Commit 6ab8b6f8b2
Changed files (10)
src/arch/wasm/CodeGen.zig
@@ -3088,11 +3088,15 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
64 => return WValue{ .float64 = val.toFloat(f64) },
else => unreachable,
},
- .Pointer => switch (val.tag()) {
- .field_ptr, .elem_ptr, .opt_payload_ptr => return func.lowerParentPtr(val, 0),
- .int_u64, .one => return WValue{ .imm32 = @intCast(u32, val.toUnsignedInt(mod)) },
- .zero, .null_value => return WValue{ .imm32 = 0 },
- else => return func.fail("Wasm TODO: lowerConstant for other const pointer tag {}", .{val.tag()}),
+ .Pointer => switch (val.ip_index) {
+ .null_value => return WValue{ .imm32 = 0 },
+ .none => switch (val.tag()) {
+ .field_ptr, .elem_ptr, .opt_payload_ptr => return func.lowerParentPtr(val, 0),
+ .int_u64, .one => return WValue{ .imm32 = @intCast(u32, val.toUnsignedInt(mod)) },
+ .zero => return WValue{ .imm32 = 0 },
+ else => return func.fail("Wasm TODO: lowerConstant for other const pointer tag {}", .{val.tag()}),
+ },
+ else => unreachable,
},
.Enum => {
if (val.castTag(.enum_field_index)) |field_index| {
src/codegen/c.zig
@@ -1045,8 +1045,8 @@ pub const DeclGen = struct {
if (!empty) try writer.writeByte(')');
return;
},
- .Pointer => switch (val.tag()) {
- .null_value, .zero => if (ty.isSlice(mod)) {
+ .Pointer => switch (val.ip_index) {
+ .null_value => if (ty.isSlice(mod)) {
var slice_pl = Value.Payload.Slice{
.base = .{ .tag = .slice },
.data = .{ .ptr = val, .len = Value.undef },
@@ -1059,46 +1059,63 @@ pub const DeclGen = struct {
try dg.renderType(writer, ty);
try writer.writeAll(")NULL)");
},
- .variable => {
- const decl = val.castTag(.variable).?.data.owner_decl;
- return dg.renderDeclValue(writer, ty, val, decl, location);
- },
- .slice => {
- if (!location.isInitializer()) {
- try writer.writeByte('(');
+ .none => switch (val.tag()) {
+ .zero => if (ty.isSlice(mod)) {
+ var slice_pl = Value.Payload.Slice{
+ .base = .{ .tag = .slice },
+ .data = .{ .ptr = val, .len = Value.undef },
+ };
+ const slice_val = Value.initPayload(&slice_pl.base);
+
+ return dg.renderValue(writer, ty, slice_val, location);
+ } else {
+ try writer.writeAll("((");
try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
+ try writer.writeAll(")NULL)");
+ },
+ .variable => {
+ const decl = val.castTag(.variable).?.data.owner_decl;
+ return dg.renderDeclValue(writer, ty, val, decl, location);
+ },
+ .slice => {
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderType(writer, ty);
+ try writer.writeByte(')');
+ }
- const slice = val.castTag(.slice).?.data;
- var buf: Type.SlicePtrFieldTypeBuffer = undefined;
+ const slice = val.castTag(.slice).?.data;
+ var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- try writer.writeByte('{');
- try dg.renderValue(writer, ty.slicePtrFieldType(&buf), slice.ptr, initializer_type);
- try writer.writeAll(", ");
- try dg.renderValue(writer, Type.usize, slice.len, initializer_type);
- try writer.writeByte('}');
- },
- .function => {
- const func = val.castTag(.function).?.data;
- try dg.renderDeclName(writer, func.owner_decl, 0);
- },
- .extern_fn => {
- const extern_fn = val.castTag(.extern_fn).?.data;
- try dg.renderDeclName(writer, extern_fn.owner_decl, 0);
- },
- .int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
- try writer.writeAll("((");
- try dg.renderType(writer, ty);
- return writer.print("){x})", .{try dg.fmtIntLiteral(Type.usize, val, .Other)});
+ try writer.writeByte('{');
+ try dg.renderValue(writer, ty.slicePtrFieldType(&buf), slice.ptr, initializer_type);
+ try writer.writeAll(", ");
+ try dg.renderValue(writer, Type.usize, slice.len, initializer_type);
+ try writer.writeByte('}');
+ },
+ .function => {
+ const func = val.castTag(.function).?.data;
+ try dg.renderDeclName(writer, func.owner_decl, 0);
+ },
+ .extern_fn => {
+ const extern_fn = val.castTag(.extern_fn).?.data;
+ try dg.renderDeclName(writer, extern_fn.owner_decl, 0);
+ },
+ .int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
+ try writer.writeAll("((");
+ try dg.renderType(writer, ty);
+ return writer.print("){x})", .{try dg.fmtIntLiteral(Type.usize, val, .Other)});
+ },
+ .field_ptr,
+ .elem_ptr,
+ .opt_payload_ptr,
+ .eu_payload_ptr,
+ .decl_ref_mut,
+ .decl_ref,
+ => try dg.renderParentPtr(writer, val, ty, location),
+
+ else => unreachable,
},
- .field_ptr,
- .elem_ptr,
- .opt_payload_ptr,
- .eu_payload_ptr,
- .decl_ref_mut,
- .decl_ref,
- => try dg.renderParentPtr(writer, val, ty, location),
else => unreachable,
},
.Array, .Vector => {
@@ -1109,8 +1126,8 @@ pub const DeclGen = struct {
}
// First try specific tag representations for more efficiency.
- switch (val.tag()) {
- .undef, .empty_struct_value, .empty_array => {
+ switch (val.ip_index) {
+ .undef => {
const ai = ty.arrayInfo(mod);
try writer.writeByte('{');
if (ai.sentinel) |s| {
@@ -1119,76 +1136,91 @@ pub const DeclGen = struct {
try writer.writeByte('0');
}
try writer.writeByte('}');
+ return;
},
- .bytes, .str_lit => |t| {
- const bytes = switch (t) {
- .bytes => val.castTag(.bytes).?.data,
- .str_lit => bytes: {
- const str_lit = val.castTag(.str_lit).?.data;
- break :bytes dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
- },
- else => unreachable,
- };
- const sentinel = if (ty.sentinel(mod)) |sentinel| @intCast(u8, sentinel.toUnsignedInt(mod)) else null;
- try writer.print("{s}", .{
- fmtStringLiteral(bytes[0..@intCast(usize, ty.arrayLen(mod))], sentinel),
- });
- },
- else => {
- // Fall back to generic implementation.
- var arena = std.heap.ArenaAllocator.init(dg.gpa);
- defer arena.deinit();
- const arena_allocator = arena.allocator();
-
- // MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal
- const max_string_initializer_len = 65535;
-
- const ai = ty.arrayInfo(mod);
- if (ai.elem_type.eql(Type.u8, dg.module)) {
- if (ai.len <= max_string_initializer_len) {
- var literal = stringLiteral(writer);
- try literal.start();
- var index: usize = 0;
- while (index < ai.len) : (index += 1) {
- const elem_val = try val.elemValue(dg.module, arena_allocator, index);
- const elem_val_u8 = if (elem_val.isUndef()) undefPattern(u8) else @intCast(u8, elem_val.toUnsignedInt(mod));
- try literal.writeChar(elem_val_u8);
- }
- if (ai.sentinel) |s| {
- const s_u8 = @intCast(u8, s.toUnsignedInt(mod));
- if (s_u8 != 0) try literal.writeChar(s_u8);
- }
- try literal.end();
- } else {
- try writer.writeByte('{');
- var index: usize = 0;
- while (index < ai.len) : (index += 1) {
- if (index != 0) try writer.writeByte(',');
- const elem_val = try val.elemValue(dg.module, arena_allocator, index);
- const elem_val_u8 = if (elem_val.isUndef()) undefPattern(u8) else @intCast(u8, elem_val.toUnsignedInt(mod));
- try writer.print("'\\x{x}'", .{elem_val_u8});
- }
- if (ai.sentinel) |s| {
- if (index != 0) try writer.writeByte(',');
- try dg.renderValue(writer, ai.elem_type, s, initializer_type);
- }
- try writer.writeByte('}');
- }
- } else {
+ .none => switch (val.tag()) {
+ .empty_struct_value, .empty_array => {
+ const ai = ty.arrayInfo(mod);
try writer.writeByte('{');
- var index: usize = 0;
- while (index < ai.len) : (index += 1) {
- if (index != 0) try writer.writeByte(',');
- const elem_val = try val.elemValue(dg.module, arena_allocator, index);
- try dg.renderValue(writer, ai.elem_type, elem_val, initializer_type);
- }
if (ai.sentinel) |s| {
- if (index != 0) try writer.writeByte(',');
try dg.renderValue(writer, ai.elem_type, s, initializer_type);
+ } else {
+ try writer.writeByte('0');
}
try writer.writeByte('}');
- }
+ return;
+ },
+ .bytes, .str_lit => |t| {
+ const bytes = switch (t) {
+ .bytes => val.castTag(.bytes).?.data,
+ .str_lit => bytes: {
+ const str_lit = val.castTag(.str_lit).?.data;
+ break :bytes dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
+ },
+ else => unreachable,
+ };
+ const sentinel = if (ty.sentinel(mod)) |sentinel| @intCast(u8, sentinel.toUnsignedInt(mod)) else null;
+ try writer.print("{s}", .{
+ fmtStringLiteral(bytes[0..@intCast(usize, ty.arrayLen(mod))], sentinel),
+ });
+ return;
+ },
+ else => {},
},
+ else => {},
+ }
+ // Fall back to generic implementation.
+ var arena = std.heap.ArenaAllocator.init(dg.gpa);
+ defer arena.deinit();
+ const arena_allocator = arena.allocator();
+
+ // MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal
+ const max_string_initializer_len = 65535;
+
+ const ai = ty.arrayInfo(mod);
+ if (ai.elem_type.eql(Type.u8, dg.module)) {
+ if (ai.len <= max_string_initializer_len) {
+ var literal = stringLiteral(writer);
+ try literal.start();
+ var index: usize = 0;
+ while (index < ai.len) : (index += 1) {
+ const elem_val = try val.elemValue(dg.module, arena_allocator, index);
+ const elem_val_u8 = if (elem_val.isUndef()) undefPattern(u8) else @intCast(u8, elem_val.toUnsignedInt(mod));
+ try literal.writeChar(elem_val_u8);
+ }
+ if (ai.sentinel) |s| {
+ const s_u8 = @intCast(u8, s.toUnsignedInt(mod));
+ if (s_u8 != 0) try literal.writeChar(s_u8);
+ }
+ try literal.end();
+ } else {
+ try writer.writeByte('{');
+ var index: usize = 0;
+ while (index < ai.len) : (index += 1) {
+ if (index != 0) try writer.writeByte(',');
+ const elem_val = try val.elemValue(dg.module, arena_allocator, index);
+ const elem_val_u8 = if (elem_val.isUndef()) undefPattern(u8) else @intCast(u8, elem_val.toUnsignedInt(mod));
+ try writer.print("'\\x{x}'", .{elem_val_u8});
+ }
+ if (ai.sentinel) |s| {
+ if (index != 0) try writer.writeByte(',');
+ try dg.renderValue(writer, ai.elem_type, s, initializer_type);
+ }
+ try writer.writeByte('}');
+ }
+ } else {
+ try writer.writeByte('{');
+ var index: usize = 0;
+ while (index < ai.len) : (index += 1) {
+ if (index != 0) try writer.writeByte(',');
+ const elem_val = try val.elemValue(dg.module, arena_allocator, index);
+ try dg.renderValue(writer, ai.elem_type, elem_val, initializer_type);
+ }
+ if (ai.sentinel) |s| {
+ if (index != 0) try writer.writeByte(',');
+ try dg.renderValue(writer, ai.elem_type, s, initializer_type);
+ }
+ try writer.writeByte('}');
}
},
.Bool => {
@@ -1201,7 +1233,7 @@ pub const DeclGen = struct {
.Optional => {
const payload_ty = ty.optionalChild(mod);
- const is_null_val = Value.makeBool(val.tag() == .null_value);
+ const is_null_val = Value.makeBool(val.ip_index == .null_value);
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod))
return dg.renderValue(writer, Type.bool, is_null_val, location);
@@ -7765,7 +7797,7 @@ fn lowerFnRetTy(ret_ty: Type, buffer: *LowerFnRetTyBuffer, mod: *const Module) T
if (lowersToArray(ret_ty, mod)) {
buffer.names = [1][]const u8{"array"};
buffer.types = [1]Type{ret_ty};
- buffer.values = [1]Value{Value.initTag(.unreachable_value)};
+ buffer.values = [1]Value{Value.@"unreachable"};
buffer.payload = .{ .data = .{
.names = &buffer.names,
.types = &buffer.types,
src/codegen/llvm.zig
@@ -2028,7 +2028,7 @@ pub const Object = struct {
for (tuple.types, 0..) |field_ty, i| {
const field_val = tuple.values[i];
- if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
+ if (field_val.ip_index != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
const field_size = field_ty.abiSize(mod);
const field_align = field_ty.abiAlignment(mod);
@@ -2498,7 +2498,7 @@ pub const DeclGen = struct {
global.setGlobalConstant(.True);
break :init_val decl.val;
};
- if (init_val.tag() != .unreachable_value) {
+ if (init_val.ip_index != .unreachable_value) {
const llvm_init = try dg.lowerValue(.{ .ty = decl.ty, .val = init_val });
if (global.globalGetValueType() == llvm_init.typeOf()) {
global.setInitializer(llvm_init);
@@ -2954,7 +2954,7 @@ pub const DeclGen = struct {
for (tuple.types, 0..) |field_ty, i| {
const field_val = tuple.values[i];
- if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
+ if (field_val.ip_index != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
const field_align = field_ty.abiAlignment(mod);
big_align = @max(big_align, field_align);
@@ -3359,58 +3359,65 @@ pub const DeclGen = struct {
else => unreachable,
}
},
- .Pointer => switch (tv.val.tag()) {
- .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl_index),
- .decl_ref => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref).?.data),
- .variable => {
- const decl_index = tv.val.castTag(.variable).?.data.owner_decl;
- const decl = dg.module.declPtr(decl_index);
- dg.module.markDeclAlive(decl);
-
- const llvm_wanted_addrspace = toLlvmAddressSpace(decl.@"addrspace", target);
- const llvm_actual_addrspace = toLlvmGlobalAddressSpace(decl.@"addrspace", target);
-
- const val = try dg.resolveGlobalDecl(decl_index);
- const addrspace_casted_ptr = if (llvm_actual_addrspace != llvm_wanted_addrspace)
- val.constAddrSpaceCast(dg.context.pointerType(llvm_wanted_addrspace))
- else
- val;
- return addrspace_casted_ptr;
- },
- .slice => {
- const slice = tv.val.castTag(.slice).?.data;
- var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- const fields: [2]*llvm.Value = .{
- try dg.lowerValue(.{
- .ty = tv.ty.slicePtrFieldType(&buf),
- .val = slice.ptr,
- }),
- try dg.lowerValue(.{
- .ty = Type.usize,
- .val = slice.len,
- }),
- };
- return dg.context.constStruct(&fields, fields.len, .False);
- },
- .int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
- const llvm_usize = try dg.lowerType(Type.usize);
- const llvm_int = llvm_usize.constInt(tv.val.toUnsignedInt(mod), .False);
- return llvm_int.constIntToPtr(try dg.lowerType(tv.ty));
- },
- .field_ptr, .opt_payload_ptr, .eu_payload_ptr, .elem_ptr => {
- return dg.lowerParentPtr(tv.val, tv.ty.ptrInfo(mod).bit_offset % 8 == 0);
- },
- .null_value, .zero => {
+ .Pointer => switch (tv.val.ip_index) {
+ .null_value => {
const llvm_type = try dg.lowerType(tv.ty);
return llvm_type.constNull();
},
- .opt_payload => {
- const payload = tv.val.castTag(.opt_payload).?.data;
- return dg.lowerParentPtr(payload, tv.ty.ptrInfo(mod).bit_offset % 8 == 0);
+ .none => switch (tv.val.tag()) {
+ .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl_index),
+ .decl_ref => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref).?.data),
+ .variable => {
+ const decl_index = tv.val.castTag(.variable).?.data.owner_decl;
+ const decl = dg.module.declPtr(decl_index);
+ dg.module.markDeclAlive(decl);
+
+ const llvm_wanted_addrspace = toLlvmAddressSpace(decl.@"addrspace", target);
+ const llvm_actual_addrspace = toLlvmGlobalAddressSpace(decl.@"addrspace", target);
+
+ const val = try dg.resolveGlobalDecl(decl_index);
+ const addrspace_casted_ptr = if (llvm_actual_addrspace != llvm_wanted_addrspace)
+ val.constAddrSpaceCast(dg.context.pointerType(llvm_wanted_addrspace))
+ else
+ val;
+ return addrspace_casted_ptr;
+ },
+ .slice => {
+ const slice = tv.val.castTag(.slice).?.data;
+ var buf: Type.SlicePtrFieldTypeBuffer = undefined;
+ const fields: [2]*llvm.Value = .{
+ try dg.lowerValue(.{
+ .ty = tv.ty.slicePtrFieldType(&buf),
+ .val = slice.ptr,
+ }),
+ try dg.lowerValue(.{
+ .ty = Type.usize,
+ .val = slice.len,
+ }),
+ };
+ return dg.context.constStruct(&fields, fields.len, .False);
+ },
+ .int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
+ const llvm_usize = try dg.lowerType(Type.usize);
+ const llvm_int = llvm_usize.constInt(tv.val.toUnsignedInt(mod), .False);
+ return llvm_int.constIntToPtr(try dg.lowerType(tv.ty));
+ },
+ .field_ptr, .opt_payload_ptr, .eu_payload_ptr, .elem_ptr => {
+ return dg.lowerParentPtr(tv.val, tv.ty.ptrInfo(mod).bit_offset % 8 == 0);
+ },
+ .zero => {
+ const llvm_type = try dg.lowerType(tv.ty);
+ return llvm_type.constNull();
+ },
+ .opt_payload => {
+ const payload = tv.val.castTag(.opt_payload).?.data;
+ return dg.lowerParentPtr(payload, tv.ty.ptrInfo(mod).bit_offset % 8 == 0);
+ },
+ else => |tag| return dg.todo("implement const of pointer type '{}' ({})", .{
+ tv.ty.fmtDebug(), tag,
+ }),
},
- else => |tag| return dg.todo("implement const of pointer type '{}' ({})", .{
- tv.ty.fmtDebug(), tag,
- }),
+ else => unreachable,
},
.Array => switch (tv.val.tag()) {
.bytes => {
@@ -3555,7 +3562,7 @@ pub const DeclGen = struct {
var fields_buf: [3]*llvm.Value = undefined;
fields_buf[0] = try dg.lowerValue(.{
.ty = payload_ty,
- .val = if (tv.val.castTag(.opt_payload)) |pl| pl.data else Value.initTag(.undef),
+ .val = if (tv.val.castTag(.opt_payload)) |pl| pl.data else Value.undef,
});
fields_buf[1] = non_null_bit;
if (llvm_field_count > 2) {
@@ -3606,7 +3613,7 @@ pub const DeclGen = struct {
});
const llvm_payload_value = try dg.lowerValue(.{
.ty = payload_type,
- .val = if (tv.val.castTag(.eu_payload)) |pl| pl.data else Value.initTag(.undef),
+ .val = if (tv.val.castTag(.eu_payload)) |pl| pl.data else Value.undef,
});
var fields_buf: [3]*llvm.Value = undefined;
@@ -3645,7 +3652,7 @@ pub const DeclGen = struct {
var need_unnamed = false;
for (tuple.types, 0..) |field_ty, i| {
- if (tuple.values[i].tag() != .unreachable_value) continue;
+ if (tuple.values[i].ip_index != .unreachable_value) continue;
if (!field_ty.hasRuntimeBitsIgnoreComptime(mod)) continue;
const field_align = field_ty.abiAlignment(mod);
@@ -10501,7 +10508,7 @@ fn llvmFieldIndex(
const tuple = ty.tupleFields();
var llvm_field_index: c_uint = 0;
for (tuple.types, 0..) |field_ty, i| {
- if (tuple.values[i].tag() != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
+ if (tuple.values[i].ip_index != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
const field_align = field_ty.abiAlignment(mod);
big_align = @max(big_align, field_align);
@@ -11117,7 +11124,7 @@ fn isByRef(ty: Type, mod: *const Module) bool {
const tuple = ty.tupleFields();
var count: usize = 0;
for (tuple.values, 0..) |field_val, i| {
- if (field_val.tag() != .unreachable_value or !tuple.types[i].hasRuntimeBits(mod)) continue;
+ if (field_val.ip_index != .unreachable_value or !tuple.types[i].hasRuntimeBits(mod)) continue;
count += 1;
if (count > max_fields_byval) return true;
src/codegen/spirv.zig
@@ -674,7 +674,7 @@ pub const DeclGen = struct {
try self.lower(ptr_ty, slice.ptr);
try self.addInt(Type.usize, slice.len);
},
- .null_value, .zero => try self.addNullPtr(try dg.resolveType(ty, .indirect)),
+ .zero => try self.addNullPtr(try dg.resolveType(ty, .indirect)),
.int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
try self.addInt(Type.usize, val);
},
@@ -813,7 +813,8 @@ pub const DeclGen = struct {
const error_size = Type.anyerror.abiAlignment(mod);
const ty_size = ty.abiSize(mod);
const padding = ty_size - payload_size - error_size;
- const payload_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.initTag(.undef);
+
+ const payload_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.undef;
if (eu_layout.error_first) {
try self.lower(Type.anyerror, error_val);
@@ -1021,7 +1022,7 @@ pub const DeclGen = struct {
return try self.constant(Type.anyerror, error_val, repr);
}
- const payload_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.initTag(.undef);
+ const payload_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.undef;
var members: [2]IdRef = undefined;
if (eu_layout.error_first) {
@@ -1292,7 +1293,7 @@ pub const DeclGen = struct {
var member_index: usize = 0;
for (tuple.types, 0..) |field_ty, i| {
const field_val = tuple.values[i];
- if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
+ if (field_val.ip_index != .unreachable_value or !field_ty.hasRuntimeBits(mod)) continue;
member_types[member_index] = try self.resolveType(field_ty, .indirect);
member_index += 1;
@@ -1596,7 +1597,7 @@ pub const DeclGen = struct {
else
decl.val;
- if (init_val.tag() == .unreachable_value) {
+ if (init_val.ip_index == .unreachable_value) {
return self.todo("importing extern variables", .{});
}
src/codegen.zig
@@ -312,7 +312,7 @@ pub fn generateSymbol(
),
},
},
- .Pointer => switch (typed_value.val.tag()) {
+ .Pointer => switch (typed_value.val.ip_index) {
.null_value => {
switch (target.ptrBitWidth()) {
32 => {
@@ -327,76 +327,79 @@ pub fn generateSymbol(
}
return Result.ok;
},
- .zero, .one, .int_u64, .int_big_positive => {
- switch (target.ptrBitWidth()) {
- 32 => {
- const x = typed_value.val.toUnsignedInt(mod);
- mem.writeInt(u32, try code.addManyAsArray(4), @intCast(u32, x), endian);
- },
- 64 => {
- const x = typed_value.val.toUnsignedInt(mod);
- mem.writeInt(u64, try code.addManyAsArray(8), x, endian);
- },
- else => unreachable,
- }
- return Result.ok;
- },
- .variable, .decl_ref, .decl_ref_mut => |tag| return lowerDeclRef(
- bin_file,
- src_loc,
- typed_value,
- switch (tag) {
- .variable => typed_value.val.castTag(.variable).?.data.owner_decl,
- .decl_ref => typed_value.val.castTag(.decl_ref).?.data,
- .decl_ref_mut => typed_value.val.castTag(.decl_ref_mut).?.data.decl_index,
- else => unreachable,
+ .none => switch (typed_value.val.tag()) {
+ .zero, .one, .int_u64, .int_big_positive => {
+ switch (target.ptrBitWidth()) {
+ 32 => {
+ const x = typed_value.val.toUnsignedInt(mod);
+ mem.writeInt(u32, try code.addManyAsArray(4), @intCast(u32, x), endian);
+ },
+ 64 => {
+ const x = typed_value.val.toUnsignedInt(mod);
+ mem.writeInt(u64, try code.addManyAsArray(8), x, endian);
+ },
+ else => unreachable,
+ }
+ return Result.ok;
},
- code,
- debug_output,
- reloc_info,
- ),
- .slice => {
- const slice = typed_value.val.castTag(.slice).?.data;
+ .variable, .decl_ref, .decl_ref_mut => |tag| return lowerDeclRef(
+ bin_file,
+ src_loc,
+ typed_value,
+ switch (tag) {
+ .variable => typed_value.val.castTag(.variable).?.data.owner_decl,
+ .decl_ref => typed_value.val.castTag(.decl_ref).?.data,
+ .decl_ref_mut => typed_value.val.castTag(.decl_ref_mut).?.data.decl_index,
+ else => unreachable,
+ },
+ code,
+ debug_output,
+ reloc_info,
+ ),
+ .slice => {
+ const slice = typed_value.val.castTag(.slice).?.data;
- // generate ptr
- var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(&buf);
- switch (try generateSymbol(bin_file, src_loc, .{
- .ty = slice_ptr_field_type,
- .val = slice.ptr,
- }, code, debug_output, reloc_info)) {
- .ok => {},
- .fail => |em| return Result{ .fail = em },
- }
+ // generate ptr
+ var buf: Type.SlicePtrFieldTypeBuffer = undefined;
+ const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(&buf);
+ switch (try generateSymbol(bin_file, src_loc, .{
+ .ty = slice_ptr_field_type,
+ .val = slice.ptr,
+ }, code, debug_output, reloc_info)) {
+ .ok => {},
+ .fail => |em| return Result{ .fail = em },
+ }
- // generate length
- switch (try generateSymbol(bin_file, src_loc, .{
- .ty = Type.usize,
- .val = slice.len,
- }, code, debug_output, reloc_info)) {
- .ok => {},
- .fail => |em| return Result{ .fail = em },
- }
+ // generate length
+ switch (try generateSymbol(bin_file, src_loc, .{
+ .ty = Type.usize,
+ .val = slice.len,
+ }, code, debug_output, reloc_info)) {
+ .ok => {},
+ .fail => |em| return Result{ .fail = em },
+ }
- return Result.ok;
- },
- .field_ptr, .elem_ptr, .opt_payload_ptr => return lowerParentPtr(
- bin_file,
- src_loc,
- typed_value,
- typed_value.val,
- code,
- debug_output,
- reloc_info,
- ),
- else => return Result{
- .fail = try ErrorMsg.create(
- bin_file.allocator,
+ return Result.ok;
+ },
+ .field_ptr, .elem_ptr, .opt_payload_ptr => return lowerParentPtr(
+ bin_file,
src_loc,
- "TODO implement generateSymbol for pointer type value: '{s}'",
- .{@tagName(typed_value.val.tag())},
+ typed_value,
+ typed_value.val,
+ code,
+ debug_output,
+ reloc_info,
),
+ else => return Result{
+ .fail = try ErrorMsg.create(
+ bin_file.allocator,
+ src_loc,
+ "TODO implement generateSymbol for pointer type value: '{s}'",
+ .{@tagName(typed_value.val.tag())},
+ ),
+ },
},
+ else => unreachable,
},
.Int => {
const info = typed_value.ty.intInfo(mod);
@@ -652,7 +655,7 @@ pub fn generateSymbol(
}
const padding = abi_size - (math.cast(usize, payload_type.abiSize(mod)) orelse return error.Overflow) - 1;
- const value = if (typed_value.val.castTag(.opt_payload)) |payload| payload.data else Value.initTag(.undef);
+ const value = if (typed_value.val.castTag(.opt_payload)) |payload| payload.data else Value.undef;
switch (try generateSymbol(bin_file, src_loc, .{
.ty = payload_type,
.val = value,
@@ -696,7 +699,7 @@ pub fn generateSymbol(
// emit payload part of the error union
{
const begin = code.items.len;
- const payload_val = if (typed_value.val.castTag(.eu_payload)) |val| val.data else Value.initTag(.undef);
+ const payload_val = if (typed_value.val.castTag(.eu_payload)) |val| val.data else Value.undef;
switch (try generateSymbol(bin_file, src_loc, .{
.ty = payload_ty,
.val = payload_val,
@@ -1189,16 +1192,17 @@ pub fn genTypedValue(
.Void => return GenResult.mcv(.none),
.Pointer => switch (typed_value.ty.ptrSize(mod)) {
.Slice => {},
- else => {
- switch (typed_value.val.tag()) {
- .null_value => {
- return GenResult.mcv(.{ .immediate = 0 });
- },
+ else => switch (typed_value.val.ip_index) {
+ .null_value => {
+ return GenResult.mcv(.{ .immediate = 0 });
+ },
+ .none => switch (typed_value.val.tag()) {
.int_u64 => {
return GenResult.mcv(.{ .immediate = typed_value.val.toUnsignedInt(mod) });
},
else => {},
- }
+ },
+ else => {},
},
},
.Int => {
@@ -1216,7 +1220,7 @@ pub fn genTypedValue(
},
.Optional => {
if (typed_value.ty.isPtrLikeOptional(mod)) {
- if (typed_value.val.tag() == .null_value) return GenResult.mcv(.{ .immediate = 0 });
+ if (typed_value.val.ip_index == .null_value) return GenResult.mcv(.{ .immediate = 0 });
return genTypedValue(bin_file, src_loc, .{
.ty = typed_value.ty.optionalChild(mod),
src/Module.zig
@@ -932,7 +932,7 @@ pub const Decl = struct {
assert(decl.has_tv);
return switch (decl.val.tag()) {
.extern_fn => true,
- .variable => decl.val.castTag(.variable).?.data.init.tag() == .unreachable_value,
+ .variable => decl.val.castTag(.variable).?.data.init.ip_index == .unreachable_value,
else => false,
};
}
@@ -4849,6 +4849,8 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
var is_extern = false;
switch (decl_tv.val.ip_index) {
.generic_poison => unreachable,
+ .unreachable_value => unreachable,
+
.none => switch (decl_tv.val.tag()) {
.variable => {
const variable = decl_tv.val.castTag(.variable).?.data;
@@ -4869,8 +4871,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
}
},
- .unreachable_value => unreachable,
-
.function => {},
else => {
@@ -6592,7 +6592,7 @@ pub fn populateTestFunctions(
.len = try Value.Tag.int_u64.create(arena, test_name_slice.len),
}), // name
try Value.Tag.decl_ref.create(arena, test_decl_index), // func
- Value.initTag(.null_value), // async_frame_size
+ Value.null, // async_frame_size
};
test_fn_vals[i] = try Value.Tag.aggregate.create(arena, field_vals);
}
src/Sema.zig
@@ -1877,8 +1877,8 @@ fn resolveConstValue(
if (try sema.resolveMaybeUndefValAllowVariables(air_ref)) |val| {
switch (val.ip_index) {
.generic_poison => return error.GenericPoison,
+ .undef => return sema.failWithUseOfUndef(block, src),
.none => switch (val.tag()) {
- .undef => return sema.failWithUseOfUndef(block, src),
.variable => return sema.failWithNeededComptime(block, src, reason),
else => return val,
},
@@ -4409,7 +4409,7 @@ fn validateStructInit(
if (field_ptr != 0) continue;
const default_val = struct_ty.structFieldDefaultValue(i);
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
if (struct_ty.isTuple()) {
const template = "missing tuple field with index {d}";
if (root_msg) |msg| {
@@ -4554,7 +4554,7 @@ fn validateStructInit(
}
const default_val = struct_ty.structFieldDefaultValue(i);
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
if (struct_ty.isTuple()) {
const template = "missing tuple field with index {d}";
if (root_msg) |msg| {
@@ -4644,7 +4644,7 @@ fn zirValidateArrayInit(
var i = instrs.len;
while (i < array_len) : (i += 1) {
const default_val = array_ty.structFieldDefaultValue(i);
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
const template = "missing tuple field with index {d}";
if (root_msg) |msg| {
try sema.errNote(block, init_src, msg, template, .{i});
@@ -7885,7 +7885,7 @@ fn resolveTupleLazyValues(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type)
const tuple = ty.tupleFields();
for (tuple.values, 0..) |field_val, i| {
try sema.resolveTupleLazyValues(block, src, tuple.types[i]);
- if (field_val.tag() == .unreachable_value) continue;
+ if (field_val.ip_index == .unreachable_value) continue;
try sema.resolveLazyValue(field_val);
}
}
@@ -12641,7 +12641,7 @@ fn analyzeTupleCat(
const default_val = lhs_ty.structFieldDefaultValue(i);
values[i] = default_val;
const operand_src = lhs_src; // TODO better source location
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
runtime_src = operand_src;
}
}
@@ -12651,7 +12651,7 @@ fn analyzeTupleCat(
const default_val = rhs_ty.structFieldDefaultValue(i);
values[i + lhs_len] = default_val;
const operand_src = rhs_src; // TODO better source location
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
runtime_src = operand_src;
}
}
@@ -12809,8 +12809,8 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
while (elem_i < lhs_len) : (elem_i += 1) {
const lhs_elem_i = elem_i;
const elem_ty = if (lhs_is_tuple) lhs_ty.structFieldType(lhs_elem_i) else lhs_info.elem_type;
- const elem_default_val = if (lhs_is_tuple) lhs_ty.structFieldDefaultValue(lhs_elem_i) else Value.initTag(.unreachable_value);
- const elem_val = if (elem_default_val.tag() == .unreachable_value) try lhs_sub_val.elemValue(sema.mod, sema.arena, lhs_elem_i) else elem_default_val;
+ const elem_default_val = if (lhs_is_tuple) lhs_ty.structFieldDefaultValue(lhs_elem_i) else Value.@"unreachable";
+ const elem_val = if (elem_default_val.ip_index == .unreachable_value) try lhs_sub_val.elemValue(sema.mod, sema.arena, lhs_elem_i) else elem_default_val;
const elem_val_inst = try sema.addConstant(elem_ty, elem_val);
const coerced_elem_val_inst = try sema.coerce(block, resolved_elem_ty, elem_val_inst, .unneeded);
const coerced_elem_val = try sema.resolveConstMaybeUndefVal(block, .unneeded, coerced_elem_val_inst, "");
@@ -12819,8 +12819,8 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
while (elem_i < result_len) : (elem_i += 1) {
const rhs_elem_i = elem_i - lhs_len;
const elem_ty = if (rhs_is_tuple) rhs_ty.structFieldType(rhs_elem_i) else rhs_info.elem_type;
- const elem_default_val = if (rhs_is_tuple) rhs_ty.structFieldDefaultValue(rhs_elem_i) else Value.initTag(.unreachable_value);
- const elem_val = if (elem_default_val.tag() == .unreachable_value) try rhs_sub_val.elemValue(sema.mod, sema.arena, rhs_elem_i) else elem_default_val;
+ const elem_default_val = if (rhs_is_tuple) rhs_ty.structFieldDefaultValue(rhs_elem_i) else Value.@"unreachable";
+ const elem_val = if (elem_default_val.ip_index == .unreachable_value) try rhs_sub_val.elemValue(sema.mod, sema.arena, rhs_elem_i) else elem_default_val;
const elem_val_inst = try sema.addConstant(elem_ty, elem_val);
const coerced_elem_val_inst = try sema.coerce(block, resolved_elem_ty, elem_val_inst, .unneeded);
const coerced_elem_val = try sema.resolveConstMaybeUndefVal(block, .unneeded, coerced_elem_val_inst, "");
@@ -12962,7 +12962,7 @@ fn analyzeTupleMul(
types[i] = operand_ty.structFieldType(i);
values[i] = operand_ty.structFieldDefaultValue(i);
const operand_src = lhs_src; // TODO better source location
- if (values[i].tag() == .unreachable_value) {
+ if (values[i].ip_index == .unreachable_value) {
runtime_src = operand_src;
}
}
@@ -14332,7 +14332,7 @@ fn zirOverflowArithmetic(
var result: struct {
inst: Air.Inst.Ref = .none,
- wrapped: Value = Value.initTag(.unreachable_value),
+ wrapped: Value = Value.@"unreachable",
overflow_bit: Value,
} = result: {
switch (zir_tag) {
@@ -14508,8 +14508,8 @@ fn overflowArithmeticTupleType(sema: *Sema, ty: Type) !Type {
types[0] = ty;
types[1] = ov_ty;
- values[0] = Value.initTag(.unreachable_value);
- values[1] = Value.initTag(.unreachable_value);
+ values[0] = Value.@"unreachable";
+ values[1] = Value.@"unreachable";
return tuple_ty;
}
@@ -15647,7 +15647,7 @@ fn zirClosureCapture(
// value only. In such case we preserve the type and use a dummy runtime value.
const operand = try sema.resolveInst(inst_data.operand);
const val = (try sema.resolveMaybeUndefValAllowVariables(operand)) orelse
- Value.initTag(.unreachable_value);
+ Value.@"unreachable";
try block.wip_capture_scope.captures.putNoClobber(sema.gpa, inst, .{
.ty = try sema.typeOf(operand).copy(sema.perm_arena),
@@ -15684,7 +15684,7 @@ fn zirClosureGet(
scope = scope.parent.?;
};
- if (tv.val.tag() == .unreachable_value and !block.is_typeof and sema.func == null) {
+ if (tv.val.ip_index == .unreachable_value and !block.is_typeof and sema.func == null) {
const msg = msg: {
const name = name: {
const file = sema.owner_decl.getFileScope();
@@ -15712,7 +15712,7 @@ fn zirClosureGet(
return sema.failWithOwnedErrorMsg(msg);
}
- if (tv.val.tag() == .unreachable_value and !block.is_typeof and !block.is_comptime and sema.func != null) {
+ if (tv.val.ip_index == .unreachable_value and !block.is_typeof and !block.is_comptime and sema.func != null) {
const msg = msg: {
const name = name: {
const file = sema.owner_decl.getFileScope();
@@ -15742,7 +15742,7 @@ fn zirClosureGet(
return sema.failWithOwnedErrorMsg(msg);
}
- if (tv.val.tag() == .unreachable_value) {
+ if (tv.val.ip_index == .unreachable_value) {
assert(block.is_typeof);
// We need a dummy runtime instruction with the correct type.
return block.addTy(.alloc, tv.ty);
@@ -16477,7 +16477,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const struct_field_fields = try fields_anon_decl.arena().create([5]Value);
const field_val = tuple.values[i];
- const is_comptime = field_val.tag() != .unreachable_value;
+ const is_comptime = field_val.ip_index != .unreachable_value;
const opt_default_val = if (is_comptime) field_val else null;
const default_val_ptr = try sema.optRefValue(block, field_ty, opt_default_val);
struct_field_fields.* = .{
@@ -16518,7 +16518,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
};
const struct_field_fields = try fields_anon_decl.arena().create([5]Value);
- const opt_default_val = if (field.default_val.tag() == .unreachable_value)
+ const opt_default_val = if (field.default_val.ip_index == .unreachable_value)
null
else
field.default_val;
@@ -16570,7 +16570,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const backing_int_ty_val = try Value.Tag.ty.create(sema.arena, struct_obj.backing_int_ty);
break :blk try Value.Tag.opt_payload.create(sema.arena, backing_int_ty_val);
} else {
- break :blk Value.initTag(.null_value);
+ break :blk Value.null;
}
};
@@ -17974,7 +17974,7 @@ fn finishStructInit(
for (struct_obj.values, 0..) |default_val, i| {
if (field_inits[i] != .none) continue;
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
const field_name = struct_obj.names[i];
const template = "missing struct field: {s}";
const args = .{field_name};
@@ -17994,7 +17994,7 @@ fn finishStructInit(
if (field_inits[i] != .none) continue;
const default_val = struct_ty.structFieldDefaultValue(i);
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
const template = "missing tuple field with index {d}";
if (root_msg) |msg| {
try sema.errNote(block, init_src, msg, template, .{i});
@@ -18010,7 +18010,7 @@ fn finishStructInit(
for (struct_obj.fields.values(), 0..) |field, i| {
if (field_inits[i] != .none) continue;
- if (field.default_val.tag() == .unreachable_value) {
+ if (field.default_val.ip_index == .unreachable_value) {
const field_name = struct_obj.fields.keys()[i];
const template = "missing struct field: {s}";
const args = .{field_name};
@@ -18145,7 +18145,7 @@ fn zirStructInitAnon(
if (try sema.resolveMaybeUndefVal(init)) |init_val| {
values[i] = init_val;
} else {
- values[i] = Value.initTag(.unreachable_value);
+ values[i] = Value.@"unreachable";
runtime_index = i;
}
}
@@ -18191,7 +18191,7 @@ fn zirStructInitAnon(
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
.pointee_type = field_ty,
});
- if (values[i].tag() == .unreachable_value) {
+ if (values[i].ip_index == .unreachable_value) {
const init = try sema.resolveInst(item.data.init);
const field_ptr = try block.addStructFieldPtr(alloc, i, field_ptr_ty);
_ = try block.addBinOp(.store, field_ptr, init);
@@ -18357,7 +18357,7 @@ fn zirArrayInitAnon(
if (try sema.resolveMaybeUndefVal(elem)) |val| {
values[i] = val;
} else {
- values[i] = Value.initTag(.unreachable_value);
+ values[i] = Value.@"unreachable";
runtime_src = operand_src;
}
}
@@ -18390,7 +18390,7 @@ fn zirArrayInitAnon(
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
.pointee_type = types[i],
});
- if (values[i].tag() == .unreachable_value) {
+ if (values[i].ip_index == .unreachable_value) {
const field_ptr = try block.addStructFieldPtr(alloc, i, field_ptr_ty);
_ = try block.addBinOp(.store, field_ptr, try sema.resolveInst(operand));
}
@@ -19545,8 +19545,8 @@ fn reifyStruct(
else
opt_val;
break :blk try payload_val.copy(new_decl_arena_allocator);
- } else Value.initTag(.unreachable_value);
- if (is_comptime_val.toBool(mod) and default_val.tag() == .unreachable_value) {
+ } else Value.@"unreachable";
+ if (is_comptime_val.toBool(mod) and default_val.ip_index == .unreachable_value) {
return sema.fail(block, src, "comptime field without default initialization value", .{});
}
@@ -22579,7 +22579,7 @@ fn zirVarExtended(
break :blk (try sema.resolveMaybeUndefVal(init)) orelse
return sema.failWithNeededComptime(block, init_src, "container level variable initializers must be comptime-known");
- } else Value.initTag(.unreachable_value);
+ } else Value.@"unreachable";
try sema.validateVarType(block, ty_src, var_ty, small.is_extern);
@@ -23080,7 +23080,7 @@ fn zirBuiltinExtern(
const new_var = try new_decl_arena_allocator.create(Module.Var);
new_var.* = .{
.owner_decl = sema.owner_decl_index,
- .init = Value.initTag(.unreachable_value),
+ .init = Value.@"unreachable",
.is_extern = true,
.is_mutable = false,
.is_threadlocal = options.is_thread_local,
@@ -25736,7 +25736,7 @@ fn coerceExtra(
}
} else {
in_memory_result = .{ .ptr_sentinel = .{
- .actual = Value.initTag(.unreachable_value),
+ .actual = Value.@"unreachable",
.wanted = dest_sent,
.ty = dst_elem_type,
} };
@@ -26116,26 +26116,28 @@ fn coerceExtra(
.ErrorUnion => switch (inst_ty.zigTypeTag(mod)) {
.ErrorUnion => eu: {
if (maybe_inst_val) |inst_val| {
- switch (inst_val.tag()) {
+ switch (inst_val.ip_index) {
.undef => return sema.addConstUndef(dest_ty),
- .eu_payload => {
- const payload = try sema.addConstant(
- inst_ty.errorUnionPayload(),
- inst_val.castTag(.eu_payload).?.data,
- );
- return sema.wrapErrorUnionPayload(block, dest_ty, payload, inst_src) catch |err| switch (err) {
- error.NotCoercible => break :eu,
- else => |e| return e,
- };
- },
- else => {
- const error_set = try sema.addConstant(
- inst_ty.errorUnionSet(),
- inst_val,
- );
- return sema.wrapErrorUnionSet(block, dest_ty, error_set, inst_src);
+ .none => switch (inst_val.tag()) {
+ .eu_payload => {
+ const payload = try sema.addConstant(
+ inst_ty.errorUnionPayload(),
+ inst_val.castTag(.eu_payload).?.data,
+ );
+ return sema.wrapErrorUnionPayload(block, dest_ty, payload, inst_src) catch |err| switch (err) {
+ error.NotCoercible => break :eu,
+ else => |e| return e,
+ };
+ },
+ else => {},
},
+ else => {},
}
+ const error_set = try sema.addConstant(
+ inst_ty.errorUnionSet(),
+ inst_val,
+ );
+ return sema.wrapErrorUnionSet(block, dest_ty, error_set, inst_src);
}
},
.ErrorSet => {
@@ -26413,7 +26415,7 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.array_sentinel => |sentinel| {
- if (sentinel.actual.tag() != .unreachable_value) {
+ if (sentinel.actual.ip_index != .unreachable_value) {
try sema.errNote(block, src, msg, "array sentinel '{}' cannot cast into array sentinel '{}'", .{
sentinel.actual.fmtValue(sentinel.ty, sema.mod), sentinel.wanted.fmtValue(sentinel.ty, sema.mod),
});
@@ -26539,7 +26541,7 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.ptr_sentinel => |sentinel| {
- if (sentinel.actual.tag() != .unreachable_value) {
+ if (sentinel.actual.ip_index != .unreachable_value) {
try sema.errNote(block, src, msg, "pointer sentinel '{}' cannot cast into pointer sentinel '{}'", .{
sentinel.actual.fmtValue(sentinel.ty, sema.mod), sentinel.wanted.fmtValue(sentinel.ty, sema.mod),
});
@@ -26747,8 +26749,8 @@ fn coerceInMemoryAllowed(
dest_info.sentinel.?.eql(src_info.sentinel.?, dest_info.elem_type, mod));
if (!ok_sent) {
return InMemoryCoercionResult{ .array_sentinel = .{
- .actual = src_info.sentinel orelse Value.initTag(.unreachable_value),
- .wanted = dest_info.sentinel orelse Value.initTag(.unreachable_value),
+ .actual = src_info.sentinel orelse Value.@"unreachable",
+ .wanted = dest_info.sentinel orelse Value.@"unreachable",
.ty = dest_info.elem_type,
} };
}
@@ -27129,8 +27131,8 @@ fn coerceInMemoryAllowedPtrs(
dest_info.sentinel.?.eql(src_info.sentinel.?, dest_info.pointee_type, sema.mod));
if (!ok_sent) {
return InMemoryCoercionResult{ .ptr_sentinel = .{
- .actual = src_info.sentinel orelse Value.initTag(.unreachable_value),
- .wanted = dest_info.sentinel orelse Value.initTag(.unreachable_value),
+ .actual = src_info.sentinel orelse Value.@"unreachable",
+ .wanted = dest_info.sentinel orelse Value.@"unreachable",
.ty = dest_info.pointee_type,
} };
}
@@ -27540,7 +27542,7 @@ fn beginComptimePtrMutation(
};
}
- switch (val_ptr.tag()) {
+ switch (val_ptr.ip_index) {
.undef => {
// An array has been initialized to undefined at comptime and now we
// are for the first time setting an element. We must change the representation
@@ -27565,127 +27567,130 @@ fn beginComptimePtrMutation(
parent.decl_ref_mut,
);
},
- .bytes => {
- // An array is memory-optimized to store a slice of bytes, but we are about
- // to modify an individual field and the representation has to change.
- // If we wanted to avoid this, there would need to be special detection
- // elsewhere to identify when writing a value to an array element that is stored
- // using the `bytes` tag, and handle it without making a call to this function.
- const arena = parent.beginArena(sema.mod);
- defer parent.finishArena(sema.mod);
-
- const bytes = val_ptr.castTag(.bytes).?.data;
- const dest_len = parent.ty.arrayLenIncludingSentinel(mod);
- // bytes.len may be one greater than dest_len because of the case when
- // assigning `[N:S]T` to `[N]T`. This is allowed; the sentinel is omitted.
- assert(bytes.len >= dest_len);
- const elems = try arena.alloc(Value, @intCast(usize, dest_len));
- for (elems, 0..) |*elem, i| {
- elem.* = try Value.Tag.int_u64.create(arena, bytes[i]);
- }
-
- val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
+ .none => switch (val_ptr.tag()) {
+ .bytes => {
+ // An array is memory-optimized to store a slice of bytes, but we are about
+ // to modify an individual field and the representation has to change.
+ // If we wanted to avoid this, there would need to be special detection
+ // elsewhere to identify when writing a value to an array element that is stored
+ // using the `bytes` tag, and handle it without making a call to this function.
+ const arena = parent.beginArena(sema.mod);
+ defer parent.finishArena(sema.mod);
+
+ const bytes = val_ptr.castTag(.bytes).?.data;
+ const dest_len = parent.ty.arrayLenIncludingSentinel(mod);
+ // bytes.len may be one greater than dest_len because of the case when
+ // assigning `[N:S]T` to `[N]T`. This is allowed; the sentinel is omitted.
+ assert(bytes.len >= dest_len);
+ const elems = try arena.alloc(Value, @intCast(usize, dest_len));
+ for (elems, 0..) |*elem, i| {
+ elem.* = try Value.Tag.int_u64.create(arena, bytes[i]);
+ }
- return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- elem_ty,
- &elems[elem_ptr.index],
- ptr_elem_ty,
- parent.decl_ref_mut,
- );
- },
- .str_lit => {
- // An array is memory-optimized to store a slice of bytes, but we are about
- // to modify an individual field and the representation has to change.
- // If we wanted to avoid this, there would need to be special detection
- // elsewhere to identify when writing a value to an array element that is stored
- // using the `str_lit` tag, and handle it without making a call to this function.
- const arena = parent.beginArena(sema.mod);
- defer parent.finishArena(sema.mod);
+ val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
- const str_lit = val_ptr.castTag(.str_lit).?.data;
- const dest_len = parent.ty.arrayLenIncludingSentinel(mod);
- const bytes = sema.mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
- const elems = try arena.alloc(Value, @intCast(usize, dest_len));
- for (bytes, 0..) |byte, i| {
- elems[i] = try Value.Tag.int_u64.create(arena, byte);
- }
- if (parent.ty.sentinel(mod)) |sent_val| {
- assert(elems.len == bytes.len + 1);
- elems[bytes.len] = sent_val;
- }
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ elem_ty,
+ &elems[elem_ptr.index],
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
+ .str_lit => {
+ // An array is memory-optimized to store a slice of bytes, but we are about
+ // to modify an individual field and the representation has to change.
+ // If we wanted to avoid this, there would need to be special detection
+ // elsewhere to identify when writing a value to an array element that is stored
+ // using the `str_lit` tag, and handle it without making a call to this function.
+ const arena = parent.beginArena(sema.mod);
+ defer parent.finishArena(sema.mod);
+
+ const str_lit = val_ptr.castTag(.str_lit).?.data;
+ const dest_len = parent.ty.arrayLenIncludingSentinel(mod);
+ const bytes = sema.mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
+ const elems = try arena.alloc(Value, @intCast(usize, dest_len));
+ for (bytes, 0..) |byte, i| {
+ elems[i] = try Value.Tag.int_u64.create(arena, byte);
+ }
+ if (parent.ty.sentinel(mod)) |sent_val| {
+ assert(elems.len == bytes.len + 1);
+ elems[bytes.len] = sent_val;
+ }
- val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
+ val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
- return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- elem_ty,
- &elems[elem_ptr.index],
- ptr_elem_ty,
- parent.decl_ref_mut,
- );
- },
- .repeated => {
- // An array is memory-optimized to store only a single element value, and
- // that value is understood to be the same for the entire length of the array.
- // However, now we want to modify an individual field and so the
- // representation has to change. If we wanted to avoid this, there would
- // need to be special detection elsewhere to identify when writing a value to an
- // array element that is stored using the `repeated` tag, and handle it
- // without making a call to this function.
- const arena = parent.beginArena(sema.mod);
- defer parent.finishArena(sema.mod);
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ elem_ty,
+ &elems[elem_ptr.index],
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
+ .repeated => {
+ // An array is memory-optimized to store only a single element value, and
+ // that value is understood to be the same for the entire length of the array.
+ // However, now we want to modify an individual field and so the
+ // representation has to change. If we wanted to avoid this, there would
+ // need to be special detection elsewhere to identify when writing a value to an
+ // array element that is stored using the `repeated` tag, and handle it
+ // without making a call to this function.
+ const arena = parent.beginArena(sema.mod);
+ defer parent.finishArena(sema.mod);
+
+ const repeated_val = try val_ptr.castTag(.repeated).?.data.copy(arena);
+ const array_len_including_sentinel =
+ try sema.usizeCast(block, src, parent.ty.arrayLenIncludingSentinel(mod));
+ const elems = try arena.alloc(Value, array_len_including_sentinel);
+ if (elems.len > 0) elems[0] = repeated_val;
+ for (elems[1..]) |*elem| {
+ elem.* = try repeated_val.copy(arena);
+ }
- const repeated_val = try val_ptr.castTag(.repeated).?.data.copy(arena);
- const array_len_including_sentinel =
- try sema.usizeCast(block, src, parent.ty.arrayLenIncludingSentinel(mod));
- const elems = try arena.alloc(Value, array_len_including_sentinel);
- if (elems.len > 0) elems[0] = repeated_val;
- for (elems[1..]) |*elem| {
- elem.* = try repeated_val.copy(arena);
- }
+ val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
- val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ elem_ty,
+ &elems[elem_ptr.index],
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
- return beginComptimePtrMutationInner(
+ .aggregate => return beginComptimePtrMutationInner(
sema,
block,
src,
elem_ty,
- &elems[elem_ptr.index],
+ &val_ptr.castTag(.aggregate).?.data[elem_ptr.index],
ptr_elem_ty,
parent.decl_ref_mut,
- );
- },
+ ),
- .aggregate => return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- elem_ty,
- &val_ptr.castTag(.aggregate).?.data[elem_ptr.index],
- ptr_elem_ty,
- parent.decl_ref_mut,
- ),
+ .the_only_possible_value => {
+ const duped = try sema.arena.create(Value);
+ duped.* = Value.initTag(.the_only_possible_value);
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ elem_ty,
+ duped,
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
- .the_only_possible_value => {
- const duped = try sema.arena.create(Value);
- duped.* = Value.initTag(.the_only_possible_value);
- return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- elem_ty,
- duped,
- ptr_elem_ty,
- parent.decl_ref_mut,
- );
+ else => unreachable,
},
-
else => unreachable,
}
},
@@ -27738,7 +27743,7 @@ fn beginComptimePtrMutation(
var parent = try sema.beginComptimePtrMutation(block, src, field_ptr.container_ptr, field_ptr.container_ty);
switch (parent.pointee) {
- .direct => |val_ptr| switch (val_ptr.tag()) {
+ .direct => |val_ptr| switch (val_ptr.ip_index) {
.undef => {
// A struct or union has been initialized to undefined at comptime and now we
// are for the first time setting a field. We must change the representation
@@ -27815,72 +27820,75 @@ fn beginComptimePtrMutation(
else => unreachable,
}
},
- .aggregate => return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- parent.ty.structFieldType(field_index),
- &val_ptr.castTag(.aggregate).?.data[field_index],
- ptr_elem_ty,
- parent.decl_ref_mut,
- ),
-
- .@"union" => {
- // We need to set the active field of the union.
- const arena = parent.beginArena(sema.mod);
- defer parent.finishArena(sema.mod);
-
- const payload = &val_ptr.castTag(.@"union").?.data;
- payload.tag = try Value.Tag.enum_field_index.create(arena, field_index);
-
- return beginComptimePtrMutationInner(
+ .none => switch (val_ptr.tag()) {
+ .aggregate => return beginComptimePtrMutationInner(
sema,
block,
src,
parent.ty.structFieldType(field_index),
- &payload.val,
- ptr_elem_ty,
- parent.decl_ref_mut,
- );
- },
- .slice => switch (field_index) {
- Value.Payload.Slice.ptr_index => return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- parent.ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer)),
- &val_ptr.castTag(.slice).?.data.ptr,
+ &val_ptr.castTag(.aggregate).?.data[field_index],
ptr_elem_ty,
parent.decl_ref_mut,
),
- Value.Payload.Slice.len_index => return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- Type.usize,
- &val_ptr.castTag(.slice).?.data.len,
- ptr_elem_ty,
- parent.decl_ref_mut,
- ),
+ .@"union" => {
+ // We need to set the active field of the union.
+ const arena = parent.beginArena(sema.mod);
+ defer parent.finishArena(sema.mod);
- else => unreachable,
- },
+ const payload = &val_ptr.castTag(.@"union").?.data;
+ payload.tag = try Value.Tag.enum_field_index.create(arena, field_index);
- .empty_struct_value => {
- const duped = try sema.arena.create(Value);
- duped.* = Value.initTag(.the_only_possible_value);
- return beginComptimePtrMutationInner(
- sema,
- block,
- src,
- parent.ty.structFieldType(field_index),
- duped,
- ptr_elem_ty,
- parent.decl_ref_mut,
- );
- },
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ parent.ty.structFieldType(field_index),
+ &payload.val,
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
+ .slice => switch (field_index) {
+ Value.Payload.Slice.ptr_index => return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ parent.ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer)),
+ &val_ptr.castTag(.slice).?.data.ptr,
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ ),
+
+ Value.Payload.Slice.len_index => return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ Type.usize,
+ &val_ptr.castTag(.slice).?.data.len,
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ ),
+
+ else => unreachable,
+ },
+
+ .empty_struct_value => {
+ const duped = try sema.arena.create(Value);
+ duped.* = Value.initTag(.the_only_possible_value);
+ return beginComptimePtrMutationInner(
+ sema,
+ block,
+ src,
+ parent.ty.structFieldType(field_index),
+ duped,
+ ptr_elem_ty,
+ parent.decl_ref_mut,
+ );
+ },
+ else => unreachable,
+ },
else => unreachable,
},
.reinterpret => |reinterpret| {
@@ -27951,7 +27959,7 @@ fn beginComptimePtrMutation(
switch (parent.pointee) {
.direct => |val_ptr| {
const payload_ty = parent.ty.optionalChild(mod);
- switch (val_ptr.tag()) {
+ switch (val_ptr.ip_index) {
.undef, .null_value => {
// An optional has been initialized to undefined at comptime and now we
// are for the first time setting the payload. We must change the
@@ -27973,12 +27981,19 @@ fn beginComptimePtrMutation(
.ty = payload_ty,
};
},
- .opt_payload => return ComptimePtrMutationKit{
- .decl_ref_mut = parent.decl_ref_mut,
- .pointee = .{ .direct = &val_ptr.castTag(.opt_payload).?.data },
- .ty = payload_ty,
- },
+ .none => switch (val_ptr.tag()) {
+ .opt_payload => return ComptimePtrMutationKit{
+ .decl_ref_mut = parent.decl_ref_mut,
+ .pointee = .{ .direct = &val_ptr.castTag(.opt_payload).?.data },
+ .ty = payload_ty,
+ },
+ else => return ComptimePtrMutationKit{
+ .decl_ref_mut = parent.decl_ref_mut,
+ .pointee = .{ .direct = val_ptr },
+ .ty = payload_ty,
+ },
+ },
else => return ComptimePtrMutationKit{
.decl_ref_mut = parent.decl_ref_mut,
.pointee = .{ .direct = val_ptr },
@@ -28092,231 +28107,236 @@ fn beginComptimePtrLoad(
) ComptimePtrLoadError!ComptimePtrLoadKit {
const mod = sema.mod;
const target = sema.mod.getTarget();
- var deref: ComptimePtrLoadKit = switch (ptr_val.tag()) {
- .decl_ref,
- .decl_ref_mut,
- => blk: {
- const decl_index = switch (ptr_val.tag()) {
- .decl_ref => ptr_val.castTag(.decl_ref).?.data,
- .decl_ref_mut => ptr_val.castTag(.decl_ref_mut).?.data.decl_index,
- else => unreachable,
- };
- const is_mutable = ptr_val.tag() == .decl_ref_mut;
- const decl = sema.mod.declPtr(decl_index);
- const decl_tv = try decl.typedValue();
- if (decl_tv.val.tag() == .variable) return error.RuntimeLoad;
-
- const layout_defined = decl.ty.hasWellDefinedLayout(mod);
- break :blk ComptimePtrLoadKit{
- .parent = if (layout_defined) .{ .tv = decl_tv, .byte_offset = 0 } else null,
- .pointee = decl_tv,
- .is_mutable = is_mutable,
- .ty_without_well_defined_layout = if (!layout_defined) decl.ty else null,
- };
+
+ var deref: ComptimePtrLoadKit = switch (ptr_val.ip_index) {
+ .null_value => {
+ return sema.fail(block, src, "attempt to use null value", .{});
},
- .elem_ptr => blk: {
- const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
- const elem_ty = elem_ptr.elem_ty;
- var deref = try sema.beginComptimePtrLoad(block, src, elem_ptr.array_ptr, null);
+ .none => switch (ptr_val.tag()) {
+ .decl_ref,
+ .decl_ref_mut,
+ => blk: {
+ const decl_index = switch (ptr_val.tag()) {
+ .decl_ref => ptr_val.castTag(.decl_ref).?.data,
+ .decl_ref_mut => ptr_val.castTag(.decl_ref_mut).?.data.decl_index,
+ else => unreachable,
+ };
+ const is_mutable = ptr_val.tag() == .decl_ref_mut;
+ const decl = sema.mod.declPtr(decl_index);
+ const decl_tv = try decl.typedValue();
+ if (decl_tv.val.tag() == .variable) return error.RuntimeLoad;
+
+ const layout_defined = decl.ty.hasWellDefinedLayout(mod);
+ break :blk ComptimePtrLoadKit{
+ .parent = if (layout_defined) .{ .tv = decl_tv, .byte_offset = 0 } else null,
+ .pointee = decl_tv,
+ .is_mutable = is_mutable,
+ .ty_without_well_defined_layout = if (!layout_defined) decl.ty else null,
+ };
+ },
- // This code assumes that elem_ptrs have been "flattened" in order for direct dereference
- // to succeed, meaning that elem ptrs of the same elem_ty are coalesced. Here we check that
- // our parent is not an elem_ptr with the same elem_ty, since that would be "unflattened"
- if (elem_ptr.array_ptr.castTag(.elem_ptr)) |parent_elem_ptr| {
- assert(!(parent_elem_ptr.data.elem_ty.eql(elem_ty, sema.mod)));
- }
+ .elem_ptr => blk: {
+ const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
+ const elem_ty = elem_ptr.elem_ty;
+ var deref = try sema.beginComptimePtrLoad(block, src, elem_ptr.array_ptr, null);
+
+ // This code assumes that elem_ptrs have been "flattened" in order for direct dereference
+ // to succeed, meaning that elem ptrs of the same elem_ty are coalesced. Here we check that
+ // our parent is not an elem_ptr with the same elem_ty, since that would be "unflattened"
+ if (elem_ptr.array_ptr.castTag(.elem_ptr)) |parent_elem_ptr| {
+ assert(!(parent_elem_ptr.data.elem_ty.eql(elem_ty, sema.mod)));
+ }
+
+ if (elem_ptr.index != 0) {
+ if (elem_ty.hasWellDefinedLayout(mod)) {
+ if (deref.parent) |*parent| {
+ // Update the byte offset (in-place)
+ const elem_size = try sema.typeAbiSize(elem_ty);
+ const offset = parent.byte_offset + elem_size * elem_ptr.index;
+ parent.byte_offset = try sema.usizeCast(block, src, offset);
+ }
+ } else {
+ deref.parent = null;
+ deref.ty_without_well_defined_layout = elem_ty;
+ }
+ }
+
+ // If we're loading an elem_ptr that was derived from a different type
+ // than the true type of the underlying decl, we cannot deref directly
+ const ty_matches = if (deref.pointee != null and deref.pointee.?.ty.isArrayOrVector(mod)) x: {
+ const deref_elem_ty = deref.pointee.?.ty.childType(mod);
+ break :x (try sema.coerceInMemoryAllowed(block, deref_elem_ty, elem_ty, false, target, src, src)) == .ok or
+ (try sema.coerceInMemoryAllowed(block, elem_ty, deref_elem_ty, false, target, src, src)) == .ok;
+ } else false;
+ if (!ty_matches) {
+ deref.pointee = null;
+ break :blk deref;
+ }
+
+ var array_tv = deref.pointee.?;
+ const check_len = array_tv.ty.arrayLenIncludingSentinel(mod);
+ if (maybe_array_ty) |load_ty| {
+ // It's possible that we're loading a [N]T, in which case we'd like to slice
+ // the pointee array directly from our parent array.
+ if (load_ty.isArrayOrVector(mod) and load_ty.childType(mod).eql(elem_ty, sema.mod)) {
+ const N = try sema.usizeCast(block, src, load_ty.arrayLenIncludingSentinel(mod));
+ deref.pointee = if (elem_ptr.index + N <= check_len) TypedValue{
+ .ty = try Type.array(sema.arena, N, null, elem_ty, sema.mod),
+ .val = try array_tv.val.sliceArray(sema.mod, sema.arena, elem_ptr.index, elem_ptr.index + N),
+ } else null;
+ break :blk deref;
+ }
+ }
+
+ if (elem_ptr.index >= check_len) {
+ deref.pointee = null;
+ break :blk deref;
+ }
+ if (elem_ptr.index == check_len - 1) {
+ if (array_tv.ty.sentinel(mod)) |sent| {
+ deref.pointee = TypedValue{
+ .ty = elem_ty,
+ .val = sent,
+ };
+ break :blk deref;
+ }
+ }
+ deref.pointee = TypedValue{
+ .ty = elem_ty,
+ .val = try array_tv.val.elemValue(sema.mod, sema.arena, elem_ptr.index),
+ };
+ break :blk deref;
+ },
- if (elem_ptr.index != 0) {
- if (elem_ty.hasWellDefinedLayout(mod)) {
- if (deref.parent) |*parent| {
+ .slice => blk: {
+ const slice = ptr_val.castTag(.slice).?.data;
+ break :blk try sema.beginComptimePtrLoad(block, src, slice.ptr, null);
+ },
+
+ .field_ptr => blk: {
+ const field_ptr = ptr_val.castTag(.field_ptr).?.data;
+ const field_index = @intCast(u32, field_ptr.field_index);
+ var deref = try sema.beginComptimePtrLoad(block, src, field_ptr.container_ptr, field_ptr.container_ty);
+
+ if (field_ptr.container_ty.hasWellDefinedLayout(mod)) {
+ const struct_ty = field_ptr.container_ty.castTag(.@"struct");
+ if (struct_ty != null and struct_ty.?.data.layout == .Packed) {
+ // packed structs are not byte addressable
+ deref.parent = null;
+ } else if (deref.parent) |*parent| {
// Update the byte offset (in-place)
- const elem_size = try sema.typeAbiSize(elem_ty);
- const offset = parent.byte_offset + elem_size * elem_ptr.index;
- parent.byte_offset = try sema.usizeCast(block, src, offset);
+ try sema.resolveTypeLayout(field_ptr.container_ty);
+ const field_offset = field_ptr.container_ty.structFieldOffset(field_index, mod);
+ parent.byte_offset = try sema.usizeCast(block, src, parent.byte_offset + field_offset);
}
} else {
deref.parent = null;
- deref.ty_without_well_defined_layout = elem_ty;
+ deref.ty_without_well_defined_layout = field_ptr.container_ty;
}
- }
- // If we're loading an elem_ptr that was derived from a different type
- // than the true type of the underlying decl, we cannot deref directly
- const ty_matches = if (deref.pointee != null and deref.pointee.?.ty.isArrayOrVector(mod)) x: {
- const deref_elem_ty = deref.pointee.?.ty.childType(mod);
- break :x (try sema.coerceInMemoryAllowed(block, deref_elem_ty, elem_ty, false, target, src, src)) == .ok or
- (try sema.coerceInMemoryAllowed(block, elem_ty, deref_elem_ty, false, target, src, src)) == .ok;
- } else false;
- if (!ty_matches) {
- deref.pointee = null;
- break :blk deref;
- }
-
- var array_tv = deref.pointee.?;
- const check_len = array_tv.ty.arrayLenIncludingSentinel(mod);
- if (maybe_array_ty) |load_ty| {
- // It's possible that we're loading a [N]T, in which case we'd like to slice
- // the pointee array directly from our parent array.
- if (load_ty.isArrayOrVector(mod) and load_ty.childType(mod).eql(elem_ty, sema.mod)) {
- const N = try sema.usizeCast(block, src, load_ty.arrayLenIncludingSentinel(mod));
- deref.pointee = if (elem_ptr.index + N <= check_len) TypedValue{
- .ty = try Type.array(sema.arena, N, null, elem_ty, sema.mod),
- .val = try array_tv.val.sliceArray(sema.mod, sema.arena, elem_ptr.index, elem_ptr.index + N),
- } else null;
+ const tv = deref.pointee orelse {
+ deref.pointee = null;
+ break :blk deref;
+ };
+ const coerce_in_mem_ok =
+ (try sema.coerceInMemoryAllowed(block, field_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
+ (try sema.coerceInMemoryAllowed(block, tv.ty, field_ptr.container_ty, false, target, src, src)) == .ok;
+ if (!coerce_in_mem_ok) {
+ deref.pointee = null;
break :blk deref;
}
- }
- if (elem_ptr.index >= check_len) {
- deref.pointee = null;
- break :blk deref;
- }
- if (elem_ptr.index == check_len - 1) {
- if (array_tv.ty.sentinel(mod)) |sent| {
+ if (field_ptr.container_ty.isSlice(mod)) {
+ const slice_val = tv.val.castTag(.slice).?.data;
+ deref.pointee = switch (field_index) {
+ Value.Payload.Slice.ptr_index => TypedValue{
+ .ty = field_ptr.container_ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer)),
+ .val = slice_val.ptr,
+ },
+ Value.Payload.Slice.len_index => TypedValue{
+ .ty = Type.usize,
+ .val = slice_val.len,
+ },
+ else => unreachable,
+ };
+ } else {
+ const field_ty = field_ptr.container_ty.structFieldType(field_index);
deref.pointee = TypedValue{
- .ty = elem_ty,
- .val = sent,
+ .ty = field_ty,
+ .val = tv.val.fieldValue(tv.ty, mod, field_index),
};
- break :blk deref;
}
- }
- deref.pointee = TypedValue{
- .ty = elem_ty,
- .val = try array_tv.val.elemValue(sema.mod, sema.arena, elem_ptr.index),
- };
- break :blk deref;
- },
+ break :blk deref;
+ },
- .slice => blk: {
- const slice = ptr_val.castTag(.slice).?.data;
- break :blk try sema.beginComptimePtrLoad(block, src, slice.ptr, null);
- },
+ .comptime_field_ptr => blk: {
+ const comptime_field_ptr = ptr_val.castTag(.comptime_field_ptr).?.data;
+ break :blk ComptimePtrLoadKit{
+ .parent = null,
+ .pointee = .{ .ty = comptime_field_ptr.field_ty, .val = comptime_field_ptr.field_val },
+ .is_mutable = false,
+ .ty_without_well_defined_layout = comptime_field_ptr.field_ty,
+ };
+ },
- .field_ptr => blk: {
- const field_ptr = ptr_val.castTag(.field_ptr).?.data;
- const field_index = @intCast(u32, field_ptr.field_index);
- var deref = try sema.beginComptimePtrLoad(block, src, field_ptr.container_ptr, field_ptr.container_ty);
+ .opt_payload_ptr,
+ .eu_payload_ptr,
+ => blk: {
+ const payload_ptr = ptr_val.cast(Value.Payload.PayloadPtr).?.data;
+ const payload_ty = switch (ptr_val.tag()) {
+ .eu_payload_ptr => payload_ptr.container_ty.errorUnionPayload(),
+ .opt_payload_ptr => payload_ptr.container_ty.optionalChild(mod),
+ else => unreachable,
+ };
+ var deref = try sema.beginComptimePtrLoad(block, src, payload_ptr.container_ptr, payload_ptr.container_ty);
- if (field_ptr.container_ty.hasWellDefinedLayout(mod)) {
- const struct_ty = field_ptr.container_ty.castTag(.@"struct");
- if (struct_ty != null and struct_ty.?.data.layout == .Packed) {
- // packed structs are not byte addressable
+ // eu_payload_ptr and opt_payload_ptr never have a well-defined layout
+ if (deref.parent != null) {
deref.parent = null;
- } else if (deref.parent) |*parent| {
- // Update the byte offset (in-place)
- try sema.resolveTypeLayout(field_ptr.container_ty);
- const field_offset = field_ptr.container_ty.structFieldOffset(field_index, mod);
- parent.byte_offset = try sema.usizeCast(block, src, parent.byte_offset + field_offset);
+ deref.ty_without_well_defined_layout = payload_ptr.container_ty;
}
- } else {
- deref.parent = null;
- deref.ty_without_well_defined_layout = field_ptr.container_ty;
- }
- const tv = deref.pointee orelse {
- deref.pointee = null;
- break :blk deref;
- };
- const coerce_in_mem_ok =
- (try sema.coerceInMemoryAllowed(block, field_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
- (try sema.coerceInMemoryAllowed(block, tv.ty, field_ptr.container_ty, false, target, src, src)) == .ok;
- if (!coerce_in_mem_ok) {
+ if (deref.pointee) |*tv| {
+ const coerce_in_mem_ok =
+ (try sema.coerceInMemoryAllowed(block, payload_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
+ (try sema.coerceInMemoryAllowed(block, tv.ty, payload_ptr.container_ty, false, target, src, src)) == .ok;
+ if (coerce_in_mem_ok) {
+ const payload_val = switch (ptr_val.tag()) {
+ .eu_payload_ptr => if (tv.val.castTag(.eu_payload)) |some| some.data else {
+ return sema.fail(block, src, "attempt to unwrap error: {s}", .{tv.val.castTag(.@"error").?.data.name});
+ },
+ .opt_payload_ptr => if (tv.val.castTag(.opt_payload)) |some| some.data else opt: {
+ if (tv.val.isNull(mod)) return sema.fail(block, src, "attempt to use null value", .{});
+ break :opt tv.val;
+ },
+ else => unreachable,
+ };
+ tv.* = TypedValue{ .ty = payload_ty, .val = payload_val };
+ break :blk deref;
+ }
+ }
deref.pointee = null;
break :blk deref;
- }
-
- if (field_ptr.container_ty.isSlice(mod)) {
- const slice_val = tv.val.castTag(.slice).?.data;
- deref.pointee = switch (field_index) {
- Value.Payload.Slice.ptr_index => TypedValue{
- .ty = field_ptr.container_ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer)),
- .val = slice_val.ptr,
- },
- Value.Payload.Slice.len_index => TypedValue{
- .ty = Type.usize,
- .val = slice_val.len,
- },
- else => unreachable,
- };
- } else {
- const field_ty = field_ptr.container_ty.structFieldType(field_index);
- deref.pointee = TypedValue{
- .ty = field_ty,
- .val = tv.val.fieldValue(tv.ty, mod, field_index),
- };
- }
- break :blk deref;
- },
-
- .comptime_field_ptr => blk: {
- const comptime_field_ptr = ptr_val.castTag(.comptime_field_ptr).?.data;
- break :blk ComptimePtrLoadKit{
- .parent = null,
- .pointee = .{ .ty = comptime_field_ptr.field_ty, .val = comptime_field_ptr.field_val },
- .is_mutable = false,
- .ty_without_well_defined_layout = comptime_field_ptr.field_ty,
- };
- },
-
- .opt_payload_ptr,
- .eu_payload_ptr,
- => blk: {
- const payload_ptr = ptr_val.cast(Value.Payload.PayloadPtr).?.data;
- const payload_ty = switch (ptr_val.tag()) {
- .eu_payload_ptr => payload_ptr.container_ty.errorUnionPayload(),
- .opt_payload_ptr => payload_ptr.container_ty.optionalChild(mod),
- else => unreachable,
- };
- var deref = try sema.beginComptimePtrLoad(block, src, payload_ptr.container_ptr, payload_ptr.container_ty);
+ },
+ .opt_payload => blk: {
+ const opt_payload = ptr_val.castTag(.opt_payload).?.data;
+ break :blk try sema.beginComptimePtrLoad(block, src, opt_payload, null);
+ },
- // eu_payload_ptr and opt_payload_ptr never have a well-defined layout
- if (deref.parent != null) {
- deref.parent = null;
- deref.ty_without_well_defined_layout = payload_ptr.container_ty;
- }
+ .zero,
+ .one,
+ .int_u64,
+ .int_i64,
+ .int_big_positive,
+ .int_big_negative,
+ .variable,
+ .extern_fn,
+ .function,
+ => return error.RuntimeLoad,
- if (deref.pointee) |*tv| {
- const coerce_in_mem_ok =
- (try sema.coerceInMemoryAllowed(block, payload_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
- (try sema.coerceInMemoryAllowed(block, tv.ty, payload_ptr.container_ty, false, target, src, src)) == .ok;
- if (coerce_in_mem_ok) {
- const payload_val = switch (ptr_val.tag()) {
- .eu_payload_ptr => if (tv.val.castTag(.eu_payload)) |some| some.data else {
- return sema.fail(block, src, "attempt to unwrap error: {s}", .{tv.val.castTag(.@"error").?.data.name});
- },
- .opt_payload_ptr => if (tv.val.castTag(.opt_payload)) |some| some.data else opt: {
- if (tv.val.isNull(mod)) return sema.fail(block, src, "attempt to use null value", .{});
- break :opt tv.val;
- },
- else => unreachable,
- };
- tv.* = TypedValue{ .ty = payload_ty, .val = payload_val };
- break :blk deref;
- }
- }
- deref.pointee = null;
- break :blk deref;
- },
- .null_value => {
- return sema.fail(block, src, "attempt to use null value", .{});
- },
- .opt_payload => blk: {
- const opt_payload = ptr_val.castTag(.opt_payload).?.data;
- break :blk try sema.beginComptimePtrLoad(block, src, opt_payload, null);
+ else => unreachable,
},
-
- .zero,
- .one,
- .int_u64,
- .int_i64,
- .int_big_positive,
- .int_big_negative,
- .variable,
- .extern_fn,
- .function,
- => return error.RuntimeLoad,
-
else => unreachable,
};
@@ -28953,7 +28973,7 @@ fn coerceTupleToStruct(
const field_name = fields.keys()[i];
const field = fields.values()[i];
const field_src = inst_src; // TODO better source location
- if (field.default_val.tag() == .unreachable_value) {
+ if (field.default_val.ip_index == .unreachable_value) {
const template = "missing struct field: {s}";
const args = .{field_name};
if (root_msg) |msg| {
@@ -29023,7 +29043,7 @@ fn coerceTupleToTuple(
const elem_ref = try sema.tupleField(block, inst_src, inst, field_src, field_i);
const coerced = try sema.coerce(block, field_ty, elem_ref, field_src);
field_refs[field_index] = coerced;
- if (default_val.tag() != .unreachable_value) {
+ if (default_val.ip_index != .unreachable_value) {
const init_val = (try sema.resolveMaybeUndefVal(coerced)) orelse {
return sema.failWithNeededComptime(block, field_src, "value stored in comptime field must be comptime-known");
};
@@ -29052,7 +29072,7 @@ fn coerceTupleToTuple(
const field_ty = tuple_ty.structFieldType(i);
const field_src = inst_src; // TODO better source location
- if (default_val.tag() == .unreachable_value) {
+ if (default_val.ip_index == .unreachable_value) {
if (tuple_ty.isTuple()) {
const template = "missing tuple field: {d}";
if (root_msg) |msg| {
@@ -31557,7 +31577,7 @@ pub fn resolveTypeRequiresComptime(sema: *Sema, ty: Type) CompileError!bool {
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.types, 0..) |field_ty, i| {
- const have_comptime_val = tuple.values[i].tag() != .unreachable_value;
+ const have_comptime_val = tuple.values[i].ip_index != .unreachable_value;
if (!have_comptime_val and try sema.resolveTypeRequiresComptime(field_ty)) {
return true;
}
@@ -32141,7 +32161,7 @@ fn semaStructFields(mod: *Module, struct_obj: *Module.Struct) CompileError!void
gop.value_ptr.* = .{
.ty = Type.noreturn,
.abi_align = 0,
- .default_val = Value.initTag(.unreachable_value),
+ .default_val = Value.@"unreachable",
.is_comptime = is_comptime,
.offset = undefined,
};
@@ -32965,7 +32985,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
=> return null,
.void => return Value.void,
- .noreturn => return Value.initTag(.unreachable_value),
+ .noreturn => return Value.@"unreachable",
.null => return Value.null,
.undefined => return Value.undef,
@@ -33027,7 +33047,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.values, 0..) |val, i| {
- const is_comptime = val.tag() != .unreachable_value;
+ const is_comptime = val.ip_index != .unreachable_value;
if (is_comptime) continue;
if ((try sema.typeHasOnePossibleValue(tuple.types[i])) != null) continue;
return null;
@@ -33059,7 +33079,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
return null;
}
switch (enum_obj.fields.count()) {
- 0 => return Value.initTag(.unreachable_value),
+ 0 => return Value.@"unreachable",
1 => if (enum_obj.values.count() == 0) {
return Value.zero; // auto-numbered
} else {
@@ -33072,7 +33092,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
const resolved_ty = try sema.resolveTypeFields(ty);
const enum_simple = resolved_ty.castTag(.enum_simple).?.data;
switch (enum_simple.fields.count()) {
- 0 => return Value.initTag(.unreachable_value),
+ 0 => return Value.@"unreachable",
1 => return Value.zero,
else => return null,
}
@@ -33091,7 +33111,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
const tag_val = (try sema.typeHasOnePossibleValue(union_obj.tag_ty)) orelse
return null;
const fields = union_obj.fields.values();
- if (fields.len == 0) return Value.initTag(.unreachable_value);
+ if (fields.len == 0) return Value.@"unreachable";
const only_field = fields[0];
if (only_field.ty.eql(resolved_ty, sema.mod)) {
const msg = try Module.ErrorMsg.create(
@@ -33600,7 +33620,7 @@ pub fn typeRequiresComptime(sema: *Sema, ty: Type) CompileError!bool {
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.types, 0..) |field_ty, i| {
- const have_comptime_val = tuple.values[i].tag() != .unreachable_value;
+ const have_comptime_val = tuple.values[i].ip_index != .unreachable_value;
if (!have_comptime_val and try sema.typeRequiresComptime(field_ty)) {
return true;
}
@@ -33814,7 +33834,7 @@ fn numberAddWrapScalar(
rhs: Value,
ty: Type,
) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
const mod = sema.mod;
if (ty.zigTypeTag(mod) == .ComptimeInt) {
@@ -33874,7 +33894,7 @@ fn numberSubWrapScalar(
rhs: Value,
ty: Type,
) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
const mod = sema.mod;
if (ty.zigTypeTag(mod) == .ComptimeInt) {
@@ -34156,12 +34176,16 @@ fn intFitsInType(
) CompileError!bool {
const mod = sema.mod;
const target = mod.getTarget();
- switch (val.tag()) {
- .zero,
+ switch (val.ip_index) {
.undef,
+ .zero,
+ .zero_usize,
+ .zero_u8,
=> return true,
- .one => switch (ty.zigTypeTag(mod)) {
+ .one,
+ .one_usize,
+ => switch (ty.zigTypeTag(mod)) {
.Int => {
const info = ty.intInfo(mod);
return switch (info.signedness) {
@@ -34173,111 +34197,129 @@ fn intFitsInType(
else => unreachable,
},
- .lazy_align => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const info = ty.intInfo(mod);
- const max_needed_bits = @as(u16, 16) + @boolToInt(info.signedness == .signed);
- // If it is u16 or bigger we know the alignment fits without resolving it.
- if (info.bits >= max_needed_bits) return true;
- const x = try sema.typeAbiAlignment(val.castTag(.lazy_align).?.data);
- if (x == 0) return true;
- const actual_needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
- return info.bits >= actual_needed_bits;
+ .none => switch (val.tag()) {
+ .zero => return true,
+
+ .one => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ return switch (info.signedness) {
+ .signed => info.bits >= 2,
+ .unsigned => info.bits >= 1,
+ };
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .lazy_size => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const info = ty.intInfo(mod);
- const max_needed_bits = @as(u16, 64) + @boolToInt(info.signedness == .signed);
- // If it is u64 or bigger we know the size fits without resolving it.
- if (info.bits >= max_needed_bits) return true;
- const x = try sema.typeAbiSize(val.castTag(.lazy_size).?.data);
- if (x == 0) return true;
- const actual_needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
- return info.bits >= actual_needed_bits;
+
+ .lazy_align => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ const max_needed_bits = @as(u16, 16) + @boolToInt(info.signedness == .signed);
+ // If it is u16 or bigger we know the alignment fits without resolving it.
+ if (info.bits >= max_needed_bits) return true;
+ const x = try sema.typeAbiAlignment(val.castTag(.lazy_align).?.data);
+ if (x == 0) return true;
+ const actual_needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
+ return info.bits >= actual_needed_bits;
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
+ },
+ .lazy_size => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ const max_needed_bits = @as(u16, 64) + @boolToInt(info.signedness == .signed);
+ // If it is u64 or bigger we know the size fits without resolving it.
+ if (info.bits >= max_needed_bits) return true;
+ const x = try sema.typeAbiSize(val.castTag(.lazy_size).?.data);
+ if (x == 0) return true;
+ const actual_needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
+ return info.bits >= actual_needed_bits;
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .int_u64 => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const x = val.castTag(.int_u64).?.data;
- if (x == 0) return true;
- const info = ty.intInfo(mod);
- const needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
- return info.bits >= needed_bits;
+ .int_u64 => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const x = val.castTag(.int_u64).?.data;
+ if (x == 0) return true;
+ const info = ty.intInfo(mod);
+ const needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signedness == .signed);
+ return info.bits >= needed_bits;
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .int_i64 => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const x = val.castTag(.int_i64).?.data;
- if (x == 0) return true;
- const info = ty.intInfo(mod);
- if (info.signedness == .unsigned and x < 0)
- return false;
- var buffer: Value.BigIntSpace = undefined;
- return (try val.toBigIntAdvanced(&buffer, mod, sema)).fitsInTwosComp(info.signedness, info.bits);
+ .int_i64 => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const x = val.castTag(.int_i64).?.data;
+ if (x == 0) return true;
+ const info = ty.intInfo(mod);
+ if (info.signedness == .unsigned and x < 0)
+ return false;
+ var buffer: Value.BigIntSpace = undefined;
+ return (try val.toBigIntAdvanced(&buffer, mod, sema)).fitsInTwosComp(info.signedness, info.bits);
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .int_big_positive => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const info = ty.intInfo(mod);
- return val.castTag(.int_big_positive).?.asBigInt().fitsInTwosComp(info.signedness, info.bits);
+ .int_big_positive => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ return val.castTag(.int_big_positive).?.asBigInt().fitsInTwosComp(info.signedness, info.bits);
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .int_big_negative => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const info = ty.intInfo(mod);
- return val.castTag(.int_big_negative).?.asBigInt().fitsInTwosComp(info.signedness, info.bits);
+ .int_big_negative => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ return val.castTag(.int_big_negative).?.asBigInt().fitsInTwosComp(info.signedness, info.bits);
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .the_only_possible_value => {
- assert(ty.intInfo(mod).bits == 0);
- return true;
- },
+ .the_only_possible_value => {
+ assert(ty.intInfo(mod).bits == 0);
+ return true;
+ },
- .decl_ref_mut,
- .extern_fn,
- .decl_ref,
- .function,
- .variable,
- => switch (ty.zigTypeTag(mod)) {
- .Int => {
- const info = ty.intInfo(mod);
- const ptr_bits = target.ptrBitWidth();
- return switch (info.signedness) {
- .signed => info.bits > ptr_bits,
- .unsigned => info.bits >= ptr_bits,
- };
+ .decl_ref_mut,
+ .extern_fn,
+ .decl_ref,
+ .function,
+ .variable,
+ => switch (ty.zigTypeTag(mod)) {
+ .Int => {
+ const info = ty.intInfo(mod);
+ const ptr_bits = target.ptrBitWidth();
+ return switch (info.signedness) {
+ .signed => info.bits > ptr_bits,
+ .unsigned => info.bits >= ptr_bits,
+ };
+ },
+ .ComptimeInt => return true,
+ else => unreachable,
},
- .ComptimeInt => return true,
- else => unreachable,
- },
- .aggregate => {
- assert(ty.zigTypeTag(mod) == .Vector);
- for (val.castTag(.aggregate).?.data, 0..) |elem, i| {
- if (!(try sema.intFitsInType(elem, ty.scalarType(mod), null))) {
- if (vector_index) |some| some.* = i;
- return false;
+ .aggregate => {
+ assert(ty.zigTypeTag(mod) == .Vector);
+ for (val.castTag(.aggregate).?.data, 0..) |elem, i| {
+ if (!(try sema.intFitsInType(elem, ty.scalarType(mod), null))) {
+ if (vector_index) |some| some.* = i;
+ return false;
+ }
}
- }
- return true;
+ return true;
+ },
+
+ else => unreachable,
},
- else => unreachable,
+ else => @panic("TODO"),
}
}
src/type.zig
@@ -533,14 +533,14 @@ pub const Type = struct {
for (a_tuple.values, 0..) |a_val, i| {
const ty = a_tuple.types[i];
const b_val = b_tuple.values[i];
- if (a_val.tag() == .unreachable_value) {
- if (b_val.tag() == .unreachable_value) {
+ if (a_val.ip_index == .unreachable_value) {
+ if (b_val.ip_index == .unreachable_value) {
continue;
} else {
return false;
}
} else {
- if (b_val.tag() == .unreachable_value) {
+ if (b_val.ip_index == .unreachable_value) {
return false;
} else {
if (!Value.eql(a_val, b_val, ty, mod)) return false;
@@ -569,14 +569,14 @@ pub const Type = struct {
for (a_struct_obj.values, 0..) |a_val, i| {
const ty = a_struct_obj.types[i];
const b_val = b_struct_obj.values[i];
- if (a_val.tag() == .unreachable_value) {
- if (b_val.tag() == .unreachable_value) {
+ if (a_val.ip_index == .unreachable_value) {
+ if (b_val.ip_index == .unreachable_value) {
continue;
} else {
return false;
}
} else {
- if (b_val.tag() == .unreachable_value) {
+ if (b_val.ip_index == .unreachable_value) {
return false;
} else {
if (!Value.eql(a_val, b_val, ty, mod)) return false;
@@ -750,7 +750,7 @@ pub const Type = struct {
for (tuple.types, 0..) |field_ty, i| {
hashWithHasher(field_ty, hasher, mod);
const field_val = tuple.values[i];
- if (field_val.tag() == .unreachable_value) continue;
+ if (field_val.ip_index == .unreachable_value) continue;
field_val.hash(field_ty, hasher, mod);
}
},
@@ -764,7 +764,7 @@ pub const Type = struct {
const field_val = struct_obj.values[i];
hasher.update(field_name);
hashWithHasher(field_ty, hasher, mod);
- if (field_val.tag() == .unreachable_value) continue;
+ if (field_val.ip_index == .unreachable_value) continue;
field_val.hash(field_ty, hasher, mod);
}
},
@@ -1139,11 +1139,11 @@ pub const Type = struct {
for (tuple.types, 0..) |field_ty, i| {
if (i != 0) try writer.writeAll(", ");
const val = tuple.values[i];
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.writeAll("comptime ");
}
try field_ty.dump("", .{}, writer);
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.print(" = {}", .{val.fmtDebug()});
}
}
@@ -1156,13 +1156,13 @@ pub const Type = struct {
for (anon_struct.types, 0..) |field_ty, i| {
if (i != 0) try writer.writeAll(", ");
const val = anon_struct.values[i];
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.writeAll("comptime ");
}
try writer.writeAll(anon_struct.names[i]);
try writer.writeAll(": ");
try field_ty.dump("", .{}, writer);
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.print(" = {}", .{val.fmtDebug()});
}
}
@@ -1408,11 +1408,11 @@ pub const Type = struct {
for (tuple.types, 0..) |field_ty, i| {
if (i != 0) try writer.writeAll(", ");
const val = tuple.values[i];
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.writeAll("comptime ");
}
try print(field_ty, writer, mod);
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.print(" = {}", .{val.fmtValue(field_ty, mod)});
}
}
@@ -1425,7 +1425,7 @@ pub const Type = struct {
for (anon_struct.types, 0..) |field_ty, i| {
if (i != 0) try writer.writeAll(", ");
const val = anon_struct.values[i];
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.writeAll("comptime ");
}
try writer.writeAll(anon_struct.names[i]);
@@ -1433,7 +1433,7 @@ pub const Type = struct {
try print(field_ty, writer, mod);
- if (val.tag() != .unreachable_value) {
+ if (val.ip_index != .unreachable_value) {
try writer.print(" = {}", .{val.fmtValue(field_ty, mod)});
}
}
@@ -1770,7 +1770,7 @@ pub const Type = struct {
const tuple = ty.tupleFields();
for (tuple.types, 0..) |field_ty, i| {
const val = tuple.values[i];
- if (val.tag() != .unreachable_value) continue; // comptime field
+ if (val.ip_index != .unreachable_value) continue; // comptime field
if (try field_ty.hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat)) return true;
}
return false;
@@ -2283,7 +2283,7 @@ pub const Type = struct {
var big_align: u32 = 0;
for (tuple.types, 0..) |field_ty, i| {
const val = tuple.values[i];
- if (val.tag() != .unreachable_value) continue; // comptime field
+ if (val.ip_index != .unreachable_value) continue; // comptime field
if (!(field_ty.hasRuntimeBits(mod))) continue;
switch (try field_ty.abiAlignmentAdvanced(mod, strat)) {
@@ -3845,7 +3845,7 @@ pub const Type = struct {
=> return null,
.void => return Value.void,
- .noreturn => return Value.initTag(.unreachable_value),
+ .noreturn => return Value.@"unreachable",
.null => return Value.null,
.undefined => return Value.undef,
@@ -3896,7 +3896,7 @@ pub const Type = struct {
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.values, 0..) |val, i| {
- const is_comptime = val.tag() != .unreachable_value;
+ const is_comptime = val.ip_index != .unreachable_value;
if (is_comptime) continue;
if (tuple.types[i].onePossibleValue(mod) != null) continue;
return null;
@@ -3919,7 +3919,7 @@ pub const Type = struct {
return null;
}
switch (enum_full.fields.count()) {
- 0 => return Value.initTag(.unreachable_value),
+ 0 => return Value.@"unreachable",
1 => if (enum_full.values.count() == 0) {
return Value.zero; // auto-numbered
} else {
@@ -3931,7 +3931,7 @@ pub const Type = struct {
.enum_simple => {
const enum_simple = ty.castTag(.enum_simple).?.data;
switch (enum_simple.fields.count()) {
- 0 => return Value.initTag(.unreachable_value),
+ 0 => return Value.@"unreachable",
1 => return Value.zero,
else => return null,
}
@@ -3947,7 +3947,7 @@ pub const Type = struct {
.@"union", .union_safety_tagged, .union_tagged => {
const union_obj = ty.cast(Payload.Union).?.data;
const tag_val = union_obj.tag_ty.onePossibleValue(mod) orelse return null;
- if (union_obj.fields.count() == 0) return Value.initTag(.unreachable_value);
+ if (union_obj.fields.count() == 0) return Value.@"unreachable";
const only_field = union_obj.fields.values()[0];
const val_val = only_field.ty.onePossibleValue(mod) orelse return null;
_ = tag_val;
@@ -4075,7 +4075,7 @@ pub const Type = struct {
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.types, 0..) |field_ty, i| {
- const have_comptime_val = tuple.values[i].tag() != .unreachable_value;
+ const have_comptime_val = tuple.values[i].ip_index != .unreachable_value;
if (!have_comptime_val and field_ty.comptimeOnly(mod)) return true;
}
return false;
@@ -4514,7 +4514,7 @@ pub const Type = struct {
.tuple => {
const tuple = ty.castTag(.tuple).?.data;
const val = tuple.values[index];
- if (val.tag() == .unreachable_value) {
+ if (val.ip_index == .unreachable_value) {
return tuple.types[index].onePossibleValue(mod);
} else {
return val;
@@ -4523,7 +4523,7 @@ pub const Type = struct {
.anon_struct => {
const anon_struct = ty.castTag(.anon_struct).?.data;
const val = anon_struct.values[index];
- if (val.tag() == .unreachable_value) {
+ if (val.ip_index == .unreachable_value) {
return anon_struct.types[index].onePossibleValue(mod);
} else {
return val;
@@ -4544,12 +4544,12 @@ pub const Type = struct {
.tuple => {
const tuple = ty.castTag(.tuple).?.data;
const val = tuple.values[index];
- return val.tag() != .unreachable_value;
+ return val.ip_index != .unreachable_value;
},
.anon_struct => {
const anon_struct = ty.castTag(.anon_struct).?.data;
const val = anon_struct.values[index];
- return val.tag() != .unreachable_value;
+ return val.ip_index != .unreachable_value;
},
else => unreachable,
}
@@ -4647,7 +4647,7 @@ pub const Type = struct {
for (tuple.types, 0..) |field_ty, i| {
const field_val = tuple.values[i];
- if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits(mod)) {
+ if (field_val.ip_index != .unreachable_value or !field_ty.hasRuntimeBits(mod)) {
// comptime field
if (i == index) return offset;
continue;
src/TypedValue.zig
@@ -76,34 +76,236 @@ pub fn print(
if (val.isVariable(mod))
return writer.writeAll("(variable)");
- while (true) switch (val.tag()) {
- .empty_struct_value, .aggregate => {
- if (level == 0) {
- return writer.writeAll(".{ ... }");
- }
- if (ty.zigTypeTag(mod) == .Struct) {
- try writer.writeAll(".{");
- const max_len = std.math.min(ty.structFieldCount(), max_aggregate_items);
+ while (true) switch (val.ip_index) {
+ .none => switch (val.tag()) {
+ .empty_struct_value, .aggregate => {
+ if (level == 0) {
+ return writer.writeAll(".{ ... }");
+ }
+ if (ty.zigTypeTag(mod) == .Struct) {
+ try writer.writeAll(".{");
+ const max_len = std.math.min(ty.structFieldCount(), max_aggregate_items);
- var i: u32 = 0;
- while (i < max_len) : (i += 1) {
- if (i != 0) try writer.writeAll(", ");
- switch (ty.tag()) {
- .anon_struct, .@"struct" => try writer.print(".{s} = ", .{ty.structFieldName(i)}),
- else => {},
+ var i: u32 = 0;
+ while (i < max_len) : (i += 1) {
+ if (i != 0) try writer.writeAll(", ");
+ switch (ty.tag()) {
+ .anon_struct, .@"struct" => try writer.print(".{s} = ", .{ty.structFieldName(i)}),
+ else => {},
+ }
+ try print(.{
+ .ty = ty.structFieldType(i),
+ .val = val.fieldValue(ty, mod, i),
+ }, writer, level - 1, mod);
}
+ if (ty.structFieldCount() > max_aggregate_items) {
+ try writer.writeAll(", ...");
+ }
+ return writer.writeAll("}");
+ } else {
+ const elem_ty = ty.elemType2(mod);
+ const len = ty.arrayLen(mod);
+
+ if (elem_ty.eql(Type.u8, mod)) str: {
+ const max_len = @intCast(usize, std.math.min(len, max_string_len));
+ var buf: [max_string_len]u8 = undefined;
+
+ var i: u32 = 0;
+ while (i < max_len) : (i += 1) {
+ const elem = val.fieldValue(ty, mod, i);
+ if (elem.isUndef()) break :str;
+ buf[i] = std.math.cast(u8, elem.toUnsignedInt(mod)) orelse break :str;
+ }
+
+ const truncated = if (len > max_string_len) " (truncated)" else "";
+ return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated });
+ }
+
+ try writer.writeAll(".{ ");
+
+ const max_len = std.math.min(len, max_aggregate_items);
+ var i: u32 = 0;
+ while (i < max_len) : (i += 1) {
+ if (i != 0) try writer.writeAll(", ");
+ try print(.{
+ .ty = elem_ty,
+ .val = val.fieldValue(ty, mod, i),
+ }, writer, level - 1, mod);
+ }
+ if (len > max_aggregate_items) {
+ try writer.writeAll(", ...");
+ }
+ return writer.writeAll(" }");
+ }
+ },
+ .@"union" => {
+ if (level == 0) {
+ return writer.writeAll(".{ ... }");
+ }
+ const union_val = val.castTag(.@"union").?.data;
+ try writer.writeAll(".{ ");
+
+ try print(.{
+ .ty = ty.cast(Type.Payload.Union).?.data.tag_ty,
+ .val = union_val.tag,
+ }, writer, level - 1, mod);
+ try writer.writeAll(" = ");
+ try print(.{
+ .ty = ty.unionFieldType(union_val.tag, mod),
+ .val = union_val.val,
+ }, writer, level - 1, mod);
+
+ return writer.writeAll(" }");
+ },
+ .zero => return writer.writeAll("0"),
+ .one => return writer.writeAll("1"),
+ .the_only_possible_value => return writer.writeAll("0"),
+ .ty => return val.castTag(.ty).?.data.print(writer, mod),
+ .int_u64 => return std.fmt.formatIntValue(val.castTag(.int_u64).?.data, "", .{}, writer),
+ .int_i64 => return std.fmt.formatIntValue(val.castTag(.int_i64).?.data, "", .{}, writer),
+ .int_big_positive => return writer.print("{}", .{val.castTag(.int_big_positive).?.asBigInt()}),
+ .int_big_negative => return writer.print("{}", .{val.castTag(.int_big_negative).?.asBigInt()}),
+ .lazy_align => {
+ const sub_ty = val.castTag(.lazy_align).?.data;
+ const x = sub_ty.abiAlignment(mod);
+ return writer.print("{d}", .{x});
+ },
+ .lazy_size => {
+ const sub_ty = val.castTag(.lazy_size).?.data;
+ const x = sub_ty.abiSize(mod);
+ return writer.print("{d}", .{x});
+ },
+ .function => return writer.print("(function '{s}')", .{
+ mod.declPtr(val.castTag(.function).?.data.owner_decl).name,
+ }),
+ .extern_fn => return writer.writeAll("(extern function)"),
+ .variable => unreachable,
+ .decl_ref_mut => {
+ const decl_index = val.castTag(.decl_ref_mut).?.data.decl_index;
+ const decl = mod.declPtr(decl_index);
+ if (level == 0) {
+ return writer.print("(decl ref mut '{s}')", .{decl.name});
+ }
+ return print(.{
+ .ty = decl.ty,
+ .val = decl.val,
+ }, writer, level - 1, mod);
+ },
+ .decl_ref => {
+ const decl_index = val.castTag(.decl_ref).?.data;
+ const decl = mod.declPtr(decl_index);
+ if (level == 0) {
+ return writer.print("(decl ref '{s}')", .{decl.name});
+ }
+ return print(.{
+ .ty = decl.ty,
+ .val = decl.val,
+ }, writer, level - 1, mod);
+ },
+ .comptime_field_ptr => {
+ const payload = val.castTag(.comptime_field_ptr).?.data;
+ if (level == 0) {
+ return writer.writeAll("(comptime field ptr)");
+ }
+ return print(.{
+ .ty = payload.field_ty,
+ .val = payload.field_val,
+ }, writer, level - 1, mod);
+ },
+ .elem_ptr => {
+ const elem_ptr = val.castTag(.elem_ptr).?.data;
+ try writer.writeAll("&");
+ if (level == 0) {
+ try writer.writeAll("(ptr)");
+ } else {
try print(.{
- .ty = ty.structFieldType(i),
- .val = val.fieldValue(ty, mod, i),
+ .ty = elem_ptr.elem_ty,
+ .val = elem_ptr.array_ptr,
}, writer, level - 1, mod);
}
- if (ty.structFieldCount() > max_aggregate_items) {
+ return writer.print("[{}]", .{elem_ptr.index});
+ },
+ .field_ptr => {
+ const field_ptr = val.castTag(.field_ptr).?.data;
+ try writer.writeAll("&");
+ if (level == 0) {
+ try writer.writeAll("(ptr)");
+ } else {
+ try print(.{
+ .ty = field_ptr.container_ty,
+ .val = field_ptr.container_ptr,
+ }, writer, level - 1, mod);
+ }
+
+ if (field_ptr.container_ty.zigTypeTag(mod) == .Struct) {
+ switch (field_ptr.container_ty.tag()) {
+ .tuple => return writer.print(".@\"{d}\"", .{field_ptr.field_index}),
+ else => {
+ const field_name = field_ptr.container_ty.structFieldName(field_ptr.field_index);
+ return writer.print(".{s}", .{field_name});
+ },
+ }
+ } else if (field_ptr.container_ty.zigTypeTag(mod) == .Union) {
+ const field_name = field_ptr.container_ty.unionFields().keys()[field_ptr.field_index];
+ return writer.print(".{s}", .{field_name});
+ } else if (field_ptr.container_ty.isSlice(mod)) {
+ switch (field_ptr.field_index) {
+ Value.Payload.Slice.ptr_index => return writer.writeAll(".ptr"),
+ Value.Payload.Slice.len_index => return writer.writeAll(".len"),
+ else => unreachable,
+ }
+ }
+ },
+ .empty_array => return writer.writeAll(".{}"),
+ .enum_literal => return writer.print(".{}", .{std.zig.fmtId(val.castTag(.enum_literal).?.data)}),
+ .enum_field_index => {
+ return writer.print(".{s}", .{ty.enumFieldName(val.castTag(.enum_field_index).?.data)});
+ },
+ .bytes => return writer.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}),
+ .str_lit => {
+ const str_lit = val.castTag(.str_lit).?.data;
+ const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
+ return writer.print("\"{}\"", .{std.zig.fmtEscapes(bytes)});
+ },
+ .repeated => {
+ if (level == 0) {
+ return writer.writeAll(".{ ... }");
+ }
+ var i: u32 = 0;
+ try writer.writeAll(".{ ");
+ const elem_tv = TypedValue{
+ .ty = ty.elemType2(mod),
+ .val = val.castTag(.repeated).?.data,
+ };
+ const len = ty.arrayLen(mod);
+ const max_len = std.math.min(len, max_aggregate_items);
+ while (i < max_len) : (i += 1) {
+ if (i != 0) try writer.writeAll(", ");
+ try print(elem_tv, writer, level - 1, mod);
+ }
+ if (len > max_aggregate_items) {
try writer.writeAll(", ...");
}
- return writer.writeAll("}");
- } else {
+ return writer.writeAll(" }");
+ },
+ .empty_array_sentinel => {
+ if (level == 0) {
+ return writer.writeAll(".{ (sentinel) }");
+ }
+ try writer.writeAll(".{ ");
+ try print(.{
+ .ty = ty.elemType2(mod),
+ .val = ty.sentinel(mod).?,
+ }, writer, level - 1, mod);
+ return writer.writeAll(" }");
+ },
+ .slice => {
+ if (level == 0) {
+ return writer.writeAll(".{ ... }");
+ }
+ const payload = val.castTag(.slice).?.data;
const elem_ty = ty.elemType2(mod);
- const len = ty.arrayLen(mod);
+ const len = payload.len.toUnsignedInt(mod);
if (elem_ty.eql(Type.u8, mod)) str: {
const max_len = @intCast(usize, std.math.min(len, max_string_len));
@@ -111,11 +313,13 @@ pub fn print(
var i: u32 = 0;
while (i < max_len) : (i += 1) {
- const elem = val.fieldValue(ty, mod, i);
- if (elem.isUndef()) break :str;
- buf[i] = std.math.cast(u8, elem.toUnsignedInt(mod)) orelse break :str;
+ var elem_buf: Value.ElemValueBuffer = undefined;
+ const elem_val = payload.ptr.elemValueBuffer(mod, i, &elem_buf);
+ if (elem_val.isUndef()) break :str;
+ buf[i] = std.math.cast(u8, elem_val.toUnsignedInt(mod)) orelse break :str;
}
+ // TODO would be nice if this had a bit of unicode awareness.
const truncated = if (len > max_string_len) " (truncated)" else "";
return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated });
}
@@ -126,292 +330,91 @@ pub fn print(
var i: u32 = 0;
while (i < max_len) : (i += 1) {
if (i != 0) try writer.writeAll(", ");
+ var buf: Value.ElemValueBuffer = undefined;
try print(.{
.ty = elem_ty,
- .val = val.fieldValue(ty, mod, i),
+ .val = payload.ptr.elemValueBuffer(mod, i, &buf),
}, writer, level - 1, mod);
}
if (len > max_aggregate_items) {
try writer.writeAll(", ...");
}
return writer.writeAll(" }");
- }
- },
- .@"union" => {
- if (level == 0) {
- return writer.writeAll(".{ ... }");
- }
- const union_val = val.castTag(.@"union").?.data;
- try writer.writeAll(".{ ");
-
- try print(.{
- .ty = ty.cast(Type.Payload.Union).?.data.tag_ty,
- .val = union_val.tag,
- }, writer, level - 1, mod);
- try writer.writeAll(" = ");
- try print(.{
- .ty = ty.unionFieldType(union_val.tag, mod),
- .val = union_val.val,
- }, writer, level - 1, mod);
-
- return writer.writeAll(" }");
- },
- .null_value => return writer.writeAll("null"),
- .undef => return writer.writeAll("undefined"),
- .zero => return writer.writeAll("0"),
- .one => return writer.writeAll("1"),
- .unreachable_value => return writer.writeAll("unreachable"),
- .the_only_possible_value => return writer.writeAll("0"),
- .ty => return val.castTag(.ty).?.data.print(writer, mod),
- .int_u64 => return std.fmt.formatIntValue(val.castTag(.int_u64).?.data, "", .{}, writer),
- .int_i64 => return std.fmt.formatIntValue(val.castTag(.int_i64).?.data, "", .{}, writer),
- .int_big_positive => return writer.print("{}", .{val.castTag(.int_big_positive).?.asBigInt()}),
- .int_big_negative => return writer.print("{}", .{val.castTag(.int_big_negative).?.asBigInt()}),
- .lazy_align => {
- const sub_ty = val.castTag(.lazy_align).?.data;
- const x = sub_ty.abiAlignment(mod);
- return writer.print("{d}", .{x});
- },
- .lazy_size => {
- const sub_ty = val.castTag(.lazy_size).?.data;
- const x = sub_ty.abiSize(mod);
- return writer.print("{d}", .{x});
- },
- .function => return writer.print("(function '{s}')", .{
- mod.declPtr(val.castTag(.function).?.data.owner_decl).name,
- }),
- .extern_fn => return writer.writeAll("(extern function)"),
- .variable => unreachable,
- .decl_ref_mut => {
- const decl_index = val.castTag(.decl_ref_mut).?.data.decl_index;
- const decl = mod.declPtr(decl_index);
- if (level == 0) {
- return writer.print("(decl ref mut '{s}')", .{decl.name});
- }
- return print(.{
- .ty = decl.ty,
- .val = decl.val,
- }, writer, level - 1, mod);
- },
- .decl_ref => {
- const decl_index = val.castTag(.decl_ref).?.data;
- const decl = mod.declPtr(decl_index);
- if (level == 0) {
- return writer.print("(decl ref '{s}')", .{decl.name});
- }
- return print(.{
- .ty = decl.ty,
- .val = decl.val,
- }, writer, level - 1, mod);
- },
- .comptime_field_ptr => {
- const payload = val.castTag(.comptime_field_ptr).?.data;
- if (level == 0) {
- return writer.writeAll("(comptime field ptr)");
- }
- return print(.{
- .ty = payload.field_ty,
- .val = payload.field_val,
- }, writer, level - 1, mod);
- },
- .elem_ptr => {
- const elem_ptr = val.castTag(.elem_ptr).?.data;
- try writer.writeAll("&");
- if (level == 0) {
- try writer.writeAll("(ptr)");
- } else {
+ },
+ .float_16 => return writer.print("{d}", .{val.castTag(.float_16).?.data}),
+ .float_32 => return writer.print("{d}", .{val.castTag(.float_32).?.data}),
+ .float_64 => return writer.print("{d}", .{val.castTag(.float_64).?.data}),
+ .float_80 => return writer.print("{d}", .{@floatCast(f64, val.castTag(.float_80).?.data)}),
+ .float_128 => return writer.print("{d}", .{@floatCast(f64, val.castTag(.float_128).?.data)}),
+ .@"error" => return writer.print("error.{s}", .{val.castTag(.@"error").?.data.name}),
+ .eu_payload => {
+ val = val.castTag(.eu_payload).?.data;
+ ty = ty.errorUnionPayload();
+ },
+ .opt_payload => {
+ val = val.castTag(.opt_payload).?.data;
+ ty = ty.optionalChild(mod);
+ return print(.{ .ty = ty, .val = val }, writer, level, mod);
+ },
+ .eu_payload_ptr => {
+ try writer.writeAll("&");
+
+ const data = val.castTag(.eu_payload_ptr).?.data;
+
+ var ty_val: Value.Payload.Ty = .{
+ .base = .{ .tag = .ty },
+ .data = ty,
+ };
+
+ try writer.writeAll("@as(");
try print(.{
- .ty = elem_ptr.elem_ty,
- .val = elem_ptr.array_ptr,
+ .ty = Type.type,
+ .val = Value.initPayload(&ty_val.base),
}, writer, level - 1, mod);
- }
- return writer.print("[{}]", .{elem_ptr.index});
- },
- .field_ptr => {
- const field_ptr = val.castTag(.field_ptr).?.data;
- try writer.writeAll("&");
- if (level == 0) {
- try writer.writeAll("(ptr)");
- } else {
+
+ try writer.writeAll(", &(payload of ");
+
try print(.{
- .ty = field_ptr.container_ty,
- .val = field_ptr.container_ptr,
+ .ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
+ .val = data.container_ptr,
}, writer, level - 1, mod);
- }
-
- if (field_ptr.container_ty.zigTypeTag(mod) == .Struct) {
- switch (field_ptr.container_ty.tag()) {
- .tuple => return writer.print(".@\"{d}\"", .{field_ptr.field_index}),
- else => {
- const field_name = field_ptr.container_ty.structFieldName(field_ptr.field_index);
- return writer.print(".{s}", .{field_name});
- },
- }
- } else if (field_ptr.container_ty.zigTypeTag(mod) == .Union) {
- const field_name = field_ptr.container_ty.unionFields().keys()[field_ptr.field_index];
- return writer.print(".{s}", .{field_name});
- } else if (field_ptr.container_ty.isSlice(mod)) {
- switch (field_ptr.field_index) {
- Value.Payload.Slice.ptr_index => return writer.writeAll(".ptr"),
- Value.Payload.Slice.len_index => return writer.writeAll(".len"),
- else => unreachable,
- }
- }
- },
- .empty_array => return writer.writeAll(".{}"),
- .enum_literal => return writer.print(".{}", .{std.zig.fmtId(val.castTag(.enum_literal).?.data)}),
- .enum_field_index => {
- return writer.print(".{s}", .{ty.enumFieldName(val.castTag(.enum_field_index).?.data)});
- },
- .bytes => return writer.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}),
- .str_lit => {
- const str_lit = val.castTag(.str_lit).?.data;
- const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
- return writer.print("\"{}\"", .{std.zig.fmtEscapes(bytes)});
- },
- .repeated => {
- if (level == 0) {
- return writer.writeAll(".{ ... }");
- }
- var i: u32 = 0;
- try writer.writeAll(".{ ");
- const elem_tv = TypedValue{
- .ty = ty.elemType2(mod),
- .val = val.castTag(.repeated).?.data,
- };
- const len = ty.arrayLen(mod);
- const max_len = std.math.min(len, max_aggregate_items);
- while (i < max_len) : (i += 1) {
- if (i != 0) try writer.writeAll(", ");
- try print(elem_tv, writer, level - 1, mod);
- }
- if (len > max_aggregate_items) {
- try writer.writeAll(", ...");
- }
- return writer.writeAll(" }");
- },
- .empty_array_sentinel => {
- if (level == 0) {
- return writer.writeAll(".{ (sentinel) }");
- }
- try writer.writeAll(".{ ");
- try print(.{
- .ty = ty.elemType2(mod),
- .val = ty.sentinel(mod).?,
- }, writer, level - 1, mod);
- return writer.writeAll(" }");
- },
- .slice => {
- if (level == 0) {
- return writer.writeAll(".{ ... }");
- }
- const payload = val.castTag(.slice).?.data;
- const elem_ty = ty.elemType2(mod);
- const len = payload.len.toUnsignedInt(mod);
-
- if (elem_ty.eql(Type.u8, mod)) str: {
- const max_len = @intCast(usize, std.math.min(len, max_string_len));
- var buf: [max_string_len]u8 = undefined;
- var i: u32 = 0;
- while (i < max_len) : (i += 1) {
- var elem_buf: Value.ElemValueBuffer = undefined;
- const elem_val = payload.ptr.elemValueBuffer(mod, i, &elem_buf);
- if (elem_val.isUndef()) break :str;
- buf[i] = std.math.cast(u8, elem_val.toUnsignedInt(mod)) orelse break :str;
- }
-
- // TODO would be nice if this had a bit of unicode awareness.
- const truncated = if (len > max_string_len) " (truncated)" else "";
- return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated });
- }
+ try writer.writeAll("))");
+ return;
+ },
+ .opt_payload_ptr => {
+ const data = val.castTag(.opt_payload_ptr).?.data;
- try writer.writeAll(".{ ");
+ var ty_val: Value.Payload.Ty = .{
+ .base = .{ .tag = .ty },
+ .data = ty,
+ };
- const max_len = std.math.min(len, max_aggregate_items);
- var i: u32 = 0;
- while (i < max_len) : (i += 1) {
- if (i != 0) try writer.writeAll(", ");
- var buf: Value.ElemValueBuffer = undefined;
+ try writer.writeAll("@as(");
try print(.{
- .ty = elem_ty,
- .val = payload.ptr.elemValueBuffer(mod, i, &buf),
+ .ty = Type.type,
+ .val = Value.initPayload(&ty_val.base),
}, writer, level - 1, mod);
- }
- if (len > max_aggregate_items) {
- try writer.writeAll(", ...");
- }
- return writer.writeAll(" }");
- },
- .float_16 => return writer.print("{d}", .{val.castTag(.float_16).?.data}),
- .float_32 => return writer.print("{d}", .{val.castTag(.float_32).?.data}),
- .float_64 => return writer.print("{d}", .{val.castTag(.float_64).?.data}),
- .float_80 => return writer.print("{d}", .{@floatCast(f64, val.castTag(.float_80).?.data)}),
- .float_128 => return writer.print("{d}", .{@floatCast(f64, val.castTag(.float_128).?.data)}),
- .@"error" => return writer.print("error.{s}", .{val.castTag(.@"error").?.data.name}),
- .eu_payload => {
- val = val.castTag(.eu_payload).?.data;
- ty = ty.errorUnionPayload();
- },
- .opt_payload => {
- val = val.castTag(.opt_payload).?.data;
- ty = ty.optionalChild(mod);
- return print(.{ .ty = ty, .val = val }, writer, level, mod);
- },
- .eu_payload_ptr => {
- try writer.writeAll("&");
- const data = val.castTag(.eu_payload_ptr).?.data;
+ try writer.writeAll(", &(payload of ");
- var ty_val: Value.Payload.Ty = .{
- .base = .{ .tag = .ty },
- .data = ty,
- };
-
- try writer.writeAll("@as(");
- try print(.{
- .ty = Type.type,
- .val = Value.initPayload(&ty_val.base),
- }, writer, level - 1, mod);
-
- try writer.writeAll(", &(payload of ");
+ try print(.{
+ .ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
+ .val = data.container_ptr,
+ }, writer, level - 1, mod);
- try print(.{
- .ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
- .val = data.container_ptr,
- }, writer, level - 1, mod);
+ try writer.writeAll("))");
+ return;
+ },
- try writer.writeAll("))");
- return;
+ // TODO these should not appear in this function
+ .inferred_alloc => return writer.writeAll("(inferred allocation value)"),
+ .inferred_alloc_comptime => return writer.writeAll("(inferred comptime allocation value)"),
+ .runtime_value => return writer.writeAll("[runtime value]"),
},
- .opt_payload_ptr => {
- const data = val.castTag(.opt_payload_ptr).?.data;
-
- var ty_val: Value.Payload.Ty = .{
- .base = .{ .tag = .ty },
- .data = ty,
- };
-
- try writer.writeAll("@as(");
- try print(.{
- .ty = Type.type,
- .val = Value.initPayload(&ty_val.base),
- }, writer, level - 1, mod);
-
- try writer.writeAll(", &(payload of ");
-
- try print(.{
- .ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
- .val = data.container_ptr,
- }, writer, level - 1, mod);
-
- try writer.writeAll("))");
+ else => {
+ try writer.print("(interned: {})", .{val.ip_index});
return;
},
-
- // TODO these should not appear in this function
- .inferred_alloc => return writer.writeAll("(inferred allocation value)"),
- .inferred_alloc_comptime => return writer.writeAll("(inferred comptime allocation value)"),
- .runtime_value => return writer.writeAll("[runtime value]"),
};
}
src/value.zig
@@ -33,13 +33,10 @@ pub const Value = struct {
// Keep in sync with tools/stage2_pretty_printers_common.py
pub const Tag = enum(usize) {
// The first section of this enum are tags that require no payload.
- undef,
zero,
one,
- unreachable_value,
/// The only possible value for a particular type, which is stored externally.
the_only_possible_value,
- null_value,
empty_struct_value,
empty_array, // See last_no_payload_tag below.
@@ -132,14 +129,11 @@ pub const Value = struct {
pub fn Type(comptime t: Tag) type {
return switch (t) {
- .undef,
.zero,
.one,
- .unreachable_value,
.the_only_possible_value,
.empty_struct_value,
.empty_array,
- .null_value,
=> @compileError("Value Tag " ++ @tagName(t) ++ " has no payload"),
.int_big_positive,
@@ -287,13 +281,10 @@ pub const Value = struct {
.legacy = .{ .tag_if_small_enough = self.legacy.tag_if_small_enough },
};
} else switch (self.legacy.ptr_otherwise.tag) {
- .undef,
.zero,
.one,
- .unreachable_value,
.the_only_possible_value,
.empty_array,
- .null_value,
.empty_struct_value,
=> unreachable,
@@ -522,7 +513,7 @@ pub const Value = struct {
) !void {
comptime assert(fmt.len == 0);
if (start_val.ip_index != .none) {
- try out_stream.print("(interned {d})", .{@enumToInt(start_val.ip_index)});
+ try out_stream.print("(interned: {})", .{start_val.ip_index});
return;
}
var val = start_val;
@@ -534,11 +525,8 @@ pub const Value = struct {
.@"union" => {
return out_stream.writeAll("(union value)");
},
- .null_value => return out_stream.writeAll("null"),
- .undef => return out_stream.writeAll("undefined"),
.zero => return out_stream.writeAll("0"),
.one => return out_stream.writeAll("1"),
- .unreachable_value => return out_stream.writeAll("unreachable"),
.the_only_possible_value => return out_stream.writeAll("(the only possible value)"),
.ty => return val.castTag(.ty).?.data.dump("", options, out_stream),
.lazy_align => {
@@ -811,8 +799,9 @@ pub const Value = struct {
switch (val.ip_index) {
.bool_false => return BigIntMutable.init(&space.limbs, 0).toConst(),
.bool_true => return BigIntMutable.init(&space.limbs, 1).toConst(),
+ .undef => unreachable,
+ .null_value => return BigIntMutable.init(&space.limbs, 0).toConst(),
.none => switch (val.tag()) {
- .null_value,
.zero,
.the_only_possible_value, // i0, u0
=> return BigIntMutable.init(&space.limbs, 0).toConst(),
@@ -832,8 +821,6 @@ pub const Value = struct {
.int_big_positive => return val.castTag(.int_big_positive).?.asBigInt(),
.int_big_negative => return val.castTag(.int_big_negative).?.asBigInt(),
- .undef => unreachable,
-
.lazy_align => {
const ty = val.castTag(.lazy_align).?.data;
if (opt_sema) |sema| {
@@ -880,6 +867,7 @@ pub const Value = struct {
switch (val.ip_index) {
.bool_false => return 0,
.bool_true => return 1,
+ .undef => unreachable,
.none => switch (val.tag()) {
.zero,
.the_only_possible_value, // i0, u0
@@ -892,8 +880,6 @@ pub const Value = struct {
.int_big_positive => return val.castTag(.int_big_positive).?.asBigInt().to(u64) catch null,
.int_big_negative => return val.castTag(.int_big_negative).?.asBigInt().to(u64) catch null,
- .undef => unreachable,
-
.lazy_align => {
const ty = val.castTag(.lazy_align).?.data;
if (opt_sema) |sema| {
@@ -913,9 +899,9 @@ pub const Value = struct {
else => return null,
},
- else => switch (mod.intern_pool.indexToKey(val.ip_index)) {
- .int => |int| return int.big_int.to(u64) catch null,
- else => unreachable,
+ else => return switch (mod.intern_pool.indexToKey(val.ip_index)) {
+ .int => |int| int.big_int.to(u64) catch null,
+ else => null,
},
}
}
@@ -930,6 +916,7 @@ pub const Value = struct {
switch (val.ip_index) {
.bool_false => return 0,
.bool_true => return 1,
+ .undef => unreachable,
.none => switch (val.tag()) {
.zero,
.the_only_possible_value, // i0, u0
@@ -951,7 +938,6 @@ pub const Value = struct {
return @intCast(i64, ty.abiSize(mod));
},
- .undef => unreachable,
else => unreachable,
},
else => switch (mod.intern_pool.indexToKey(val.ip_index)) {
@@ -2032,8 +2018,7 @@ pub const Value = struct {
const a_tag = a.tag();
const b_tag = b.tag();
if (a_tag == b_tag) switch (a_tag) {
- .undef => return true,
- .null_value, .the_only_possible_value, .empty_struct_value => return true,
+ .the_only_possible_value, .empty_struct_value => return true,
.enum_literal => {
const a_name = a.castTag(.enum_literal).?.data;
const b_name = b.castTag(.enum_literal).?.data;
@@ -2162,9 +2147,7 @@ pub const Value = struct {
return eqlAdvanced(a_union.val, active_field_ty, b_union.val, active_field_ty, mod, opt_sema);
},
else => {},
- } else if (b_tag == .null_value or b_tag == .@"error") {
- return false;
- } else if (a_tag == .undef or b_tag == .undef) {
+ } else if (b_tag == .@"error") {
return false;
}
@@ -2283,7 +2266,7 @@ pub const Value = struct {
if (a_nan) return true;
return a_float == b_float;
},
- .Optional => if (a_tag != .null_value and b_tag == .opt_payload) {
+ .Optional => if (b_tag == .opt_payload) {
var sub_pl: Payload.SubValue = .{
.base = .{ .tag = b.tag() },
.data = a,
@@ -2301,7 +2284,7 @@ pub const Value = struct {
},
else => {},
}
- if (a_tag == .null_value or a_tag == .@"error") return false;
+ if (a_tag == .@"error") return false;
return (try orderAdvanced(a, b, mod, opt_sema)).compare(.eq);
}
@@ -2642,7 +2625,6 @@ pub const Value = struct {
.zero,
.one,
- .null_value,
.int_u64,
.int_i64,
.int_big_positive,
@@ -2717,102 +2699,108 @@ pub const Value = struct {
arena: ?Allocator,
buffer: *ElemValueBuffer,
) error{OutOfMemory}!Value {
- switch (val.tag()) {
- // This is the case of accessing an element of an undef array.
+ switch (val.ip_index) {
.undef => return Value.undef,
- .empty_array => unreachable, // out of bounds array index
- .empty_struct_value => unreachable, // out of bounds array index
+ .none => switch (val.tag()) {
+ // This is the case of accessing an element of an undef array.
+ .empty_array => unreachable, // out of bounds array index
+ .empty_struct_value => unreachable, // out of bounds array index
- .empty_array_sentinel => {
- assert(index == 0); // The only valid index for an empty array with sentinel.
- return val.castTag(.empty_array_sentinel).?.data;
- },
+ .empty_array_sentinel => {
+ assert(index == 0); // The only valid index for an empty array with sentinel.
+ return val.castTag(.empty_array_sentinel).?.data;
+ },
- .bytes => {
- const byte = val.castTag(.bytes).?.data[index];
- if (arena) |a| {
- return Tag.int_u64.create(a, byte);
- } else {
- buffer.* = .{
- .base = .{ .tag = .int_u64 },
- .data = byte,
- };
- return initPayload(&buffer.base);
- }
- },
- .str_lit => {
- const str_lit = val.castTag(.str_lit).?.data;
- const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
- const byte = bytes[index];
- if (arena) |a| {
- return Tag.int_u64.create(a, byte);
- } else {
- buffer.* = .{
- .base = .{ .tag = .int_u64 },
- .data = byte,
- };
- return initPayload(&buffer.base);
- }
- },
+ .bytes => {
+ const byte = val.castTag(.bytes).?.data[index];
+ if (arena) |a| {
+ return Tag.int_u64.create(a, byte);
+ } else {
+ buffer.* = .{
+ .base = .{ .tag = .int_u64 },
+ .data = byte,
+ };
+ return initPayload(&buffer.base);
+ }
+ },
+ .str_lit => {
+ const str_lit = val.castTag(.str_lit).?.data;
+ const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
+ const byte = bytes[index];
+ if (arena) |a| {
+ return Tag.int_u64.create(a, byte);
+ } else {
+ buffer.* = .{
+ .base = .{ .tag = .int_u64 },
+ .data = byte,
+ };
+ return initPayload(&buffer.base);
+ }
+ },
- // No matter the index; all the elements are the same!
- .repeated => return val.castTag(.repeated).?.data,
+ // No matter the index; all the elements are the same!
+ .repeated => return val.castTag(.repeated).?.data,
- .aggregate => return val.castTag(.aggregate).?.data[index],
- .slice => return val.castTag(.slice).?.data.ptr.elemValueAdvanced(mod, index, arena, buffer),
+ .aggregate => return val.castTag(.aggregate).?.data[index],
+ .slice => return val.castTag(.slice).?.data.ptr.elemValueAdvanced(mod, index, arena, buffer),
- .decl_ref => return mod.declPtr(val.castTag(.decl_ref).?.data).val.elemValueAdvanced(mod, index, arena, buffer),
- .decl_ref_mut => return mod.declPtr(val.castTag(.decl_ref_mut).?.data.decl_index).val.elemValueAdvanced(mod, index, arena, buffer),
- .comptime_field_ptr => return val.castTag(.comptime_field_ptr).?.data.field_val.elemValueAdvanced(mod, index, arena, buffer),
- .elem_ptr => {
- const data = val.castTag(.elem_ptr).?.data;
- return data.array_ptr.elemValueAdvanced(mod, index + data.index, arena, buffer);
- },
- .field_ptr => {
- const data = val.castTag(.field_ptr).?.data;
- if (data.container_ptr.pointerDecl()) |decl_index| {
- const container_decl = mod.declPtr(decl_index);
- const field_type = data.container_ty.structFieldType(data.field_index);
- const field_val = container_decl.val.fieldValue(field_type, mod, data.field_index);
- return field_val.elemValueAdvanced(mod, index, arena, buffer);
- } else unreachable;
- },
+ .decl_ref => return mod.declPtr(val.castTag(.decl_ref).?.data).val.elemValueAdvanced(mod, index, arena, buffer),
+ .decl_ref_mut => return mod.declPtr(val.castTag(.decl_ref_mut).?.data.decl_index).val.elemValueAdvanced(mod, index, arena, buffer),
+ .comptime_field_ptr => return val.castTag(.comptime_field_ptr).?.data.field_val.elemValueAdvanced(mod, index, arena, buffer),
+ .elem_ptr => {
+ const data = val.castTag(.elem_ptr).?.data;
+ return data.array_ptr.elemValueAdvanced(mod, index + data.index, arena, buffer);
+ },
+ .field_ptr => {
+ const data = val.castTag(.field_ptr).?.data;
+ if (data.container_ptr.pointerDecl()) |decl_index| {
+ const container_decl = mod.declPtr(decl_index);
+ const field_type = data.container_ty.structFieldType(data.field_index);
+ const field_val = container_decl.val.fieldValue(field_type, mod, data.field_index);
+ return field_val.elemValueAdvanced(mod, index, arena, buffer);
+ } else unreachable;
+ },
- // The child type of arrays which have only one possible value need
- // to have only one possible value itself.
- .the_only_possible_value => return val,
+ // The child type of arrays which have only one possible value need
+ // to have only one possible value itself.
+ .the_only_possible_value => return val,
- .opt_payload_ptr => return val.castTag(.opt_payload_ptr).?.data.container_ptr.elemValueAdvanced(mod, index, arena, buffer),
- .eu_payload_ptr => return val.castTag(.eu_payload_ptr).?.data.container_ptr.elemValueAdvanced(mod, index, arena, buffer),
+ .opt_payload_ptr => return val.castTag(.opt_payload_ptr).?.data.container_ptr.elemValueAdvanced(mod, index, arena, buffer),
+ .eu_payload_ptr => return val.castTag(.eu_payload_ptr).?.data.container_ptr.elemValueAdvanced(mod, index, arena, buffer),
- .opt_payload => return val.castTag(.opt_payload).?.data.elemValueAdvanced(mod, index, arena, buffer),
- .eu_payload => return val.castTag(.eu_payload).?.data.elemValueAdvanced(mod, index, arena, buffer),
+ .opt_payload => return val.castTag(.opt_payload).?.data.elemValueAdvanced(mod, index, arena, buffer),
+ .eu_payload => return val.castTag(.eu_payload).?.data.elemValueAdvanced(mod, index, arena, buffer),
+ else => unreachable,
+ },
else => unreachable,
}
}
/// Returns true if a Value is backed by a variable
pub fn isVariable(val: Value, mod: *Module) bool {
- return switch (val.tag()) {
- .slice => val.castTag(.slice).?.data.ptr.isVariable(mod),
- .comptime_field_ptr => val.castTag(.comptime_field_ptr).?.data.field_val.isVariable(mod),
- .elem_ptr => val.castTag(.elem_ptr).?.data.array_ptr.isVariable(mod),
- .field_ptr => val.castTag(.field_ptr).?.data.container_ptr.isVariable(mod),
- .eu_payload_ptr => val.castTag(.eu_payload_ptr).?.data.container_ptr.isVariable(mod),
- .opt_payload_ptr => val.castTag(.opt_payload_ptr).?.data.container_ptr.isVariable(mod),
- .decl_ref => {
- const decl = mod.declPtr(val.castTag(.decl_ref).?.data);
- assert(decl.has_tv);
- return decl.val.isVariable(mod);
- },
- .decl_ref_mut => {
- const decl = mod.declPtr(val.castTag(.decl_ref_mut).?.data.decl_index);
- assert(decl.has_tv);
- return decl.val.isVariable(mod);
- },
+ return switch (val.ip_index) {
+ .none => switch (val.tag()) {
+ .slice => val.castTag(.slice).?.data.ptr.isVariable(mod),
+ .comptime_field_ptr => val.castTag(.comptime_field_ptr).?.data.field_val.isVariable(mod),
+ .elem_ptr => val.castTag(.elem_ptr).?.data.array_ptr.isVariable(mod),
+ .field_ptr => val.castTag(.field_ptr).?.data.container_ptr.isVariable(mod),
+ .eu_payload_ptr => val.castTag(.eu_payload_ptr).?.data.container_ptr.isVariable(mod),
+ .opt_payload_ptr => val.castTag(.opt_payload_ptr).?.data.container_ptr.isVariable(mod),
+ .decl_ref => {
+ const decl = mod.declPtr(val.castTag(.decl_ref).?.data);
+ assert(decl.has_tv);
+ return decl.val.isVariable(mod);
+ },
+ .decl_ref_mut => {
+ const decl = mod.declPtr(val.castTag(.decl_ref_mut).?.data.decl_index);
+ assert(decl.has_tv);
+ return decl.val.isVariable(mod);
+ },
- .variable => true,
+ .variable => true,
+ else => false,
+ },
else => false,
};
}
@@ -2878,39 +2866,46 @@ pub const Value = struct {
}
pub fn fieldValue(val: Value, ty: Type, mod: *const Module, index: usize) Value {
- switch (val.tag()) {
- .aggregate => {
- const field_values = val.castTag(.aggregate).?.data;
- return field_values[index];
- },
- .@"union" => {
- const payload = val.castTag(.@"union").?.data;
- // TODO assert the tag is correct
- return payload.val;
- },
+ switch (val.ip_index) {
+ .undef => return Value.undef,
+ .none => switch (val.tag()) {
+ .aggregate => {
+ const field_values = val.castTag(.aggregate).?.data;
+ return field_values[index];
+ },
+ .@"union" => {
+ const payload = val.castTag(.@"union").?.data;
+ // TODO assert the tag is correct
+ return payload.val;
+ },
- .the_only_possible_value => return ty.onePossibleValue(mod).?,
+ .the_only_possible_value => return ty.onePossibleValue(mod).?,
- .empty_struct_value => {
- if (ty.isSimpleTupleOrAnonStruct()) {
- const tuple = ty.tupleFields();
- return tuple.values[index];
- }
- if (ty.structFieldValueComptime(mod, index)) |some| {
- return some;
- }
- unreachable;
- },
- .undef => return Value.undef,
+ .empty_struct_value => {
+ if (ty.isSimpleTupleOrAnonStruct()) {
+ const tuple = ty.tupleFields();
+ return tuple.values[index];
+ }
+ if (ty.structFieldValueComptime(mod, index)) |some| {
+ return some;
+ }
+ unreachable;
+ },
+ else => unreachable,
+ },
else => unreachable,
}
}
pub fn unionTag(val: Value) Value {
- switch (val.tag()) {
- .undef, .enum_field_index => return val,
- .@"union" => return val.castTag(.@"union").?.data.tag,
+ switch (val.ip_index) {
+ .undef => return val,
+ .none => switch (val.tag()) {
+ .enum_field_index => return val,
+ .@"union" => return val.castTag(.@"union").?.data.tag,
+ else => unreachable,
+ },
else => unreachable,
}
}
@@ -2946,15 +2941,15 @@ pub const Value = struct {
});
}
- pub fn isUndef(self: Value) bool {
- return self.tag() == .undef;
+ pub fn isUndef(val: Value) bool {
+ return val.ip_index == .undef;
}
/// TODO: check for cases such as array that is not marked undef but all the element
/// values are marked undef, or struct that is not marked undef but all fields are marked
/// undef, etc.
- pub fn isUndefDeep(self: Value) bool {
- return self.isUndef();
+ pub fn isUndefDeep(val: Value) bool {
+ return val.isUndef();
}
/// Returns true if any value contained in `self` is undefined.
@@ -2962,27 +2957,29 @@ pub const Value = struct {
/// values are marked undef, or struct that is not marked undef but all fields are marked
/// undef, etc.
pub fn anyUndef(self: Value, mod: *Module) bool {
- switch (self.tag()) {
- .slice => {
- const payload = self.castTag(.slice).?;
- const len = payload.data.len.toUnsignedInt(mod);
-
- var elem_value_buf: ElemValueBuffer = undefined;
- var i: usize = 0;
- while (i < len) : (i += 1) {
- const elem_val = payload.data.ptr.elemValueBuffer(mod, i, &elem_value_buf);
- if (elem_val.anyUndef(mod)) return true;
- }
- },
+ switch (self.ip_index) {
+ .undef => return true,
+ .none => switch (self.tag()) {
+ .slice => {
+ const payload = self.castTag(.slice).?;
+ const len = payload.data.len.toUnsignedInt(mod);
+
+ var elem_value_buf: ElemValueBuffer = undefined;
+ var i: usize = 0;
+ while (i < len) : (i += 1) {
+ const elem_val = payload.data.ptr.elemValueBuffer(mod, i, &elem_value_buf);
+ if (elem_val.anyUndef(mod)) return true;
+ }
+ },
- .aggregate => {
- const payload = self.castTag(.aggregate).?;
- for (payload.data) |val| {
- if (val.anyUndef(mod)) return true;
- }
+ .aggregate => {
+ const payload = self.castTag(.aggregate).?;
+ for (payload.data) |val| {
+ if (val.anyUndef(mod)) return true;
+ }
+ },
+ else => {},
},
-
- .undef => return true,
else => {},
}
@@ -2992,30 +2989,33 @@ pub const Value = struct {
/// Asserts the value is not undefined and not unreachable.
/// Integer value 0 is considered null because of C pointers.
pub fn isNull(self: Value, mod: *const Module) bool {
- return switch (self.tag()) {
+ return switch (self.ip_index) {
+ .undef => unreachable,
+ .unreachable_value => unreachable,
.null_value => true,
- .opt_payload => false,
+ .none => switch (self.tag()) {
+ .opt_payload => false,
- // If it's not one of those two tags then it must be a C pointer value,
- // in which case the value 0 is null and other values are non-null.
+ // If it's not one of those two tags then it must be a C pointer value,
+ // in which case the value 0 is null and other values are non-null.
- .zero,
- .the_only_possible_value,
- => true,
+ .zero,
+ .the_only_possible_value,
+ => true,
- .one => false,
+ .one => false,
- .int_u64,
- .int_i64,
- .int_big_positive,
- .int_big_negative,
- => self.orderAgainstZero(mod).compare(.eq),
+ .int_u64,
+ .int_i64,
+ .int_big_positive,
+ .int_big_negative,
+ => self.orderAgainstZero(mod).compare(.eq),
- .undef => unreachable,
- .unreachable_value => unreachable,
- .inferred_alloc => unreachable,
- .inferred_alloc_comptime => unreachable,
+ .inferred_alloc => unreachable,
+ .inferred_alloc_comptime => unreachable,
+ else => false,
+ },
else => false,
};
}
@@ -3025,18 +3025,21 @@ pub const Value = struct {
/// something is an error or not because it works without having to figure out the
/// string.
pub fn getError(self: Value) ?[]const u8 {
- return switch (self.tag()) {
- .@"error" => self.castTag(.@"error").?.data.name,
- .int_u64 => @panic("TODO"),
- .int_i64 => @panic("TODO"),
- .int_big_positive => @panic("TODO"),
- .int_big_negative => @panic("TODO"),
- .one => @panic("TODO"),
+ return switch (self.ip_index) {
.undef => unreachable,
.unreachable_value => unreachable,
- .inferred_alloc => unreachable,
- .inferred_alloc_comptime => unreachable,
+ .none => switch (self.tag()) {
+ .@"error" => self.castTag(.@"error").?.data.name,
+ .int_u64 => @panic("TODO"),
+ .int_i64 => @panic("TODO"),
+ .int_big_positive => @panic("TODO"),
+ .int_big_negative => @panic("TODO"),
+ .one => @panic("TODO"),
+ .inferred_alloc => unreachable,
+ .inferred_alloc_comptime => unreachable,
+ else => null,
+ },
else => null,
};
}
@@ -3044,13 +3047,16 @@ pub const Value = struct {
/// Assumes the type is an error union. Returns true if and only if the value is
/// the error union payload, not an error.
pub fn errorUnionIsPayload(val: Value) bool {
- return switch (val.tag()) {
- .eu_payload => true,
- else => false,
-
+ return switch (val.ip_index) {
.undef => unreachable,
- .inferred_alloc => unreachable,
- .inferred_alloc_comptime => unreachable,
+ .none => switch (val.tag()) {
+ .eu_payload => true,
+ else => false,
+
+ .inferred_alloc => unreachable,
+ .inferred_alloc_comptime => unreachable,
+ },
+ else => false,
};
}
@@ -3065,17 +3071,20 @@ pub const Value = struct {
/// Valid for all types. Asserts the value is not undefined.
pub fn isFloat(self: Value) bool {
- return switch (self.tag()) {
+ return switch (self.ip_index) {
.undef => unreachable,
- .inferred_alloc => unreachable,
- .inferred_alloc_comptime => unreachable,
+ .none => switch (self.tag()) {
+ .inferred_alloc => unreachable,
+ .inferred_alloc_comptime => unreachable,
- .float_16,
- .float_32,
- .float_64,
- .float_80,
- .float_128,
- => true,
+ .float_16,
+ .float_32,
+ .float_64,
+ .float_80,
+ .float_128,
+ => true,
+ else => false,
+ },
else => false,
};
}
@@ -3102,40 +3111,44 @@ pub const Value = struct {
pub fn intToFloatScalar(val: Value, arena: Allocator, float_ty: Type, mod: *Module, opt_sema: ?*Sema) !Value {
const target = mod.getTarget();
- switch (val.tag()) {
- .undef, .zero, .one => return val,
- .the_only_possible_value => return Value.initTag(.zero), // for i0, u0
- .int_u64 => {
- return intToFloatInner(val.castTag(.int_u64).?.data, arena, float_ty, target);
- },
- .int_i64 => {
- return intToFloatInner(val.castTag(.int_i64).?.data, arena, float_ty, target);
- },
- .int_big_positive => {
- const limbs = val.castTag(.int_big_positive).?.data;
- const float = bigIntToFloat(limbs, true);
- return floatToValue(float, arena, float_ty, target);
- },
- .int_big_negative => {
- const limbs = val.castTag(.int_big_negative).?.data;
- const float = bigIntToFloat(limbs, false);
- return floatToValue(float, arena, float_ty, target);
- },
- .lazy_align => {
- const ty = val.castTag(.lazy_align).?.data;
- if (opt_sema) |sema| {
- return intToFloatInner((try ty.abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar, arena, float_ty, target);
- } else {
- return intToFloatInner(ty.abiAlignment(mod), arena, float_ty, target);
- }
- },
- .lazy_size => {
- const ty = val.castTag(.lazy_size).?.data;
- if (opt_sema) |sema| {
- return intToFloatInner((try ty.abiSizeAdvanced(mod, .{ .sema = sema })).scalar, arena, float_ty, target);
- } else {
- return intToFloatInner(ty.abiSize(mod), arena, float_ty, target);
- }
+ switch (val.ip_index) {
+ .undef => return val,
+ .none => switch (val.tag()) {
+ .zero, .one => return val,
+ .the_only_possible_value => return Value.initTag(.zero), // for i0, u0
+ .int_u64 => {
+ return intToFloatInner(val.castTag(.int_u64).?.data, arena, float_ty, target);
+ },
+ .int_i64 => {
+ return intToFloatInner(val.castTag(.int_i64).?.data, arena, float_ty, target);
+ },
+ .int_big_positive => {
+ const limbs = val.castTag(.int_big_positive).?.data;
+ const float = bigIntToFloat(limbs, true);
+ return floatToValue(float, arena, float_ty, target);
+ },
+ .int_big_negative => {
+ const limbs = val.castTag(.int_big_negative).?.data;
+ const float = bigIntToFloat(limbs, false);
+ return floatToValue(float, arena, float_ty, target);
+ },
+ .lazy_align => {
+ const ty = val.castTag(.lazy_align).?.data;
+ if (opt_sema) |sema| {
+ return intToFloatInner((try ty.abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar, arena, float_ty, target);
+ } else {
+ return intToFloatInner(ty.abiAlignment(mod), arena, float_ty, target);
+ }
+ },
+ .lazy_size => {
+ const ty = val.castTag(.lazy_size).?.data;
+ if (opt_sema) |sema| {
+ return intToFloatInner((try ty.abiSizeAdvanced(mod, .{ .sema = sema })).scalar, arena, float_ty, target);
+ } else {
+ return intToFloatInner(ty.abiSize(mod), arena, float_ty, target);
+ }
+ },
+ else => unreachable,
},
else => unreachable,
}
@@ -3381,7 +3394,7 @@ pub const Value = struct {
arena: Allocator,
mod: *Module,
) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
if (ty.zigTypeTag(mod) == .ComptimeInt) {
return intMul(lhs, rhs, ty, arena, mod);
@@ -3492,7 +3505,7 @@ pub const Value = struct {
/// operands must be integers; handles undefined.
pub fn bitwiseNotScalar(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
- if (val.isUndef()) return Value.initTag(.undef);
+ if (val.isUndef()) return Value.undef;
const info = ty.intInfo(mod);
@@ -3532,7 +3545,7 @@ pub const Value = struct {
/// operands must be integers; handles undefined.
pub fn bitwiseAndScalar(lhs: Value, rhs: Value, arena: Allocator, mod: *Module) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
// TODO is this a performance issue? maybe we should try the operation without
// resorting to BigInt first.
@@ -3568,7 +3581,7 @@ pub const Value = struct {
/// operands must be integers; handles undefined.
pub fn bitwiseNandScalar(lhs: Value, rhs: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
const anded = try bitwiseAnd(lhs, rhs, ty, arena, mod);
@@ -3598,7 +3611,7 @@ pub const Value = struct {
/// operands must be integers; handles undefined.
pub fn bitwiseOrScalar(lhs: Value, rhs: Value, arena: Allocator, mod: *Module) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
// TODO is this a performance issue? maybe we should try the operation without
// resorting to BigInt first.
@@ -3633,7 +3646,7 @@ pub const Value = struct {
/// operands must be integers; handles undefined.
pub fn bitwiseXorScalar(lhs: Value, rhs: Value, arena: Allocator, mod: *Module) !Value {
- if (lhs.isUndef() or rhs.isUndef()) return Value.initTag(.undef);
+ if (lhs.isUndef() or rhs.isUndef()) return Value.undef;
// TODO is this a performance issue? maybe we should try the operation without
// resorting to BigInt first.
@@ -5393,11 +5406,12 @@ pub const Value = struct {
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &negative_one_payload.base },
};
- pub const undef = initTag(.undef);
+ pub const undef: Value = .{ .ip_index = .undef, .legacy = undefined };
pub const @"void": Value = .{ .ip_index = .void_value, .legacy = undefined };
- pub const @"null" = initTag(.null_value);
+ pub const @"null": Value = .{ .ip_index = .null_value, .legacy = undefined };
pub const @"false": Value = .{ .ip_index = .bool_false, .legacy = undefined };
pub const @"true": Value = .{ .ip_index = .bool_true, .legacy = undefined };
+ pub const @"unreachable": Value = .{ .ip_index = .unreachable_value, .legacy = undefined };
pub const generic_poison: Value = .{ .ip_index = .generic_poison, .legacy = undefined };
pub const generic_poison_type: Value = .{ .ip_index = .generic_poison_type, .legacy = undefined };