Commit 405f7298ac
Changed files (2)
src
codegen
spirv
src/codegen/spirv/Module.zig
@@ -39,41 +39,58 @@ pub const Fn = struct {
/// This section should also contain the OpFunctionEnd instruction marking
/// the end of this function definition.
body: Section = .{},
+ /// The decl dependencies that this function depends on.
+ decl_deps: std.ArrayListUnmanaged(Decl.Index) = .{},
/// Reset this function without deallocating resources, so that
/// it may be used to emit code for another function.
pub fn reset(self: *Fn) void {
self.prologue.reset();
self.body.reset();
+ self.decl_deps.items.len = 0;
}
/// Free the resources owned by this function.
pub fn deinit(self: *Fn, a: Allocator) void {
self.prologue.deinit(a);
self.body.deinit(a);
+ self.decl_deps.deinit(a);
self.* = undefined;
}
};
+/// Declarations, both functions and globals, can have dependencies. These are used for 2 things:
+/// - Globals must be declared before they are used, also between globals. The compiler processes
+/// globals unordered, so we must use the dependencies here to figure out how to order the globals
+/// in the final module. The Globals structure is also used for that.
+/// - Entry points must declare the complete list of OpVariable instructions that they access.
+/// For these we use the same dependency structure.
+/// In this mechanism, globals will only depend on other globals, while functions may depend on
+/// globals or other functions.
+pub const Decl = struct {
+ /// Index to refer to a Decl by.
+ pub const Index = enum(u32) { _ };
+
+ /// The result-id to be used for this declaration. This is the final result-id
+ /// of the decl, which may be an OpFunction, OpVariable, or the result of a sequence
+ /// of OpSpecConstantOp operations.
+ result_id: IdRef,
+ /// The offset of the first dependency of this decl in the `decl_deps` array.
+ begin_dep: u32,
+ /// The past-end offset of the dependencies of this decl in the `decl_deps` array.
+ end_dep: u32,
+};
+
/// Globals must be kept in order: operations involving globals must be ordered
/// so that the global declaration precedes any usage.
pub const Global = struct {
- /// Index type to refer to a global by.
- pub const Index = enum(u32) { _ };
-
- /// The result-id to be used for this global declaration. Note that this does not
- /// necessarily refer to an OpVariable instruction - it may also be the final result
- /// id of a number of OpSpecConstantOp instructions.
+ /// This is the result-id of the OpVariable instruction that declares the global.
result_id: IdRef,
/// The offset into `self.globals.section` of the first instruction of this global
/// declaration.
begin_inst: u32,
/// The past-end offset into `self.flobals.section`.
end_inst: u32,
- /// The first dependency in the `self.globals.dependencies` array list.
- begin_dep: u32,
- /// The past-end dependency in `self.globals.dependencies`.
- end_dep: u32,
};
/// A general-purpose allocator which may be used to allocate resources for this module
@@ -123,18 +140,19 @@ source_file_names: std.StringHashMapUnmanaged(IdRef) = .{},
/// Note: Uses ArrayHashMap which is insertion ordered, so that we may refer to other types by index (Type.Ref).
type_cache: TypeCache = .{},
+/// Set of Decls, referred to by Decl.Index.
+decls: std.ArrayListUnmanaged(Decl) = .{},
+
+decl_deps: std.ArrayListUnmanaged(Decl.Index) = .{},
+
/// The fields in this structure help to maintain the required order for global variables.
globals: struct {
- /// The graph nodes of global variables present in the module.
- nodes: std.ArrayListUnmanaged(Global) = .{},
+ /// Set of globals, referred to by Decl.Index.
+ globals: std.AutoArrayHashMapUnmanaged(Decl.Index, Global) = .{},
/// This pseudo-section contains the initialization code for all the globals. Instructions from
/// here are reordered when flushing the module. Its contents should be part of the
/// `types_globals_constants` SPIR-V section.
section: Section = .{},
- /// Holds a list of dependent global variables for each global variable.
- dependencies: std.ArrayListUnmanaged(Global.Index) = .{},
- /// The global that initialization code/dependencies are currently being generated for, if any.
- current_global: ?Global.Index = null,
} = .{},
pub fn init(gpa: Allocator, arena: Allocator) Module {
@@ -159,9 +177,11 @@ pub fn deinit(self: *Module) void {
self.source_file_names.deinit(self.gpa);
self.type_cache.deinit(self.gpa);
- self.globals.nodes.deinit(self.gpa);
+ self.decls.deinit(self.gpa);
+ self.decl_deps.deinit(self.gpa);
+
+ self.globals.globals.deinit(self.gpa);
self.globals.section.deinit(self.gpa);
- self.globals.dependencies.deinit(self.gpa);
self.* = undefined;
}
@@ -181,16 +201,17 @@ pub fn idBound(self: Module) Word {
}
fn orderGlobalsInto(
- self: Module,
- global_index: Global.Index,
+ self: *Module,
+ index: Decl.Index,
section: *Section,
seen: *std.DynamicBitSetUnmanaged,
) !void {
- const node = self.globals.nodes.items[@enumToInt(global_index)];
- const deps = self.globals.dependencies.items[node.begin_dep..node.end_dep];
- const insts = self.globals.section.instructions.items[node.begin_inst..node.end_inst];
+ const decl = self.declPtr(index);
+ const deps = self.decl_deps.items[decl.begin_dep..decl.end_dep];
+ const global = self.globalPtr(index).?;
+ const insts = self.globals.section.instructions.items[global.begin_inst..global.end_inst];
- seen.set(@enumToInt(global_index));
+ seen.set(@enumToInt(index));
for (deps) |dep| {
if (!seen.isSet(@enumToInt(dep))) {
@@ -201,17 +222,16 @@ fn orderGlobalsInto(
try section.instructions.appendSlice(self.gpa, insts);
}
-fn orderGlobals(self: Module) !Section {
- const nodes = self.globals.nodes.items;
+fn orderGlobals(self: *Module) !Section {
+ const globals = self.globals.globals.keys();
- var seen = try std.DynamicBitSetUnmanaged.initEmpty(self.gpa, nodes.len);
+ var seen = try std.DynamicBitSetUnmanaged.initEmpty(self.gpa, self.decls.items.len);
defer seen.deinit(self.gpa);
var ordered_globals = Section{};
-
- for (0..nodes.len) |global_index| {
- if (!seen.isSet(global_index)) {
- try self.orderGlobalsInto(@intToEnum(Global.Index, @intCast(u32, global_index)), &ordered_globals, &seen);
+ for (globals) |decl_index| {
+ if (!seen.isSet(@enumToInt(decl_index))) {
+ try self.orderGlobalsInto(decl_index, &ordered_globals, &seen);
}
}
@@ -219,12 +239,14 @@ fn orderGlobals(self: Module) !Section {
}
/// Emit this module as a spir-v binary.
-pub fn flush(self: Module, file: std.fs.File) !void {
+pub fn flush(self: *Module, file: std.fs.File) !void {
// See SPIR-V Spec section 2.3, "Physical Layout of a SPIR-V Module and Instruction"
const header = [_]Word{
spec.magic_number,
- (1 << 16) | (4 << 8), // TODO: From cpu features
+ // TODO: From cpu features
+ // Emit SPIR-V 1.4 for now. This is the highest version that Intel's CPU OpenCL supports.
+ (1 << 16) | (4 << 8),
0, // TODO: Register Zig compiler magic number.
self.idBound(),
0, // Schema (currently reserved for future use)
@@ -265,9 +287,10 @@ pub fn flush(self: Module, file: std.fs.File) !void {
}
/// Merge the sections making up a function declaration into this module.
-pub fn addFunction(self: *Module, func: Fn) !void {
+pub fn addFunction(self: *Module, decl_index: Decl.Index, func: Fn) !void {
try self.sections.functions.append(self.gpa, func.prologue);
try self.sections.functions.append(self.gpa, func.body);
+ try self.declareDeclDeps(decl_index, func.decl_deps.items);
}
/// Fetch the result-id of an OpString instruction that encodes the path of the source
@@ -719,43 +742,56 @@ pub fn decorateMember(
});
}
-pub fn allocGlobal(self: *Module) !Global.Index {
- try self.globals.nodes.append(self.gpa, .{
+pub const DeclKind = enum {
+ func,
+ global,
+};
+
+pub fn allocDecl(self: *Module, kind: DeclKind) !Decl.Index {
+ try self.decls.append(self.gpa, .{
.result_id = self.allocId(),
- .begin_inst = undefined,
- .end_inst = undefined,
.begin_dep = undefined,
.end_dep = undefined,
});
- return @intToEnum(Global.Index, @intCast(u32, self.globals.nodes.items.len - 1));
+ const index = @intToEnum(Decl.Index, @intCast(u32, self.decls.items.len - 1));
+ switch (kind) {
+ .func => {},
+ // If the decl represents a global, also allocate a global node.
+ .global => try self.globals.globals.putNoClobber(self.gpa, index, .{
+ .result_id = undefined,
+ .begin_inst = undefined,
+ .end_inst = undefined,
+ }),
+ }
+
+ return index;
}
-pub fn globalPtr(self: *Module, index: Global.Index) *Global {
- return &self.globals.nodes.items[@enumToInt(index)];
+pub fn declPtr(self: *Module, index: Decl.Index) *Decl {
+ return &self.decls.items[@enumToInt(index)];
}
-/// Begin generating the global for `index`. The previous global is finalized
-/// at this point, and the global for `index` is made active. Any new calls to
-/// `addGlobalDependency` will affect this global. After a new call to this function,
-/// the prior active global cannot be modified again.
-pub fn beginGlobal(self: *Module, index: Global.Index) IdRef {
- const global = self.globalPtr(index);
- global.begin_inst = @intCast(u32, self.globals.section.instructions.items.len);
- global.begin_dep = @intCast(u32, self.globals.dependencies.items.len);
- self.globals.current_global = index;
- return global.result_id;
+pub fn globalPtr(self: *Module, index: Decl.Index) ?*Global {
+ return self.globals.globals.getPtr(index);
}
-/// Finalize the global. After this point, the current global cannot be modified anymore.
-pub fn endGlobal(self: *Module) void {
- const global = self.globalPtr(self.globals.current_global.?);
- global.end_inst = @intCast(u32, self.globals.section.instructions.items.len);
- global.end_dep = @intCast(u32, self.globals.dependencies.items.len);
- self.globals.current_global = null;
+/// Declare ALL dependencies for a decl.
+pub fn declareDeclDeps(self: *Module, decl_index: Decl.Index, deps: []const Decl.Index) !void {
+ const begin_dep = @intCast(u32, self.decl_deps.items.len);
+ try self.decl_deps.appendSlice(self.gpa, deps);
+ const end_dep = @intCast(u32, self.decl_deps.items.len);
+
+ const decl = self.declPtr(decl_index);
+ decl.begin_dep = begin_dep;
+ decl.end_dep = end_dep;
}
-pub fn addGlobalDependency(self: *Module, dependency: Global.Index) !void {
- assert(self.globals.current_global != null);
- assert(self.globals.current_global.? != dependency);
- try self.globals.dependencies.append(self.gpa, dependency);
+pub fn beginGlobal(self: *Module) u32 {
+ return @intCast(u32, self.globals.section.instructions.items.len);
+}
+
+pub fn endGlobal(self: *Module, global_index: Decl.Index, begin_inst: u32) void {
+ const global = self.globalPtr(global_index).?;
+ global.begin_inst = begin_inst;
+ global.end_inst = @intCast(u32, self.globals.section.instructions.items.len);
}
src/codegen/spirv.zig
@@ -19,6 +19,7 @@ const Word = spec.Word;
const IdRef = spec.IdRef;
const IdResult = spec.IdResult;
const IdResultType = spec.IdResultType;
+const StorageClass = spec.StorageClass;
const SpvModule = @import("spirv/Module.zig");
const SpvSection = @import("spirv/Section.zig");
@@ -37,23 +38,8 @@ const BlockMap = std.AutoHashMapUnmanaged(Air.Inst.Index, struct {
incoming_blocks: *std.ArrayListUnmanaged(IncomingBlock),
});
-/// Linking information about a particular decl.
-/// The active field of this enum depends on the type of the corresponding decl.
-const DeclLink = union {
- /// Linking information about a function.
- /// Active when the decl is a function.
- func: struct {
- /// Result-id of the OpFunction instruction.
- result_id: IdResult,
- },
- /// Linking information about a global. This index points into the
- /// SPIR-V module's `globals` array.
- /// Active when the decl is a variable.
- global: SpvModule.Global.Index,
-};
-
/// Maps Zig decl indices to linking SPIR-V linking information.
-pub const DeclLinkMap = std.AutoHashMap(Module.Decl.Index, DeclLink);
+pub const DeclLinkMap = std.AutoHashMap(Module.Decl.Index, SpvModule.Decl.Index);
/// This structure is used to compile a declaration, and contains all relevant meta-information to deal with that.
pub const DeclGen = struct {
@@ -251,8 +237,8 @@ pub const DeclGen = struct {
.function => val.castTag(.function).?.data.owner_decl,
else => unreachable,
};
- const link = try self.resolveDecl(fn_decl_index);
- return link.func.result_id;
+ const spv_decl_index = try self.resolveDecl(fn_decl_index);
+ return self.spv.declPtr(spv_decl_index).result_id;
}
return try self.constant(ty, val);
@@ -263,19 +249,19 @@ pub const DeclGen = struct {
/// Fetch or allocate a result id for decl index. This function also marks the decl as alive.
/// Note: Function does not actually generate the decl.
- fn resolveDecl(self: *DeclGen, decl_index: Module.Decl.Index) !DeclLink {
+ fn resolveDecl(self: *DeclGen, decl_index: Module.Decl.Index) !SpvModule.Decl.Index {
const decl = self.module.declPtr(decl_index);
self.module.markDeclAlive(decl);
const entry = try self.decl_link.getOrPut(decl_index);
- const result_id = self.spv.allocId();
-
if (!entry.found_existing) {
- if (decl.val.castTag(.function)) |_| {
- entry.value_ptr.* = .{ .func = .{ .result_id = result_id } };
- } else {
- entry.value_ptr.* = .{ .global = try self.spv.allocGlobal() };
- }
+ // TODO: Extern fn?
+ const kind: SpvModule.DeclKind = if (decl.val.tag() == .function)
+ .func
+ else
+ .global;
+
+ entry.value_ptr.* = try self.spv.allocDecl(kind);
}
return entry.value_ptr.*;
@@ -440,6 +426,8 @@ pub const DeclGen = struct {
/// The partially filled last constant.
/// If full, its flushed.
partial_word: std.BoundedArray(u8, @sizeOf(Word)) = .{},
+ /// The declaration dependencies of the constant we are lowering.
+ decl_deps: std.ArrayList(SpvModule.Decl.Index),
/// Utility function to get the section that instructions should be lowered to.
fn section(self: *@This()) *SpvSection {
@@ -554,7 +542,7 @@ pub const DeclGen = struct {
const ty_id = dg.typeId(ty_ref);
const decl = dg.module.declPtr(decl_index);
- const link = try dg.resolveDecl(decl_index);
+ const spv_decl_index = try dg.resolveDecl(decl_index);
switch (decl.val.tag()) {
.function => {
@@ -569,14 +557,15 @@ pub const DeclGen = struct {
const result_id = dg.spv.allocId();
log.debug("addDeclRef {s} = {}", .{ decl.name, result_id.id });
- const global = dg.spv.globalPtr(link.global);
- try dg.spv.addGlobalDependency(link.global);
+ try self.decl_deps.append(spv_decl_index);
+
+ const decl_id = dg.spv.declPtr(spv_decl_index).result_id;
// TODO: Do we need a storage class cast here?
// TODO: We can probably eliminate these casts
try dg.spv.globals.section.emitSpecConstantOp(dg.spv.gpa, .OpBitcast, .{
.id_result_type = ty_id,
.id_result = result_id,
- .operand = global.result_id,
+ .operand = decl_id,
});
try self.addPtr(ty_ref, result_id);
@@ -810,10 +799,11 @@ pub const DeclGen = struct {
/// pointer points to. Note: result is not necessarily an OpVariable instruction!
fn lowerIndirectConstant(
self: *DeclGen,
- result_id: IdRef,
+ spv_decl_index: SpvModule.Decl.Index,
ty: Type,
val: Value,
- storage_class: spec.StorageClass,
+ storage_class: StorageClass,
+ cast_to_generic: bool,
alignment: u32,
) Error!void {
// To simplify constant generation, we're going to generate constants as a word-array, and
@@ -844,23 +834,27 @@ pub const DeclGen = struct {
const ty_ref = try self.resolveType(ty, .indirect);
const ptr_ty_ref = try self.spv.ptrType(ty_ref, storage_class, alignment);
- const target = self.getTarget();
-
- if (val.isUndef()) {
- // Special case: the entire value is undefined. In this case, we can just
- // generate an OpVariable with no initializer.
- return try section.emit(self.spv.gpa, .OpVariable, .{
- .id_result_type = self.typeId(ptr_ty_ref),
- .id_result = result_id,
- .storage_class = storage_class,
- });
- } else if (ty.abiSize(target) == 0) {
- // Special case: if the type has no size, then return an undefined pointer.
- return try section.emit(self.spv.gpa, .OpUndef, .{
- .id_result_type = self.typeId(ptr_ty_ref),
- .id_result = result_id,
- });
- }
+ // const target = self.getTarget();
+
+ // TODO: Fix the resulting global linking for these paths.
+ // if (val.isUndef()) {
+ // // Special case: the entire value is undefined. In this case, we can just
+ // // generate an OpVariable with no initializer.
+ // return try section.emit(self.spv.gpa, .OpVariable, .{
+ // .id_result_type = self.typeId(ptr_ty_ref),
+ // .id_result = result_id,
+ // .storage_class = storage_class,
+ // });
+ // } else if (ty.abiSize(target) == 0) {
+ // // Special case: if the type has no size, then return an undefined pointer.
+ // return try section.emit(self.spv.gpa, .OpUndef, .{
+ // .id_result_type = self.typeId(ptr_ty_ref),
+ // .id_result = result_id,
+ // });
+ // }
+
+ // TODO: Capture the above stuff in here as well...
+ const begin_inst = self.spv.beginGlobal();
const u32_ty_ref = try self.intType(.unsigned, 32);
var icl = IndirectConstantLowering{
@@ -869,10 +863,12 @@ pub const DeclGen = struct {
.u32_ty_id = self.typeId(u32_ty_ref),
.members = std.ArrayList(SpvType.Payload.Struct.Member).init(self.gpa),
.initializers = std.ArrayList(IdRef).init(self.gpa),
+ .decl_deps = std.ArrayList(SpvModule.Decl.Index).init(self.gpa),
};
defer icl.members.deinit();
defer icl.initializers.deinit();
+ defer icl.decl_deps.deinit();
try icl.lower(ty, val);
try icl.flush();
@@ -888,6 +884,7 @@ pub const DeclGen = struct {
});
const var_id = self.spv.allocId();
+ self.spv.globalPtr(spv_decl_index).?.result_id = var_id;
try section.emit(self.spv.gpa, .OpVariable, .{
.id_result_type = self.typeId(ptr_constant_struct_ty_ref),
.id_result = var_id,
@@ -896,12 +893,32 @@ pub const DeclGen = struct {
});
// TODO: Set alignment of OpVariable.
// TODO: We may be able to eliminate these casts.
+
const const_ptr_id = try self.makePointerConstant(section, ptr_constant_struct_ty_ref, var_id);
+ const result_id = self.spv.declPtr(spv_decl_index).result_id;
+
+ const bitcast_result_id = if (cast_to_generic)
+ self.spv.allocId()
+ else
+ result_id;
+
try section.emitSpecConstantOp(self.spv.gpa, .OpBitcast, .{
.id_result_type = self.typeId(ptr_ty_ref),
- .id_result = result_id,
+ .id_result = bitcast_result_id,
.operand = const_ptr_id,
});
+
+ if (cast_to_generic) {
+ const generic_ptr_ty_ref = try self.spv.ptrType(ty_ref, .Generic, alignment);
+ try section.emitSpecConstantOp(self.spv.gpa, .OpPtrCastToGeneric, .{
+ .id_result_type = self.typeId(generic_ptr_ty_ref),
+ .id_result = result_id,
+ .pointer = bitcast_result_id,
+ });
+ }
+
+ try self.spv.declareDeclDeps(spv_decl_index, icl.decl_deps.items);
+ self.spv.endGlobal(spv_decl_index, begin_inst);
}
/// This function generates a load for a constant in direct (ie, non-memory) representation.
@@ -940,19 +957,28 @@ pub const DeclGen = struct {
try section.emit(self.spv.gpa, .OpConstantFalse, operands);
}
},
+ // TODO: We can handle most pointers here (decl refs etc), because now they emit an extra
+ // OpVariable that is not really required.
else => {
// The value cannot be generated directly, so generate it as an indirect constant,
// and then perform an OpLoad.
const alignment = ty.abiAlignment(target);
- const global_index = try self.spv.allocGlobal();
- log.debug("constant {}", .{global_index});
- const ptr_id = self.spv.beginGlobal(global_index);
- defer self.spv.endGlobal();
- try self.lowerIndirectConstant(ptr_id, ty, val, .UniformConstant, alignment);
+ const spv_decl_index = try self.spv.allocDecl(.global);
+
+ try self.lowerIndirectConstant(
+ spv_decl_index,
+ ty,
+ val,
+ .UniformConstant,
+ false,
+ alignment,
+ );
+ try self.func.decl_deps.append(self.spv.gpa, spv_decl_index);
+
try self.func.body.emit(self.spv.gpa, .OpLoad, .{
.id_result_type = result_ty_id,
.id_result = result_id,
- .pointer = ptr_id,
+ .pointer = self.spv.declPtr(spv_decl_index).result_id,
});
// TODO: Convert bools? This logic should hook into `load`. It should be a dead
// path though considering .Bool is handled above.
@@ -1289,7 +1315,7 @@ pub const DeclGen = struct {
}
}
- fn spvStorageClass(as: std.builtin.AddressSpace) spec.StorageClass {
+ fn spvStorageClass(as: std.builtin.AddressSpace) StorageClass {
return switch (as) {
.generic => .Generic, // TODO: Disallow?
.gs, .fs, .ss => unreachable,
@@ -1370,16 +1396,17 @@ pub const DeclGen = struct {
fn genDecl(self: *DeclGen) !void {
const decl = self.module.declPtr(self.decl_index);
- const link = try self.resolveDecl(self.decl_index);
+ const spv_decl_index = try self.resolveDecl(self.decl_index);
- if (decl.val.castTag(.function)) |_| {
- log.debug("genDecl function {s} = {}", .{ decl.name, link.func.result_id.id });
+ const decl_id = self.spv.declPtr(spv_decl_index).result_id;
+ log.debug("genDecl {s} = {}", .{ decl.name, decl_id });
+ if (decl.val.castTag(.function)) |_| {
assert(decl.ty.zigTypeTag() == .Fn);
const prototype_id = try self.resolveTypeId(decl.ty);
try self.func.prologue.emit(self.spv.gpa, .OpFunction, .{
.id_result_type = try self.resolveTypeId(decl.ty.fnReturnType()),
- .id_result = link.func.result_id,
+ .id_result = decl_id,
.function_control = .{}, // TODO: We can set inline here if the type requires it.
.function_type = prototype_id,
});
@@ -1413,18 +1440,18 @@ pub const DeclGen = struct {
// Append the actual code into the functions section.
try self.func.body.emit(self.spv.gpa, .OpFunctionEnd, {});
- try self.spv.addFunction(self.func);
+ try self.spv.addFunction(spv_decl_index, self.func);
const fqn = try decl.getFullyQualifiedName(self.module);
defer self.module.gpa.free(fqn);
try self.spv.sections.debug_names.emit(self.gpa, .OpName, .{
- .target = link.func.result_id,
+ .target = decl_id,
.name = fqn,
});
if (self.module.test_functions.contains(self.decl_index)) {
- try self.generateTestEntryPoint(fqn, link.func.result_id);
+ try self.generateTestEntryPoint(fqn, decl_id);
}
} else {
const init_val = if (decl.val.castTag(.variable)) |payload|
@@ -1438,41 +1465,33 @@ pub const DeclGen = struct {
// TODO: integrate with variable().
- const storage_class = spvStorageClass(decl.@"addrspace");
- const actual_storage_class = switch (storage_class) {
+ const final_storage_class = spvStorageClass(decl.@"addrspace");
+ const actual_storage_class = switch (final_storage_class) {
.Generic => .CrossWorkgroup,
- else => storage_class,
- };
-
- const global_result_id = self.spv.beginGlobal(link.global);
- defer self.spv.endGlobal();
- log.debug("genDecl {}", .{link.global});
-
- const var_result_id = switch (storage_class) {
- .Generic => self.spv.allocId(),
- else => global_result_id,
+ else => final_storage_class,
};
try self.lowerIndirectConstant(
- var_result_id,
+ spv_decl_index,
decl.ty,
init_val,
actual_storage_class,
+ final_storage_class == .Generic,
decl.@"align",
);
- if (storage_class == .Generic) {
- const section = &self.spv.globals.section;
- const ty_ref = try self.resolveType(decl.ty, .indirect);
- const ptr_ty_ref = try self.spv.ptrType(ty_ref, storage_class, decl.@"align");
- // TODO: Can we eliminate this cast?
- // TODO: Const-wash pointer
- try section.emitSpecConstantOp(self.spv.gpa, .OpPtrCastToGeneric, .{
- .id_result_type = self.typeId(ptr_ty_ref),
- .id_result = global_result_id,
- .pointer = var_result_id,
- });
- }
+ // if (storage_class == .Generic) {
+ // const section = &self.spv.globals.section;
+ // const ty_ref = try self.resolveType(decl.ty, .indirect);
+ // const ptr_ty_ref = try self.spv.ptrType(ty_ref, storage_class, decl.@"align");
+ // // TODO: Can we eliminate this cast?
+ // // TODO: Const-wash pointer?
+ // try section.emitSpecConstantOp(self.spv.gpa, .OpPtrCastToGeneric, .{
+ // .id_result_type = self.typeId(ptr_ty_ref),
+ // .id_result = global_result_id,
+ // .pointer = casted_result_id,
+ // });
+ // }
}
}