Commit b9e508c410
Changed files (7)
src/AstGen.zig
@@ -4149,12 +4149,14 @@ fn builtinCall(
},
.@"export" => {
- const target_fn = try expr(gz, scope, .none, params[0]);
- // FIXME: When structs work in stage2, actually implement this correctly!
- // Currently the name is always signifies Strong linkage.
+ // TODO: @export is supposed to be able to export things other than functions.
+ // Instead of `comptimeExpr` here we need `decl_ref`.
+ const fn_to_export = try comptimeExpr(gz, scope, .none, params[0]);
+ // TODO: the second parameter here is supposed to be
+ // `std.builtin.ExportOptions`, not a string.
const export_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
_ = try gz.addPlNode(.@"export", node, zir.Inst.Bin{
- .lhs = target_fn,
+ .lhs = fn_to_export,
.rhs = export_name,
});
return rvalue(gz, scope, rl, .void_value, node);
src/Compilation.zig
@@ -510,11 +510,11 @@ pub const InitOptions = struct {
fn addPackageTableToCacheHash(
hash: *Cache.HashHelper,
arena: *std.heap.ArenaAllocator,
- package: *Package,
+ pkg_table: Package.Table,
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
) (error{OutOfMemory} || std.os.GetCwdError)!void {
const allocator = &arena.allocator;
- const pkg_table = package.table;
+
const packages = try allocator.alloc(Package.Table.Entry, pkg_table.count());
{
// Copy over the hashmap entries to our slice
@@ -547,8 +547,7 @@ fn addPackageTableToCacheHash(
},
}
// Recurse to handle the package's dependencies
- if (package != pkg.value)
- try addPackageTableToCacheHash(hash, arena, pkg.value, hash_type);
+ try addPackageTableToCacheHash(hash, arena, pkg.value.table, hash_type);
}
}
@@ -886,7 +885,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
{
var local_arena = std.heap.ArenaAllocator.init(gpa);
defer local_arena.deinit();
- try addPackageTableToCacheHash(&hash, &local_arena, root_pkg, .path_bytes);
+ try addPackageTableToCacheHash(&hash, &local_arena, root_pkg.table, .path_bytes);
}
hash.add(valgrind);
hash.add(single_threaded);
@@ -907,46 +906,38 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
artifact_sub_dir,
};
- const builtin_pkg = try Package.create(gpa, zig_cache_artifact_directory.path.?, "builtin2.zig");
-
- const std_dir_path = try options.zig_lib_directory.join(gpa, &[_][]const u8{"std"});
- defer gpa.free(std_dir_path);
- const start_pkg = try Package.create(gpa, std_dir_path, "start2.zig");
-
- try root_pkg.add(gpa, "builtin", builtin_pkg);
- try root_pkg.add(gpa, "root", root_pkg);
-
- try start_pkg.add(gpa, "builtin", builtin_pkg);
- try start_pkg.add(gpa, "root", root_pkg);
-
// TODO when we implement serialization and deserialization of incremental compilation metadata,
// this is where we would load it. We have open a handle to the directory where
// the output either already is, or will be.
// However we currently do not have serialization of such metadata, so for now
// we set up an empty Module that does the entire compilation fresh.
- if (mem.endsWith(u8, root_pkg.root_src_path, ".zir")) return error.ZirFilesUnsupported;
-
- const start_scope = ss: {
- const start_scope = try gpa.create(Module.Scope.File);
- const struct_ty = try Type.Tag.empty_struct.create(
- gpa,
- &start_scope.root_container,
- );
- start_scope.* = .{
- // TODO this is duped so it can be freed in Container.deinit
- .sub_file_path = try gpa.dupe(u8, start_pkg.root_src_path),
- .source = .{ .unloaded = {} },
- .tree = undefined,
- .status = .never_loaded,
- .pkg = start_pkg,
- .root_container = .{
- .file_scope = start_scope,
- .decls = .{},
- .ty = struct_ty,
- },
- };
- break :ss start_scope;
+ const root_scope = rs: {
+ if (mem.endsWith(u8, root_pkg.root_src_path, ".zig")) {
+ const root_scope = try gpa.create(Module.Scope.File);
+ const struct_ty = try Type.Tag.empty_struct.create(
+ gpa,
+ &root_scope.root_container,
+ );
+ root_scope.* = .{
+ // TODO this is duped so it can be freed in Container.deinit
+ .sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
+ .source = .{ .unloaded = {} },
+ .tree = undefined,
+ .status = .never_loaded,
+ .pkg = root_pkg,
+ .root_container = .{
+ .file_scope = root_scope,
+ .decls = .{},
+ .ty = struct_ty,
+ },
+ };
+ break :rs root_scope;
+ } else if (mem.endsWith(u8, root_pkg.root_src_path, ".zir")) {
+ return error.ZirFilesUnsupported;
+ } else {
+ unreachable;
+ }
};
const module = try arena.create(Module);
@@ -955,9 +946,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
.gpa = gpa,
.comp = comp,
.root_pkg = root_pkg,
- .root_scope = null,
- .start_pkg = start_pkg,
- .start_scope = start_scope,
+ .root_scope = root_scope,
.zig_cache_artifact_directory = zig_cache_artifact_directory,
.emit_h = options.emit_h,
.error_name_list = try std.ArrayListUnmanaged([]const u8).initCapacity(gpa, 1),
@@ -1359,9 +1348,9 @@ pub fn update(self: *Compilation) !void {
// TODO Detect which source files changed.
// Until then we simulate a full cache miss. Source files could have been loaded
// for any reason; to force a refresh we unload now.
- module.unloadFile(module.start_scope);
+ module.unloadFile(module.root_scope);
module.failed_root_src_file = null;
- module.analyzeContainer(&module.start_scope.root_container) catch |err| switch (err) {
+ module.analyzeContainer(&module.root_scope.root_container) catch |err| switch (err) {
error.AnalysisFail => {
assert(self.totalErrorCount() != 0);
},
@@ -1422,7 +1411,7 @@ pub fn update(self: *Compilation) !void {
// to report error messages. Otherwise we unload all source files to save memory.
if (self.totalErrorCount() == 0 and !self.keep_source_files_loaded) {
if (self.bin_file.options.module) |module| {
- module.start_scope.unload(self.gpa);
+ module.root_scope.unload(self.gpa);
}
}
}
@@ -2833,11 +2822,6 @@ fn updateBuiltinZigFile(comp: *Compilation, mod: *Module) !void {
const source = try comp.generateBuiltinZigSource(comp.gpa);
defer comp.gpa.free(source);
try mod.zig_cache_artifact_directory.handle.writeFile("builtin.zig", source);
-
- // FIXME: Remove builtin2.zig when stage2 can correctly generate code for builtin.zig!
- const source2 = try comp.generateBuiltin2ZigSource(comp.gpa);
- defer comp.gpa.free(source2);
- try mod.zig_cache_artifact_directory.handle.writeFile("builtin2.zig", source2);
}
pub fn dump_argv(argv: []const []const u8) void {
@@ -2847,30 +2831,6 @@ pub fn dump_argv(argv: []const []const u8) void {
std.debug.print("{s}\n", .{argv[argv.len - 1]});
}
-fn generateBuiltin2ZigSource(comp: *Compilation, allocator: *Allocator) ![]u8 {
- var buffer = std.ArrayList(u8).init(allocator);
- defer buffer.deinit();
-
- const target = comp.getTarget();
-
- try buffer.writer().print(
- \\pub const link_libc = {};
- \\pub const arch = {};
- \\pub const os = {};
- \\pub const output_mode = {};
- \\pub const object_format = {};
- \\
- , .{
- comp.bin_file.options.link_libc,
- @enumToInt(target.cpu.arch),
- @enumToInt(target.os.tag),
- @enumToInt(comp.bin_file.options.output_mode),
- @enumToInt(comp.bin_file.options.object_format),
- });
-
- return buffer.toOwnedSlice();
-}
-
pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8 {
const tracy = trace(@src());
defer tracy.end();
@@ -3215,7 +3175,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
{
var local_arena = std.heap.ArenaAllocator.init(comp.gpa);
defer local_arena.deinit();
- try addPackageTableToCacheHash(&man.hash, &local_arena, mod.root_pkg, .{ .files = &man });
+ try addPackageTableToCacheHash(&man.hash, &local_arena, mod.root_pkg.table, .{ .files = &man });
}
man.hash.add(comp.bin_file.options.valgrind);
man.hash.add(comp.bin_file.options.single_threaded);
src/main.zig
@@ -1732,8 +1732,6 @@ fn buildOutputType(
},
}
- // This gets cleaned up, because root_pkg becomes part of the
- // package table of the start_pkg.
const root_pkg: ?*Package = if (root_src_file) |src_path| blk: {
if (main_pkg_path) |p| {
const rel_src_path = try fs.path.relative(gpa, p, src_path);
@@ -1743,6 +1741,7 @@ fn buildOutputType(
break :blk try Package.create(gpa, fs.path.dirname(src_path), fs.path.basename(src_path));
}
} else null;
+ defer if (root_pkg) |p| p.destroy(gpa);
// Transfer packages added with --pkg-begin/--pkg-end to the root package
if (root_pkg) |pkg| {
src/Module.zig
@@ -35,11 +35,8 @@ comp: *Compilation,
zig_cache_artifact_directory: Compilation.Directory,
/// Pointer to externally managed resource. `null` if there is no zig file being compiled.
root_pkg: *Package,
-/// This is populated when `@import("root")` is analysed.
-root_scope: ?*Scope.File,
-start_pkg: *Package,
/// Module owns this resource.
-start_scope: *Scope.File,
+root_scope: *Scope.File,
/// It's rare for a decl to be exported, so we save memory by having a sparse map of
/// Decl pointers to details about them being exported.
/// The Export memory is owned by the `export_owners` table; the slice itself is owned by this table.
@@ -2344,9 +2341,7 @@ pub fn deinit(mod: *Module) void {
mod.export_owners.deinit(gpa);
mod.symbol_exports.deinit(gpa);
-
- mod.start_scope.destroy(gpa);
- mod.start_pkg.destroy(gpa);
+ mod.root_scope.destroy(gpa);
var it = mod.global_error_set.iterator();
while (it.next()) |entry| {
@@ -2518,7 +2513,6 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const block_expr = node_datas[decl_node].lhs;
_ = try AstGen.comptimeExpr(&gen_scope, &gen_scope.base, .none, block_expr);
- _ = try gen_scope.addBreak(.break_inline, gen_scope.break_block, .void_value);
const code = try gen_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
@@ -2863,9 +2857,8 @@ fn astgenAndSemaFn(
_ = try AstGen.expr(&gen_scope, params_scope, .none, body_node);
- const inst_tags = astgen.instructions.items(.tag);
- if (inst_tags.len == 0 or
- !inst_tags[inst_tags.len - 1]
+ if (gen_scope.instructions.items.len == 0 or
+ !astgen.instructions.items(.tag)[gen_scope.instructions.items.len - 1]
.isNoReturn())
{
// astgen uses result location semantics to coerce return operands.
src/Package.zig
@@ -15,9 +15,6 @@ root_src_path: []const u8,
table: Table = .{},
parent: ?*Package = null,
-// Used when freeing packages
-seen: bool = false,
-
/// Allocate a Package. No references to the slices passed are kept.
pub fn create(
gpa: *Allocator,
@@ -58,20 +55,10 @@ pub fn destroy(pkg: *Package, gpa: *Allocator) void {
pkg.root_src_directory.handle.close();
}
- // First we recurse into all the packages and remove packages from the tables
- // once we have seen it before. We do this to make sure that that
- // a package can only be found once in the whole tree.
- if (!pkg.seen) {
- pkg.seen = true;
- pkg.markSeen(gpa);
- }
-
{
var it = pkg.table.iterator();
while (it.next()) |kv| {
- if (pkg != kv.value) {
- kv.value.destroy(gpa);
- }
+ kv.value.destroy(gpa);
gpa.free(kv.key);
}
}
@@ -80,20 +67,6 @@ pub fn destroy(pkg: *Package, gpa: *Allocator) void {
gpa.destroy(pkg);
}
-fn markSeen(pkg: *Package, gpa: *Allocator) void {
- var it = pkg.table.iterator();
- while (it.next()) |kv| {
- if (pkg != kv.value) {
- if (kv.value.seen) {
- pkg.table.removeAssertDiscard(kv.key);
- } else {
- kv.value.seen = true;
- kv.value.markSeen(gpa);
- }
- }
- }
-}
-
pub fn add(pkg: *Package, gpa: *Allocator, name: []const u8, package: *Package) !void {
try pkg.table.ensureCapacity(gpa, pkg.table.count() + 1);
const name_dupe = try mem.dupe(gpa, u8, name);
src/Sema.zig
@@ -1345,21 +1345,18 @@ fn zirExport(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data;
const src = inst_data.src();
+ const lhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
+ const rhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
- const target_fn = try sema.resolveInst(extra.lhs);
- const target_fn_val = try sema.resolveConstValue(
- block,
- .{ .node_offset_builtin_call_arg0 = inst_data.src_node },
- target_fn,
- );
-
- const export_name = try sema.resolveConstString(
- block,
- .{ .node_offset_builtin_call_arg1 = inst_data.src_node },
- extra.rhs,
- );
+ // TODO (see corresponding TODO in AstGen) this is supposed to be a `decl_ref`
+ // instruction, which could reference any decl, which is then supposed to get
+ // exported, regardless of whether or not it is a function.
+ const target_fn = try sema.resolveInstConst(block, lhs_src, extra.lhs);
+ // TODO (see corresponding TODO in AstGen) this is supposed to be
+ // `std.builtin.ExportOptions`, not a string.
+ const export_name = try sema.resolveConstString(block, rhs_src, extra.rhs);
- const actual_fn = target_fn_val.castTag(.function).?.data;
+ const actual_fn = target_fn.val.castTag(.function).?.data;
try sema.mod.analyzeExport(&block.base, src, export_name, actual_fn.owner_decl);
}
@@ -3636,26 +3633,21 @@ fn zirHasDecl(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError
const rhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
const container_type = try sema.resolveType(block, lhs_src, extra.lhs);
const decl_name = try sema.resolveConstString(block, rhs_src, extra.rhs);
+ const mod = sema.mod;
+ const arena = sema.arena;
- const maybe_scope = container_type.getContainerScope();
- if (maybe_scope == null) {
- return sema.mod.fail(
- &block.base,
- src,
- "expected container (struct, enum, or union), found '{}'",
- .{container_type},
- );
+ const container_scope = container_type.getContainerScope() orelse return mod.fail(
+ &block.base,
+ lhs_src,
+ "expected struct, enum, union, or opaque, found '{}'",
+ .{container_type},
+ );
+ if (mod.lookupDeclName(&container_scope.base, decl_name)) |decl| {
+ // TODO if !decl.is_pub and inDifferentFiles() return false
+ return mod.constBool(arena, src, true);
+ } else {
+ return mod.constBool(arena, src, false);
}
-
- const found = blk: {
- for (maybe_scope.?.decls.items()) |kv| {
- if (mem.eql(u8, mem.spanZ(kv.key.name), decl_name))
- break :blk true;
- }
- break :blk false;
- };
-
- return sema.mod.constBool(sema.arena, src, found);
}
fn zirImport(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
@@ -4699,7 +4691,7 @@ fn namedFieldPtr(
}
// TODO this will give false positives for structs inside the root file
- if (container_scope.file_scope == mod.root_scope.?) {
+ if (container_scope.file_scope == mod.root_scope) {
return mod.fail(
&block.base,
src,
@@ -5338,9 +5330,6 @@ fn analyzeImport(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, target_strin
.ty = struct_ty,
},
};
- if (mem.eql(u8, target_string, "root")) {
- sema.mod.root_scope = file_scope;
- }
sema.mod.analyzeContainer(&file_scope.root_container) catch |err| switch (err) {
error.AnalysisFail => {
assert(sema.mod.comp.totalErrorCount() != 0);
src/zir.zig
@@ -328,8 +328,7 @@ pub const Inst = struct {
error_union_type,
/// `error.Foo` syntax. Uses the `str_tok` field of the Data union.
error_value,
- /// Exports a function with a specified name. This can be used at comptime
- /// to export a function conditionally.
+ /// Implements the `@export` builtin function.
/// Uses the `pl_node` union field. Payload is `Bin`.
@"export",
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
@@ -364,7 +363,7 @@ pub const Inst = struct {
fn_type_cc,
/// Same as `fn_type_cc` but the function is variadic.
fn_type_cc_var_args,
- /// Determines whether a container has a declaration matching name.
+ /// Implements the `@hasDecl` builtin.
/// Uses the `pl_node` union field. Payload is `Bin`.
has_decl,
/// `@import(operand)`.