Commit d2a8660d04
Changed files (16)
deps/aro/aro/Builtins/Builtin.def
@@ -17008,3 +17008,155 @@ wmemmove
.param_str = "w*w*wC*z"
.header = .wchar
.attributes = .{ .lib_function_without_prefix = true, .const_evaluable = true }
+
+__c11_atomic_init
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_load
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_store
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_exchange
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_compare_exchange_strong
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_compare_exchange_weak
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_add
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_sub
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_and
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_or
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_xor
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_nand
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_max
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__c11_atomic_fetch_min
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_load
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_load_n
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_store
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_store_n
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_exchange
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_exchange_n
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_compare_exchange
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_compare_exchange_n
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_add
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_sub
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_and
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_or
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_xor
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_nand
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_add_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_sub_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_and_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_or_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_xor_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_max_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_min_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_nand_fetch
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_min
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
+
+__atomic_fetch_max
+ .param_str = "v."
+ .attributes = .{ .custom_typecheck = true }
deps/aro/aro/Driver/Filesystem.zig
@@ -121,7 +121,7 @@ pub const Filesystem = union(enum) {
base: []const u8,
i: usize = 0,
- fn next(self: *@This()) !?std.fs.IterableDir.Entry {
+ fn next(self: *@This()) !?std.fs.Dir.Entry {
while (self.i < self.entries.len) {
const entry = self.entries[self.i];
self.i += 1;
@@ -130,7 +130,7 @@ pub const Filesystem = union(enum) {
const remaining = entry.path[self.base.len + 1 ..];
if (std.mem.indexOfScalar(u8, remaining, std.fs.path.sep) != null) continue;
const extension = std.fs.path.extension(remaining);
- const kind: std.fs.IterableDir.Entry.Kind = if (extension.len == 0) .directory else .file;
+ const kind: std.fs.Dir.Entry.Kind = if (extension.len == 0) .directory else .file;
return .{ .name = remaining, .kind = kind };
}
}
@@ -139,18 +139,18 @@ pub const Filesystem = union(enum) {
};
};
- const IterableDir = union(enum) {
- dir: std.fs.IterableDir,
+ const Dir = union(enum) {
+ dir: std.fs.Dir,
fake: FakeDir,
- pub fn iterate(self: IterableDir) Iterator {
+ pub fn iterate(self: Dir) Iterator {
return switch (self) {
.dir => |dir| .{ .iterator = dir.iterate() },
.fake => |fake| .{ .fake = fake.iterate() },
};
}
- pub fn close(self: *IterableDir) void {
+ pub fn close(self: *Dir) void {
switch (self.*) {
.dir => |*d| d.close(),
.fake => {},
@@ -159,10 +159,10 @@ pub const Filesystem = union(enum) {
};
const Iterator = union(enum) {
- iterator: std.fs.IterableDir.Iterator,
+ iterator: std.fs.Dir.Iterator,
fake: FakeDir.Iterator,
- pub fn next(self: *Iterator) std.fs.IterableDir.Iterator.Error!?std.fs.IterableDir.Entry {
+ pub fn next(self: *Iterator) std.fs.Dir.Iterator.Error!?std.fs.Dir.Entry {
return switch (self.*) {
.iterator => |*it| it.next(),
.fake => |*it| it.next(),
@@ -221,9 +221,9 @@ pub const Filesystem = union(enum) {
};
}
- pub fn openIterableDir(fs: Filesystem, dir_name: []const u8) std.fs.Dir.OpenError!IterableDir {
+ pub fn openDir(fs: Filesystem, dir_name: []const u8) std.fs.Dir.OpenError!Dir {
return switch (fs) {
- .real => .{ .dir = try std.fs.cwd().openIterableDir(dir_name, .{ .access_sub_paths = false }) },
+ .real => .{ .dir = try std.fs.cwd().openDir(dir_name, .{ .access_sub_paths = false, .iterate = true }) },
.fake => |entries| .{ .fake = .{ .entries = entries, .path = dir_name } },
};
}
deps/aro/aro/Driver/GCCDetector.zig
@@ -602,7 +602,7 @@ fn scanLibDirForGCCTriple(
const lib_suffix = std.fs.path.join(suffix_buf_fib.allocator(), &.{ base, candidate_triple }) catch continue;
const dir_name = std.fs.path.join(fib.allocator(), &.{ lib_dir, lib_suffix }) catch continue;
- var parent_dir = tc.filesystem.openIterableDir(dir_name) catch continue;
+ var parent_dir = tc.filesystem.openDir(dir_name) catch continue;
defer parent_dir.close();
var it = parent_dir.iterate();
deps/aro/aro/number_affixes.zig → deps/aro/aro/Tree/number_affixes.zig
@@ -77,6 +77,18 @@ pub const Suffix = enum {
// _Float16
F16,
+ // __float80
+ W,
+
+ // Imaginary __float80
+ IW,
+
+ // _Float128
+ Q, F128,
+
+ // Imaginary _Float128
+ IQ, IF128,
+
// Imaginary _Bitint
IWB, IUWB,
@@ -111,10 +123,16 @@ pub const Suffix = enum {
.{ .F16, &.{"F16"} },
.{ .F, &.{"F"} },
.{ .L, &.{"L"} },
+ .{ .W, &.{"W"} },
+ .{ .F128, &.{"F128"} },
+ .{ .Q, &.{"Q"} },
.{ .I, &.{"I"} },
.{ .IL, &.{ "I", "L" } },
.{ .IF, &.{ "I", "F" } },
+ .{ .IW, &.{ "I", "W" } },
+ .{ .IF128, &.{ "I", "F128" } },
+ .{ .IQ, &.{ "I", "Q" } },
};
pub fn fromString(buf: []const u8, suffix_kind: enum { int, float }) ?Suffix {
@@ -124,7 +142,7 @@ pub const Suffix = enum {
.float => FloatSuffixes,
.int => IntSuffixes,
};
- var scratch: [3]u8 = undefined;
+ var scratch: [4]u8 = undefined;
top: for (suffixes) |candidate| {
const tag = candidate[0];
const parts = candidate[1];
@@ -143,8 +161,8 @@ pub const Suffix = enum {
pub fn isImaginary(suffix: Suffix) bool {
return switch (suffix) {
- .I, .IL, .IF, .IU, .IUL, .ILL, .IULL, .IWB, .IUWB => true,
- .None, .L, .F16, .F, .U, .UL, .LL, .ULL, .WB, .UWB => false,
+ .I, .IL, .IF, .IU, .IUL, .ILL, .IULL, .IWB, .IUWB, .IF128, .IQ, .IW => true,
+ .None, .L, .F16, .F, .U, .UL, .LL, .ULL, .WB, .UWB, .F128, .Q, .W => false,
};
}
@@ -152,7 +170,7 @@ pub const Suffix = enum {
return switch (suffix) {
.None, .L, .LL, .I, .IL, .ILL, .WB, .IWB => true,
.U, .UL, .ULL, .IU, .IUL, .IULL, .UWB, .IUWB => false,
- .F, .IF, .F16 => unreachable,
+ .F, .IF, .F16, .F128, .IF128, .Q, .IQ, .W, .IW => unreachable,
};
}
deps/aro/aro/Compilation.zig
@@ -408,6 +408,17 @@ fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
\\
);
+ // atomics
+ try w.writeAll(
+ \\#define __ATOMIC_RELAXED 0
+ \\#define __ATOMIC_CONSUME 1
+ \\#define __ATOMIC_ACQUIRE 2
+ \\#define __ATOMIC_RELEASE 3
+ \\#define __ATOMIC_ACQ_REL 4
+ \\#define __ATOMIC_SEQ_CST 5
+ \\
+ );
+
// types
if (comp.getCharSignedness() == .unsigned) try w.writeAll("#define __CHAR_UNSIGNED__ 1\n");
try w.writeAll("#define __CHAR_BIT__ 8\n");
@@ -445,6 +456,10 @@ fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
try comp.generateSizeofType(w, "__SIZEOF_WCHAR_T__", comp.types.wchar);
// try comp.generateSizeofType(w, "__SIZEOF_WINT_T__", .{ .specifier = .pointer });
+ if (target_util.hasInt128(comp.target)) {
+ try comp.generateSizeofType(w, "__SIZEOF_INT128__", .{ .specifier = .int128 });
+ }
+
// various int types
const mapper = comp.string_interner.getSlowTypeMapper();
try generateTypeMacro(w, mapper, "__INTPTR_TYPE__", comp.types.intptr, comp.langopts);
@@ -461,6 +476,7 @@ fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
try generateTypeMacro(w, mapper, "__WCHAR_TYPE__", comp.types.wchar, comp.langopts);
try comp.generateExactWidthTypes(w, mapper);
+ try comp.generateFastAndLeastWidthTypes(w, mapper);
if (target_util.FPSemantics.halfPrecisionType(comp.target)) |half| {
try generateFloatMacros(w, "FLT16", half, "F16");
@@ -497,10 +513,11 @@ pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefi
);
}
+ try buf.appendSlice("#define __STDC__ 1\n");
+ try buf.writer().print("#define __STDC_HOSTED__ {d}\n", .{@intFromBool(comp.target.os.tag != .freestanding)});
+
// standard macros
try buf.appendSlice(
- \\#define __STDC__ 1
- \\#define __STDC_HOSTED__ 1
\\#define __STDC_NO_ATOMICS__ 1
\\#define __STDC_NO_COMPLEX__ 1
\\#define __STDC_NO_THREADS__ 1
@@ -678,6 +695,14 @@ fn generateBuiltinTypes(comp: *Compilation) !void {
/// Smallest integer type with at least N bits
fn intLeastN(comp: *const Compilation, bits: usize, signedness: std.builtin.Signedness) Type {
+ if (bits == 64 and (comp.target.isDarwin() or comp.target.isWasm())) {
+ // WebAssembly and Darwin use `long long` for `int_least64_t` and `int_fast64_t`.
+ return .{ .specifier = if (signedness == .signed) .long_long else .ulong_long };
+ }
+ if (bits == 16 and comp.target.cpu.arch == .avr) {
+ // AVR uses int for int_least16_t and int_fast16_t.
+ return .{ .specifier = if (signedness == .signed) .int else .uint };
+ }
const candidates = switch (signedness) {
.signed => &[_]Type.Specifier{ .schar, .short, .int, .long, .long_long },
.unsigned => &[_]Type.Specifier{ .uchar, .ushort, .uint, .ulong, .ulong_long },
@@ -693,6 +718,52 @@ fn intSize(comp: *const Compilation, specifier: Type.Specifier) u64 {
return ty.sizeof(comp).?;
}
+fn generateFastOrLeastType(
+ comp: *Compilation,
+ bits: usize,
+ kind: enum { least, fast },
+ signedness: std.builtin.Signedness,
+ w: anytype,
+ mapper: StrInt.TypeMapper,
+) !void {
+ const ty = comp.intLeastN(bits, signedness); // defining the fast types as the least types is permitted
+
+ var buf: [32]u8 = undefined;
+ const suffix = "_TYPE__";
+ const base_name = switch (signedness) {
+ .signed => "__INT_",
+ .unsigned => "__UINT_",
+ };
+ const kind_str = switch (kind) {
+ .fast => "FAST",
+ .least => "LEAST",
+ };
+
+ const full = std.fmt.bufPrint(&buf, "{s}{s}{d}{s}", .{
+ base_name, kind_str, bits, suffix,
+ }) catch return error.OutOfMemory;
+
+ try generateTypeMacro(w, mapper, full, ty, comp.langopts);
+
+ const prefix = full[2 .. full.len - suffix.len]; // remove "__" and "_TYPE__"
+
+ switch (signedness) {
+ .signed => try comp.generateIntMaxAndWidth(w, prefix, ty),
+ .unsigned => try comp.generateIntMax(w, prefix, ty),
+ }
+ try comp.generateFmt(prefix, w, ty);
+}
+
+fn generateFastAndLeastWidthTypes(comp: *Compilation, w: anytype, mapper: StrInt.TypeMapper) !void {
+ const sizes = [_]usize{ 8, 16, 32, 64 };
+ for (sizes) |size| {
+ try comp.generateFastOrLeastType(size, .least, .signed, w, mapper);
+ try comp.generateFastOrLeastType(size, .least, .unsigned, w, mapper);
+ try comp.generateFastOrLeastType(size, .fast, .signed, w, mapper);
+ try comp.generateFastOrLeastType(size, .fast, .unsigned, w, mapper);
+ }
+}
+
fn generateExactWidthTypes(comp: *const Compilation, w: anytype, mapper: StrInt.TypeMapper) !void {
try comp.generateExactWidthType(w, mapper, .schar);
deps/aro/aro/Diagnostics.zig
@@ -236,7 +236,7 @@ pub fn set(d: *Diagnostics, name: []const u8, to: Kind) !void {
try d.addExtra(.{}, .{
.tag = .unknown_warning,
.extra = .{ .str = name },
- }, &.{});
+ }, &.{}, true);
}
pub fn init(gpa: Allocator) Diagnostics {
@@ -251,7 +251,7 @@ pub fn deinit(d: *Diagnostics) void {
}
pub fn add(comp: *Compilation, msg: Message, expansion_locs: []const Source.Location) Compilation.Error!void {
- return comp.diagnostics.addExtra(comp.langopts, msg, expansion_locs);
+ return comp.diagnostics.addExtra(comp.langopts, msg, expansion_locs, true);
}
pub fn addExtra(
@@ -259,6 +259,7 @@ pub fn addExtra(
langopts: LangOpts,
msg: Message,
expansion_locs: []const Source.Location,
+ note_msg_loc: bool,
) Compilation.Error!void {
const kind = d.tagKind(msg.tag, langopts);
if (kind == .off) return;
@@ -301,7 +302,7 @@ pub fn addExtra(
}
}
- d.list.appendAssumeCapacity(.{
+ if (note_msg_loc) d.list.appendAssumeCapacity(.{
.tag = .expanded_from_here,
.kind = .note,
.loc = msg.loc,
deps/aro/aro/Driver.zig
@@ -98,8 +98,10 @@ pub const usage =
\\ -fno-declspec Disable support for __declspec attributes
\\ -ffp-eval-method=[source|double|extended]
\\ Evaluation method to use for floating-point arithmetic
+ \\ -ffreestanding Compilation in a freestanding environment
\\ -fgnu-inline-asm Enable GNU style inline asm (default: enabled)
\\ -fno-gnu-inline-asm Disable GNU style inline asm
+ \\ -fhosted Compilation in a hosted environment
\\ -fms-extensions Enable support for Microsoft extensions
\\ -fno-ms-extensions Disable support for Microsoft extensions
\\ -fdollars-in-identifiers
@@ -177,6 +179,7 @@ pub fn parseArgs(
) !bool {
var i: usize = 1;
var comment_arg: []const u8 = "";
+ var hosted: ?bool = null;
while (i < args.len) : (i += 1) {
const arg = args[i];
if (mem.startsWith(u8, arg, "-") and arg.len > 1) {
@@ -277,6 +280,10 @@ pub fn parseArgs(
d.comp.langopts.declspec_attrs = true;
} else if (mem.eql(u8, arg, "-fno-declspec")) {
d.comp.langopts.declspec_attrs = false;
+ } else if (mem.eql(u8, arg, "-ffreestanding")) {
+ hosted = false;
+ } else if (mem.eql(u8, arg, "-fhosted")) {
+ hosted = true;
} else if (mem.eql(u8, arg, "-fms-extensions")) {
d.comp.langopts.enableMSExtensions();
} else if (mem.eql(u8, arg, "-fno-ms-extensions")) {
@@ -440,6 +447,15 @@ pub fn parseArgs(
if (d.comp.langopts.preserve_comments and !d.only_preprocess) {
return d.fatal("invalid argument '{s}' only allowed with '-E'", .{comment_arg});
}
+ if (hosted) |is_hosted| {
+ if (is_hosted) {
+ if (d.comp.target.os.tag == .freestanding) {
+ return d.fatal("Cannot use freestanding target with `-fhosted`", .{});
+ }
+ } else {
+ d.comp.target.os.tag = .freestanding;
+ }
+ }
return false;
}
deps/aro/aro/Parser.zig
@@ -9,6 +9,8 @@ const Tokenizer = @import("Tokenizer.zig");
const Preprocessor = @import("Preprocessor.zig");
const Tree = @import("Tree.zig");
const Token = Tree.Token;
+const NumberPrefix = Token.NumberPrefix;
+const NumberSuffix = Token.NumberSuffix;
const TokenIndex = Tree.TokenIndex;
const NodeIndex = Tree.NodeIndex;
const Type = @import("Type.zig");
@@ -24,9 +26,6 @@ const Symbol = SymbolStack.Symbol;
const record_layout = @import("record_layout.zig");
const StrInt = @import("StringInterner.zig");
const StringId = StrInt.StringId;
-const number_affixes = @import("number_affixes.zig");
-const NumberPrefix = number_affixes.Prefix;
-const NumberSuffix = number_affixes.Suffix;
const Builtins = @import("Builtins.zig");
const Builtin = Builtins.Builtin;
const target_util = @import("target.zig");
@@ -323,7 +322,7 @@ fn expectIdentifier(p: *Parser) Error!TokenIndex {
return p.errExpectedToken(.identifier, actual);
}
- return (try p.eatIdentifier()) orelse unreachable;
+ return (try p.eatIdentifier()) orelse error.ParsingFailed;
}
fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex {
@@ -347,7 +346,7 @@ pub fn tokSlice(p: *Parser, tok: TokenIndex) []const u8 {
const loc = p.pp.tokens.items(.loc)[tok];
var tmp_tokenizer = Tokenizer{
.buf = p.comp.getSource(loc.id).buf,
- .comp = p.comp,
+ .langopts = p.comp.langopts,
.index = loc.byte_offset,
.source = .generated,
};
@@ -715,6 +714,9 @@ pub fn parse(pp: *Preprocessor) Compilation.Error!Tree {
p.field_attr_buf.deinit();
}
+ try p.syms.pushScope(&p);
+ defer p.syms.popScope();
+
// NodeIndex 0 must be invalid
_ = try p.addNode(.{ .tag = .invalid, .ty = undefined, .data = undefined });
@@ -1010,7 +1012,7 @@ fn decl(p: *Parser) Error!bool {
// Collect old style parameter declarations.
if (init_d.d.old_style_func != null) {
const attrs = init_d.d.ty.getAttributes();
- var base_ty = if (init_d.d.ty.specifier == .attributed) init_d.d.ty.elemType() else init_d.d.ty;
+ var base_ty = if (init_d.d.ty.specifier == .attributed) init_d.d.ty.data.attributed.base else init_d.d.ty;
base_ty.specifier = .func;
init_d.d.ty = try base_ty.withAttributes(p.arena, attrs);
@@ -1066,7 +1068,7 @@ fn decl(p: *Parser) Error!bool {
d.ty = try Attribute.applyParameterAttributes(p, d.ty, attr_buf_top_declarator, .alignas_on_param);
// bypass redefinition check to avoid duplicate errors
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = .def,
.name = interned_name,
.tok = d.name,
@@ -1088,7 +1090,7 @@ fn decl(p: *Parser) Error!bool {
}
// bypass redefinition check to avoid duplicate errors
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = .def,
.name = param.name,
.tok = param.name_tok,
@@ -1428,12 +1430,14 @@ fn typeof(p: *Parser) Error!?Type {
.data = typeof_expr.ty.data,
.qual = if (unqual) .{} else typeof_expr.ty.qual.inheritFromTypeof(),
.specifier = typeof_expr.ty.specifier,
+ .decayed = typeof_expr.ty.decayed,
},
};
return Type{
.data = .{ .expr = inner },
.specifier = .typeof_expr,
+ .decayed = typeof_expr.ty.decayed,
};
}
@@ -1814,6 +1818,7 @@ fn initDeclarator(p: *Parser, decl_spec: *DeclSpec, attr_buf_top: usize) Error!?
} else {
init_d.d.ty.specifier = init_d.initializer.ty.specifier;
init_d.d.ty.data = init_d.initializer.ty.data;
+ init_d.d.ty.decayed = init_d.initializer.ty.decayed;
}
}
if (apply_var_attributes) {
@@ -2105,7 +2110,7 @@ fn recordSpec(p: *Parser) Error!Type {
.specifier = if (is_struct) .@"struct" else .@"union",
.data = .{ .record = record_ty },
}, attr_buf_top, null);
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = if (is_struct) .@"struct" else .@"union",
.name = interned_name,
.tok = ident,
@@ -2151,10 +2156,8 @@ fn recordSpec(p: *Parser) Error!Type {
// declare a symbol for the type
// We need to replace the symbol's type if it has attributes
- var symbol_index: ?usize = null;
if (maybe_ident != null and !defined) {
- symbol_index = p.syms.syms.len;
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = if (is_struct) .@"struct" else .@"union",
.name = record_ty.name,
.tok = maybe_ident.?,
@@ -2216,8 +2219,11 @@ fn recordSpec(p: *Parser) Error!Type {
.specifier = if (is_struct) .@"struct" else .@"union",
.data = .{ .record = record_ty },
}, attr_buf_top, null);
- if (ty.specifier == .attributed and symbol_index != null) {
- p.syms.syms.items(.ty)[symbol_index.?] = ty;
+ if (ty.specifier == .attributed and maybe_ident != null) {
+ const ident_str = p.tokSlice(maybe_ident.?);
+ const interned_name = try StrInt.intern(p.comp, ident_str);
+ const ptr = p.syms.getPtr(interned_name, .tags);
+ ptr.ty = ty;
}
if (!ty.hasIncompleteSize()) {
@@ -2474,7 +2480,7 @@ fn enumSpec(p: *Parser) Error!Type {
.specifier = .@"enum",
.data = .{ .@"enum" = enum_ty },
}, attr_buf_top, null);
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = .@"enum",
.name = interned_name,
.tok = ident,
@@ -2525,7 +2531,6 @@ fn enumSpec(p: *Parser) Error!Type {
p.enum_buf.items.len = enum_buf_top;
}
- const sym_stack_top = p.syms.syms.len;
var e = Enumerator.init(fixed_ty);
while (try p.enumerator(&e)) |field_and_node| {
try p.enum_buf.append(field_and_node.field);
@@ -2551,13 +2556,12 @@ fn enumSpec(p: *Parser) Error!Type {
const field_nodes = p.list_buf.items[list_buf_top..];
if (fixed_ty == null) {
- const vals = p.syms.syms.items(.val)[sym_stack_top..];
- const types = p.syms.syms.items(.ty)[sym_stack_top..];
-
for (enum_fields, 0..) |*field, i| {
if (field.ty.eql(Type.int, p.comp, false)) continue;
- var res = Result{ .node = field.node, .ty = field.ty, .val = vals[i] };
+ const sym = p.syms.get(field.name, .vars) orelse continue;
+
+ var res = Result{ .node = field.node, .ty = field.ty, .val = sym.val };
const dest_ty = if (p.comp.fixedEnumTagSpecifier()) |some|
Type{ .specifier = some }
else if (try res.intFitsInType(p, Type.int))
@@ -2567,8 +2571,9 @@ fn enumSpec(p: *Parser) Error!Type {
else
continue;
- try vals[i].intCast(dest_ty, p.comp);
- types[i] = dest_ty;
+ const symbol = p.syms.getPtr(field.name, .vars);
+ try symbol.val.intCast(dest_ty, p.comp);
+ symbol.ty = dest_ty;
p.nodes.items(.ty)[@intFromEnum(field_nodes[i])] = dest_ty;
field.ty = dest_ty;
res.ty = dest_ty;
@@ -2585,7 +2590,7 @@ fn enumSpec(p: *Parser) Error!Type {
// declare a symbol for the type
if (maybe_ident != null and !defined) {
- try p.syms.syms.append(p.gpa, .{
+ try p.syms.define(p.gpa, .{
.kind = .@"enum",
.name = enum_ty.name,
.ty = ty,
@@ -2885,7 +2890,7 @@ fn declarator(
try res.ty.combine(outer);
try res.ty.validateCombinedType(p, suffix_start);
res.old_style_func = d.old_style_func;
- res.func_declarator = d.func_declarator;
+ if (d.func_declarator) |some| res.func_declarator = some;
return res;
}
@@ -4376,7 +4381,7 @@ fn stmt(p: *Parser) Error!NodeIndex {
/// | keyword_default ':' stmt
fn labeledStmt(p: *Parser) Error!?NodeIndex {
if ((p.tok_ids[p.tok_i] == .identifier or p.tok_ids[p.tok_i] == .extended_identifier) and p.tok_ids[p.tok_i + 1] == .colon) {
- const name_tok = p.expectIdentifier() catch unreachable;
+ const name_tok = try p.expectIdentifier();
const str = p.tokSlice(name_tok);
if (p.findLabel(str)) |some| {
try p.errStr(.duplicate_label, name_tok, str);
@@ -4814,10 +4819,23 @@ const CallExpr = union(enum) {
/// of arguments, `paramCountOverride` is used to tell us how many arguments we should actually expect to see for
/// these custom-typechecked functions.
fn paramCountOverride(self: CallExpr) ?u32 {
+ @setEvalBranchQuota(10_000);
return switch (self) {
.standard => null,
.builtin => |builtin| switch (builtin.tag) {
Builtin.tagFromName("__builtin_complex").? => 2,
+
+ Builtin.tagFromName("__atomic_fetch_add").?,
+ Builtin.tagFromName("__atomic_fetch_sub").?,
+ Builtin.tagFromName("__atomic_fetch_and").?,
+ Builtin.tagFromName("__atomic_fetch_xor").?,
+ Builtin.tagFromName("__atomic_fetch_or").?,
+ Builtin.tagFromName("__atomic_fetch_nand").?,
+ => 3,
+
+ Builtin.tagFromName("__atomic_compare_exchange").?,
+ Builtin.tagFromName("__atomic_compare_exchange_n").?,
+ => 6,
else => null,
},
};
@@ -4827,10 +4845,25 @@ const CallExpr = union(enum) {
return switch (self) {
.standard => callable_ty.returnType(),
.builtin => |builtin| switch (builtin.tag) {
+ Builtin.tagFromName("__atomic_fetch_add").?,
+ Builtin.tagFromName("__atomic_fetch_sub").?,
+ Builtin.tagFromName("__atomic_fetch_and").?,
+ Builtin.tagFromName("__atomic_fetch_xor").?,
+ Builtin.tagFromName("__atomic_fetch_or").?,
+ Builtin.tagFromName("__atomic_fetch_nand").?,
+ => {
+ if (p.list_buf.items.len < 2) return Type.invalid; // not enough arguments; already an error
+ const second_param = p.list_buf.items[p.list_buf.items.len - 2];
+ return p.nodes.items(.ty)[@intFromEnum(second_param)];
+ },
Builtin.tagFromName("__builtin_complex").? => {
+ if (p.list_buf.items.len < 1) return Type.invalid; // not enough arguments; already an error
const last_param = p.list_buf.items[p.list_buf.items.len - 1];
return p.nodes.items(.ty)[@intFromEnum(last_param)].makeComplex();
},
+ Builtin.tagFromName("__atomic_compare_exchange").?,
+ Builtin.tagFromName("__atomic_compare_exchange_n").?,
+ => .{ .specifier = .bool },
else => callable_ty.returnType(),
},
};
@@ -7458,7 +7491,7 @@ fn primaryExpr(p: *Parser) Error!Result {
}
switch (p.tok_ids[p.tok_i]) {
.identifier, .extended_identifier => {
- const name_tok = p.expectIdentifier() catch unreachable;
+ const name_tok = try p.expectIdentifier();
const name = p.tokSlice(name_tok);
const interned_name = try StrInt.intern(p.comp, name);
if (p.syms.findSymbol(interned_name)) |sym| {
@@ -7938,6 +7971,8 @@ fn parseFloat(p: *Parser, buf: []const u8, suffix: NumberSuffix) !Result {
.F, .IF => .float,
.F16 => .float16,
.L, .IL => .long_double,
+ .W, .IW => .float80,
+ .Q, .IQ, .F128, .IF128 => .float128,
else => unreachable,
} };
const val = try Value.intern(p.comp, key: {
@@ -7946,7 +7981,7 @@ fn parseFloat(p: *Parser, buf: []const u8, suffix: NumberSuffix) !Result {
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
for (buf) |c| {
- if (c != '_') p.strings.appendAssumeCapacity(c);
+ if (c != '\'') p.strings.appendAssumeCapacity(c);
}
const float = std.fmt.parseFloat(f128, p.strings.items[strings_top..]) catch unreachable;
@@ -7971,6 +8006,8 @@ fn parseFloat(p: *Parser, buf: []const u8, suffix: NumberSuffix) !Result {
.I => .complex_double,
.IF => .complex_float,
.IL => .complex_long_double,
+ .IW => .complex_float80,
+ .IQ, .IF128 => .complex_float128,
else => unreachable,
} };
res.val = .{}; // TODO add complex values
@@ -8123,11 +8160,21 @@ fn bitInt(p: *Parser, base: u8, buf: []const u8, suffix: NumberSuffix, tok_i: To
var managed = try big.int.Managed.init(p.gpa);
defer managed.deinit();
- managed.setString(base, buf) catch |e| switch (e) {
- error.InvalidBase => unreachable, // `base` is one of 2, 8, 10, 16
- error.InvalidCharacter => unreachable, // digits validated by Tokenizer
- else => |er| return er,
- };
+ {
+ try p.strings.ensureUnusedCapacity(buf.len);
+
+ const strings_top = p.strings.items.len;
+ defer p.strings.items.len = strings_top;
+ for (buf) |c| {
+ if (c != '\'') p.strings.appendAssumeCapacity(c);
+ }
+
+ managed.setString(base, p.strings.items[strings_top..]) catch |e| switch (e) {
+ error.InvalidBase => unreachable, // `base` is one of 2, 8, 10, 16
+ error.InvalidCharacter => unreachable, // digits validated by Tokenizer
+ else => |er| return er,
+ };
+ }
const c = managed.toConst();
const bits_needed: std.math.IntFittingRange(0, Compilation.bit_int_max_bits) = blk: {
// Literal `0` requires at least 1 bit
deps/aro/aro/Preprocessor.zig
@@ -315,7 +315,7 @@ fn invalidTokenDiagnostic(tok_id: Token.Id) Diagnostics.Tag {
fn findIncludeGuard(pp: *Preprocessor, source: Source) ?[]const u8 {
var tokenizer = Tokenizer{
.buf = source.buf,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.source = source.id,
};
var hash = tokenizer.nextNoWS();
@@ -334,7 +334,7 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!Token {
pp.preprocess_count += 1;
var tokenizer = Tokenizer{
.buf = source.buf,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.source = source.id,
};
@@ -747,6 +747,17 @@ fn fatal(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: anyty
return error.FatalError;
}
+fn fatalNotFound(pp: *Preprocessor, tok: Token, filename: []const u8) Compilation.Error {
+ const old = pp.comp.diagnostics.fatal_errors;
+ pp.comp.diagnostics.fatal_errors = true;
+ defer pp.comp.diagnostics.fatal_errors = old;
+
+ try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{ .tag = .cli_error, .loc = tok.loc, .extra = .{
+ .str = try std.fmt.allocPrint(pp.comp.diagnostics.arena.allocator(), "'{s}' not found", .{filename}),
+ } }, tok.expansionSlice(), false);
+ unreachable; // addExtra should've returned FatalError
+}
+
fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: anytype) void {
const source = pp.comp.getSource(raw.source);
const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start });
@@ -1185,7 +1196,7 @@ fn pragmaOperator(pp: *Preprocessor, arg_tok: Token, operator_loc: Source.Locati
try pp.comp.generated_buf.appendSlice(pp.gpa, pp.char_buf.items);
var tmp_tokenizer = Tokenizer{
.buf = pp.comp.generated_buf.items,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.index = @intCast(start),
.source = .generated,
.line = pp.generated_line,
@@ -1864,7 +1875,7 @@ fn expandVaOpt(
.buf = source.buf,
.index = raw.start,
.source = raw.source,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.line = raw.line,
};
while (tokenizer.index < raw.end) {
@@ -2282,11 +2293,11 @@ fn expandMacro(pp: *Preprocessor, tokenizer: *Tokenizer, raw: RawToken) MacroErr
fn expandedSliceExtra(pp: *const Preprocessor, tok: Token, macro_ws_handling: enum { single_macro_ws, preserve_macro_ws }) []const u8 {
if (tok.id.lexeme()) |some| {
- if (!tok.id.allowsDigraphs(pp.comp) and !(tok.id == .macro_ws and macro_ws_handling == .preserve_macro_ws)) return some;
+ if (!tok.id.allowsDigraphs(pp.comp.langopts) and !(tok.id == .macro_ws and macro_ws_handling == .preserve_macro_ws)) return some;
}
var tmp_tokenizer = Tokenizer{
.buf = pp.comp.getSource(tok.loc.id).buf,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.index = tok.loc.byte_offset,
.source = .generated,
};
@@ -2340,7 +2351,7 @@ fn pasteTokens(pp: *Preprocessor, lhs_toks: *ExpandBuf, rhs_toks: []const Token)
// Try to tokenize the result.
var tmp_tokenizer = Tokenizer{
.buf = pp.comp.generated_buf.items,
- .comp = pp.comp,
+ .langopts = pp.comp.langopts,
.index = @intCast(start),
.source = .generated,
};
@@ -2703,6 +2714,7 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
error.InvalidInclude => return,
else => |e| return e,
};
+ defer Token.free(filename_tok.expansion_locs, pp.gpa);
// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
@@ -2836,7 +2848,7 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
}
const embed_bytes = (try pp.comp.findEmbed(filename, first.source, include_type, limit)) orelse
- return pp.fatal(first, "'{s}' not found", .{filename});
+ return pp.fatalNotFound(filename_tok, filename);
defer pp.comp.gpa.free(embed_bytes);
try Range.expand(prefix, pp, tokenizer);
@@ -2984,8 +2996,6 @@ fn findIncludeFilenameToken(
tokenizer: *Tokenizer,
trailing_token_behavior: enum { ignore_trailing_tokens, expect_nl_eof },
) !Token {
- const start = pp.tokens.len;
- defer pp.tokens.len = start;
var first = first_token;
if (first.id == .angle_bracket_left) to_end: {
@@ -3008,35 +3018,60 @@ fn findIncludeFilenameToken(
}, &.{});
try pp.err(first, .header_str_match);
}
- // Try to expand if the argument is a macro.
- try pp.expandMacro(tokenizer, first);
- // Check that we actually got a string.
- const filename_tok = pp.tokens.get(start);
- switch (filename_tok.id) {
- .string_literal, .macro_string => {},
- else => {
- try pp.err(first, .expected_filename);
- try pp.expectNl(tokenizer);
- return error.InvalidInclude;
+ const source_tok = tokFromRaw(first);
+ const filename_tok, const expanded_trailing = switch (source_tok.id) {
+ .string_literal, .macro_string => .{ source_tok, false },
+ else => expanded: {
+ // Try to expand if the argument is a macro.
+ pp.top_expansion_buf.items.len = 0;
+ defer for (pp.top_expansion_buf.items) |tok| Token.free(tok.expansion_locs, pp.gpa);
+ try pp.top_expansion_buf.append(source_tok);
+ pp.expansion_source_loc = source_tok.loc;
+
+ try pp.expandMacroExhaustive(tokenizer, &pp.top_expansion_buf, 0, 1, true, .non_expr);
+ var trailing_toks: []const Token = &.{};
+ const include_str = (try pp.reconstructIncludeString(pp.top_expansion_buf.items, &trailing_toks)) orelse {
+ try pp.err(first, .expected_filename);
+ try pp.expectNl(tokenizer);
+ return error.InvalidInclude;
+ };
+ const start = pp.comp.generated_buf.items.len;
+ try pp.comp.generated_buf.appendSlice(pp.gpa, include_str);
+
+ break :expanded .{ try pp.makeGeneratedToken(start, switch (include_str[0]) {
+ '"' => .string_literal,
+ '<' => .macro_string,
+ else => unreachable,
+ }, pp.top_expansion_buf.items[0]), trailing_toks.len != 0 };
},
- }
+ };
+
switch (trailing_token_behavior) {
.expect_nl_eof => {
// Error on extra tokens.
const nl = tokenizer.nextNoWS();
- if ((nl.id != .nl and nl.id != .eof) or pp.tokens.len > start + 1) {
+ if ((nl.id != .nl and nl.id != .eof) or expanded_trailing) {
skipToNl(tokenizer);
- try pp.err(first, .extra_tokens_directive_end);
+ try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{
+ .tag = .extra_tokens_directive_end,
+ .loc = filename_tok.loc,
+ }, filename_tok.expansionSlice(), false);
}
},
- .ignore_trailing_tokens => {},
+ .ignore_trailing_tokens => if (expanded_trailing) {
+ try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{
+ .tag = .extra_tokens_directive_end,
+ .loc = filename_tok.loc,
+ }, filename_tok.expansionSlice(), false);
+ },
}
return filename_tok;
}
fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken, which: Compilation.WhichInclude) !Source {
const filename_tok = try pp.findIncludeFilenameToken(first, tokenizer, .expect_nl_eof);
+ defer Token.free(filename_tok.expansion_locs, pp.gpa);
// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
@@ -3054,7 +3089,7 @@ fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken,
};
return (try pp.comp.findInclude(filename, first, include_type, which)) orelse
- pp.fatal(first, "'{s}' not found", .{filename});
+ return pp.fatalNotFound(filename_tok, filename);
}
fn printLinemarker(
deps/aro/aro/SymbolStack.zig
@@ -11,6 +11,8 @@ const Parser = @import("Parser.zig");
const Value = @import("Value.zig");
const StringId = @import("StringInterner.zig").StringId;
+const SymbolStack = @This();
+
pub const Symbol = struct {
name: StringId,
ty: Type,
@@ -31,72 +33,74 @@ pub const Kind = enum {
constexpr,
};
-const SymbolStack = @This();
+scopes: std.ArrayListUnmanaged(Scope) = .{},
+/// allocations from nested scopes are retained after popping; `active_len` is the number
+/// of currently-active items in `scopes`.
+active_len: usize = 0,
-syms: std.MultiArrayList(Symbol) = .{},
-scopes: std.ArrayListUnmanaged(u32) = .{},
+const Scope = struct {
+ vars: std.AutoHashMapUnmanaged(StringId, Symbol) = .{},
+ tags: std.AutoHashMapUnmanaged(StringId, Symbol) = .{},
+
+ fn deinit(self: *Scope, allocator: Allocator) void {
+ self.vars.deinit(allocator);
+ self.tags.deinit(allocator);
+ }
+
+ fn clearRetainingCapacity(self: *Scope) void {
+ self.vars.clearRetainingCapacity();
+ self.tags.clearRetainingCapacity();
+ }
+};
pub fn deinit(s: *SymbolStack, gpa: Allocator) void {
- s.syms.deinit(gpa);
+ std.debug.assert(s.active_len == 0); // all scopes should have been popped
+ for (s.scopes.items) |*scope| {
+ scope.deinit(gpa);
+ }
s.scopes.deinit(gpa);
s.* = undefined;
}
-pub fn scopeEnd(s: SymbolStack) u32 {
- if (s.scopes.items.len == 0) return 0;
- return s.scopes.items[s.scopes.items.len - 1];
-}
-
pub fn pushScope(s: *SymbolStack, p: *Parser) !void {
- try s.scopes.append(p.gpa, @intCast(s.syms.len));
+ if (s.active_len + 1 > s.scopes.items.len) {
+ try s.scopes.append(p.gpa, .{});
+ s.active_len = s.scopes.items.len;
+ } else {
+ s.scopes.items[s.active_len].clearRetainingCapacity();
+ s.active_len += 1;
+ }
}
pub fn popScope(s: *SymbolStack) void {
- s.syms.len = s.scopes.pop();
+ s.active_len -= 1;
}
pub fn findTypedef(s: *SymbolStack, p: *Parser, name: StringId, name_tok: TokenIndex, no_type_yet: bool) !?Symbol {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- var i = s.syms.len;
- while (i > 0) {
- i -= 1;
- switch (kinds[i]) {
- .typedef => if (names[i] == name) return s.syms.get(i),
- .@"struct" => if (names[i] == name) {
- if (no_type_yet) return null;
- try p.errStr(.must_use_struct, name_tok, p.tokSlice(name_tok));
- return s.syms.get(i);
- },
- .@"union" => if (names[i] == name) {
- if (no_type_yet) return null;
- try p.errStr(.must_use_union, name_tok, p.tokSlice(name_tok));
- return s.syms.get(i);
- },
- .@"enum" => if (names[i] == name) {
- if (no_type_yet) return null;
- try p.errStr(.must_use_enum, name_tok, p.tokSlice(name_tok));
- return s.syms.get(i);
- },
- .def, .decl, .constexpr => if (names[i] == name) return null,
- else => {},
- }
+ const prev = s.lookup(name, .vars) orelse s.lookup(name, .tags) orelse return null;
+ switch (prev.kind) {
+ .typedef => return prev,
+ .@"struct" => {
+ if (no_type_yet) return null;
+ try p.errStr(.must_use_struct, name_tok, p.tokSlice(name_tok));
+ return prev;
+ },
+ .@"union" => {
+ if (no_type_yet) return null;
+ try p.errStr(.must_use_union, name_tok, p.tokSlice(name_tok));
+ return prev;
+ },
+ .@"enum" => {
+ if (no_type_yet) return null;
+ try p.errStr(.must_use_enum, name_tok, p.tokSlice(name_tok));
+ return prev;
+ },
+ else => return null,
}
- return null;
}
pub fn findSymbol(s: *SymbolStack, name: StringId) ?Symbol {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- var i = s.syms.len;
- while (i > 0) {
- i -= 1;
- switch (kinds[i]) {
- .def, .decl, .enumeration, .constexpr => if (names[i] == name) return s.syms.get(i),
- else => {},
- }
- }
- return null;
+ return s.lookup(name, .vars);
}
pub fn findTag(
@@ -107,36 +111,62 @@ pub fn findTag(
name_tok: TokenIndex,
next_tok_id: Token.Id,
) !?Symbol {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
// `tag Name;` should always result in a new type if in a new scope.
- const end = if (next_tok_id == .semicolon) s.scopeEnd() else 0;
- var i = s.syms.len;
- while (i > end) {
+ const prev = (if (next_tok_id == .semicolon) s.get(name, .tags) else s.lookup(name, .tags)) orelse return null;
+ switch (prev.kind) {
+ .@"enum" => if (kind == .keyword_enum) return prev,
+ .@"struct" => if (kind == .keyword_struct) return prev,
+ .@"union" => if (kind == .keyword_union) return prev,
+ else => unreachable,
+ }
+ if (s.get(name, .tags) == null) return null;
+ try p.errStr(.wrong_tag, name_tok, p.tokSlice(name_tok));
+ try p.errTok(.previous_definition, prev.tok);
+ return null;
+}
+
+const ScopeKind = enum {
+ /// structs, enums, unions
+ tags,
+ /// everything else
+ vars,
+};
+
+/// Return the Symbol for `name` (or null if not found) in the innermost scope
+pub fn get(s: *SymbolStack, name: StringId, kind: ScopeKind) ?Symbol {
+ return switch (kind) {
+ .vars => s.scopes.items[s.active_len - 1].vars.get(name),
+ .tags => s.scopes.items[s.active_len - 1].tags.get(name),
+ };
+}
+
+/// Return the Symbol for `name` (or null if not found) in the nearest active scope,
+/// starting at the innermost.
+fn lookup(s: *SymbolStack, name: StringId, kind: ScopeKind) ?Symbol {
+ var i = s.active_len;
+ while (i > 0) {
i -= 1;
- switch (kinds[i]) {
- .@"enum" => if (names[i] == name) {
- if (kind == .keyword_enum) return s.syms.get(i);
- break;
- },
- .@"struct" => if (names[i] == name) {
- if (kind == .keyword_struct) return s.syms.get(i);
- break;
- },
- .@"union" => if (names[i] == name) {
- if (kind == .keyword_union) return s.syms.get(i);
- break;
- },
- else => {},
+ switch (kind) {
+ .vars => if (s.scopes.items[i].vars.get(name)) |sym| return sym,
+ .tags => if (s.scopes.items[i].tags.get(name)) |sym| return sym,
}
- } else return null;
-
- if (i < s.scopeEnd()) return null;
- try p.errStr(.wrong_tag, name_tok, p.tokSlice(name_tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
+ }
return null;
}
+/// Define a symbol in the innermost scope. Does not issue diagnostics or check correctness
+/// with regard to the C standard.
+pub fn define(s: *SymbolStack, allocator: Allocator, symbol: Symbol) !void {
+ switch (symbol.kind) {
+ .constexpr, .def, .decl, .enumeration, .typedef => {
+ try s.scopes.items[s.active_len - 1].vars.put(allocator, symbol.name, symbol);
+ },
+ .@"struct", .@"union", .@"enum" => {
+ try s.scopes.items[s.active_len - 1].tags.put(allocator, symbol.name, symbol);
+ },
+ }
+}
+
pub fn defineTypedef(
s: *SymbolStack,
p: *Parser,
@@ -145,25 +175,22 @@ pub fn defineTypedef(
tok: TokenIndex,
node: NodeIndex,
) !void {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .typedef => if (names[i] == name) {
- const prev_ty = s.syms.items(.ty)[i];
- if (ty.eql(prev_ty, p.comp, true)) break;
- try p.errStr(.redefinition_of_typedef, tok, try p.typePairStrExtra(ty, " vs ", prev_ty));
- const previous_tok = s.syms.items(.tok)[i];
- if (previous_tok != 0) try p.errTok(.previous_definition, previous_tok);
- break;
+ if (s.get(name, .vars)) |prev| {
+ switch (prev.kind) {
+ .typedef => {
+ if (!ty.eql(prev.ty, p.comp, true)) {
+ try p.errStr(.redefinition_of_typedef, tok, try p.typePairStrExtra(ty, " vs ", prev.ty));
+ if (prev.tok != 0) try p.errTok(.previous_definition, prev.tok);
+ }
+ },
+ .enumeration, .decl, .def, .constexpr => {
+ try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
},
- else => {},
+ else => unreachable,
}
}
- try s.syms.append(p.gpa, .{
+ try s.define(p.gpa, .{
.kind = .typedef,
.name = name,
.tok = tok,
@@ -183,35 +210,31 @@ pub fn defineSymbol(
val: Value,
constexpr: bool,
) !void {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .enumeration => if (names[i] == name) {
+ if (s.get(name, .vars)) |prev| {
+ switch (prev.kind) {
+ .enumeration => {
try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- break;
+ try p.errTok(.previous_definition, prev.tok);
},
- .decl => if (names[i] == name) {
- const prev_ty = s.syms.items(.ty)[i];
- if (!ty.eql(prev_ty, p.comp, true)) {
+ .decl => {
+ if (!ty.eql(prev.ty, p.comp, true)) {
try p.errStr(.redefinition_incompatible, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
+ try p.errTok(.previous_definition, prev.tok);
}
- break;
},
- .def, .constexpr => if (names[i] == name) {
+ .def, .constexpr => {
try p.errStr(.redefinition, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- break;
+ try p.errTok(.previous_definition, prev.tok);
},
- else => {},
+ .typedef => {
+ try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ },
+ else => unreachable,
}
}
- try s.syms.append(p.gpa, .{
+
+ try s.define(p.gpa, .{
.kind = if (constexpr) .constexpr else .def,
.name = name,
.tok = tok,
@@ -221,6 +244,15 @@ pub fn defineSymbol(
});
}
+/// Get a pointer to the named symbol in the innermost scope.
+/// Asserts that a symbol with the name exists.
+pub fn getPtr(s: *SymbolStack, name: StringId, kind: ScopeKind) *Symbol {
+ return switch (kind) {
+ .tags => s.scopes.items[s.active_len - 1].tags.getPtr(name).?,
+ .vars => s.scopes.items[s.active_len - 1].vars.getPtr(name).?,
+ };
+}
+
pub fn declareSymbol(
s: *SymbolStack,
p: *Parser,
@@ -229,39 +261,34 @@ pub fn declareSymbol(
tok: TokenIndex,
node: NodeIndex,
) !void {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .enumeration => if (names[i] == name) {
+ if (s.get(name, .vars)) |prev| {
+ switch (prev.kind) {
+ .enumeration => {
try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- break;
+ try p.errTok(.previous_definition, prev.tok);
},
- .decl => if (names[i] == name) {
- const prev_ty = s.syms.items(.ty)[i];
- if (!ty.eql(prev_ty, p.comp, true)) {
+ .decl => {
+ if (!ty.eql(prev.ty, p.comp, true)) {
try p.errStr(.redefinition_incompatible, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
+ try p.errTok(.previous_definition, prev.tok);
}
- break;
},
- .def, .constexpr => if (names[i] == name) {
- const prev_ty = s.syms.items(.ty)[i];
- if (!ty.eql(prev_ty, p.comp, true)) {
+ .def, .constexpr => {
+ if (!ty.eql(prev.ty, p.comp, true)) {
try p.errStr(.redefinition_incompatible, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- break;
+ try p.errTok(.previous_definition, prev.tok);
+ } else {
+ return;
}
- return;
},
- else => {},
+ .typedef => {
+ try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ },
+ else => unreachable,
}
}
- try s.syms.append(p.gpa, .{
+ try s.define(p.gpa, .{
.kind = .decl,
.name = name,
.tok = tok,
@@ -272,25 +299,23 @@ pub fn declareSymbol(
}
pub fn defineParam(s: *SymbolStack, p: *Parser, name: StringId, ty: Type, tok: TokenIndex) !void {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .enumeration, .decl, .def, .constexpr => if (names[i] == name) {
+ if (s.get(name, .vars)) |prev| {
+ switch (prev.kind) {
+ .enumeration, .decl, .def, .constexpr => {
try p.errStr(.redefinition_of_parameter, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- break;
+ try p.errTok(.previous_definition, prev.tok);
+ },
+ .typedef => {
+ try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
},
- else => {},
+ else => unreachable,
}
}
if (ty.is(.fp16) and !p.comp.hasHalfPrecisionFloatABI()) {
try p.errStr(.suggest_pointer_for_invalid_fp16, tok, "parameters");
}
- try s.syms.append(p.gpa, .{
+ try s.define(p.gpa, .{
.kind = .def,
.name = name,
.tok = tok,
@@ -306,35 +331,28 @@ pub fn defineTag(
kind: Token.Id,
tok: TokenIndex,
) !?Symbol {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .@"enum" => if (names[i] == name) {
- if (kind == .keyword_enum) return s.syms.get(i);
- try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- return null;
- },
- .@"struct" => if (names[i] == name) {
- if (kind == .keyword_struct) return s.syms.get(i);
- try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- return null;
- },
- .@"union" => if (names[i] == name) {
- if (kind == .keyword_union) return s.syms.get(i);
- try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
- return null;
- },
- else => {},
- }
+ const prev = s.get(name, .tags) orelse return null;
+ switch (prev.kind) {
+ .@"enum" => {
+ if (kind == .keyword_enum) return prev;
+ try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ return null;
+ },
+ .@"struct" => {
+ if (kind == .keyword_struct) return prev;
+ try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ return null;
+ },
+ .@"union" => {
+ if (kind == .keyword_union) return prev;
+ try p.errStr(.wrong_tag, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ return null;
+ },
+ else => unreachable,
}
- return null;
}
pub fn defineEnumeration(
@@ -345,27 +363,26 @@ pub fn defineEnumeration(
tok: TokenIndex,
val: Value,
) !void {
- const kinds = s.syms.items(.kind);
- const names = s.syms.items(.name);
- const end = s.scopeEnd();
- var i = s.syms.len;
- while (i > end) {
- i -= 1;
- switch (kinds[i]) {
- .enumeration => if (names[i] == name) {
+ if (s.get(name, .vars)) |prev| {
+ switch (prev.kind) {
+ .enumeration => {
try p.errStr(.redefinition, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
+ try p.errTok(.previous_definition, prev.tok);
return;
},
- .decl, .def, .constexpr => if (names[i] == name) {
+ .decl, .def, .constexpr => {
try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
- try p.errTok(.previous_definition, s.syms.items(.tok)[i]);
+ try p.errTok(.previous_definition, prev.tok);
return;
},
- else => {},
+ .typedef => {
+ try p.errStr(.redefinition_different_sym, tok, p.tokSlice(tok));
+ try p.errTok(.previous_definition, prev.tok);
+ },
+ else => unreachable,
}
}
- try s.syms.append(p.gpa, .{
+ try s.define(p.gpa, .{
.kind = .enumeration,
.name = name,
.tok = tok,
deps/aro/aro/target.zig
@@ -1,7 +1,6 @@
const std = @import("std");
const LangOpts = @import("LangOpts.zig");
const Type = @import("Type.zig");
-const llvm = @import("root").codegen.llvm;
const TargetSet = @import("Builtins/Properties.zig").TargetSet;
/// intmax_t for this target
deps/aro/aro/Tokenizer.zig
@@ -749,12 +749,15 @@ pub const Token = struct {
.string_literal_utf_8,
.string_literal_utf_32,
.string_literal_wide,
+ .unterminated_string_literal,
=> "a string literal",
.char_literal,
.char_literal_utf_8,
.char_literal_utf_16,
.char_literal_utf_32,
.char_literal_wide,
+ .unterminated_char_literal,
+ .empty_char_literal,
=> "a character literal",
.pp_num, .embed_byte => "A number",
else => id.lexeme().?,
@@ -798,7 +801,7 @@ pub const Token = struct {
};
}
- pub fn allowsDigraphs(id: Id, comp: *const Compilation) bool {
+ pub fn allowsDigraphs(id: Id, langopts: LangOpts) bool {
return switch (id) {
.l_bracket,
.r_bracket,
@@ -806,7 +809,7 @@ pub const Token = struct {
.r_brace,
.hash,
.hash_hash,
- => comp.langopts.hasDigraphs(),
+ => langopts.hasDigraphs(),
else => false,
};
}
@@ -829,15 +832,15 @@ pub const Token = struct {
/// double underscore and underscore + capital letter identifiers
/// belong to the implementation namespace, so we always convert them
/// to keywords.
- pub fn getTokenId(comp: *const Compilation, str: []const u8) Token.Id {
+ pub fn getTokenId(langopts: LangOpts, str: []const u8) Token.Id {
const kw = all_kws.get(str) orelse return .identifier;
- const standard = comp.langopts.standard;
+ const standard = langopts.standard;
return switch (kw) {
.keyword_inline => if (standard.isGNU() or standard.atLeast(.c99)) kw else .identifier,
.keyword_restrict => if (standard.atLeast(.c99)) kw else .identifier,
.keyword_typeof => if (standard.isGNU() or standard.atLeast(.c23)) kw else .identifier,
.keyword_asm => if (standard.isGNU()) kw else .identifier,
- .keyword_declspec => if (comp.langopts.declspec_attrs) kw else .identifier,
+ .keyword_declspec => if (langopts.declspec_attrs) kw else .identifier,
.keyword_c23_alignas,
.keyword_c23_alignof,
@@ -864,7 +867,7 @@ pub const Token = struct {
.keyword_stdcall2,
.keyword_thiscall2,
.keyword_vectorcall2,
- => if (comp.langopts.ms_extensions) kw else .identifier,
+ => if (langopts.ms_extensions) kw else .identifier,
else => kw,
};
}
@@ -1023,7 +1026,7 @@ const Tokenizer = @This();
buf: []const u8,
index: u32 = 0,
source: Source.Id,
-comp: *const Compilation,
+langopts: LangOpts,
line: u32 = 1,
pub fn next(self: *Tokenizer) Token {
@@ -1162,14 +1165,14 @@ pub fn next(self: *Tokenizer) Token {
'#' => state = .hash,
'0'...'9' => state = .pp_num,
'\t', '\x0B', '\x0C', ' ' => state = .whitespace,
- '$' => if (self.comp.langopts.dollars_in_identifiers) {
+ '$' => if (self.langopts.dollars_in_identifiers) {
state = .extended_identifier;
} else {
id = .invalid;
self.index += 1;
break;
},
- 0x1A => if (self.comp.langopts.ms_extensions) {
+ 0x1A => if (self.langopts.ms_extensions) {
id = .eof;
break;
} else {
@@ -1306,15 +1309,15 @@ pub fn next(self: *Tokenizer) Token {
},
.identifier, .extended_identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
- '$' => if (self.comp.langopts.dollars_in_identifiers) {
+ '$' => if (self.langopts.dollars_in_identifiers) {
state = .extended_identifier;
} else {
- id = if (state == .identifier) Token.getTokenId(self.comp, self.buf[start..self.index]) else .extended_identifier;
+ id = if (state == .identifier) Token.getTokenId(self.langopts, self.buf[start..self.index]) else .extended_identifier;
break;
},
0x80...0xFF => state = .extended_identifier,
else => {
- id = if (state == .identifier) Token.getTokenId(self.comp, self.buf[start..self.index]) else .extended_identifier;
+ id = if (state == .identifier) Token.getTokenId(self.langopts, self.buf[start..self.index]) else .extended_identifier;
break;
},
},
@@ -1358,7 +1361,7 @@ pub fn next(self: *Tokenizer) Token {
},
.colon => switch (c) {
'>' => {
- if (self.comp.langopts.hasDigraphs()) {
+ if (self.langopts.hasDigraphs()) {
id = .r_bracket;
self.index += 1;
} else {
@@ -1367,7 +1370,7 @@ pub fn next(self: *Tokenizer) Token {
break;
},
':' => {
- if (self.comp.langopts.standard.atLeast(.c23)) {
+ if (self.langopts.standard.atLeast(.c23)) {
id = .colon_colon;
self.index += 1;
break;
@@ -1388,7 +1391,7 @@ pub fn next(self: *Tokenizer) Token {
break;
},
'>' => {
- if (self.comp.langopts.hasDigraphs()) {
+ if (self.langopts.hasDigraphs()) {
id = .r_brace;
self.index += 1;
} else {
@@ -1397,7 +1400,7 @@ pub fn next(self: *Tokenizer) Token {
break;
},
':' => {
- if (self.comp.langopts.hasDigraphs()) {
+ if (self.langopts.hasDigraphs()) {
state = .hash_digraph;
} else {
id = .percent;
@@ -1444,7 +1447,7 @@ pub fn next(self: *Tokenizer) Token {
break;
},
':' => {
- if (self.comp.langopts.hasDigraphs()) {
+ if (self.langopts.hasDigraphs()) {
id = .l_bracket;
self.index += 1;
} else {
@@ -1453,7 +1456,7 @@ pub fn next(self: *Tokenizer) Token {
break;
},
'%' => {
- if (self.comp.langopts.hasDigraphs()) {
+ if (self.langopts.hasDigraphs()) {
id = .l_brace;
self.index += 1;
} else {
@@ -1613,7 +1616,7 @@ pub fn next(self: *Tokenizer) Token {
},
.line_comment => switch (c) {
'\n' => {
- if (self.comp.langopts.preserve_comments) {
+ if (self.langopts.preserve_comments) {
id = .comment;
break;
}
@@ -1629,7 +1632,7 @@ pub fn next(self: *Tokenizer) Token {
},
.multi_line_comment_asterisk => switch (c) {
'/' => {
- if (self.comp.langopts.preserve_comments) {
+ if (self.langopts.preserve_comments) {
self.index += 1;
id = .comment;
break;
@@ -1673,7 +1676,7 @@ pub fn next(self: *Tokenizer) Token {
'.',
=> {},
'e', 'E', 'p', 'P' => state = .pp_num_exponent,
- '\'' => if (self.comp.langopts.standard.atLeast(.c23)) {
+ '\'' => if (self.langopts.standard.atLeast(.c23)) {
state = .pp_num_digit_separator;
} else {
id = .pp_num;
@@ -1721,7 +1724,7 @@ pub fn next(self: *Tokenizer) Token {
} else if (self.index == self.buf.len) {
switch (state) {
.start, .line_comment => {},
- .u, .u8, .U, .L, .identifier => id = Token.getTokenId(self.comp, self.buf[start..self.index]),
+ .u, .u8, .U, .L, .identifier => id = Token.getTokenId(self.langopts, self.buf[start..self.index]),
.extended_identifier => id = .extended_identifier,
.period2 => {
@@ -2149,7 +2152,7 @@ fn expectTokensExtra(contents: []const u8, expected_tokens: []const Token.Id, st
var tokenizer = Tokenizer{
.buf = source.buf,
.source = source.id,
- .comp = &comp,
+ .langopts = comp.langopts,
};
var i: usize = 0;
while (i < expected_tokens.len) {
deps/aro/aro/Toolchain.zig
@@ -183,18 +183,15 @@ pub fn getLinkerPath(tc: *const Toolchain, buf: []u8) ![]const u8 {
return tc.getProgramPath(default_linker, buf);
}
-const TargetSpecificToolName = std.BoundedArray(u8, 64);
-
/// If an explicit target is provided, also check the prefixed tool-specific name
/// TODO: this isn't exactly right since our target names don't necessarily match up
/// with GCC's.
/// For example the Zig target `arm-freestanding-eabi` would need the `arm-none-eabi` tools
-fn possibleProgramNames(raw_triple: ?[]const u8, name: []const u8, target_specific: *TargetSpecificToolName) std.BoundedArray([]const u8, 2) {
+fn possibleProgramNames(raw_triple: ?[]const u8, name: []const u8, buf: *[64]u8) std.BoundedArray([]const u8, 2) {
var possible_names: std.BoundedArray([]const u8, 2) = .{};
if (raw_triple) |triple| {
- const w = target_specific.writer();
- if (w.print("{s}-{s}", .{ triple, name })) {
- possible_names.appendAssumeCapacity(target_specific.constSlice());
+ if (std.fmt.bufPrint(buf, "{s}-{s}", .{ triple, name })) |res| {
+ possible_names.appendAssumeCapacity(res);
} else |_| {}
}
possible_names.appendAssumeCapacity(name);
@@ -227,8 +224,8 @@ fn getProgramPath(tc: *const Toolchain, name: []const u8, buf: []u8) []const u8
var path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var fib = std.heap.FixedBufferAllocator.init(&path_buf);
- var tool_specific_name: TargetSpecificToolName = .{};
- const possible_names = possibleProgramNames(tc.driver.raw_target_triple, name, &tool_specific_name);
+ var tool_specific_buf: [64]u8 = undefined;
+ const possible_names = possibleProgramNames(tc.driver.raw_target_triple, name, &tool_specific_buf);
for (possible_names.constSlice()) |tool_name| {
for (tc.program_paths.items) |program_path| {
deps/aro/aro/Tree.zig
@@ -1,11 +1,12 @@
const std = @import("std");
const Interner = @import("backend").Interner;
-const Type = @import("Type.zig");
-const Tokenizer = @import("Tokenizer.zig");
+const Attribute = @import("Attribute.zig");
const CodeGen = @import("CodeGen.zig");
const Compilation = @import("Compilation.zig");
+const number_affixes = @import("Tree/number_affixes.zig");
const Source = @import("Source.zig");
-const Attribute = @import("Attribute.zig");
+const Tokenizer = @import("Tokenizer.zig");
+const Type = @import("Type.zig");
const Value = @import("Value.zig");
const StringInterner = @import("StringInterner.zig");
@@ -92,6 +93,8 @@ pub const Token = struct {
pub const List = std.MultiArrayList(Token);
pub const Id = Tokenizer.Token.Id;
+ pub const NumberPrefix = number_affixes.Prefix;
+ pub const NumberSuffix = number_affixes.Suffix;
};
pub const TokenIndex = u32;
@@ -669,7 +672,7 @@ pub fn tokSlice(tree: *const Tree, tok_i: TokenIndex) []const u8 {
const loc = tree.tokens.items(.loc)[tok_i];
var tmp_tokenizer = Tokenizer{
.buf = tree.comp.getSource(loc.id).buf,
- .comp = tree.comp,
+ .langopts = tree.comp.langopts,
.index = loc.byte_offset,
.source = .generated,
};
deps/aro/aro/Type.zig
@@ -363,7 +363,6 @@ pub const Specifier = enum {
// data.sub_type
pointer,
unspecified_variable_len_array,
- decayed_unspecified_variable_len_array,
// data.func
/// int foo(int bar, char baz) and int (void)
func,
@@ -375,15 +374,11 @@ pub const Specifier = enum {
// data.array
array,
- decayed_array,
static_array,
- decayed_static_array,
incomplete_array,
- decayed_incomplete_array,
vector,
// data.expr
variable_len_array,
- decayed_variable_len_array,
// data.record
@"struct",
@@ -394,13 +389,9 @@ pub const Specifier = enum {
/// typeof(type-name)
typeof_type,
- /// decayed array created with typeof(type-name)
- decayed_typeof_type,
/// typeof(expression)
typeof_expr,
- /// decayed array created with typeof(expression)
- decayed_typeof_expr,
/// data.attributed
attributed,
@@ -428,6 +419,7 @@ data: union {
} = .{ .none = {} },
specifier: Specifier,
qual: Qualifiers = .{},
+decayed: bool = false,
pub const int = Type{ .specifier = .int };
pub const invalid = Type{ .specifier = .invalid };
@@ -442,7 +434,7 @@ pub fn is(ty: Type, specifier: Specifier) bool {
pub fn withAttributes(self: Type, allocator: std.mem.Allocator, attributes: []const Attribute) !Type {
if (attributes.len == 0) return self;
const attributed_type = try Type.Attributed.create(allocator, self, self.getAttributes(), attributes);
- return Type{ .specifier = .attributed, .data = .{ .attributed = attributed_type } };
+ return Type{ .specifier = .attributed, .data = .{ .attributed = attributed_type }, .decayed = self.decayed };
}
pub fn isCallable(ty: Type) ?Type {
@@ -468,10 +460,10 @@ pub fn isFunc(ty: Type) bool {
pub fn isArray(ty: Type) bool {
return switch (ty.specifier) {
- .array, .static_array, .incomplete_array, .variable_len_array, .unspecified_variable_len_array => true,
- .typeof_type => ty.data.sub_type.isArray(),
- .typeof_expr => ty.data.expr.ty.isArray(),
- .attributed => ty.data.attributed.base.isArray(),
+ .array, .static_array, .incomplete_array, .variable_len_array, .unspecified_variable_len_array => !ty.isDecayed(),
+ .typeof_type => !ty.isDecayed() and ty.data.sub_type.isArray(),
+ .typeof_expr => !ty.isDecayed() and ty.data.expr.ty.isArray(),
+ .attributed => !ty.isDecayed() and ty.data.attributed.base.isArray(),
else => false,
};
}
@@ -502,35 +494,22 @@ pub fn isScalarNonInt(ty: Type) bool {
}
pub fn isDecayed(ty: Type) bool {
- const decayed = switch (ty.specifier) {
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
- => true,
- else => false,
- };
- std.debug.assert(decayed or !std.mem.startsWith(u8, @tagName(ty.specifier), "decayed"));
- return decayed;
+ return ty.decayed;
}
pub fn isPtr(ty: Type) bool {
return switch (ty.specifier) {
- .pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
- => true,
- .typeof_type => ty.data.sub_type.isPtr(),
- .typeof_expr => ty.data.expr.ty.isPtr(),
- .attributed => ty.data.attributed.base.isPtr(),
+ .pointer => true,
+
+ .array,
+ .static_array,
+ .incomplete_array,
+ .variable_len_array,
+ .unspecified_variable_len_array,
+ => ty.isDecayed(),
+ .typeof_type => ty.isDecayed() or ty.data.sub_type.isPtr(),
+ .typeof_expr => ty.isDecayed() or ty.data.expr.ty.isPtr(),
+ .attributed => ty.isDecayed() or ty.data.attributed.base.isPtr(),
else => false,
};
}
@@ -608,15 +587,15 @@ pub fn isVoidStar(ty: Type) bool {
pub fn isTypeof(ty: Type) bool {
return switch (ty.specifier) {
- .typeof_type, .typeof_expr, .decayed_typeof_type, .decayed_typeof_expr => true,
+ .typeof_type, .typeof_expr => true,
else => false,
};
}
pub fn isConst(ty: Type) bool {
return switch (ty.specifier) {
- .typeof_type, .decayed_typeof_type => ty.qual.@"const" or ty.data.sub_type.isConst(),
- .typeof_expr, .decayed_typeof_expr => ty.qual.@"const" or ty.data.expr.ty.isConst(),
+ .typeof_type => ty.qual.@"const" or ty.data.sub_type.isConst(),
+ .typeof_expr => ty.qual.@"const" or ty.data.expr.ty.isConst(),
.attributed => ty.data.attributed.base.isConst(),
else => ty.qual.@"const",
};
@@ -630,7 +609,7 @@ pub fn signedness(ty: Type, comp: *const Compilation) std.builtin.Signedness {
return switch (ty.specifier) {
// zig fmt: off
.char, .complex_char => return comp.getCharSignedness(),
- .uchar, .ushort, .uint, .ulong, .ulong_long, .bool, .complex_uchar, .complex_ushort,
+ .uchar, .ushort, .uint, .ulong, .ulong_long, .uint128, .bool, .complex_uchar, .complex_ushort,
.complex_uint, .complex_ulong, .complex_ulong_long, .complex_uint128 => .unsigned,
// zig fmt: on
.bit_int, .complex_bit_int => ty.data.int.signedness,
@@ -678,16 +657,16 @@ pub fn isAnonymousRecord(ty: Type, comp: *const Compilation) bool {
pub fn elemType(ty: Type) Type {
return switch (ty.specifier) {
- .pointer, .unspecified_variable_len_array, .decayed_unspecified_variable_len_array => ty.data.sub_type.*,
- .array, .static_array, .incomplete_array, .decayed_array, .decayed_static_array, .decayed_incomplete_array, .vector => ty.data.array.elem,
- .variable_len_array, .decayed_variable_len_array => ty.data.expr.ty,
- .typeof_type, .decayed_typeof_type, .typeof_expr, .decayed_typeof_expr => {
+ .pointer, .unspecified_variable_len_array => ty.data.sub_type.*,
+ .array, .static_array, .incomplete_array, .vector => ty.data.array.elem,
+ .variable_len_array => ty.data.expr.ty,
+ .typeof_type, .typeof_expr => {
const unwrapped = ty.canonicalize(.preserve_quals);
var elem = unwrapped.elemType();
elem.qual = elem.qual.mergeAll(unwrapped.qual);
return elem;
},
- .attributed => ty.data.attributed.base,
+ .attributed => ty.data.attributed.base.elemType(),
.invalid => Type.invalid,
// zig fmt: off
.complex_float, .complex_double, .complex_long_double, .complex_float80,
@@ -703,8 +682,8 @@ pub fn elemType(ty: Type) Type {
pub fn returnType(ty: Type) Type {
return switch (ty.specifier) {
.func, .var_args_func, .old_style_func => ty.data.func.return_type,
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.returnType(),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.returnType(),
+ .typeof_type => ty.data.sub_type.returnType(),
+ .typeof_expr => ty.data.expr.ty.returnType(),
.attributed => ty.data.attributed.base.returnType(),
.invalid => Type.invalid,
else => unreachable,
@@ -714,8 +693,8 @@ pub fn returnType(ty: Type) Type {
pub fn params(ty: Type) []Func.Param {
return switch (ty.specifier) {
.func, .var_args_func, .old_style_func => ty.data.func.params,
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.params(),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.params(),
+ .typeof_type => ty.data.sub_type.params(),
+ .typeof_expr => ty.data.expr.ty.params(),
.attributed => ty.data.attributed.base.params(),
.invalid => &.{},
else => unreachable,
@@ -724,9 +703,9 @@ pub fn params(ty: Type) []Func.Param {
pub fn arrayLen(ty: Type) ?u64 {
return switch (ty.specifier) {
- .array, .static_array, .decayed_array, .decayed_static_array => ty.data.array.len,
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.arrayLen(),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.arrayLen(),
+ .array, .static_array => ty.data.array.len,
+ .typeof_type => ty.data.sub_type.arrayLen(),
+ .typeof_expr => ty.data.expr.ty.arrayLen(),
.attributed => ty.data.attributed.base.arrayLen(),
else => null,
};
@@ -748,8 +727,8 @@ pub fn anyQual(ty: Type) bool {
pub fn getAttributes(ty: Type) []const Attribute {
return switch (ty.specifier) {
.attributed => ty.data.attributed.attributes,
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.getAttributes(),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.getAttributes(),
+ .typeof_type => ty.data.sub_type.getAttributes(),
+ .typeof_expr => ty.data.expr.ty.getAttributes(),
else => &.{},
};
}
@@ -757,8 +736,8 @@ pub fn getAttributes(ty: Type) []const Attribute {
pub fn getRecord(ty: Type) ?*const Type.Record {
return switch (ty.specifier) {
.attributed => ty.data.attributed.base.getRecord(),
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.getRecord(),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.getRecord(),
+ .typeof_type => ty.data.sub_type.getRecord(),
+ .typeof_expr => ty.data.expr.ty.getRecord(),
.@"struct", .@"union" => ty.data.record,
else => null,
};
@@ -901,6 +880,7 @@ pub fn bitfieldPromotion(ty: Type, comp: *Compilation, width: u32) ?Type {
}
pub fn hasIncompleteSize(ty: Type) bool {
+ if (ty.isDecayed()) return false;
return switch (ty.specifier) {
.void, .incomplete_array => true,
.@"enum" => ty.data.@"enum".isIncomplete() and !ty.data.@"enum".fixed,
@@ -917,20 +897,14 @@ pub fn hasUnboundVLA(ty: Type) bool {
var cur = ty;
while (true) {
switch (cur.specifier) {
- .unspecified_variable_len_array,
- .decayed_unspecified_variable_len_array,
- => return true,
+ .unspecified_variable_len_array => return true,
.array,
.static_array,
.incomplete_array,
.variable_len_array,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
=> cur = cur.elemType(),
- .typeof_type, .decayed_typeof_type => cur = cur.data.sub_type.*,
- .typeof_expr, .decayed_typeof_expr => cur = cur.data.expr.ty,
+ .typeof_type => cur = cur.data.sub_type.*,
+ .typeof_expr => cur = cur.data.expr.ty,
.attributed => cur = cur.data.attributed.base,
else => return false,
}
@@ -1006,9 +980,11 @@ pub fn sizeCompare(a: Type, b: Type, comp: *Compilation) TypeSizeOrder {
/// Size of type as reported by sizeof
pub fn sizeof(ty: Type, comp: *const Compilation) ?u64 {
+ if (ty.isPtr()) return comp.target.ptrBitWidth() / 8;
+
return switch (ty.specifier) {
.auto_type, .c23_auto => unreachable,
- .variable_len_array, .unspecified_variable_len_array => return null,
+ .variable_len_array, .unspecified_variable_len_array => null,
.incomplete_array => return if (comp.langopts.emulate == .msvc) @as(?u64, 0) else null,
.func, .var_args_func, .old_style_func, .void, .bool => 1,
.char, .schar, .uchar => 1,
@@ -1037,14 +1013,7 @@ pub fn sizeof(ty: Type, comp: *const Compilation) ?u64 {
.complex_long_double, .complex_float80, .complex_float128, .complex_bit_int,
=> return 2 * ty.makeReal().sizeof(comp).?,
// zig fmt: on
- .pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
+ .pointer => unreachable,
.static_array,
.nullptr_t,
=> comp.target.ptrBitWidth() / 8,
@@ -1073,8 +1042,8 @@ pub fn sizeof(ty: Type, comp: *const Compilation) ?u64 {
pub fn bitSizeof(ty: Type, comp: *const Compilation) ?u64 {
return switch (ty.specifier) {
.bool => if (comp.langopts.emulate == .msvc) @as(u64, 8) else 1,
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.bitSizeof(comp),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.bitSizeof(comp),
+ .typeof_type => ty.data.sub_type.bitSizeof(comp),
+ .typeof_expr => ty.data.expr.ty.bitSizeof(comp),
.attributed => ty.data.attributed.base.bitSizeof(comp),
.bit_int => return ty.data.int.bits,
.long_double => comp.target.c_type_bit_size(.longdouble),
@@ -1117,7 +1086,10 @@ pub fn alignof(ty: Type, comp: *const Compilation) u29 {
.unspecified_variable_len_array,
.array,
.vector,
- => ty.elemType().alignof(comp),
+ => if (ty.isPtr()) switch (comp.target.cpu.arch) {
+ .avr => 1,
+ else => comp.target.ptrBitWidth() / 8,
+ } else ty.elemType().alignof(comp),
.func, .var_args_func, .old_style_func => target_util.defaultFunctionAlignment(comp.target),
.char, .schar, .uchar, .void, .bool => 1,
@@ -1153,11 +1125,6 @@ pub fn alignof(ty: Type, comp: *const Compilation) u29 {
.float80, .float128 => 16,
.pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
.static_array,
.nullptr_t,
=> switch (comp.target.cpu.arch) {
@@ -1166,8 +1133,8 @@ pub fn alignof(ty: Type, comp: *const Compilation) u29 {
},
.@"struct", .@"union" => if (ty.data.record.isIncomplete()) 0 else @intCast(ty.data.record.type_layout.field_alignment_bits / 8),
.@"enum" => if (ty.data.@"enum".isIncomplete() and !ty.data.@"enum".fixed) 0 else ty.data.@"enum".tag_ty.alignof(comp),
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.alignof(comp),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.alignof(comp),
+ .typeof_type => ty.data.sub_type.alignof(comp),
+ .typeof_expr => ty.data.expr.ty.alignof(comp),
.attributed => ty.data.attributed.base.alignof(comp),
};
}
@@ -1179,7 +1146,10 @@ pub fn alignof(ty: Type, comp: *const Compilation) u29 {
/// arrays and pointers.
pub fn canonicalize(ty: Type, qual_handling: enum { standard, preserve_quals }) Type {
var cur = ty;
- if (cur.specifier == .attributed) cur = cur.data.attributed.base;
+ if (cur.specifier == .attributed) {
+ cur = cur.data.attributed.base;
+ cur.decayed = ty.decayed;
+ }
if (!cur.isTypeof()) return cur;
var qual = cur.qual;
@@ -1187,14 +1157,6 @@ pub fn canonicalize(ty: Type, qual_handling: enum { standard, preserve_quals })
switch (cur.specifier) {
.typeof_type => cur = cur.data.sub_type.*,
.typeof_expr => cur = cur.data.expr.ty,
- .decayed_typeof_type => {
- cur = cur.data.sub_type.*;
- cur.decayArray();
- },
- .decayed_typeof_expr => {
- cur = cur.data.expr.ty;
- cur.decayArray();
- },
else => break,
}
qual = qual.mergeAll(cur.qual);
@@ -1204,6 +1166,7 @@ pub fn canonicalize(ty: Type, qual_handling: enum { standard, preserve_quals })
} else {
cur.qual = qual;
}
+ cur.decayed = ty.decayed;
return cur;
}
@@ -1219,8 +1182,8 @@ pub fn get(ty: *const Type, specifier: Specifier) ?*const Type {
pub fn requestedAlignment(ty: Type, comp: *const Compilation) ?u29 {
return switch (ty.specifier) {
- .typeof_type, .decayed_typeof_type => ty.data.sub_type.requestedAlignment(comp),
- .typeof_expr, .decayed_typeof_expr => ty.data.expr.ty.requestedAlignment(comp),
+ .typeof_type => ty.data.sub_type.requestedAlignment(comp),
+ .typeof_expr => ty.data.expr.ty.requestedAlignment(comp),
.attributed => annotationAlignment(comp, ty.data.attributed.attributes),
else => null,
};
@@ -1265,14 +1228,11 @@ pub fn eql(a_param: Type, b_param: Type, comp: *const Compilation, check_qualifi
if (a.qual.@"volatile" != b.qual.@"volatile") return false;
}
+ if (a.isPtr()) {
+ return a_param.elemType().eql(b_param.elemType(), comp, check_qualifiers);
+ }
switch (a.specifier) {
- .pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- => if (!a_param.elemType().eql(b_param.elemType(), comp, check_qualifiers)) return false,
+ .pointer => unreachable,
.func,
.var_args_func,
@@ -1293,8 +1253,9 @@ pub fn eql(a_param: Type, b_param: Type, comp: *const Compilation, check_qualifi
}
if (!a.elemType().eql(b.elemType(), comp, false)) return false;
},
- .variable_len_array => if (!a.elemType().eql(b.elemType(), comp, check_qualifiers)) return false,
-
+ .variable_len_array => {
+ if (!a.elemType().eql(b.elemType(), comp, check_qualifiers)) return false;
+ },
.@"struct", .@"union" => if (a.data.record != b.data.record) return false,
.@"enum" => if (a.data.@"enum" != b.data.@"enum") return false,
.bit_int, .complex_bit_int => return a.data.int.bits == b.data.int.bits and a.data.int.signedness == b.data.int.signedness,
@@ -1306,14 +1267,14 @@ pub fn eql(a_param: Type, b_param: Type, comp: *const Compilation, check_qualifi
/// Decays an array to a pointer
pub fn decayArray(ty: *Type) void {
- // the decayed array type is the current specifier +1
- ty.specifier = @enumFromInt(@intFromEnum(ty.specifier) + 1);
+ std.debug.assert(ty.isArray());
+ ty.decayed = true;
}
pub fn originalTypeOfDecayedArray(ty: Type) Type {
std.debug.assert(ty.isDecayed());
var copy = ty;
- copy.specifier = @enumFromInt(@intFromEnum(ty.specifier) - 1);
+ copy.decayed = false;
return copy;
}
@@ -1405,25 +1366,23 @@ pub fn combine(inner: *Type, outer: Type) Parser.Error!void {
switch (inner.specifier) {
.pointer => return inner.data.sub_type.combine(outer),
.unspecified_variable_len_array => {
+ std.debug.assert(!inner.isDecayed());
try inner.data.sub_type.combine(outer);
},
.variable_len_array => {
+ std.debug.assert(!inner.isDecayed());
try inner.data.expr.ty.combine(outer);
},
.array, .static_array, .incomplete_array => {
+ std.debug.assert(!inner.isDecayed());
try inner.data.array.elem.combine(outer);
},
.func, .var_args_func, .old_style_func => {
try inner.data.func.return_type.combine(outer);
},
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
- => unreachable, // type should not be able to decay before being combined
+ .typeof_type,
+ .typeof_expr,
+ => std.debug.assert(!inner.isDecayed()),
.void, .invalid => inner.* = outer,
else => unreachable,
}
@@ -1474,8 +1433,8 @@ pub fn validateCombinedType(ty: Type, p: *Parser, source_tok: TokenIndex) Parser
try p.errStr(.suggest_pointer_for_invalid_fp16, source_tok, "function return value");
}
},
- .typeof_type, .decayed_typeof_type => return ty.data.sub_type.validateCombinedType(p, source_tok),
- .typeof_expr, .decayed_typeof_expr => return ty.data.expr.ty.validateCombinedType(p, source_tok),
+ .typeof_type => return ty.data.sub_type.validateCombinedType(p, source_tok),
+ .typeof_expr => return ty.data.expr.ty.validateCombinedType(p, source_tok),
.attributed => return ty.data.attributed.base.validateCombinedType(p, source_tok),
else => {},
}
@@ -1610,6 +1569,7 @@ pub const Builder = struct {
decayed_typeof_expr: *Expr,
attributed: *Attributed,
+ decayed_attributed: *Attributed,
pub fn str(spec: Builder.Specifier, langopts: LangOpts) ?[]const u8 {
return switch (spec) {
@@ -1835,13 +1795,10 @@ pub const Builder = struct {
ty.specifier = .pointer;
ty.data = .{ .sub_type = data };
},
- .unspecified_variable_len_array => |data| {
+ .unspecified_variable_len_array, .decayed_unspecified_variable_len_array => |data| {
ty.specifier = .unspecified_variable_len_array;
ty.data = .{ .sub_type = data };
- },
- .decayed_unspecified_variable_len_array => |data| {
- ty.specifier = .decayed_unspecified_variable_len_array;
- ty.data = .{ .sub_type = data };
+ ty.decayed = b.specifier == .decayed_unspecified_variable_len_array;
},
.func => |data| {
ty.specifier = .func;
@@ -1855,41 +1812,29 @@ pub const Builder = struct {
ty.specifier = .old_style_func;
ty.data = .{ .func = data };
},
- .array => |data| {
+ .array, .decayed_array => |data| {
ty.specifier = .array;
ty.data = .{ .array = data };
+ ty.decayed = b.specifier == .decayed_array;
},
- .decayed_array => |data| {
- ty.specifier = .decayed_array;
- ty.data = .{ .array = data };
- },
- .static_array => |data| {
+ .static_array, .decayed_static_array => |data| {
ty.specifier = .static_array;
ty.data = .{ .array = data };
+ ty.decayed = b.specifier == .decayed_static_array;
},
- .decayed_static_array => |data| {
- ty.specifier = .decayed_static_array;
- ty.data = .{ .array = data };
- },
- .incomplete_array => |data| {
+ .incomplete_array, .decayed_incomplete_array => |data| {
ty.specifier = .incomplete_array;
ty.data = .{ .array = data };
- },
- .decayed_incomplete_array => |data| {
- ty.specifier = .decayed_incomplete_array;
- ty.data = .{ .array = data };
+ ty.decayed = b.specifier == .decayed_incomplete_array;
},
.vector => |data| {
ty.specifier = .vector;
ty.data = .{ .array = data };
},
- .variable_len_array => |data| {
+ .variable_len_array, .decayed_variable_len_array => |data| {
ty.specifier = .variable_len_array;
ty.data = .{ .expr = data };
- },
- .decayed_variable_len_array => |data| {
- ty.specifier = .decayed_variable_len_array;
- ty.data = .{ .expr = data };
+ ty.decayed = b.specifier == .decayed_variable_len_array;
},
.@"struct" => |data| {
ty.specifier = .@"struct";
@@ -1903,25 +1848,20 @@ pub const Builder = struct {
ty.specifier = .@"enum";
ty.data = .{ .@"enum" = data };
},
- .typeof_type => |data| {
+ .typeof_type, .decayed_typeof_type => |data| {
ty.specifier = .typeof_type;
ty.data = .{ .sub_type = data };
+ ty.decayed = b.specifier == .decayed_typeof_type;
},
- .decayed_typeof_type => |data| {
- ty.specifier = .decayed_typeof_type;
- ty.data = .{ .sub_type = data };
- },
- .typeof_expr => |data| {
+ .typeof_expr, .decayed_typeof_expr => |data| {
ty.specifier = .typeof_expr;
ty.data = .{ .expr = data };
+ ty.decayed = b.specifier == .decayed_typeof_expr;
},
- .decayed_typeof_expr => |data| {
- ty.specifier = .decayed_typeof_expr;
- ty.data = .{ .expr = data };
- },
- .attributed => |data| {
+ .attributed, .decayed_attributed => |data| {
ty.specifier = .attributed;
ty.data = .{ .attributed = data };
+ ty.decayed = b.specifier == .decayed_attributed;
},
}
if (!ty.isReal() and ty.isInt()) {
@@ -2359,30 +2299,47 @@ pub const Builder = struct {
.complex_float128 => .complex_float128,
.pointer => .{ .pointer = ty.data.sub_type },
- .unspecified_variable_len_array => .{ .unspecified_variable_len_array = ty.data.sub_type },
- .decayed_unspecified_variable_len_array => .{ .decayed_unspecified_variable_len_array = ty.data.sub_type },
+ .unspecified_variable_len_array => if (ty.isDecayed())
+ .{ .decayed_unspecified_variable_len_array = ty.data.sub_type }
+ else
+ .{ .unspecified_variable_len_array = ty.data.sub_type },
.func => .{ .func = ty.data.func },
.var_args_func => .{ .var_args_func = ty.data.func },
.old_style_func => .{ .old_style_func = ty.data.func },
- .array => .{ .array = ty.data.array },
- .decayed_array => .{ .decayed_array = ty.data.array },
- .static_array => .{ .static_array = ty.data.array },
- .decayed_static_array => .{ .decayed_static_array = ty.data.array },
- .incomplete_array => .{ .incomplete_array = ty.data.array },
- .decayed_incomplete_array => .{ .decayed_incomplete_array = ty.data.array },
+ .array => if (ty.isDecayed())
+ .{ .decayed_array = ty.data.array }
+ else
+ .{ .array = ty.data.array },
+ .static_array => if (ty.isDecayed())
+ .{ .decayed_static_array = ty.data.array }
+ else
+ .{ .static_array = ty.data.array },
+ .incomplete_array => if (ty.isDecayed())
+ .{ .decayed_incomplete_array = ty.data.array }
+ else
+ .{ .incomplete_array = ty.data.array },
.vector => .{ .vector = ty.data.array },
- .variable_len_array => .{ .variable_len_array = ty.data.expr },
- .decayed_variable_len_array => .{ .decayed_variable_len_array = ty.data.expr },
+ .variable_len_array => if (ty.isDecayed())
+ .{ .decayed_variable_len_array = ty.data.expr }
+ else
+ .{ .variable_len_array = ty.data.expr },
.@"struct" => .{ .@"struct" = ty.data.record },
.@"union" => .{ .@"union" = ty.data.record },
.@"enum" => .{ .@"enum" = ty.data.@"enum" },
- .typeof_type => .{ .typeof_type = ty.data.sub_type },
- .decayed_typeof_type => .{ .decayed_typeof_type = ty.data.sub_type },
- .typeof_expr => .{ .typeof_expr = ty.data.expr },
- .decayed_typeof_expr => .{ .decayed_typeof_expr = ty.data.expr },
+ .typeof_type => if (ty.isDecayed())
+ .{ .decayed_typeof_type = ty.data.sub_type }
+ else
+ .{ .typeof_type = ty.data.sub_type },
+ .typeof_expr => if (ty.isDecayed())
+ .{ .decayed_typeof_expr = ty.data.expr }
+ else
+ .{ .typeof_expr = ty.data.expr },
- .attributed => .{ .attributed = ty.data.attributed },
+ .attributed => if (ty.isDecayed())
+ .{ .decayed_attributed = ty.data.attributed }
+ else
+ .{ .attributed = ty.data.attributed },
else => unreachable,
};
}
@@ -2472,24 +2429,17 @@ fn printPrologue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts
try w.writeAll(")");
return true;
}
+ if (ty.isPtr()) {
+ const elem_ty = ty.elemType();
+ const simple = try elem_ty.printPrologue(mapper, langopts, w);
+ if (simple) try w.writeByte(' ');
+ if (elem_ty.isFunc() or elem_ty.isArray()) try w.writeByte('(');
+ try w.writeByte('*');
+ try ty.qual.dump(w);
+ return false;
+ }
switch (ty.specifier) {
- .pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
- => {
- const elem_ty = ty.elemType();
- const simple = try elem_ty.printPrologue(mapper, langopts, w);
- if (simple) try w.writeByte(' ');
- if (elem_ty.isFunc() or elem_ty.isArray()) try w.writeByte('(');
- try w.writeByte('*');
- try ty.qual.dump(w);
- return false;
- },
+ .pointer => unreachable,
.func, .var_args_func, .old_style_func => {
const ret_ty = ty.data.func.return_type;
const simple = try ret_ty.printPrologue(mapper, langopts, w);
@@ -2541,20 +2491,14 @@ fn printPrologue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts
fn printEpilogue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
if (ty.qual.atomic) return;
+ if (ty.isPtr()) {
+ const elem_ty = ty.elemType();
+ if (elem_ty.isFunc() or elem_ty.isArray()) try w.writeByte(')');
+ try elem_ty.printEpilogue(mapper, langopts, w);
+ return;
+ }
switch (ty.specifier) {
- .pointer,
- .decayed_array,
- .decayed_static_array,
- .decayed_incomplete_array,
- .decayed_variable_len_array,
- .decayed_unspecified_variable_len_array,
- .decayed_typeof_type,
- .decayed_typeof_expr,
- => {
- const elem_ty = ty.elemType();
- if (elem_ty.isFunc() or elem_ty.isArray()) try w.writeByte(')');
- try elem_ty.printEpilogue(mapper, langopts, w);
- },
+ .pointer => unreachable, // handled above
.func, .var_args_func, .old_style_func => {
try w.writeByte('(');
for (ty.data.func.params, 0..) |param, i| {
@@ -2637,10 +2581,10 @@ pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w:
try w.writeAll(") ");
try ty.data.func.return_type.dump(mapper, langopts, w);
},
- .array, .static_array, .decayed_array, .decayed_static_array => {
- if (ty.specifier == .decayed_array or ty.specifier == .decayed_static_array) try w.writeAll("*d");
+ .array, .static_array => {
+ if (ty.isDecayed()) try w.writeAll("*d");
try w.writeByte('[');
- if (ty.specifier == .static_array or ty.specifier == .decayed_static_array) try w.writeAll("static ");
+ if (ty.specifier == .static_array) try w.writeAll("static ");
try w.print("{d}]", .{ty.data.array.len});
try ty.data.array.elem.dump(mapper, langopts, w);
},
@@ -2649,8 +2593,8 @@ pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w:
try ty.data.array.elem.dump(mapper, langopts, w);
try w.writeAll(")");
},
- .incomplete_array, .decayed_incomplete_array => {
- if (ty.specifier == .decayed_incomplete_array) try w.writeAll("*d");
+ .incomplete_array => {
+ if (ty.isDecayed()) try w.writeAll("*d");
try w.writeAll("[]");
try ty.data.array.elem.dump(mapper, langopts, w);
},
@@ -2672,27 +2616,28 @@ pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w:
try w.print("union {s}", .{mapper.lookup(ty.data.record.name)});
if (dump_detailed_containers) try dumpRecord(ty.data.record, mapper, langopts, w);
},
- .unspecified_variable_len_array, .decayed_unspecified_variable_len_array => {
- if (ty.specifier == .decayed_unspecified_variable_len_array) try w.writeAll("*d");
+ .unspecified_variable_len_array => {
+ if (ty.isDecayed()) try w.writeAll("*d");
try w.writeAll("[*]");
try ty.data.sub_type.dump(mapper, langopts, w);
},
- .variable_len_array, .decayed_variable_len_array => {
- if (ty.specifier == .decayed_variable_len_array) try w.writeAll("*d");
+ .variable_len_array => {
+ if (ty.isDecayed()) try w.writeAll("*d");
try w.writeAll("[<expr>]");
try ty.data.expr.ty.dump(mapper, langopts, w);
},
- .typeof_type, .decayed_typeof_type => {
+ .typeof_type => {
try w.writeAll("typeof(");
try ty.data.sub_type.dump(mapper, langopts, w);
try w.writeAll(")");
},
- .typeof_expr, .decayed_typeof_expr => {
+ .typeof_expr => {
try w.writeAll("typeof(<expr>: ");
try ty.data.expr.ty.dump(mapper, langopts, w);
try w.writeAll(")");
},
.attributed => {
+ if (ty.isDecayed()) try w.writeAll("*d:");
try w.writeAll("attributed(");
try ty.data.attributed.base.dump(mapper, langopts, w);
try w.writeAll(")");
deps/aro/aro/Value.zig
@@ -215,7 +215,7 @@ pub fn intCast(v: *Value, dest_ty: Type, comp: *Compilation) !void {
const limbs = try comp.gpa.alloc(
std.math.big.Limb,
- std.math.big.int.calcTwosCompLimbCount(bits),
+ std.math.big.int.calcTwosCompLimbCount(@max(big.bitCountTwosComp(), bits)),
);
defer comp.gpa.free(limbs);
var result_bigint = std.math.big.int.Mutable{ .limbs = limbs, .positive = undefined, .len = undefined };