Commit 288fc3a8d3
Changed files (6)
std
std/crypto/test.zig
@@ -14,9 +14,8 @@ pub fn assertEqualHash(comptime Hasher: var, comptime expected: []const u8, inpu
pub fn assertEqual(comptime expected: []const u8, input: []const u8) void {
var expected_bytes: [expected.len / 2]u8 = undefined;
for (expected_bytes) |*r, i| {
- *r = fmt.parseInt(u8, expected[2*i .. 2*i+2], 16) catch unreachable;
+ r.* = fmt.parseInt(u8, expected[2 * i .. 2 * i + 2], 16) catch unreachable;
}
debug.assert(mem.eql(u8, expected_bytes, input));
}
-
std/fmt/errol/index.zig
@@ -86,7 +86,7 @@ pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
const data = enum3_data[i];
const digits = buffer[1..data.str.len + 1];
mem.copy(u8, digits, data.str);
- return FloatDecimal {
+ return FloatDecimal{
.digits = digits,
.exp = data.exp,
};
@@ -105,7 +105,6 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
return errolFixed(val, buffer);
}
-
// normalize the midpoint
const e = math.frexp(val).exponent;
@@ -137,11 +136,11 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
}
// compute boundaries
- var high = HP {
+ var high = HP{
.val = mid.val,
.off = mid.off + (fpnext(val) - val) * lten * ten / 2.0,
};
- var low = HP {
+ var low = HP{
.val = mid.val,
.off = mid.off + (fpprev(val) - val) * lten * ten / 2.0,
};
@@ -171,15 +170,12 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
var buf_index: usize = 1;
while (true) {
var hdig = u8(math.floor(high.val));
- if ((high.val == f64(hdig)) and (high.off < 0))
- hdig -= 1;
+ if ((high.val == f64(hdig)) and (high.off < 0)) hdig -= 1;
var ldig = u8(math.floor(low.val));
- if ((low.val == f64(ldig)) and (low.off < 0))
- ldig -= 1;
+ if ((low.val == f64(ldig)) and (low.off < 0)) ldig -= 1;
- if (ldig != hdig)
- break;
+ if (ldig != hdig) break;
buffer[buf_index] = hdig + '0';
buf_index += 1;
@@ -191,13 +187,12 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
const tmp = (high.val + low.val) / 2.0;
var mdig = u8(math.floor(tmp + 0.5));
- if ((f64(mdig) - tmp) == 0.5 and (mdig & 0x1) != 0)
- mdig -= 1;
+ if ((f64(mdig) - tmp) == 0.5 and (mdig & 0x1) != 0) mdig -= 1;
buffer[buf_index] = mdig + '0';
buf_index += 1;
- return FloatDecimal {
+ return FloatDecimal{
.digits = buffer[1..buf_index],
.exp = exp,
};
@@ -235,7 +230,7 @@ fn hpProd(in: &const HP, val: f64) HP {
const p = in.val * val;
const e = ((hi * hi2 - p) + lo * hi2 + hi * lo2) + lo * lo2;
- return HP {
+ return HP{
.val = p,
.off = in.off * val + e,
};
@@ -246,8 +241,8 @@ fn hpProd(in: &const HP, val: f64) HP {
/// @hi: The high bits.
/// @lo: The low bits.
fn split(val: f64, hi: &f64, lo: &f64) void {
- *hi = gethi(val);
- *lo = val - *hi;
+ hi.* = gethi(val);
+ lo.* = val - hi.*;
}
fn gethi(in: f64) f64 {
@@ -301,7 +296,6 @@ fn hpMul10(hp: &HP) void {
hpNormalize(hp);
}
-
/// Integer conversion algorithm, guaranteed correct, optimal, and best.
/// @val: The val.
/// @buf: The output buffer.
@@ -343,8 +337,7 @@ fn errolInt(val: f64, buffer: []u8) FloatDecimal {
}
const m64 = @truncate(u64, @divTrunc(mid, x));
- if (lf != hf)
- mi += 19;
+ if (lf != hf) mi += 19;
var buf_index = u64toa(m64, buffer) - 1;
@@ -354,7 +347,7 @@ fn errolInt(val: f64, buffer: []u8) FloatDecimal {
buf_index += 1;
}
- return FloatDecimal {
+ return FloatDecimal{
.digits = buffer[0..buf_index],
.exp = i32(buf_index) + mi,
};
@@ -396,25 +389,24 @@ fn errolFixed(val: f64, buffer: []u8) FloatDecimal {
buffer[j] = u8(mdig + '0');
j += 1;
- if(hdig != ldig or j > 50)
- break;
+ if (hdig != ldig or j > 50) break;
}
if (mid > 0.5) {
- buffer[j-1] += 1;
- } else if ((mid == 0.5) and (buffer[j-1] & 0x1) != 0) {
- buffer[j-1] += 1;
+ buffer[j - 1] += 1;
+ } else if ((mid == 0.5) and (buffer[j - 1] & 0x1) != 0) {
+ buffer[j - 1] += 1;
}
} else {
- while (buffer[j-1] == '0') {
- buffer[j-1] = 0;
+ while (buffer[j - 1] == '0') {
+ buffer[j - 1] = 0;
j -= 1;
}
}
buffer[j] = 0;
- return FloatDecimal {
+ return FloatDecimal{
.digits = buffer[0..j],
.exp = exp,
};
@@ -587,7 +579,7 @@ fn u64toa(value_param: u64, buffer: []u8) usize {
buffer[buf_index] = c_digits_lut[d8 + 1];
buf_index += 1;
} else {
- const a = u32(value / kTen16); // 1 to 1844
+ const a = u32(value / kTen16); // 1 to 1844
value %= kTen16;
if (a < 10) {
@@ -686,7 +678,6 @@ fn fpeint(from: f64) u128 {
return u128(1) << @truncate(u7, (bits >> 52) -% 1023);
}
-
/// Given two different integers with the same length in terms of the number
/// of decimal digits, index the digits from the right-most position starting
/// from zero, find the first index where the digits in the two integers
@@ -713,7 +704,6 @@ fn mismatch10(a: u64, b: u64) i32 {
a_copy /= 10;
b_copy /= 10;
- if (a_copy == b_copy)
- return i;
+ if (a_copy == b_copy) return i;
}
}
std/os/darwin.zig
@@ -10,33 +10,56 @@ pub const STDIN_FILENO = 0;
pub const STDOUT_FILENO = 1;
pub const STDERR_FILENO = 2;
-pub const PROT_NONE = 0x00; /// [MC2] no permissions
-pub const PROT_READ = 0x01; /// [MC2] pages can be read
-pub const PROT_WRITE = 0x02; /// [MC2] pages can be written
-pub const PROT_EXEC = 0x04; /// [MC2] pages can be executed
-
-pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space
-pub const MAP_FILE = 0x0000; /// map from file (default)
-pub const MAP_FIXED = 0x0010; /// interpret addr exactly
-pub const MAP_HASSEMAPHORE = 0x0200; /// region may contain semaphores
-pub const MAP_PRIVATE = 0x0002; /// changes are private
-pub const MAP_SHARED = 0x0001; /// share changes
-pub const MAP_NOCACHE = 0x0400; /// don't cache pages for this mapping
-pub const MAP_NORESERVE = 0x0040; /// don't reserve needed swap area
+/// [MC2] no permissions
+pub const PROT_NONE = 0x00;
+/// [MC2] pages can be read
+pub const PROT_READ = 0x01;
+/// [MC2] pages can be written
+pub const PROT_WRITE = 0x02;
+/// [MC2] pages can be executed
+pub const PROT_EXEC = 0x04;
+
+/// allocated from memory, swap space
+pub const MAP_ANONYMOUS = 0x1000;
+/// map from file (default)
+pub const MAP_FILE = 0x0000;
+/// interpret addr exactly
+pub const MAP_FIXED = 0x0010;
+/// region may contain semaphores
+pub const MAP_HASSEMAPHORE = 0x0200;
+/// changes are private
+pub const MAP_PRIVATE = 0x0002;
+/// share changes
+pub const MAP_SHARED = 0x0001;
+/// don't cache pages for this mapping
+pub const MAP_NOCACHE = 0x0400;
+/// don't reserve needed swap area
+pub const MAP_NORESERVE = 0x0040;
pub const MAP_FAILED = @maxValue(usize);
-pub const WNOHANG = 0x00000001; /// [XSI] no hang in wait/no child to reap
-pub const WUNTRACED = 0x00000002; /// [XSI] notify on stop, untraced child
-
-pub const SA_ONSTACK = 0x0001; /// take signal on signal stack
-pub const SA_RESTART = 0x0002; /// restart system on signal return
-pub const SA_RESETHAND = 0x0004; /// reset to SIG_DFL when taking signal
-pub const SA_NOCLDSTOP = 0x0008; /// do not generate SIGCHLD on child stop
-pub const SA_NODEFER = 0x0010; /// don't mask the signal we're delivering
-pub const SA_NOCLDWAIT = 0x0020; /// don't keep zombies around
-pub const SA_SIGINFO = 0x0040; /// signal handler with SA_SIGINFO args
-pub const SA_USERTRAMP = 0x0100; /// do not bounce off kernel's sigtramp
-pub const SA_64REGSET = 0x0200; /// signal handler with SA_SIGINFO args with 64bit regs information
+/// [XSI] no hang in wait/no child to reap
+pub const WNOHANG = 0x00000001;
+/// [XSI] notify on stop, untraced child
+pub const WUNTRACED = 0x00000002;
+
+/// take signal on signal stack
+pub const SA_ONSTACK = 0x0001;
+/// restart system on signal return
+pub const SA_RESTART = 0x0002;
+/// reset to SIG_DFL when taking signal
+pub const SA_RESETHAND = 0x0004;
+/// do not generate SIGCHLD on child stop
+pub const SA_NOCLDSTOP = 0x0008;
+/// don't mask the signal we're delivering
+pub const SA_NODEFER = 0x0010;
+/// don't keep zombies around
+pub const SA_NOCLDWAIT = 0x0020;
+/// signal handler with SA_SIGINFO args
+pub const SA_SIGINFO = 0x0040;
+/// do not bounce off kernel's sigtramp
+pub const SA_USERTRAMP = 0x0100;
+/// signal handler with SA_SIGINFO args with 64bit regs information
+pub const SA_64REGSET = 0x0200;
pub const O_LARGEFILE = 0x0000;
pub const O_PATH = 0x0000;
@@ -46,20 +69,34 @@ pub const X_OK = 1;
pub const W_OK = 2;
pub const R_OK = 4;
-pub const O_RDONLY = 0x0000; /// open for reading only
-pub const O_WRONLY = 0x0001; /// open for writing only
-pub const O_RDWR = 0x0002; /// open for reading and writing
-pub const O_NONBLOCK = 0x0004; /// do not block on open or for data to become available
-pub const O_APPEND = 0x0008; /// append on each write
-pub const O_CREAT = 0x0200; /// create file if it does not exist
-pub const O_TRUNC = 0x0400; /// truncate size to 0
-pub const O_EXCL = 0x0800; /// error if O_CREAT and the file exists
-pub const O_SHLOCK = 0x0010; /// atomically obtain a shared lock
-pub const O_EXLOCK = 0x0020; /// atomically obtain an exclusive lock
-pub const O_NOFOLLOW = 0x0100; /// do not follow symlinks
-pub const O_SYMLINK = 0x200000; /// allow open of symlinks
-pub const O_EVTONLY = 0x8000; /// descriptor requested for event notifications only
-pub const O_CLOEXEC = 0x1000000; /// mark as close-on-exec
+/// open for reading only
+pub const O_RDONLY = 0x0000;
+/// open for writing only
+pub const O_WRONLY = 0x0001;
+/// open for reading and writing
+pub const O_RDWR = 0x0002;
+/// do not block on open or for data to become available
+pub const O_NONBLOCK = 0x0004;
+/// append on each write
+pub const O_APPEND = 0x0008;
+/// create file if it does not exist
+pub const O_CREAT = 0x0200;
+/// truncate size to 0
+pub const O_TRUNC = 0x0400;
+/// error if O_CREAT and the file exists
+pub const O_EXCL = 0x0800;
+/// atomically obtain a shared lock
+pub const O_SHLOCK = 0x0010;
+/// atomically obtain an exclusive lock
+pub const O_EXLOCK = 0x0020;
+/// do not follow symlinks
+pub const O_NOFOLLOW = 0x0100;
+/// allow open of symlinks
+pub const O_SYMLINK = 0x200000;
+/// descriptor requested for event notifications only
+pub const O_EVTONLY = 0x8000;
+/// mark as close-on-exec
+pub const O_CLOEXEC = 0x1000000;
pub const O_ACCMODE = 3;
pub const O_ALERT = 536870912;
@@ -87,52 +124,102 @@ pub const DT_LNK = 10;
pub const DT_SOCK = 12;
pub const DT_WHT = 14;
-pub const SIG_BLOCK = 1; /// block specified signal set
-pub const SIG_UNBLOCK = 2; /// unblock specified signal set
-pub const SIG_SETMASK = 3; /// set specified signal set
-
-pub const SIGHUP = 1; /// hangup
-pub const SIGINT = 2; /// interrupt
-pub const SIGQUIT = 3; /// quit
-pub const SIGILL = 4; /// illegal instruction (not reset when caught)
-pub const SIGTRAP = 5; /// trace trap (not reset when caught)
-pub const SIGABRT = 6; /// abort()
-pub const SIGPOLL = 7; /// pollable event ([XSR] generated, not supported)
-pub const SIGIOT = SIGABRT; /// compatibility
-pub const SIGEMT = 7; /// EMT instruction
-pub const SIGFPE = 8; /// floating point exception
-pub const SIGKILL = 9; /// kill (cannot be caught or ignored)
-pub const SIGBUS = 10; /// bus error
-pub const SIGSEGV = 11; /// segmentation violation
-pub const SIGSYS = 12; /// bad argument to system call
-pub const SIGPIPE = 13; /// write on a pipe with no one to read it
-pub const SIGALRM = 14; /// alarm clock
-pub const SIGTERM = 15; /// software termination signal from kill
-pub const SIGURG = 16; /// urgent condition on IO channel
-pub const SIGSTOP = 17; /// sendable stop signal not from tty
-pub const SIGTSTP = 18; /// stop signal from tty
-pub const SIGCONT = 19; /// continue a stopped process
-pub const SIGCHLD = 20; /// to parent on child stop or exit
-pub const SIGTTIN = 21; /// to readers pgrp upon background tty read
-pub const SIGTTOU = 22; /// like TTIN for output if (tp->t_local<OSTOP)
-pub const SIGIO = 23; /// input/output possible signal
-pub const SIGXCPU = 24; /// exceeded CPU time limit
-pub const SIGXFSZ = 25; /// exceeded file size limit
-pub const SIGVTALRM = 26; /// virtual time alarm
-pub const SIGPROF = 27; /// profiling time alarm
-pub const SIGWINCH = 28; /// window size changes
-pub const SIGINFO = 29; /// information request
-pub const SIGUSR1 = 30; /// user defined signal 1
-pub const SIGUSR2 = 31; /// user defined signal 2
-
-fn wstatus(x: i32) i32 { return x & 0o177; }
+/// block specified signal set
+pub const SIG_BLOCK = 1;
+/// unblock specified signal set
+pub const SIG_UNBLOCK = 2;
+/// set specified signal set
+pub const SIG_SETMASK = 3;
+
+/// hangup
+pub const SIGHUP = 1;
+/// interrupt
+pub const SIGINT = 2;
+/// quit
+pub const SIGQUIT = 3;
+/// illegal instruction (not reset when caught)
+pub const SIGILL = 4;
+/// trace trap (not reset when caught)
+pub const SIGTRAP = 5;
+/// abort()
+pub const SIGABRT = 6;
+/// pollable event ([XSR] generated, not supported)
+pub const SIGPOLL = 7;
+/// compatibility
+pub const SIGIOT = SIGABRT;
+/// EMT instruction
+pub const SIGEMT = 7;
+/// floating point exception
+pub const SIGFPE = 8;
+/// kill (cannot be caught or ignored)
+pub const SIGKILL = 9;
+/// bus error
+pub const SIGBUS = 10;
+/// segmentation violation
+pub const SIGSEGV = 11;
+/// bad argument to system call
+pub const SIGSYS = 12;
+/// write on a pipe with no one to read it
+pub const SIGPIPE = 13;
+/// alarm clock
+pub const SIGALRM = 14;
+/// software termination signal from kill
+pub const SIGTERM = 15;
+/// urgent condition on IO channel
+pub const SIGURG = 16;
+/// sendable stop signal not from tty
+pub const SIGSTOP = 17;
+/// stop signal from tty
+pub const SIGTSTP = 18;
+/// continue a stopped process
+pub const SIGCONT = 19;
+/// to parent on child stop or exit
+pub const SIGCHLD = 20;
+/// to readers pgrp upon background tty read
+pub const SIGTTIN = 21;
+/// like TTIN for output if (tp->t_local<OSTOP)
+pub const SIGTTOU = 22;
+/// input/output possible signal
+pub const SIGIO = 23;
+/// exceeded CPU time limit
+pub const SIGXCPU = 24;
+/// exceeded file size limit
+pub const SIGXFSZ = 25;
+/// virtual time alarm
+pub const SIGVTALRM = 26;
+/// profiling time alarm
+pub const SIGPROF = 27;
+/// window size changes
+pub const SIGWINCH = 28;
+/// information request
+pub const SIGINFO = 29;
+/// user defined signal 1
+pub const SIGUSR1 = 30;
+/// user defined signal 2
+pub const SIGUSR2 = 31;
+
+fn wstatus(x: i32) i32 {
+ return x & 0o177;
+}
const wstopped = 0o177;
-pub fn WEXITSTATUS(x: i32) i32 { return x >> 8; }
-pub fn WTERMSIG(x: i32) i32 { return wstatus(x); }
-pub fn WSTOPSIG(x: i32) i32 { return x >> 8; }
-pub fn WIFEXITED(x: i32) bool { return wstatus(x) == 0; }
-pub fn WIFSTOPPED(x: i32) bool { return wstatus(x) == wstopped and WSTOPSIG(x) != 0x13; }
-pub fn WIFSIGNALED(x: i32) bool { return wstatus(x) != wstopped and wstatus(x) != 0; }
+pub fn WEXITSTATUS(x: i32) i32 {
+ return x >> 8;
+}
+pub fn WTERMSIG(x: i32) i32 {
+ return wstatus(x);
+}
+pub fn WSTOPSIG(x: i32) i32 {
+ return x >> 8;
+}
+pub fn WIFEXITED(x: i32) bool {
+ return wstatus(x) == 0;
+}
+pub fn WIFSTOPPED(x: i32) bool {
+ return wstatus(x) == wstopped and WSTOPSIG(x) != 0x13;
+}
+pub fn WIFSIGNALED(x: i32) bool {
+ return wstatus(x) != wstopped and wstatus(x) != 0;
+}
/// Get the errno from a syscall return value, or 0 for no error.
pub fn getErrno(r: usize) usize {
@@ -184,11 +271,8 @@ pub fn write(fd: i32, buf: &const u8, nbyte: usize) usize {
return errnoWrap(c.write(fd, @ptrCast(&const c_void, buf), nbyte));
}
-pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: u32, fd: i32,
- offset: isize) usize
-{
- const ptr_result = c.mmap(@ptrCast(&c_void, address), length,
- @bitCast(c_int, c_uint(prot)), @bitCast(c_int, c_uint(flags)), fd, offset);
+pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: u32, fd: i32, offset: isize) usize {
+ const ptr_result = c.mmap(@ptrCast(&c_void, address), length, @bitCast(c_int, c_uint(prot)), @bitCast(c_int, c_uint(flags)), fd, offset);
const isize_result = @bitCast(isize, @ptrToInt(ptr_result));
return errnoWrap(isize_result);
}
@@ -202,7 +286,7 @@ pub fn unlink(path: &const u8) usize {
}
pub fn getcwd(buf: &u8, size: usize) usize {
- return if (c.getcwd(buf, size) == null) @bitCast(usize, -isize(*c._errno())) else 0;
+ return if (c.getcwd(buf, size) == null) @bitCast(usize, -isize(c._errno().*)) else 0;
}
pub fn waitpid(pid: i32, status: &i32, options: u32) usize {
@@ -223,7 +307,6 @@ pub fn pipe(fds: &[2]i32) usize {
return errnoWrap(c.pipe(@ptrCast(&c_int, fds)));
}
-
pub fn getdirentries64(fd: i32, buf_ptr: &u8, buf_len: usize, basep: &i64) usize {
return errnoWrap(@bitCast(isize, c.__getdirentries64(fd, buf_ptr, buf_len, basep)));
}
@@ -269,7 +352,7 @@ pub fn nanosleep(req: &const timespec, rem: ?×pec) usize {
}
pub fn realpath(noalias filename: &const u8, noalias resolved_name: &u8) usize {
- return if (c.realpath(filename, resolved_name) == null) @bitCast(usize, -isize(*c._errno())) else 0;
+ return if (c.realpath(filename, resolved_name) == null) @bitCast(usize, -isize(c._errno().*)) else 0;
}
pub fn setreuid(ruid: u32, euid: u32) usize {
@@ -287,8 +370,8 @@ pub fn sigprocmask(flags: u32, noalias set: &const sigset_t, noalias oldset: ?&s
pub fn sigaction(sig: u5, noalias act: &const Sigaction, noalias oact: ?&Sigaction) usize {
assert(sig != SIGKILL);
assert(sig != SIGSTOP);
- var cact = c.Sigaction {
- .handler = @ptrCast(extern fn(c_int)void, act.handler),
+ var cact = c.Sigaction{
+ .handler = @ptrCast(extern fn(c_int) void, act.handler),
.sa_flags = @bitCast(c_int, act.flags),
.sa_mask = act.mask,
};
@@ -298,8 +381,8 @@ pub fn sigaction(sig: u5, noalias act: &const Sigaction, noalias oact: ?&Sigacti
return result;
}
if (oact) |old| {
- *old = Sigaction {
- .handler = @ptrCast(extern fn(i32)void, coact.handler),
+ old.* = Sigaction{
+ .handler = @ptrCast(extern fn(i32) void, coact.handler),
.flags = @bitCast(u32, coact.sa_flags),
.mask = coact.sa_mask,
};
@@ -319,23 +402,22 @@ pub const sockaddr = c.sockaddr;
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
pub const Sigaction = struct {
- handler: extern fn(i32)void,
+ handler: extern fn(i32) void,
mask: sigset_t,
flags: u32,
};
pub fn sigaddset(set: &sigset_t, signo: u5) void {
- *set |= u32(1) << (signo - 1);
+ set.* |= u32(1) << (signo - 1);
}
/// Takes the return value from a syscall and formats it back in the way
/// that the kernel represents it to libc. Errno was a mistake, let's make
/// it go away forever.
fn errnoWrap(value: isize) usize {
- return @bitCast(usize, if (value == -1) -isize(*c._errno()) else value);
+ return @bitCast(usize, if (value == -1) -isize(c._errno().*) else value);
}
-
pub const timezone = c.timezone;
pub const timeval = c.timeval;
pub const mach_timebase_info_data = c.mach_timebase_info_data;
std/zig/ast.zig
@@ -40,7 +40,7 @@ pub const Tree = struct {
};
pub fn tokenLocationPtr(self: &Tree, start_index: usize, token: &const Token) Location {
- var loc = Location {
+ var loc = Location{
.line = 0,
.column = 0,
.line_start = start_index,
@@ -71,7 +71,6 @@ pub const Tree = struct {
pub fn dump(self: &Tree) void {
self.root_node.base.dump(0);
}
-
};
pub const Error = union(enum) {
@@ -95,7 +94,7 @@ pub const Error = union(enum) {
ExpectedCommaOrEnd: ExpectedCommaOrEnd,
pub fn render(self: &Error, tokens: &Tree.TokenList, stream: var) !void {
- switch (*self) {
+ switch (self.*) {
// TODO https://github.com/zig-lang/zig/issues/683
@TagType(Error).InvalidToken => |*x| return x.render(tokens, stream),
@TagType(Error).ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream),
@@ -119,7 +118,7 @@ pub const Error = union(enum) {
}
pub fn loc(self: &Error) TokenIndex {
- switch (*self) {
+ switch (self.*) {
// TODO https://github.com/zig-lang/zig/issues/683
@TagType(Error).InvalidToken => |x| return x.token,
@TagType(Error).ExpectedVarDeclOrFn => |x| return x.token,
@@ -144,15 +143,12 @@ pub const Error = union(enum) {
pub const InvalidToken = SingleTokenError("Invalid token {}");
pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found {}");
- pub const ExpectedAggregateKw = SingleTokenError("Expected " ++
- @tagName(Token.Id.Keyword_struct) ++ ", " ++ @tagName(Token.Id.Keyword_union) ++ ", or " ++
- @tagName(Token.Id.Keyword_enum) ++ ", found {}");
+ pub const ExpectedAggregateKw = SingleTokenError("Expected " ++ @tagName(Token.Id.Keyword_struct) ++ ", " ++ @tagName(Token.Id.Keyword_union) ++ ", or " ++ @tagName(Token.Id.Keyword_enum) ++ ", found {}");
pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found {}");
pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found {}");
pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found {}");
pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found {}");
- pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or " ++
- @tagName(Token.Id.Identifier) ++ ", found {}");
+ pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or " ++ @tagName(Token.Id.Identifier) ++ ", found {}");
pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found {}");
pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found {}");
@@ -165,8 +161,7 @@ pub const Error = union(enum) {
node: &Node,
pub fn render(self: &ExpectedCall, tokens: &Tree.TokenList, stream: var) !void {
- return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}",
- @tagName(self.node.id));
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}", @tagName(self.node.id));
}
};
@@ -174,8 +169,7 @@ pub const Error = union(enum) {
node: &Node,
pub fn render(self: &ExpectedCallOrFnProto, tokens: &Tree.TokenList, stream: var) !void {
- return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
- @tagName(Node.Id.FnProto) ++ ", found {}", @tagName(self.node.id));
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++ @tagName(Node.Id.FnProto) ++ ", found {}", @tagName(self.node.id));
}
};
@@ -445,17 +439,17 @@ pub const Node = struct {
pub fn iterate(self: &Root, index: usize) ?&Node {
if (index < self.decls.len) {
- return *self.decls.at(index);
+ return self.decls.at(index).*;
}
return null;
}
pub fn firstToken(self: &Root) TokenIndex {
- return if (self.decls.len == 0) self.eof_token else (*self.decls.at(0)).firstToken();
+ return if (self.decls.len == 0) self.eof_token else (self.decls.at(0).*).firstToken();
}
pub fn lastToken(self: &Root) TokenIndex {
- return if (self.decls.len == 0) self.eof_token else (*self.decls.at(self.decls.len - 1)).lastToken();
+ return if (self.decls.len == 0) self.eof_token else (self.decls.at(self.decls.len - 1).*).lastToken();
}
};
@@ -545,7 +539,7 @@ pub const Node = struct {
pub fn iterate(self: &ErrorSetDecl, index: usize) ?&Node {
var i = index;
- if (i < self.decls.len) return *self.decls.at(i);
+ if (i < self.decls.len) return self.decls.at(i).*;
i -= self.decls.len;
return null;
@@ -598,10 +592,10 @@ pub const Node = struct {
i -= 1;
},
InitArg.None,
- InitArg.Enum => { }
+ InitArg.Enum => {},
}
- if (i < self.fields_and_decls.len) return *self.fields_and_decls.at(i);
+ if (i < self.fields_and_decls.len) return self.fields_and_decls.at(i).*;
i -= self.fields_and_decls.len;
return null;
@@ -814,7 +808,7 @@ pub const Node = struct {
i -= 1;
}
- if (i < self.params.len) return *self.params.at(self.params.len - i - 1);
+ if (i < self.params.len) return self.params.at(self.params.len - i - 1).*;
i -= self.params.len;
if (self.align_expr) |align_expr| {
@@ -839,7 +833,6 @@ pub const Node = struct {
i -= 1;
}
-
return null;
}
@@ -934,7 +927,7 @@ pub const Node = struct {
pub fn iterate(self: &Block, index: usize) ?&Node {
var i = index;
- if (i < self.statements.len) return *self.statements.at(i);
+ if (i < self.statements.len) return self.statements.at(i).*;
i -= self.statements.len;
return null;
@@ -1119,6 +1112,7 @@ pub const Node = struct {
base: Node,
switch_token: TokenIndex,
expr: &Node,
+
/// these can be SwitchCase nodes or LineComment nodes
cases: CaseList,
rbrace: TokenIndex,
@@ -1131,7 +1125,7 @@ pub const Node = struct {
if (i < 1) return self.expr;
i -= 1;
- if (i < self.cases.len) return *self.cases.at(i);
+ if (i < self.cases.len) return self.cases.at(i).*;
i -= self.cases.len;
return null;
@@ -1157,7 +1151,7 @@ pub const Node = struct {
pub fn iterate(self: &SwitchCase, index: usize) ?&Node {
var i = index;
- if (i < self.items.len) return *self.items.at(i);
+ if (i < self.items.len) return self.items.at(i).*;
i -= self.items.len;
if (self.payload) |payload| {
@@ -1172,7 +1166,7 @@ pub const Node = struct {
}
pub fn firstToken(self: &SwitchCase) TokenIndex {
- return (*self.items.at(0)).firstToken();
+ return (self.items.at(0).*).firstToken();
}
pub fn lastToken(self: &SwitchCase) TokenIndex {
@@ -1616,7 +1610,7 @@ pub const Node = struct {
switch (self.op) {
@TagType(Op).Call => |*call_info| {
- if (i < call_info.params.len) return *call_info.params.at(i);
+ if (i < call_info.params.len) return call_info.params.at(i).*;
i -= call_info.params.len;
},
Op.ArrayAccess => |index_expr| {
@@ -1633,11 +1627,11 @@ pub const Node = struct {
}
},
Op.ArrayInitializer => |*exprs| {
- if (i < exprs.len) return *exprs.at(i);
+ if (i < exprs.len) return exprs.at(i).*;
i -= exprs.len;
},
Op.StructInitializer => |*fields| {
- if (i < fields.len) return *fields.at(i);
+ if (i < fields.len) return fields.at(i).*;
i -= fields.len;
},
}
@@ -1830,7 +1824,7 @@ pub const Node = struct {
pub fn iterate(self: &BuiltinCall, index: usize) ?&Node {
var i = index;
- if (i < self.params.len) return *self.params.at(i);
+ if (i < self.params.len) return self.params.at(i).*;
i -= self.params.len;
return null;
@@ -1873,11 +1867,11 @@ pub const Node = struct {
}
pub fn firstToken(self: &MultilineStringLiteral) TokenIndex {
- return *self.lines.at(0);
+ return self.lines.at(0).*;
}
pub fn lastToken(self: &MultilineStringLiteral) TokenIndex {
- return *self.lines.at(self.lines.len - 1);
+ return self.lines.at(self.lines.len - 1).*;
}
};
@@ -1974,7 +1968,7 @@ pub const Node = struct {
const Kind = union(enum) {
Variable: &Identifier,
- Return: &Node
+ Return: &Node,
};
pub fn iterate(self: &AsmOutput, index: usize) ?&Node {
@@ -1994,7 +1988,7 @@ pub const Node = struct {
Kind.Return => |return_type| {
if (i < 1) return return_type;
i -= 1;
- }
+ },
}
return null;
@@ -2059,13 +2053,13 @@ pub const Node = struct {
pub fn iterate(self: &Asm, index: usize) ?&Node {
var i = index;
- if (i < self.outputs.len) return &(*self.outputs.at(index)).base;
+ if (i < self.outputs.len) return &(self.outputs.at(index).*).base;
i -= self.outputs.len;
- if (i < self.inputs.len) return &(*self.inputs.at(index)).base;
+ if (i < self.inputs.len) return &(self.inputs.at(index).*).base;
i -= self.inputs.len;
- if (i < self.clobbers.len) return *self.clobbers.at(index);
+ if (i < self.clobbers.len) return self.clobbers.at(index).*;
i -= self.clobbers.len;
return null;
@@ -2159,11 +2153,11 @@ pub const Node = struct {
}
pub fn firstToken(self: &DocComment) TokenIndex {
- return *self.lines.at(0);
+ return self.lines.at(0).*;
}
pub fn lastToken(self: &DocComment) TokenIndex {
- return *self.lines.at(self.lines.len - 1);
+ return self.lines.at(self.lines.len - 1).*;
}
};
@@ -2192,4 +2186,3 @@ pub const Node = struct {
}
};
};
-
std/zig/parse.zig
@@ -17,15 +17,15 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
defer stack.deinit();
const arena = &tree_arena.allocator;
- const root_node = try arena.construct(ast.Node.Root {
- .base = ast.Node { .id = ast.Node.Id.Root },
+ const root_node = try arena.construct(ast.Node.Root{
+ .base = ast.Node{ .id = ast.Node.Id.Root },
.decls = ast.Node.Root.DeclList.init(arena),
.doc_comments = null,
// initialized when we get the eof token
.eof_token = undefined,
});
- var tree = ast.Tree {
+ var tree = ast.Tree{
.source = source,
.root_node = root_node,
.arena_allocator = tree_arena,
@@ -36,9 +36,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
var tokenizer = Tokenizer.init(tree.source);
while (true) {
const token_ptr = try tree.tokens.addOne();
- *token_ptr = tokenizer.next();
- if (token_ptr.id == Token.Id.Eof)
- break;
+ token_ptr.* = tokenizer.next();
+ if (token_ptr.id == Token.Id.Eof) break;
}
var tok_it = tree.tokens.iterator(0);
@@ -63,33 +62,27 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Keyword_test => {
stack.append(State.TopLevel) catch unreachable;
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node {
- .id = ast.Node.Id.Block,
- },
+ const block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = null,
.lbrace = undefined,
.statements = ast.Node.Block.StatementList.init(arena),
.rbrace = undefined,
});
- const test_node = try arena.construct(ast.Node.TestDecl {
- .base = ast.Node {
- .id = ast.Node.Id.TestDecl,
- },
+ const test_node = try arena.construct(ast.Node.TestDecl{
+ .base = ast.Node{ .id = ast.Node.Id.TestDecl },
.doc_comments = comments,
.test_token = token_index,
.name = undefined,
.body_node = &block.base,
});
try root_node.decls.push(&test_node.base);
- try stack.append(State { .Block = block });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LBrace,
- .ptr = &block.rbrace,
- }
- });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &test_node.name } });
+ try stack.append(State{ .Block = block });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ } });
+ try stack.append(State{ .StringLiteral = OptionalCtx{ .Required = &test_node.name } });
continue;
},
Token.Id.Eof => {
@@ -99,29 +92,25 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
Token.Id.Keyword_pub => {
stack.append(State.TopLevel) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &root_node.decls,
- .visib_token = token_index,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &root_node.decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ } });
continue;
},
Token.Id.Keyword_comptime => {
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node {.id = ast.Node.Id.Block },
+ const block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = null,
.lbrace = undefined,
.statements = ast.Node.Block.StatementList.init(arena),
.rbrace = undefined,
});
- const node = try arena.construct(ast.Node.Comptime {
- .base = ast.Node {
- .id = ast.Node.Id.Comptime,
- },
+ const node = try arena.construct(ast.Node.Comptime{
+ .base = ast.Node{ .id = ast.Node.Id.Comptime },
.comptime_token = token_index,
.expr = &block.base,
.doc_comments = comments,
@@ -129,27 +118,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
try root_node.decls.push(&node.base);
stack.append(State.TopLevel) catch unreachable;
- try stack.append(State { .Block = block });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LBrace,
- .ptr = &block.rbrace,
- }
- });
+ try stack.append(State{ .Block = block });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ } });
continue;
},
else => {
putBackToken(&tok_it, &tree);
stack.append(State.TopLevel) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &root_node.decls,
- .visib_token = null,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &root_node.decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ } });
continue;
},
}
@@ -159,41 +144,38 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
switch (token_ptr.id) {
- Token.Id.Keyword_export, Token.Id.Keyword_inline => {
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = AnnotatedToken {
- .index = token_index,
- .ptr = token_ptr,
- },
- .lib_name = null,
- .comments = ctx.comments,
+ Token.Id.Keyword_export,
+ Token.Id.Keyword_inline => {
+ stack.append(State{ .TopLevelDecl = TopLevelDeclCtx{
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken{
+ .index = token_index,
+ .ptr = token_ptr,
},
- }) catch unreachable;
+ .lib_name = null,
+ .comments = ctx.comments,
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_extern => {
- stack.append(State {
- .TopLevelLibname = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = AnnotatedToken {
- .index = token_index,
- .ptr = token_ptr,
- },
- .lib_name = null,
- .comments = ctx.comments,
+ stack.append(State{ .TopLevelLibname = TopLevelDeclCtx{
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken{
+ .index = token_index,
+ .ptr = token_ptr,
},
- }) catch unreachable;
+ .lib_name = null,
+ .comments = ctx.comments,
+ } }) catch unreachable;
continue;
},
else => {
putBackToken(&tok_it, &tree);
- stack.append(State { .TopLevelDecl = ctx }) catch unreachable;
+ stack.append(State{ .TopLevelDecl = ctx }) catch unreachable;
continue;
- }
+ },
}
},
State.TopLevelLibname => |ctx| {
@@ -207,15 +189,13 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
};
};
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = ctx.extern_export_inline_token,
- .lib_name = lib_name,
- .comments = ctx.comments,
- },
- }) catch unreachable;
+ stack.append(State{ .TopLevelDecl = TopLevelDeclCtx{
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = ctx.extern_export_inline_token,
+ .lib_name = lib_name,
+ .comments = ctx.comments,
+ } }) catch unreachable;
continue;
},
State.TopLevelDecl => |ctx| {
@@ -225,14 +205,12 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
switch (token_ptr.id) {
Token.Id.Keyword_use => {
if (ctx.extern_export_inline_token) |annotated_token| {
- *(try tree.errors.addOne()) = Error {
- .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
- };
+ ((try tree.errors.addOne())).* = Error{ .InvalidToken = Error.InvalidToken{ .token = annotated_token.index } };
return tree;
}
- const node = try arena.construct(ast.Node.Use {
- .base = ast.Node {.id = ast.Node.Id.Use },
+ const node = try arena.construct(ast.Node.Use{
+ .base = ast.Node{ .id = ast.Node.Id.Use },
.visib_token = ctx.visib_token,
.expr = undefined,
.semicolon_token = undefined,
@@ -240,44 +218,39 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
try ctx.decls.push(&node.base);
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Semicolon,
- .ptr = &node.semicolon_token,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Semicolon,
+ .ptr = &node.semicolon_token,
+ } }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
continue;
},
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ Token.Id.Keyword_var,
+ Token.Id.Keyword_const => {
if (ctx.extern_export_inline_token) |annotated_token| {
if (annotated_token.ptr.id == Token.Id.Keyword_inline) {
- *(try tree.errors.addOne()) = Error {
- .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
- };
+ ((try tree.errors.addOne())).* = Error{ .InvalidToken = Error.InvalidToken{ .token = annotated_token.index } };
return tree;
}
}
- try stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = ctx.comments,
- .visib_token = ctx.visib_token,
- .lib_name = ctx.lib_name,
- .comptime_token = null,
- .extern_export_token = if (ctx.extern_export_inline_token) |at| at.index else null,
- .mut_token = token_index,
- .list = ctx.decls
- }
- });
- continue;
- },
- Token.Id.Keyword_fn, Token.Id.Keyword_nakedcc,
- Token.Id.Keyword_stdcallcc, Token.Id.Keyword_async => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- },
+ try stack.append(State{ .VarDecl = VarDeclCtx{
+ .comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .lib_name = ctx.lib_name,
+ .comptime_token = null,
+ .extern_export_token = if (ctx.extern_export_inline_token) |at| at.index else null,
+ .mut_token = token_index,
+ .list = ctx.decls,
+ } });
+ continue;
+ },
+ Token.Id.Keyword_fn,
+ Token.Id.Keyword_nakedcc,
+ Token.Id.Keyword_stdcallcc,
+ Token.Id.Keyword_async => {
+ const fn_proto = try arena.construct(ast.Node.FnProto{
+ .base = ast.Node{ .id = ast.Node.Id.FnProto },
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = null,
@@ -293,36 +266,33 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.align_expr = null,
});
try ctx.decls.push(&fn_proto.base);
- stack.append(State { .FnDef = fn_proto }) catch unreachable;
- try stack.append(State { .FnProto = fn_proto });
+ stack.append(State{ .FnDef = fn_proto }) catch unreachable;
+ try stack.append(State{ .FnProto = fn_proto });
switch (token_ptr.id) {
- Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
+ Token.Id.Keyword_nakedcc,
+ Token.Id.Keyword_stdcallcc => {
fn_proto.cc_token = token_index;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token,
- }
- });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ } });
continue;
},
Token.Id.Keyword_async => {
- const async_node = try arena.construct(ast.Node.AsyncAttribute {
- .base = ast.Node {.id = ast.Node.Id.AsyncAttribute },
+ const async_node = try arena.construct(ast.Node.AsyncAttribute{
+ .base = ast.Node{ .id = ast.Node.Id.AsyncAttribute },
.async_token = token_index,
.allocator_type = null,
.rangle_bracket = null,
});
fn_proto.async_attr = async_node;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token,
- }
- });
- try stack.append(State { .AsyncAllocator = async_node });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ } });
+ try stack.append(State{ .AsyncAllocator = async_node });
continue;
},
Token.Id.Keyword_fn => {
@@ -333,9 +303,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
},
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedVarDeclOrFn = Error.ExpectedVarDeclOrFn { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedVarDeclOrFn = Error.ExpectedVarDeclOrFn{ .token = token_index } };
return tree;
},
}
@@ -343,34 +311,30 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
State.TopLevelExternOrField => |ctx| {
if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |identifier| {
std.debug.assert(ctx.container_decl.kind == ast.Node.ContainerDecl.Kind.Struct);
- const node = try arena.construct(ast.Node.StructField {
- .base = ast.Node {
- .id = ast.Node.Id.StructField,
- },
+ const node = try arena.construct(ast.Node.StructField{
+ .base = ast.Node{ .id = ast.Node.Id.StructField },
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = identifier,
.type_expr = undefined,
});
const node_ptr = try ctx.container_decl.fields_and_decls.addOne();
- *node_ptr = &node.base;
+ node_ptr.* = &node.base;
- stack.append(State { .FieldListCommaOrEnd = ctx.container_decl }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.type_expr } });
- try stack.append(State { .ExpectToken = Token.Id.Colon });
+ stack.append(State{ .FieldListCommaOrEnd = ctx.container_decl }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.type_expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.Colon });
continue;
}
stack.append(State{ .ContainerDecl = ctx.container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &ctx.container_decl.fields_and_decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = ctx.comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &ctx.container_decl.fields_and_decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = ctx.comments,
+ } });
continue;
},
@@ -382,7 +346,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
putBackToken(&tok_it, &tree);
continue;
}
- stack.append(State { .Expression = ctx }) catch unreachable;
+ stack.append(State{ .Expression = ctx }) catch unreachable;
continue;
},
@@ -390,8 +354,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
- const node = try arena.construct(ast.Node.ContainerDecl {
- .base = ast.Node {.id = ast.Node.Id.ContainerDecl },
+ const node = try arena.construct(ast.Node.ContainerDecl{
+ .base = ast.Node{ .id = ast.Node.Id.ContainerDecl },
.ltoken = ctx.ltoken,
.layout = ctx.layout,
.kind = switch (token_ptr.id) {
@@ -399,9 +363,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Keyword_union => ast.Node.ContainerDecl.Kind.Union,
Token.Id.Keyword_enum => ast.Node.ContainerDecl.Kind.Enum,
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedAggregateKw = Error.ExpectedAggregateKw { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedAggregateKw = Error.ExpectedAggregateKw{ .token = token_index } };
return tree;
},
},
@@ -411,9 +373,9 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
ctx.opt_ctx.store(&node.base);
- stack.append(State { .ContainerDecl = node }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LBrace });
- try stack.append(State { .ContainerInitArgStart = node });
+ stack.append(State{ .ContainerDecl = node }) catch unreachable;
+ try stack.append(State{ .ExpectToken = Token.Id.LBrace });
+ try stack.append(State{ .ContainerInitArgStart = node });
continue;
},
@@ -422,8 +384,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- try stack.append(State { .ContainerInitArg = container_decl });
+ stack.append(State{ .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State{ .ContainerInitArg = container_decl });
continue;
},
@@ -433,23 +395,21 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const init_arg_token_ptr = init_arg_token.ptr;
switch (init_arg_token_ptr.id) {
Token.Id.Keyword_enum => {
- container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg{ .Enum = null };
const lparen_tok = nextToken(&tok_it, &tree);
const lparen_tok_index = lparen_tok.index;
const lparen_tok_ptr = lparen_tok.ptr;
if (lparen_tok_ptr.id == Token.Id.LParen) {
- try stack.append(State { .ExpectToken = Token.Id.RParen } );
- try stack.append(State { .Expression = OptionalCtx {
- .RequiredNull = &container_decl.init_arg_expr.Enum,
- } });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &container_decl.init_arg_expr.Enum } });
} else {
putBackToken(&tok_it, &tree);
}
},
else => {
putBackToken(&tok_it, &tree);
- container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg { .Type = undefined };
- stack.append(State { .Expression = OptionalCtx { .Required = &container_decl.init_arg_expr.Type } }) catch unreachable;
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg{ .Type = undefined };
+ stack.append(State{ .Expression = OptionalCtx{ .Required = &container_decl.init_arg_expr.Type } }) catch unreachable;
},
}
continue;
@@ -468,26 +428,24 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Identifier => {
switch (container_decl.kind) {
ast.Node.ContainerDecl.Kind.Struct => {
- const node = try arena.construct(ast.Node.StructField {
- .base = ast.Node {
- .id = ast.Node.Id.StructField,
- },
+ const node = try arena.construct(ast.Node.StructField{
+ .base = ast.Node{ .id = ast.Node.Id.StructField },
.doc_comments = comments,
.visib_token = null,
.name_token = token_index,
.type_expr = undefined,
});
const node_ptr = try container_decl.fields_and_decls.addOne();
- *node_ptr = &node.base;
+ node_ptr.* = &node.base;
- try stack.append(State { .FieldListCommaOrEnd = container_decl });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.type_expr } });
- try stack.append(State { .ExpectToken = Token.Id.Colon });
+ try stack.append(State{ .FieldListCommaOrEnd = container_decl });
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &node.type_expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.Colon });
continue;
},
ast.Node.ContainerDecl.Kind.Union => {
- const node = try arena.construct(ast.Node.UnionTag {
- .base = ast.Node {.id = ast.Node.Id.UnionTag },
+ const node = try arena.construct(ast.Node.UnionTag{
+ .base = ast.Node{ .id = ast.Node.Id.UnionTag },
.name_token = token_index,
.type_expr = null,
.value_expr = null,
@@ -495,24 +453,24 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
try container_decl.fields_and_decls.push(&node.base);
- stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
- try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
- try stack.append(State { .IfToken = Token.Id.Colon });
+ stack.append(State{ .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State{ .FieldInitValue = OptionalCtx{ .RequiredNull = &node.value_expr } });
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .RequiredNull = &node.type_expr } });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
continue;
},
ast.Node.ContainerDecl.Kind.Enum => {
- const node = try arena.construct(ast.Node.EnumTag {
- .base = ast.Node { .id = ast.Node.Id.EnumTag },
+ const node = try arena.construct(ast.Node.EnumTag{
+ .base = ast.Node{ .id = ast.Node.Id.EnumTag },
.name_token = token_index,
.value = null,
.doc_comments = comments,
});
try container_decl.fields_and_decls.push(&node.base);
- stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &node.value } });
- try stack.append(State { .IfToken = Token.Id.Equal });
+ stack.append(State{ .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &node.value } });
+ try stack.append(State{ .IfToken = Token.Id.Equal });
continue;
},
}
@@ -520,48 +478,40 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Keyword_pub => {
switch (container_decl.kind) {
ast.Node.ContainerDecl.Kind.Struct => {
- try stack.append(State {
- .TopLevelExternOrField = TopLevelExternOrFieldCtx {
- .visib_token = token_index,
- .container_decl = container_decl,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExternOrField = TopLevelExternOrFieldCtx{
+ .visib_token = token_index,
+ .container_decl = container_decl,
+ .comments = comments,
+ } });
continue;
},
else => {
stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = token_index,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ } });
continue;
- }
+ },
}
},
Token.Id.Keyword_export => {
stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = token_index,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ } });
continue;
},
Token.Id.RBrace => {
if (comments != null) {
- *(try tree.errors.addOne()) = Error {
- .UnattachedDocComment = Error.UnattachedDocComment { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .UnattachedDocComment = Error.UnattachedDocComment{ .token = token_index } };
return tree;
}
container_decl.rbrace_token = token_index;
@@ -570,26 +520,21 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
else => {
putBackToken(&tok_it, &tree);
stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = null,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
+ try stack.append(State{ .TopLevelExtern = TopLevelDeclCtx{
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ } });
continue;
- }
+ },
}
},
-
State.VarDecl => |ctx| {
- const var_decl = try arena.construct(ast.Node.VarDecl {
- .base = ast.Node {
- .id = ast.Node.Id.VarDecl,
- },
+ const var_decl = try arena.construct(ast.Node.VarDecl{
+ .base = ast.Node{ .id = ast.Node.Id.VarDecl },
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.mut_token = ctx.mut_token,
@@ -606,27 +551,25 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
try ctx.list.push(&var_decl.base);
- try stack.append(State { .VarDeclAlign = var_decl });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &var_decl.type_node} });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Identifier,
- .ptr = &var_decl.name_token,
- }
- });
+ try stack.append(State{ .VarDeclAlign = var_decl });
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .RequiredNull = &var_decl.type_node } });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Identifier,
+ .ptr = &var_decl.name_token,
+ } });
continue;
},
State.VarDeclAlign => |var_decl| {
- try stack.append(State { .VarDeclEq = var_decl });
+ try stack.append(State{ .VarDeclEq = var_decl });
const next_token = nextToken(&tok_it, &tree);
const next_token_index = next_token.index;
const next_token_ptr = next_token.ptr;
if (next_token_ptr.id == Token.Id.Keyword_align) {
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.align_node} });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &var_decl.align_node } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
}
@@ -640,8 +583,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
switch (token_ptr.id) {
Token.Id.Equal => {
var_decl.eq_token = token_index;
- stack.append(State { .VarDeclSemiColon = var_decl }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.init_node } });
+ stack.append(State{ .VarDeclSemiColon = var_decl }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &var_decl.init_node } });
continue;
},
Token.Id.Semicolon => {
@@ -649,11 +592,9 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
},
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedEqOrSemi = Error.ExpectedEqOrSemi { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedEqOrSemi = Error.ExpectedEqOrSemi{ .token = token_index } };
return tree;
- }
+ },
}
},
@@ -661,12 +602,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const semicolon_token = nextToken(&tok_it, &tree);
if (semicolon_token.ptr.id != Token.Id.Semicolon) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = semicolon_token.index,
- .expected_id = Token.Id.Semicolon,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = semicolon_token.index,
+ .expected_id = Token.Id.Semicolon,
+ } };
return tree;
}
@@ -686,32 +625,30 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
- switch(token_ptr.id) {
+ switch (token_ptr.id) {
Token.Id.LBrace => {
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node { .id = ast.Node.Id.Block },
+ const block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = null,
.lbrace = token_index,
.statements = ast.Node.Block.StatementList.init(arena),
.rbrace = undefined,
});
fn_proto.body_node = &block.base;
- stack.append(State { .Block = block }) catch unreachable;
+ stack.append(State{ .Block = block }) catch unreachable;
continue;
},
Token.Id.Semicolon => continue,
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedSemiOrLBrace = Error.ExpectedSemiOrLBrace { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedSemiOrLBrace = Error.ExpectedSemiOrLBrace{ .token = token_index } };
return tree;
},
}
},
State.FnProto => |fn_proto| {
- stack.append(State { .FnProtoAlign = fn_proto }) catch unreachable;
- try stack.append(State { .ParamDecl = fn_proto });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .FnProtoAlign = fn_proto }) catch unreachable;
+ try stack.append(State{ .ParamDecl = fn_proto });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |name_token| {
fn_proto.name_token = name_token;
@@ -719,12 +656,12 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
},
State.FnProtoAlign => |fn_proto| {
- stack.append(State { .FnProtoReturnType = fn_proto }) catch unreachable;
+ stack.append(State{ .FnProtoReturnType = fn_proto }) catch unreachable;
if (eatToken(&tok_it, &tree, Token.Id.Keyword_align)) |align_token| {
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &fn_proto.align_expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &fn_proto.align_expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
}
continue;
},
@@ -734,42 +671,37 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.Bang => {
- fn_proto.return_type = ast.Node.FnProto.ReturnType { .InferErrorSet = undefined };
- stack.append(State {
- .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.InferErrorSet },
- }) catch unreachable;
+ fn_proto.return_type = ast.Node.FnProto.ReturnType{ .InferErrorSet = undefined };
+ stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &fn_proto.return_type.InferErrorSet } }) catch unreachable;
continue;
},
else => {
// TODO: this is a special case. Remove this when #760 is fixed
if (token_ptr.id == Token.Id.Keyword_error) {
if ((??tok_it.peek()).id == Token.Id.LBrace) {
- const error_type_node = try arena.construct(ast.Node.ErrorType {
- .base = ast.Node { .id = ast.Node.Id.ErrorType },
+ const error_type_node = try arena.construct(ast.Node.ErrorType{
+ .base = ast.Node{ .id = ast.Node.Id.ErrorType },
.token = token_index,
});
- fn_proto.return_type = ast.Node.FnProto.ReturnType {
- .Explicit = &error_type_node.base,
- };
+ fn_proto.return_type = ast.Node.FnProto.ReturnType{ .Explicit = &error_type_node.base };
continue;
}
}
putBackToken(&tok_it, &tree);
- fn_proto.return_type = ast.Node.FnProto.ReturnType { .Explicit = undefined };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.Explicit }, }) catch unreachable;
+ fn_proto.return_type = ast.Node.FnProto.ReturnType{ .Explicit = undefined };
+ stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &fn_proto.return_type.Explicit } }) catch unreachable;
continue;
},
}
},
-
State.ParamDecl => |fn_proto| {
if (eatToken(&tok_it, &tree, Token.Id.RParen)) |_| {
continue;
}
- const param_decl = try arena.construct(ast.Node.ParamDecl {
- .base = ast.Node {.id = ast.Node.Id.ParamDecl },
+ const param_decl = try arena.construct(ast.Node.ParamDecl{
+ .base = ast.Node{ .id = ast.Node.Id.ParamDecl },
.comptime_token = null,
.noalias_token = null,
.name_token = null,
@@ -778,14 +710,12 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
try fn_proto.params.push(¶m_decl.base);
- stack.append(State {
- .ParamDeclEnd = ParamDeclEndCtx {
- .param_decl = param_decl,
- .fn_proto = fn_proto,
- }
- }) catch unreachable;
- try stack.append(State { .ParamDeclName = param_decl });
- try stack.append(State { .ParamDeclAliasOrComptime = param_decl });
+ stack.append(State{ .ParamDeclEnd = ParamDeclEndCtx{
+ .param_decl = param_decl,
+ .fn_proto = fn_proto,
+ } }) catch unreachable;
+ try stack.append(State{ .ParamDeclName = param_decl });
+ try stack.append(State{ .ParamDeclAliasOrComptime = param_decl });
continue;
},
State.ParamDeclAliasOrComptime => |param_decl| {
@@ -811,21 +741,19 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
State.ParamDeclEnd => |ctx| {
if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
ctx.param_decl.var_args_token = ellipsis3;
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ stack.append(State{ .ExpectToken = Token.Id.RParen }) catch unreachable;
continue;
}
- try stack.append(State { .ParamDeclComma = ctx.fn_proto });
- try stack.append(State {
- .TypeExprBegin = OptionalCtx { .Required = &ctx.param_decl.type_node }
- });
+ try stack.append(State{ .ParamDeclComma = ctx.fn_proto });
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &ctx.param_decl.type_node } });
continue;
},
State.ParamDeclComma => |fn_proto| {
switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
ExpectCommaOrEndResult.end_token => |t| {
if (t == null) {
- stack.append(State { .ParamDecl = fn_proto }) catch unreachable;
+ stack.append(State{ .ParamDecl = fn_proto }) catch unreachable;
}
continue;
},
@@ -838,12 +766,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
State.MaybeLabeledExpression => |ctx| {
if (eatToken(&tok_it, &tree, Token.Id.Colon)) |_| {
- stack.append(State {
- .LabeledExpression = LabelCtx {
- .label = ctx.label,
- .opt_ctx = ctx.opt_ctx,
- }
- }) catch unreachable;
+ stack.append(State{ .LabeledExpression = LabelCtx{
+ .label = ctx.label,
+ .opt_ctx = ctx.opt_ctx,
+ } }) catch unreachable;
continue;
}
@@ -856,69 +782,59 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.LBrace => {
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node {.id = ast.Node.Id.Block},
+ const block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = ctx.label,
.lbrace = token_index,
.statements = ast.Node.Block.StatementList.init(arena),
.rbrace = undefined,
});
ctx.opt_ctx.store(&block.base);
- stack.append(State { .Block = block }) catch unreachable;
+ stack.append(State{ .Block = block }) catch unreachable;
continue;
},
Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .label = ctx.label,
- .inline_token = null,
- .loop_token = token_index,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
+ stack.append(State{ .While = LoopCtx{
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .label = ctx.label,
- .inline_token = null,
- .loop_token = token_index,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
+ stack.append(State{ .For = LoopCtx{
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_suspend => {
- const node = try arena.construct(ast.Node.Suspend {
- .base = ast.Node {
- .id = ast.Node.Id.Suspend,
- },
+ const node = try arena.construct(ast.Node.Suspend{
+ .base = ast.Node{ .id = ast.Node.Id.Suspend },
.label = ctx.label,
.suspend_token = token_index,
.payload = null,
.body = null,
});
ctx.opt_ctx.store(&node.base);
- stack.append(State { .SuspendBody = node }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ stack.append(State{ .SuspendBody = node }) catch unreachable;
+ try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.payload } });
continue;
},
Token.Id.Keyword_inline => {
- stack.append(State {
- .Inline = InlineCtx {
- .label = ctx.label,
- .inline_token = token_index,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
+ stack.append(State{ .Inline = InlineCtx{
+ .label = ctx.label,
+ .inline_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ } }) catch unreachable;
continue;
},
else => {
if (ctx.opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedLabelable = Error.ExpectedLabelable { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedLabelable = Error.ExpectedLabelable{ .token = token_index } };
return tree;
}
@@ -933,32 +849,26 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .inline_token = ctx.inline_token,
- .label = ctx.label,
- .loop_token = token_index,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
+ stack.append(State{ .While = LoopCtx{
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .inline_token = ctx.inline_token,
- .label = ctx.label,
- .loop_token = token_index,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
+ stack.append(State{ .For = LoopCtx{
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ } }) catch unreachable;
continue;
},
else => {
if (ctx.opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedInlinable = Error.ExpectedInlinable { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedInlinable = Error.ExpectedInlinable{ .token = token_index } };
return tree;
}
@@ -968,8 +878,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
},
State.While => |ctx| {
- const node = try arena.construct(ast.Node.While {
- .base = ast.Node {.id = ast.Node.Id.While },
+ const node = try arena.construct(ast.Node.While{
+ .base = ast.Node{ .id = ast.Node.Id.While },
.label = ctx.label,
.inline_token = ctx.inline_token,
.while_token = ctx.loop_token,
@@ -980,25 +890,25 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.@"else" = null,
});
ctx.opt_ctx.store(&node.base);
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .WhileContinueExpr = &node.continue_expr });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.body } });
+ try stack.append(State{ .WhileContinueExpr = &node.continue_expr });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
+ try stack.append(State{ .PointerPayload = OptionalCtx{ .Optional = &node.payload } });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.condition } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
},
State.WhileContinueExpr => |dest| {
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = dest } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .RequiredNull = dest } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
},
State.For => |ctx| {
- const node = try arena.construct(ast.Node.For {
- .base = ast.Node {.id = ast.Node.Id.For },
+ const node = try arena.construct(ast.Node.For{
+ .base = ast.Node{ .id = ast.Node.Id.For },
.label = ctx.label,
.inline_token = ctx.inline_token,
.for_token = ctx.loop_token,
@@ -1008,33 +918,32 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.@"else" = null,
});
ctx.opt_ctx.store(&node.base);
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .PointerIndexPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.array_expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.body } });
+ try stack.append(State{ .PointerIndexPayload = OptionalCtx{ .Optional = &node.payload } });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.array_expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
},
State.Else => |dest| {
if (eatToken(&tok_it, &tree, Token.Id.Keyword_else)) |else_token| {
- const node = try arena.construct(ast.Node.Else {
- .base = ast.Node {.id = ast.Node.Id.Else },
+ const node = try arena.construct(ast.Node.Else{
+ .base = ast.Node{ .id = ast.Node.Id.Else },
.else_token = else_token,
.payload = null,
.body = undefined,
});
- *dest = node;
+ dest.* = node;
- stack.append(State { .Expression = OptionalCtx { .Required = &node.body } }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ stack.append(State{ .Expression = OptionalCtx{ .Required = &node.body } }) catch unreachable;
+ try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.payload } });
continue;
} else {
continue;
}
},
-
State.Block => |block| {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
@@ -1046,7 +955,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
else => {
putBackToken(&tok_it, &tree);
- stack.append(State { .Block = block }) catch unreachable;
+ stack.append(State{ .Block = block }) catch unreachable;
var any_comments = false;
while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
@@ -1055,7 +964,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
if (any_comments) continue;
- try stack.append(State { .Statement = block });
+ try stack.append(State{ .Statement = block });
continue;
},
}
@@ -1066,33 +975,29 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.Keyword_comptime => {
- stack.append(State {
- .ComptimeStatement = ComptimeStatementCtx {
- .comptime_token = token_index,
- .block = block,
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = null,
- .visib_token = null,
- .comptime_token = null,
- .extern_export_token = null,
- .lib_name = null,
- .mut_token = token_index,
- .list = &block.statements,
- }
- }) catch unreachable;
+ stack.append(State{ .ComptimeStatement = ComptimeStatementCtx{
+ .comptime_token = token_index,
+ .block = block,
+ } }) catch unreachable;
continue;
},
- Token.Id.Keyword_defer, Token.Id.Keyword_errdefer => {
- const node = try arena.construct(ast.Node.Defer {
- .base = ast.Node {
- .id = ast.Node.Id.Defer,
- },
+ Token.Id.Keyword_var,
+ Token.Id.Keyword_const => {
+ stack.append(State{ .VarDecl = VarDeclCtx{
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &block.statements,
+ } }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_defer,
+ Token.Id.Keyword_errdefer => {
+ const node = try arena.construct(ast.Node.Defer{
+ .base = ast.Node{ .id = ast.Node.Id.Defer },
.defer_token = token_index,
.kind = switch (token_ptr.id) {
Token.Id.Keyword_defer => ast.Node.Defer.Kind.Unconditional,
@@ -1102,15 +1007,15 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.expr = undefined,
});
const node_ptr = try block.statements.addOne();
- *node_ptr = &node.base;
+ node_ptr.* = &node.base;
- stack.append(State { .Semicolon = node_ptr }) catch unreachable;
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
+ stack.append(State{ .Semicolon = node_ptr }) catch unreachable;
+ try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
continue;
},
Token.Id.LBrace => {
- const inner_block = try arena.construct(ast.Node.Block {
- .base = ast.Node { .id = ast.Node.Id.Block },
+ const inner_block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = null,
.lbrace = token_index,
.statements = ast.Node.Block.StatementList.init(arena),
@@ -1118,16 +1023,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
try block.statements.push(&inner_block.base);
- stack.append(State { .Block = inner_block }) catch unreachable;
+ stack.append(State{ .Block = inner_block }) catch unreachable;
continue;
},
else => {
putBackToken(&tok_it, &tree);
const statement = try block.statements.addOne();
- try stack.append(State { .Semicolon = statement });
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
+ try stack.append(State{ .Semicolon = statement });
+ try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
continue;
- }
+ },
}
},
State.ComptimeStatement => |ctx| {
@@ -1135,34 +1040,33 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
switch (token_ptr.id) {
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = null,
- .visib_token = null,
- .comptime_token = ctx.comptime_token,
- .extern_export_token = null,
- .lib_name = null,
- .mut_token = token_index,
- .list = &ctx.block.statements,
- }
- }) catch unreachable;
+ Token.Id.Keyword_var,
+ Token.Id.Keyword_const => {
+ stack.append(State{ .VarDecl = VarDeclCtx{
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = ctx.comptime_token,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &ctx.block.statements,
+ } }) catch unreachable;
continue;
},
else => {
putBackToken(&tok_it, &tree);
putBackToken(&tok_it, &tree);
const statement = try ctx.block.statements.addOne();
- try stack.append(State { .Semicolon = statement });
- try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
+ try stack.append(State{ .Semicolon = statement });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = statement } });
continue;
- }
+ },
}
},
State.Semicolon => |node_ptr| {
- const node = *node_ptr;
+ const node = node_ptr.*;
if (node.requireSemiColon()) {
- stack.append(State { .ExpectToken = Token.Id.Semicolon }) catch unreachable;
+ stack.append(State{ .ExpectToken = Token.Id.Semicolon }) catch unreachable;
continue;
}
continue;
@@ -1177,22 +1081,22 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- const node = try arena.construct(ast.Node.AsmOutput {
- .base = ast.Node {.id = ast.Node.Id.AsmOutput },
+ const node = try arena.construct(ast.Node.AsmOutput{
+ .base = ast.Node{ .id = ast.Node.Id.AsmOutput },
.symbolic_name = undefined,
.constraint = undefined,
.kind = undefined,
});
try items.push(node);
- stack.append(State { .AsmOutputItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .AsmOutputReturnOrType = node });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ stack.append(State{ .AsmOutputItems = items }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.Comma });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .AsmOutputReturnOrType = node });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .StringLiteral = OptionalCtx{ .Required = &node.constraint } });
+ try stack.append(State{ .ExpectToken = Token.Id.RBracket });
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.symbolic_name } });
continue;
},
State.AsmOutputReturnOrType => |node| {
@@ -1201,20 +1105,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.Identifier => {
- node.kind = ast.Node.AsmOutput.Kind { .Variable = try createLiteral(arena, ast.Node.Identifier, token_index) };
+ node.kind = ast.Node.AsmOutput.Kind{ .Variable = try createLiteral(arena, ast.Node.Identifier, token_index) };
continue;
},
Token.Id.Arrow => {
- node.kind = ast.Node.AsmOutput.Kind { .Return = undefined };
- try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.kind.Return } });
+ node.kind = ast.Node.AsmOutput.Kind{ .Return = undefined };
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &node.kind.Return } });
continue;
},
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedAsmOutputReturnOrType = Error.ExpectedAsmOutputReturnOrType {
- .token = token_index,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedAsmOutputReturnOrType = Error.ExpectedAsmOutputReturnOrType{ .token = token_index } };
return tree;
},
}
@@ -1228,49 +1128,48 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- const node = try arena.construct(ast.Node.AsmInput {
- .base = ast.Node {.id = ast.Node.Id.AsmInput },
+ const node = try arena.construct(ast.Node.AsmInput{
+ .base = ast.Node{ .id = ast.Node.Id.AsmInput },
.symbolic_name = undefined,
.constraint = undefined,
.expr = undefined,
});
try items.push(node);
- stack.append(State { .AsmInputItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ stack.append(State{ .AsmInputItems = items }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.Comma });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .StringLiteral = OptionalCtx{ .Required = &node.constraint } });
+ try stack.append(State{ .ExpectToken = Token.Id.RBracket });
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.symbolic_name } });
continue;
},
State.AsmClobberItems => |items| {
- stack.append(State { .AsmClobberItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = try items.addOne() } });
+ stack.append(State{ .AsmClobberItems = items }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.Comma });
+ try stack.append(State{ .StringLiteral = OptionalCtx{ .Required = try items.addOne() } });
continue;
},
-
State.ExprListItemOrEnd => |list_state| {
if (eatToken(&tok_it, &tree, list_state.end)) |token_index| {
- *list_state.ptr = token_index;
+ (list_state.ptr).* = token_index;
continue;
}
- stack.append(State { .ExprListCommaOrEnd = list_state }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = try list_state.list.addOne() } });
+ stack.append(State{ .ExprListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = try list_state.list.addOne() } });
continue;
},
State.ExprListCommaOrEnd => |list_state| {
switch (expectCommaOrEnd(&tok_it, &tree, list_state.end)) {
ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
- *list_state.ptr = end;
+ (list_state.ptr).* = end;
continue;
} else {
- stack.append(State { .ExprListItemOrEnd = list_state }) catch unreachable;
+ stack.append(State{ .ExprListItemOrEnd = list_state }) catch unreachable;
continue;
},
ExpectCommaOrEndResult.parse_error => |e| {
@@ -1285,44 +1184,38 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
+ (list_state.ptr).* = rbrace;
continue;
}
- const node = try arena.construct(ast.Node.FieldInitializer {
- .base = ast.Node {
- .id = ast.Node.Id.FieldInitializer,
- },
+ const node = try arena.construct(ast.Node.FieldInitializer{
+ .base = ast.Node{ .id = ast.Node.Id.FieldInitializer },
.period_token = undefined,
.name_token = undefined,
.expr = undefined,
});
try list_state.list.push(&node.base);
- stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.Equal });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Identifier,
- .ptr = &node.name_token,
- }
- });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Period,
- .ptr = &node.period_token,
- }
- });
+ stack.append(State{ .FieldInitListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.Equal });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Identifier,
+ .ptr = &node.name_token,
+ } });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Period,
+ .ptr = &node.period_token,
+ } });
continue;
},
State.FieldInitListCommaOrEnd => |list_state| {
switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
- *list_state.ptr = end;
+ (list_state.ptr).* = end;
continue;
} else {
- stack.append(State { .FieldInitListItemOrEnd = list_state }) catch unreachable;
+ stack.append(State{ .FieldInitListItemOrEnd = list_state }) catch unreachable;
continue;
},
ExpectCommaOrEndResult.parse_error => |e| {
@@ -1337,7 +1230,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
container_decl.rbrace_token = end;
continue;
} else {
- try stack.append(State { .ContainerDecl = container_decl });
+ try stack.append(State{ .ContainerDecl = container_decl });
continue;
},
ExpectCommaOrEndResult.parse_error => |e| {
@@ -1352,23 +1245,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
+ (list_state.ptr).* = rbrace;
continue;
}
const node_ptr = try list_state.list.addOne();
- try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
- try stack.append(State { .ErrorTag = node_ptr });
+ try stack.append(State{ .ErrorTagListCommaOrEnd = list_state });
+ try stack.append(State{ .ErrorTag = node_ptr });
continue;
},
State.ErrorTagListCommaOrEnd => |list_state| {
switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
- *list_state.ptr = end;
+ (list_state.ptr).* = end;
continue;
} else {
- stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
+ stack.append(State{ .ErrorTagListItemOrEnd = list_state }) catch unreachable;
continue;
},
ExpectCommaOrEndResult.parse_error => |e| {
@@ -1383,24 +1276,22 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
+ (list_state.ptr).* = rbrace;
continue;
}
const comments = try eatDocComments(arena, &tok_it, &tree);
- const node = try arena.construct(ast.Node.SwitchCase {
- .base = ast.Node {
- .id = ast.Node.Id.SwitchCase,
- },
+ const node = try arena.construct(ast.Node.SwitchCase{
+ .base = ast.Node{ .id = ast.Node.Id.SwitchCase },
.items = ast.Node.SwitchCase.ItemList.init(arena),
.payload = null,
.expr = undefined,
});
try list_state.list.push(&node.base);
- try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .SwitchCaseFirstItem = &node.items });
+ try stack.append(State{ .SwitchCaseCommaOrEnd = list_state });
+ try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State{ .PointerPayload = OptionalCtx{ .Optional = &node.payload } });
+ try stack.append(State{ .SwitchCaseFirstItem = &node.items });
continue;
},
@@ -1408,10 +1299,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
State.SwitchCaseCommaOrEnd => |list_state| {
switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
- *list_state.ptr = end;
+ (list_state.ptr).* = end;
continue;
} else {
- try stack.append(State { .SwitchCaseOrEnd = list_state });
+ try stack.append(State{ .SwitchCaseOrEnd = list_state });
continue;
},
ExpectCommaOrEndResult.parse_error => |e| {
@@ -1426,29 +1317,29 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (token_ptr.id == Token.Id.Keyword_else) {
- const else_node = try arena.construct(ast.Node.SwitchElse {
- .base = ast.Node{ .id = ast.Node.Id.SwitchElse},
+ const else_node = try arena.construct(ast.Node.SwitchElse{
+ .base = ast.Node{ .id = ast.Node.Id.SwitchElse },
.token = token_index,
});
try case_items.push(&else_node.base);
- try stack.append(State { .ExpectToken = Token.Id.EqualAngleBracketRight });
+ try stack.append(State{ .ExpectToken = Token.Id.EqualAngleBracketRight });
continue;
} else {
putBackToken(&tok_it, &tree);
- try stack.append(State { .SwitchCaseItem = case_items });
+ try stack.append(State{ .SwitchCaseItem = case_items });
continue;
}
},
State.SwitchCaseItem => |case_items| {
- stack.append(State { .SwitchCaseItemCommaOrEnd = case_items }) catch unreachable;
- try stack.append(State { .RangeExpressionBegin = OptionalCtx { .Required = try case_items.addOne() } });
+ stack.append(State{ .SwitchCaseItemCommaOrEnd = case_items }) catch unreachable;
+ try stack.append(State{ .RangeExpressionBegin = OptionalCtx{ .Required = try case_items.addOne() } });
},
State.SwitchCaseItemCommaOrEnd => |case_items| {
switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.EqualAngleBracketRight)) {
ExpectCommaOrEndResult.end_token => |t| {
if (t == null) {
- stack.append(State { .SwitchCaseItem = case_items }) catch unreachable;
+ stack.append(State{ .SwitchCaseItem = case_items }) catch unreachable;
}
continue;
},
@@ -1460,10 +1351,9 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
},
-
State.SuspendBody => |suspend_node| {
if (suspend_node.payload != null) {
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = &suspend_node.body } });
+ try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .RequiredNull = &suspend_node.body } });
}
continue;
},
@@ -1473,13 +1363,11 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
async_node.rangle_bracket = TokenIndex(0);
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.AngleBracketRight,
- .ptr = &??async_node.rangle_bracket,
- }
- });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &async_node.allocator_type } });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.AngleBracketRight,
+ .ptr = &??async_node.rangle_bracket,
+ } });
+ try stack.append(State{ .TypeExprBegin = OptionalCtx{ .RequiredNull = &async_node.allocator_type } });
continue;
},
State.AsyncEnd => |ctx| {
@@ -1498,27 +1386,20 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- *(try tree.errors.addOne()) = Error {
- .ExpectedCall = Error.ExpectedCall { .node = node },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedCall = Error.ExpectedCall{ .node = node } };
return tree;
},
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedCallOrFnProto = Error.ExpectedCallOrFnProto { .node = node },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedCallOrFnProto = Error.ExpectedCallOrFnProto{ .node = node } };
return tree;
- }
+ },
}
},
-
State.ExternType => |ctx| {
if (eatToken(&tok_it, &tree, Token.Id.Keyword_fn)) |fn_token| {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- },
+ const fn_proto = try arena.construct(ast.Node.FnProto{
+ .base = ast.Node{ .id = ast.Node.Id.FnProto },
.doc_comments = ctx.comments,
.visib_token = null,
.name_token = null,
@@ -1534,17 +1415,15 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.align_expr = null,
});
ctx.opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ stack.append(State{ .FnProto = fn_proto }) catch unreachable;
continue;
}
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = ctx.opt_ctx,
- .ltoken = ctx.extern_token,
- .layout = ast.Node.ContainerDecl.Layout.Extern,
- },
- }) catch unreachable;
+ stack.append(State{ .ContainerKind = ContainerKindCtx{
+ .opt_ctx = ctx.opt_ctx,
+ .ltoken = ctx.extern_token,
+ .layout = ast.Node.ContainerDecl.Layout.Extern,
+ } }) catch unreachable;
continue;
},
State.SliceOrArrayAccess => |node| {
@@ -1554,20 +1433,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
switch (token_ptr.id) {
Token.Id.Ellipsis2 => {
const start = node.op.ArrayAccess;
- node.op = ast.Node.SuffixOp.Op {
- .Slice = ast.Node.SuffixOp.Op.Slice {
- .start = start,
- .end = null,
- }
- };
+ node.op = ast.Node.SuffixOp.Op{ .Slice = ast.Node.SuffixOp.Op.Slice{
+ .start = start,
+ .end = null,
+ } };
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RBracket,
- .ptr = &node.rtoken,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Optional = &node.op.Slice.end } });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.RBracket,
+ .ptr = &node.rtoken,
+ } }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Optional = &node.op.Slice.end } });
continue;
},
Token.Id.RBracket => {
@@ -1575,33 +1450,29 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
},
else => {
- *(try tree.errors.addOne()) = Error {
- .ExpectedSliceOrRBracket = Error.ExpectedSliceOrRBracket { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedSliceOrRBracket = Error.ExpectedSliceOrRBracket{ .token = token_index } };
return tree;
- }
+ },
}
},
State.SliceOrArrayType => |node| {
if (eatToken(&tok_it, &tree, Token.Id.RBracket)) |_| {
- node.op = ast.Node.PrefixOp.Op {
- .SliceType = ast.Node.PrefixOp.AddrOfInfo {
- .align_expr = null,
- .bit_offset_start_token = null,
- .bit_offset_end_token = null,
- .const_token = null,
- .volatile_token = null,
- }
- };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- try stack.append(State { .AddrOfModifiers = &node.op.SliceType });
+ node.op = ast.Node.PrefixOp.Op{ .SliceType = ast.Node.PrefixOp.AddrOfInfo{
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ } };
+ stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State{ .AddrOfModifiers = &node.op.SliceType });
continue;
}
- node.op = ast.Node.PrefixOp.Op { .ArrayType = undefined };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayType } });
+ node.op = ast.Node.PrefixOp.Op{ .ArrayType = undefined };
+ stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State{ .ExpectToken = Token.Id.RBracket });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.op.ArrayType } });
continue;
},
State.AddrOfModifiers => |addr_of_info| {
@@ -1612,22 +1483,18 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Keyword_align => {
stack.append(state) catch unreachable;
if (addr_of_info.align_expr != null) {
- *(try tree.errors.addOne()) = Error {
- .ExtraAlignQualifier = Error.ExtraAlignQualifier { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExtraAlignQualifier = Error.ExtraAlignQualifier{ .token = token_index } };
return tree;
}
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &addr_of_info.align_expr} });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .RequiredNull = &addr_of_info.align_expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
},
Token.Id.Keyword_const => {
stack.append(state) catch unreachable;
if (addr_of_info.const_token != null) {
- *(try tree.errors.addOne()) = Error {
- .ExtraConstQualifier = Error.ExtraConstQualifier { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExtraConstQualifier = Error.ExtraConstQualifier{ .token = token_index } };
return tree;
}
addr_of_info.const_token = token_index;
@@ -1636,9 +1503,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
Token.Id.Keyword_volatile => {
stack.append(state) catch unreachable;
if (addr_of_info.volatile_token != null) {
- *(try tree.errors.addOne()) = Error {
- .ExtraVolatileQualifier = Error.ExtraVolatileQualifier { .token = token_index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExtraVolatileQualifier = Error.ExtraVolatileQualifier{ .token = token_index } };
return tree;
}
addr_of_info.volatile_token = token_index;
@@ -1651,19 +1516,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
},
-
State.Payload => |opt_ctx| {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
if (token_ptr.id != Token.Id.Pipe) {
if (opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = Token.Id.Pipe,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ } };
return tree;
}
@@ -1671,21 +1533,19 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- const node = try arena.construct(ast.Node.Payload {
- .base = ast.Node {.id = ast.Node.Id.Payload },
+ const node = try arena.construct(ast.Node.Payload{
+ .base = ast.Node{ .id = ast.Node.Id.Payload },
.lpipe = token_index,
.error_symbol = undefined,
- .rpipe = undefined
+ .rpipe = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.error_symbol } });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ } }) catch unreachable;
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.error_symbol } });
continue;
},
State.PointerPayload => |opt_ctx| {
@@ -1694,12 +1554,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
if (token_ptr.id != Token.Id.Pipe) {
if (opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = Token.Id.Pipe,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ } };
return tree;
}
@@ -1707,28 +1565,24 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- const node = try arena.construct(ast.Node.PointerPayload {
- .base = ast.Node {.id = ast.Node.Id.PointerPayload },
+ const node = try arena.construct(ast.Node.PointerPayload{
+ .base = ast.Node{ .id = ast.Node.Id.PointerPayload },
.lpipe = token_index,
.ptr_token = null,
.value_symbol = undefined,
- .rpipe = undefined
+ .rpipe = undefined,
});
opt_ctx.store(&node.base);
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Asterisk,
- .ptr = &node.ptr_token,
- }
- });
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ } });
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.value_symbol } });
+ try stack.append(State{ .OptionalTokenSave = OptionalTokenSave{
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ } });
continue;
},
State.PointerIndexPayload => |opt_ctx| {
@@ -1737,12 +1591,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
if (token_ptr.id != Token.Id.Pipe) {
if (opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = Token.Id.Pipe,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ } };
return tree;
}
@@ -1750,61 +1602,58 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
}
- const node = try arena.construct(ast.Node.PointerIndexPayload {
- .base = ast.Node {.id = ast.Node.Id.PointerIndexPayload },
+ const node = try arena.construct(ast.Node.PointerIndexPayload{
+ .base = ast.Node{ .id = ast.Node.Id.PointerIndexPayload },
.lpipe = token_index,
.ptr_token = null,
.value_symbol = undefined,
.index_symbol = null,
- .rpipe = undefined
+ .rpipe = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.index_symbol } });
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Asterisk,
- .ptr = &node.ptr_token,
- }
- });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ } }) catch unreachable;
+ try stack.append(State{ .Identifier = OptionalCtx{ .RequiredNull = &node.index_symbol } });
+ try stack.append(State{ .IfToken = Token.Id.Comma });
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.value_symbol } });
+ try stack.append(State{ .OptionalTokenSave = OptionalTokenSave{
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ } });
continue;
},
-
State.Expression => |opt_ctx| {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
switch (token_ptr.id) {
- Token.Id.Keyword_return, Token.Id.Keyword_break, Token.Id.Keyword_continue => {
- const node = try arena.construct(ast.Node.ControlFlowExpression {
- .base = ast.Node {.id = ast.Node.Id.ControlFlowExpression },
+ Token.Id.Keyword_return,
+ Token.Id.Keyword_break,
+ Token.Id.Keyword_continue => {
+ const node = try arena.construct(ast.Node.ControlFlowExpression{
+ .base = ast.Node{ .id = ast.Node.Id.ControlFlowExpression },
.ltoken = token_index,
.kind = undefined,
.rhs = null,
});
opt_ctx.store(&node.base);
- stack.append(State { .Expression = OptionalCtx { .Optional = &node.rhs } }) catch unreachable;
+ stack.append(State{ .Expression = OptionalCtx{ .Optional = &node.rhs } }) catch unreachable;
switch (token_ptr.id) {
Token.Id.Keyword_break => {
- node.kind = ast.Node.ControlFlowExpression.Kind { .Break = null };
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Break } });
- try stack.append(State { .IfToken = Token.Id.Colon });
+ node.kind = ast.Node.ControlFlowExpression.Kind{ .Break = null };
+ try stack.append(State{ .Identifier = OptionalCtx{ .RequiredNull = &node.kind.Break } });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
},
Token.Id.Keyword_continue => {
- node.kind = ast.Node.ControlFlowExpression.Kind { .Continue = null };
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Continue } });
- try stack.append(State { .IfToken = Token.Id.Colon });
+ node.kind = ast.Node.ControlFlowExpression.Kind{ .Continue = null };
+ try stack.append(State{ .Identifier = OptionalCtx{ .RequiredNull = &node.kind.Continue } });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
},
Token.Id.Keyword_return => {
node.kind = ast.Node.ControlFlowExpression.Kind.Return;
@@ -1813,56 +1662,58 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
continue;
},
- Token.Id.Keyword_try, Token.Id.Keyword_cancel, Token.Id.Keyword_resume => {
- const node = try arena.construct(ast.Node.PrefixOp {
- .base = ast.Node {.id = ast.Node.Id.PrefixOp },
+ Token.Id.Keyword_try,
+ Token.Id.Keyword_cancel,
+ Token.Id.Keyword_resume => {
+ const node = try arena.construct(ast.Node.PrefixOp{
+ .base = ast.Node{ .id = ast.Node.Id.PrefixOp },
.op_token = token_index,
.op = switch (token_ptr.id) {
- Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{} },
- Token.Id.Keyword_cancel => ast.Node.PrefixOp.Op { .Cancel = void{} },
- Token.Id.Keyword_resume => ast.Node.PrefixOp.Op { .Resume = void{} },
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op{ .Try = void{} },
+ Token.Id.Keyword_cancel => ast.Node.PrefixOp.Op{ .Cancel = void{} },
+ Token.Id.Keyword_resume => ast.Node.PrefixOp.Op{ .Resume = void{} },
else => unreachable,
},
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ stack.append(State{ .Expression = OptionalCtx{ .Required = &node.rhs } }) catch unreachable;
continue;
},
else => {
if (!try parseBlockExpr(&stack, arena, opt_ctx, token_ptr, token_index)) {
putBackToken(&tok_it, &tree);
- stack.append(State { .UnwrapExpressionBegin = opt_ctx }) catch unreachable;
+ stack.append(State{ .UnwrapExpressionBegin = opt_ctx }) catch unreachable;
}
continue;
- }
+ },
}
},
State.RangeExpressionBegin => |opt_ctx| {
- stack.append(State { .RangeExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .Expression = opt_ctx });
+ stack.append(State{ .RangeExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .Expression = opt_ctx });
continue;
},
State.RangeExpressionEnd => |opt_ctx| {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = ellipsis3,
.op = ast.Node.InfixOp.Op.Range,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ stack.append(State{ .Expression = OptionalCtx{ .Required = &node.rhs } }) catch unreachable;
continue;
}
},
State.AssignmentExpressionBegin => |opt_ctx| {
- stack.append(State { .AssignmentExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .Expression = opt_ctx });
+ stack.append(State{ .AssignmentExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .Expression = opt_ctx });
continue;
},
@@ -1873,16 +1724,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToAssignment(token_ptr.id)) |ass_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = ass_id,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .AssignmentExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .AssignmentExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.rhs } });
continue;
} else {
putBackToken(&tok_it, &tree);
@@ -1891,8 +1742,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.UnwrapExpressionBegin => |opt_ctx| {
- stack.append(State { .UnwrapExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BoolOrExpressionBegin = opt_ctx });
+ stack.append(State{ .UnwrapExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BoolOrExpressionBegin = opt_ctx });
continue;
},
@@ -1903,8 +1754,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToUnwrapExpr(token_ptr.id)) |unwrap_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = unwrap_id,
@@ -1912,11 +1763,11 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
opt_ctx.store(&node.base);
- stack.append(State { .UnwrapExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .UnwrapExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.rhs } });
if (node.op == ast.Node.InfixOp.Op.Catch) {
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.op.Catch } });
+ try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.op.Catch } });
}
continue;
} else {
@@ -1926,8 +1777,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.BoolOrExpressionBegin => |opt_ctx| {
- stack.append(State { .BoolOrExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BoolAndExpressionBegin = opt_ctx });
+ stack.append(State{ .BoolOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BoolAndExpressionBegin = opt_ctx });
continue;
},
@@ -1935,23 +1786,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Keyword_or)) |or_token| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = or_token,
.op = ast.Node.InfixOp.Op.BoolOr,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BoolOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BoolAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BoolOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .BoolAndExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
State.BoolAndExpressionBegin => |opt_ctx| {
- stack.append(State { .BoolAndExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .ComparisonExpressionBegin = opt_ctx });
+ stack.append(State{ .BoolAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .ComparisonExpressionBegin = opt_ctx });
continue;
},
@@ -1959,23 +1810,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Keyword_and)) |and_token| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = and_token,
.op = ast.Node.InfixOp.Op.BoolAnd,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BoolAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .ComparisonExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BoolAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .ComparisonExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
State.ComparisonExpressionBegin => |opt_ctx| {
- stack.append(State { .ComparisonExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryOrExpressionBegin = opt_ctx });
+ stack.append(State{ .ComparisonExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BinaryOrExpressionBegin = opt_ctx });
continue;
},
@@ -1986,16 +1837,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToComparison(token_ptr.id)) |comp_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = comp_id,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .ComparisonExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryOrExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .ComparisonExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .BinaryOrExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
} else {
putBackToken(&tok_it, &tree);
@@ -2004,8 +1855,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.BinaryOrExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryOrExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryXorExpressionBegin = opt_ctx });
+ stack.append(State{ .BinaryOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BinaryXorExpressionBegin = opt_ctx });
continue;
},
@@ -2013,23 +1864,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Pipe)) |pipe| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = pipe,
.op = ast.Node.InfixOp.Op.BitOr,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BinaryOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryXorExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BinaryOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .BinaryXorExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
State.BinaryXorExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryXorExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryAndExpressionBegin = opt_ctx });
+ stack.append(State{ .BinaryXorExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BinaryAndExpressionBegin = opt_ctx });
continue;
},
@@ -2037,23 +1888,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Caret)) |caret| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = caret,
.op = ast.Node.InfixOp.Op.BitXor,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BinaryXorExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BinaryXorExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .BinaryAndExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
State.BinaryAndExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryAndExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BitShiftExpressionBegin = opt_ctx });
+ stack.append(State{ .BinaryAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .BitShiftExpressionBegin = opt_ctx });
continue;
},
@@ -2061,23 +1912,23 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Ampersand)) |ampersand| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = ampersand,
.op = ast.Node.InfixOp.Op.BitAnd,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BinaryAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BitShiftExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BinaryAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .BitShiftExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
State.BitShiftExpressionBegin => |opt_ctx| {
- stack.append(State { .BitShiftExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .AdditionExpressionBegin = opt_ctx });
+ stack.append(State{ .BitShiftExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .AdditionExpressionBegin = opt_ctx });
continue;
},
@@ -2088,16 +1939,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToBitShift(token_ptr.id)) |bitshift_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = bitshift_id,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .BitShiftExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .AdditionExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .BitShiftExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .AdditionExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
} else {
putBackToken(&tok_it, &tree);
@@ -2106,8 +1957,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.AdditionExpressionBegin => |opt_ctx| {
- stack.append(State { .AdditionExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .MultiplyExpressionBegin = opt_ctx });
+ stack.append(State{ .AdditionExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .MultiplyExpressionBegin = opt_ctx });
continue;
},
@@ -2118,16 +1969,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToAddition(token_ptr.id)) |add_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = add_id,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .AdditionExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .MultiplyExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .AdditionExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .MultiplyExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
} else {
putBackToken(&tok_it, &tree);
@@ -2136,8 +1987,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.MultiplyExpressionBegin => |opt_ctx| {
- stack.append(State { .MultiplyExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .CurlySuffixExpressionBegin = opt_ctx });
+ stack.append(State{ .MultiplyExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .CurlySuffixExpressionBegin = opt_ctx });
continue;
},
@@ -2148,16 +1999,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToMultiply(token_ptr.id)) |mult_id| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = mult_id,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .MultiplyExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .CurlySuffixExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .MultiplyExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .CurlySuffixExpressionBegin = OptionalCtx{ .Required = &node.rhs } });
continue;
} else {
putBackToken(&tok_it, &tree);
@@ -2166,9 +2017,9 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.CurlySuffixExpressionBegin => |opt_ctx| {
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State { .TypeExprBegin = opt_ctx });
+ stack.append(State{ .CurlySuffixExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.LBrace });
+ try stack.append(State{ .TypeExprBegin = opt_ctx });
continue;
},
@@ -2176,51 +2027,43 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if ((??tok_it.peek()).id == Token.Id.Period) {
- const node = try arena.construct(ast.Node.SuffixOp {
- .base = ast.Node { .id = ast.Node.Id.SuffixOp },
+ const node = try arena.construct(ast.Node.SuffixOp{
+ .base = ast.Node{ .id = ast.Node.Id.SuffixOp },
.lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .StructInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
- },
+ .op = ast.Node.SuffixOp.Op{ .StructInitializer = ast.Node.SuffixOp.Op.InitList.init(arena) },
.rtoken = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State {
- .FieldInitListItemOrEnd = ListSave(@typeOf(node.op.StructInitializer)) {
- .list = &node.op.StructInitializer,
- .ptr = &node.rtoken,
- }
- });
+ stack.append(State{ .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.LBrace });
+ try stack.append(State{ .FieldInitListItemOrEnd = ListSave(@typeOf(node.op.StructInitializer)){
+ .list = &node.op.StructInitializer,
+ .ptr = &node.rtoken,
+ } });
continue;
}
- const node = try arena.construct(ast.Node.SuffixOp {
- .base = ast.Node {.id = ast.Node.Id.SuffixOp },
+ const node = try arena.construct(ast.Node.SuffixOp{
+ .base = ast.Node{ .id = ast.Node.Id.SuffixOp },
.lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
- },
+ .op = ast.Node.SuffixOp.Op{ .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(arena) },
.rtoken = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.op.ArrayInitializer,
- .end = Token.Id.RBrace,
- .ptr = &node.rtoken,
- }
- });
+ stack.append(State{ .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .IfToken = Token.Id.LBrace });
+ try stack.append(State{ .ExprListItemOrEnd = ExprListCtx{
+ .list = &node.op.ArrayInitializer,
+ .end = Token.Id.RBrace,
+ .ptr = &node.rtoken,
+ } });
continue;
},
State.TypeExprBegin => |opt_ctx| {
- stack.append(State { .TypeExprEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .PrefixOpExpression = opt_ctx });
+ stack.append(State{ .TypeExprEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .PrefixOpExpression = opt_ctx });
continue;
},
@@ -2228,16 +2071,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const lhs = opt_ctx.get() ?? continue;
if (eatToken(&tok_it, &tree, Token.Id.Bang)) |bang| {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = bang,
.op = ast.Node.InfixOp.Op.ErrorUnion,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .TypeExprEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .PrefixOpExpression = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .TypeExprEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .PrefixOpExpression = OptionalCtx{ .Required = &node.rhs } });
continue;
}
},
@@ -2247,8 +2090,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (tokenIdToPrefixOp(token_ptr.id)) |prefix_id| {
- var node = try arena.construct(ast.Node.PrefixOp {
- .base = ast.Node {.id = ast.Node.Id.PrefixOp },
+ var node = try arena.construct(ast.Node.PrefixOp{
+ .base = ast.Node{ .id = ast.Node.Id.PrefixOp },
.op_token = token_index,
.op = prefix_id,
.rhs = undefined,
@@ -2257,8 +2100,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
// Treat '**' token as two derefs
if (token_ptr.id == Token.Id.AsteriskAsterisk) {
- const child = try arena.construct(ast.Node.PrefixOp {
- .base = ast.Node {.id = ast.Node.Id.PrefixOp},
+ const child = try arena.construct(ast.Node.PrefixOp{
+ .base = ast.Node{ .id = ast.Node.Id.PrefixOp },
.op_token = token_index,
.op = prefix_id,
.rhs = undefined,
@@ -2267,40 +2110,38 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
node = child;
}
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ stack.append(State{ .TypeExprBegin = OptionalCtx{ .Required = &node.rhs } }) catch unreachable;
if (node.op == ast.Node.PrefixOp.Op.AddrOf) {
- try stack.append(State { .AddrOfModifiers = &node.op.AddrOf });
+ try stack.append(State{ .AddrOfModifiers = &node.op.AddrOf });
}
continue;
} else {
putBackToken(&tok_it, &tree);
- stack.append(State { .SuffixOpExpressionBegin = opt_ctx }) catch unreachable;
+ stack.append(State{ .SuffixOpExpressionBegin = opt_ctx }) catch unreachable;
continue;
}
},
State.SuffixOpExpressionBegin => |opt_ctx| {
if (eatToken(&tok_it, &tree, Token.Id.Keyword_async)) |async_token| {
- const async_node = try arena.construct(ast.Node.AsyncAttribute {
- .base = ast.Node {.id = ast.Node.Id.AsyncAttribute},
+ const async_node = try arena.construct(ast.Node.AsyncAttribute{
+ .base = ast.Node{ .id = ast.Node.Id.AsyncAttribute },
.async_token = async_token,
.allocator_type = null,
.rangle_bracket = null,
});
- stack.append(State {
- .AsyncEnd = AsyncEndCtx {
- .ctx = opt_ctx,
- .attribute = async_node,
- }
- }) catch unreachable;
- try stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() });
- try stack.append(State { .PrimaryExpression = opt_ctx.toRequired() });
- try stack.append(State { .AsyncAllocator = async_node });
+ stack.append(State{ .AsyncEnd = AsyncEndCtx{
+ .ctx = opt_ctx,
+ .attribute = async_node,
+ } }) catch unreachable;
+ try stack.append(State{ .SuffixOpExpressionEnd = opt_ctx.toRequired() });
+ try stack.append(State{ .PrimaryExpression = opt_ctx.toRequired() });
+ try stack.append(State{ .AsyncAllocator = async_node });
continue;
}
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .PrimaryExpression = opt_ctx });
+ stack.append(State{ .SuffixOpExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State{ .PrimaryExpression = opt_ctx });
continue;
},
@@ -2312,48 +2153,42 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_ptr = token.ptr;
switch (token_ptr.id) {
Token.Id.LParen => {
- const node = try arena.construct(ast.Node.SuffixOp {
- .base = ast.Node {.id = ast.Node.Id.SuffixOp },
+ const node = try arena.construct(ast.Node.SuffixOp{
+ .base = ast.Node{ .id = ast.Node.Id.SuffixOp },
.lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .Call = ast.Node.SuffixOp.Op.Call {
- .params = ast.Node.SuffixOp.Op.Call.ParamList.init(arena),
- .async_attr = null,
- }
- },
+ .op = ast.Node.SuffixOp.Op{ .Call = ast.Node.SuffixOp.Op.Call{
+ .params = ast.Node.SuffixOp.Op.Call.ParamList.init(arena),
+ .async_attr = null,
+ } },
.rtoken = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.op.Call.params,
- .end = Token.Id.RParen,
- .ptr = &node.rtoken,
- }
- });
+ stack.append(State{ .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .ExprListItemOrEnd = ExprListCtx{
+ .list = &node.op.Call.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rtoken,
+ } });
continue;
},
Token.Id.LBracket => {
- const node = try arena.construct(ast.Node.SuffixOp {
- .base = ast.Node {.id = ast.Node.Id.SuffixOp },
+ const node = try arena.construct(ast.Node.SuffixOp{
+ .base = ast.Node{ .id = ast.Node.Id.SuffixOp },
.lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .ArrayAccess = undefined,
- },
- .rtoken = undefined
+ .op = ast.Node.SuffixOp.Op{ .ArrayAccess = undefined },
+ .rtoken = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .SliceOrArrayAccess = node });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayAccess }});
+ stack.append(State{ .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .SliceOrArrayAccess = node });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.op.ArrayAccess } });
continue;
},
Token.Id.Period => {
- const node = try arena.construct(ast.Node.InfixOp {
- .base = ast.Node {.id = ast.Node.Id.InfixOp },
+ const node = try arena.construct(ast.Node.InfixOp{
+ .base = ast.Node{ .id = ast.Node.Id.InfixOp },
.lhs = lhs,
.op_token = token_index,
.op = ast.Node.InfixOp.Op.Period,
@@ -2361,8 +2196,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
opt_ctx.store(&node.base);
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.rhs } });
+ stack.append(State{ .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State{ .Identifier = OptionalCtx{ .Required = &node.rhs } });
continue;
},
else => {
@@ -2391,7 +2226,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
_ = try createToCtxLiteral(arena, opt_ctx, ast.Node.UndefinedLiteral, token.index);
continue;
},
- Token.Id.Keyword_true, Token.Id.Keyword_false => {
+ Token.Id.Keyword_true,
+ Token.Id.Keyword_false => {
_ = try createToCtxLiteral(arena, opt_ctx, ast.Node.BoolLiteral, token.index);
continue;
},
@@ -2412,10 +2248,8 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
continue;
},
Token.Id.Keyword_promise => {
- const node = try arena.construct(ast.Node.PromiseType {
- .base = ast.Node {
- .id = ast.Node.Id.PromiseType,
- },
+ const node = try arena.construct(ast.Node.PromiseType{
+ .base = ast.Node{ .id = ast.Node.Id.PromiseType },
.promise_token = token.index,
.result = null,
});
@@ -2427,121 +2261,108 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
putBackToken(&tok_it, &tree);
continue;
}
- node.result = ast.Node.PromiseType.Result {
+ node.result = ast.Node.PromiseType.Result{
.arrow_token = next_token_index,
.return_type = undefined,
};
const return_type_ptr = &((??node.result).return_type);
- try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = return_type_ptr } });
continue;
},
- Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
+ Token.Id.StringLiteral,
+ Token.Id.MultilineStringLiteralLine => {
opt_ctx.store((try parseStringLiteral(arena, &tok_it, token.ptr, token.index, &tree)) ?? unreachable);
continue;
},
Token.Id.LParen => {
- const node = try arena.construct(ast.Node.GroupedExpression {
- .base = ast.Node {.id = ast.Node.Id.GroupedExpression },
+ const node = try arena.construct(ast.Node.GroupedExpression{
+ .base = ast.Node{ .id = ast.Node.Id.GroupedExpression },
.lparen = token.index,
.expr = undefined,
.rparen = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RParen,
- .ptr = &node.rparen,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ } }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
continue;
},
Token.Id.Builtin => {
- const node = try arena.construct(ast.Node.BuiltinCall {
- .base = ast.Node {.id = ast.Node.Id.BuiltinCall },
+ const node = try arena.construct(ast.Node.BuiltinCall{
+ .base = ast.Node{ .id = ast.Node.Id.BuiltinCall },
.builtin_token = token.index,
.params = ast.Node.BuiltinCall.ParamList.init(arena),
.rparen_token = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.params,
- .end = Token.Id.RParen,
- .ptr = &node.rparen_token,
- }
- }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LParen, });
+ stack.append(State{ .ExprListItemOrEnd = ExprListCtx{
+ .list = &node.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rparen_token,
+ } }) catch unreachable;
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
continue;
},
Token.Id.LBracket => {
- const node = try arena.construct(ast.Node.PrefixOp {
- .base = ast.Node {.id = ast.Node.Id.PrefixOp },
+ const node = try arena.construct(ast.Node.PrefixOp{
+ .base = ast.Node{ .id = ast.Node.Id.PrefixOp },
.op_token = token.index,
.op = undefined,
.rhs = undefined,
});
opt_ctx.store(&node.base);
- stack.append(State { .SliceOrArrayType = node }) catch unreachable;
+ stack.append(State{ .SliceOrArrayType = node }) catch unreachable;
continue;
},
Token.Id.Keyword_error => {
- stack.append(State {
- .ErrorTypeOrSetDecl = ErrorTypeOrSetDeclCtx {
- .error_token = token.index,
- .opt_ctx = opt_ctx
- }
- }) catch unreachable;
+ stack.append(State{ .ErrorTypeOrSetDecl = ErrorTypeOrSetDeclCtx{
+ .error_token = token.index,
+ .opt_ctx = opt_ctx,
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_packed => {
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = opt_ctx,
- .ltoken = token.index,
- .layout = ast.Node.ContainerDecl.Layout.Packed,
- },
- }) catch unreachable;
+ stack.append(State{ .ContainerKind = ContainerKindCtx{
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Packed,
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_extern => {
- stack.append(State {
- .ExternType = ExternTypeCtx {
- .opt_ctx = opt_ctx,
- .extern_token = token.index,
- .comments = null,
- },
- }) catch unreachable;
+ stack.append(State{ .ExternType = ExternTypeCtx{
+ .opt_ctx = opt_ctx,
+ .extern_token = token.index,
+ .comments = null,
+ } }) catch unreachable;
continue;
},
- Token.Id.Keyword_struct, Token.Id.Keyword_union, Token.Id.Keyword_enum => {
+ Token.Id.Keyword_struct,
+ Token.Id.Keyword_union,
+ Token.Id.Keyword_enum => {
putBackToken(&tok_it, &tree);
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = opt_ctx,
- .ltoken = token.index,
- .layout = ast.Node.ContainerDecl.Layout.Auto,
- },
- }) catch unreachable;
+ stack.append(State{ .ContainerKind = ContainerKindCtx{
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Auto,
+ } }) catch unreachable;
continue;
},
Token.Id.Identifier => {
- stack.append(State {
- .MaybeLabeledExpression = MaybeLabeledExpressionCtx {
- .label = token.index,
- .opt_ctx = opt_ctx
- }
- }) catch unreachable;
+ stack.append(State{ .MaybeLabeledExpression = MaybeLabeledExpressionCtx{
+ .label = token.index,
+ .opt_ctx = opt_ctx,
+ } }) catch unreachable;
continue;
},
Token.Id.Keyword_fn => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- },
+ const fn_proto = try arena.construct(ast.Node.FnProto{
+ .base = ast.Node{ .id = ast.Node.Id.FnProto },
.doc_comments = null,
.visib_token = null,
.name_token = null,
@@ -2557,14 +2378,13 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.align_expr = null,
});
opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ stack.append(State{ .FnProto = fn_proto }) catch unreachable;
continue;
},
- Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- },
+ Token.Id.Keyword_nakedcc,
+ Token.Id.Keyword_stdcallcc => {
+ const fn_proto = try arena.construct(ast.Node.FnProto{
+ .base = ast.Node{ .id = ast.Node.Id.FnProto },
.doc_comments = null,
.visib_token = null,
.name_token = null,
@@ -2580,18 +2400,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
.align_expr = null,
});
opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token
- }
- });
+ stack.append(State{ .FnProto = fn_proto }) catch unreachable;
+ try stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ } });
continue;
},
Token.Id.Keyword_asm => {
- const node = try arena.construct(ast.Node.Asm {
- .base = ast.Node {.id = ast.Node.Id.Asm },
+ const node = try arena.construct(ast.Node.Asm{
+ .base = ast.Node{ .id = ast.Node.Id.Asm },
.asm_token = token.index,
.volatile_token = null,
.template = undefined,
@@ -2602,94 +2420,77 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
});
opt_ctx.store(&node.base);
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RParen,
- .ptr = &node.rparen,
- }
- }) catch unreachable;
- try stack.append(State { .AsmClobberItems = &node.clobbers });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .AsmInputItems = &node.inputs });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .AsmOutputItems = &node.outputs });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.template } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Keyword_volatile,
- .ptr = &node.volatile_token,
- }
- });
+ stack.append(State{ .ExpectTokenSave = ExpectTokenSave{
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ } }) catch unreachable;
+ try stack.append(State{ .AsmClobberItems = &node.clobbers });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
+ try stack.append(State{ .AsmInputItems = &node.inputs });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
+ try stack.append(State{ .AsmOutputItems = &node.outputs });
+ try stack.append(State{ .IfToken = Token.Id.Colon });
+ try stack.append(State{ .StringLiteral = OptionalCtx{ .Required = &node.template } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
+ try stack.append(State{ .OptionalTokenSave = OptionalTokenSave{
+ .id = Token.Id.Keyword_volatile,
+ .ptr = &node.volatile_token,
+ } });
},
Token.Id.Keyword_inline => {
- stack.append(State {
- .Inline = InlineCtx {
- .label = null,
- .inline_token = token.index,
- .opt_ctx = opt_ctx,
- }
- }) catch unreachable;
+ stack.append(State{ .Inline = InlineCtx{
+ .label = null,
+ .inline_token = token.index,
+ .opt_ctx = opt_ctx,
+ } }) catch unreachable;
continue;
},
else => {
if (!try parseBlockExpr(&stack, arena, opt_ctx, token.ptr, token.index)) {
putBackToken(&tok_it, &tree);
if (opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token.index },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr{ .token = token.index } };
return tree;
}
}
continue;
- }
+ },
}
},
-
State.ErrorTypeOrSetDecl => |ctx| {
if (eatToken(&tok_it, &tree, Token.Id.LBrace) == null) {
_ = try createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.ErrorType, ctx.error_token);
continue;
}
- const node = try arena.construct(ast.Node.ErrorSetDecl {
- .base = ast.Node {
- .id = ast.Node.Id.ErrorSetDecl,
- },
+ const node = try arena.construct(ast.Node.ErrorSetDecl{
+ .base = ast.Node{ .id = ast.Node.Id.ErrorSetDecl },
.error_token = ctx.error_token,
.decls = ast.Node.ErrorSetDecl.DeclList.init(arena),
.rbrace_token = undefined,
});
ctx.opt_ctx.store(&node.base);
- stack.append(State {
- .ErrorTagListItemOrEnd = ListSave(@typeOf(node.decls)) {
- .list = &node.decls,
- .ptr = &node.rbrace_token,
- }
- }) catch unreachable;
+ stack.append(State{ .ErrorTagListItemOrEnd = ListSave(@typeOf(node.decls)){
+ .list = &node.decls,
+ .ptr = &node.rbrace_token,
+ } }) catch unreachable;
continue;
},
State.StringLiteral => |opt_ctx| {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
- opt_ctx.store(
- (try parseStringLiteral(arena, &tok_it, token_ptr, token_index, &tree)) ?? {
- putBackToken(&tok_it, &tree);
- if (opt_ctx != OptionalCtx.Optional) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token_index },
- };
- return tree;
- }
-
- continue;
+ opt_ctx.store((try parseStringLiteral(arena, &tok_it, token_ptr, token_index, &tree)) ?? {
+ putBackToken(&tok_it, &tree);
+ if (opt_ctx != OptionalCtx.Optional) {
+ ((try tree.errors.addOne())).* = Error{ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr{ .token = token_index } };
+ return tree;
}
- );
+
+ continue;
+ });
},
State.Identifier => |opt_ctx| {
@@ -2702,12 +2503,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token = nextToken(&tok_it, &tree);
const token_index = token.index;
const token_ptr = token.ptr;
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = Token.Id.Identifier,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = Token.Id.Identifier,
+ } };
return tree;
}
},
@@ -2718,23 +2517,19 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const ident_token_index = ident_token.index;
const ident_token_ptr = ident_token.ptr;
if (ident_token_ptr.id != Token.Id.Identifier) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = ident_token_index,
- .expected_id = Token.Id.Identifier,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = ident_token_index,
+ .expected_id = Token.Id.Identifier,
+ } };
return tree;
}
- const node = try arena.construct(ast.Node.ErrorTag {
- .base = ast.Node {
- .id = ast.Node.Id.ErrorTag,
- },
+ const node = try arena.construct(ast.Node.ErrorTag{
+ .base = ast.Node{ .id = ast.Node.Id.ErrorTag },
.doc_comments = comments,
.name_token = ident_token_index,
});
- *node_ptr = &node.base;
+ node_ptr.* = &node.base;
continue;
},
@@ -2743,12 +2538,10 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (token_ptr.id != token_id) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = token_id,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = token_id,
+ } };
return tree;
}
continue;
@@ -2758,15 +2551,13 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
const token_index = token.index;
const token_ptr = token.ptr;
if (token_ptr.id != expect_token_save.id) {
- *(try tree.errors.addOne()) = Error {
- .ExpectedToken = Error.ExpectedToken {
- .token = token_index,
- .expected_id = expect_token_save.id,
- },
- };
+ ((try tree.errors.addOne())).* = Error{ .ExpectedToken = Error.ExpectedToken{
+ .token = token_index,
+ .expected_id = expect_token_save.id,
+ } };
return tree;
}
- *expect_token_save.ptr = token_index;
+ (expect_token_save.ptr).* = token_index;
continue;
},
State.IfToken => |token_id| {
@@ -2779,7 +2570,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.IfTokenSave => |if_token_save| {
if (eatToken(&tok_it, &tree, if_token_save.id)) |token_index| {
- *if_token_save.ptr = token_index;
+ (if_token_save.ptr).* = token_index;
continue;
}
@@ -2788,7 +2579,7 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
},
State.OptionalTokenSave => |optional_token_save| {
if (eatToken(&tok_it, &tree, optional_token_save.id)) |token_index| {
- *optional_token_save.ptr = token_index;
+ (optional_token_save.ptr).* = token_index;
continue;
}
@@ -2911,28 +2702,28 @@ const OptionalCtx = union(enum) {
Required: &&ast.Node,
pub fn store(self: &const OptionalCtx, value: &ast.Node) void {
- switch (*self) {
- OptionalCtx.Optional => |ptr| *ptr = value,
- OptionalCtx.RequiredNull => |ptr| *ptr = value,
- OptionalCtx.Required => |ptr| *ptr = value,
+ switch (self.*) {
+ OptionalCtx.Optional => |ptr| ptr.* = value,
+ OptionalCtx.RequiredNull => |ptr| ptr.* = value,
+ OptionalCtx.Required => |ptr| ptr.* = value,
}
}
pub fn get(self: &const OptionalCtx) ?&ast.Node {
- switch (*self) {
- OptionalCtx.Optional => |ptr| return *ptr,
- OptionalCtx.RequiredNull => |ptr| return ??*ptr,
- OptionalCtx.Required => |ptr| return *ptr,
+ switch (self.*) {
+ OptionalCtx.Optional => |ptr| return ptr.*,
+ OptionalCtx.RequiredNull => |ptr| return ??ptr.*,
+ OptionalCtx.Required => |ptr| return ptr.*,
}
}
pub fn toRequired(self: &const OptionalCtx) OptionalCtx {
- switch (*self) {
+ switch (self.*) {
OptionalCtx.Optional => |ptr| {
- return OptionalCtx { .RequiredNull = ptr };
+ return OptionalCtx{ .RequiredNull = ptr };
},
- OptionalCtx.RequiredNull => |ptr| return *self,
- OptionalCtx.Required => |ptr| return *self,
+ OptionalCtx.RequiredNull => |ptr| return self.*,
+ OptionalCtx.Required => |ptr| return self.*,
}
}
};
@@ -3054,7 +2845,6 @@ const State = union(enum) {
Identifier: OptionalCtx,
ErrorTag: &&ast.Node,
-
IfToken: @TagType(Token.Id),
IfTokenSave: ExpectTokenSave,
ExpectToken: @TagType(Token.Id),
@@ -3064,16 +2854,14 @@ const State = union(enum) {
fn pushDocComment(arena: &mem.Allocator, line_comment: TokenIndex, result: &?&ast.Node.DocComment) !void {
const node = blk: {
- if (*result) |comment_node| {
+ if (result.*) |comment_node| {
break :blk comment_node;
} else {
- const comment_node = try arena.construct(ast.Node.DocComment {
- .base = ast.Node {
- .id = ast.Node.Id.DocComment,
- },
+ const comment_node = try arena.construct(ast.Node.DocComment{
+ .base = ast.Node{ .id = ast.Node.Id.DocComment },
.lines = ast.Node.DocComment.LineList.init(arena),
});
- *result = comment_node;
+ result.* = comment_node;
break :blk comment_node;
}
};
@@ -3094,24 +2882,20 @@ fn eatDocComments(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, t
fn eatLineComment(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.LineComment {
const token = eatToken(tok_it, tree, Token.Id.LineComment) ?? return null;
- return try arena.construct(ast.Node.LineComment {
- .base = ast.Node {
- .id = ast.Node.Id.LineComment,
- },
+ return try arena.construct(ast.Node.LineComment{
+ .base = ast.Node{ .id = ast.Node.Id.LineComment },
.token = token,
});
}
-fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator,
- token_ptr: &const Token, token_index: TokenIndex, tree: &ast.Tree) !?&ast.Node
-{
+fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, token_ptr: &const Token, token_index: TokenIndex, tree: &ast.Tree) !?&ast.Node {
switch (token_ptr.id) {
Token.Id.StringLiteral => {
return &(try createLiteral(arena, ast.Node.StringLiteral, token_index)).base;
},
Token.Id.MultilineStringLiteralLine => {
- const node = try arena.construct(ast.Node.MultilineStringLiteral {
- .base = ast.Node { .id = ast.Node.Id.MultilineStringLiteral },
+ const node = try arena.construct(ast.Node.MultilineStringLiteral{
+ .base = ast.Node{ .id = ast.Node.Id.MultilineStringLiteral },
.lines = ast.Node.MultilineStringLiteral.LineList.init(arena),
});
try node.lines.push(token_index);
@@ -3135,12 +2919,11 @@ fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterato
}
}
-fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx,
- token_ptr: &const Token, token_index: TokenIndex) !bool {
+fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx, token_ptr: &const Token, token_index: TokenIndex) !bool {
switch (token_ptr.id) {
Token.Id.Keyword_suspend => {
- const node = try arena.construct(ast.Node.Suspend {
- .base = ast.Node {.id = ast.Node.Id.Suspend },
+ const node = try arena.construct(ast.Node.Suspend{
+ .base = ast.Node{ .id = ast.Node.Id.Suspend },
.label = null,
.suspend_token = token_index,
.payload = null,
@@ -3148,13 +2931,13 @@ fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &con
});
ctx.store(&node.base);
- stack.append(State { .SuspendBody = node }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ stack.append(State{ .SuspendBody = node }) catch unreachable;
+ try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.payload } });
return true;
},
Token.Id.Keyword_if => {
- const node = try arena.construct(ast.Node.If {
- .base = ast.Node {.id = ast.Node.Id.If },
+ const node = try arena.construct(ast.Node.If{
+ .base = ast.Node{ .id = ast.Node.Id.If },
.if_token = token_index,
.condition = undefined,
.payload = null,
@@ -3163,41 +2946,35 @@ fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &con
});
ctx.store(&node.base);
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.body } });
+ try stack.append(State{ .PointerPayload = OptionalCtx{ .Optional = &node.payload } });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.condition } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
return true;
},
Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .label = null,
- .inline_token = null,
- .loop_token = token_index,
- .opt_ctx = *ctx,
- }
- }) catch unreachable;
+ stack.append(State{ .While = LoopCtx{
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.*,
+ } }) catch unreachable;
return true;
},
Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .label = null,
- .inline_token = null,
- .loop_token = token_index,
- .opt_ctx = *ctx,
- }
- }) catch unreachable;
+ stack.append(State{ .For = LoopCtx{
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.*,
+ } }) catch unreachable;
return true;
},
Token.Id.Keyword_switch => {
- const node = try arena.construct(ast.Node.Switch {
- .base = ast.Node {
- .id = ast.Node.Id.Switch,
- },
+ const node = try arena.construct(ast.Node.Switch{
+ .base = ast.Node{ .id = ast.Node.Id.Switch },
.switch_token = token_index,
.expr = undefined,
.cases = ast.Node.Switch.CaseList.init(arena),
@@ -3205,45 +2982,43 @@ fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &con
});
ctx.store(&node.base);
- stack.append(State {
- .SwitchCaseOrEnd = ListSave(@typeOf(node.cases)) {
- .list = &node.cases,
- .ptr = &node.rbrace,
- },
- }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LBrace });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
+ stack.append(State{ .SwitchCaseOrEnd = ListSave(@typeOf(node.cases)){
+ .list = &node.cases,
+ .ptr = &node.rbrace,
+ } }) catch unreachable;
+ try stack.append(State{ .ExpectToken = Token.Id.LBrace });
+ try stack.append(State{ .ExpectToken = Token.Id.RParen });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State{ .ExpectToken = Token.Id.LParen });
return true;
},
Token.Id.Keyword_comptime => {
- const node = try arena.construct(ast.Node.Comptime {
- .base = ast.Node {.id = ast.Node.Id.Comptime },
+ const node = try arena.construct(ast.Node.Comptime{
+ .base = ast.Node{ .id = ast.Node.Id.Comptime },
.comptime_token = token_index,
.expr = undefined,
.doc_comments = null,
});
ctx.store(&node.base);
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State{ .Expression = OptionalCtx{ .Required = &node.expr } });
return true;
},
Token.Id.LBrace => {
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node {.id = ast.Node.Id.Block },
+ const block = try arena.construct(ast.Node.Block{
+ .base = ast.Node{ .id = ast.Node.Id.Block },
.label = null,
.lbrace = token_index,
.statements = ast.Node.Block.StatementList.init(arena),
.rbrace = undefined,
});
ctx.store(&block.base);
- stack.append(State { .Block = block }) catch unreachable;
+ stack.append(State{ .Block = block }) catch unreachable;
return true;
},
else => {
return false;
- }
+ },
}
}
@@ -3257,20 +3032,16 @@ fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end:
const token_index = token.index;
const token_ptr = token.ptr;
switch (token_ptr.id) {
- Token.Id.Comma => return ExpectCommaOrEndResult { .end_token = null},
+ Token.Id.Comma => return ExpectCommaOrEndResult{ .end_token = null },
else => {
if (end == token_ptr.id) {
- return ExpectCommaOrEndResult { .end_token = token_index };
+ return ExpectCommaOrEndResult{ .end_token = token_index };
}
- return ExpectCommaOrEndResult {
- .parse_error = Error {
- .ExpectedCommaOrEnd = Error.ExpectedCommaOrEnd {
- .token = token_index,
- .end_id = end,
- },
- },
- };
+ return ExpectCommaOrEndResult{ .parse_error = Error{ .ExpectedCommaOrEnd = Error.ExpectedCommaOrEnd{
+ .token = token_index,
+ .end_id = end,
+ } } };
},
}
}
@@ -3278,103 +3049,102 @@ fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end:
fn tokenIdToAssignment(id: &const Token.Id) ?ast.Node.InfixOp.Op {
// TODO: We have to cast all cases because of this:
// error: expected type '?InfixOp', found '?@TagType(InfixOp)'
- return switch (*id) {
- Token.Id.AmpersandEqual => ast.Node.InfixOp.Op { .AssignBitAnd = {} },
- Token.Id.AngleBracketAngleBracketLeftEqual => ast.Node.InfixOp.Op { .AssignBitShiftLeft = {} },
- Token.Id.AngleBracketAngleBracketRightEqual => ast.Node.InfixOp.Op { .AssignBitShiftRight = {} },
- Token.Id.AsteriskEqual => ast.Node.InfixOp.Op { .AssignTimes = {} },
- Token.Id.AsteriskPercentEqual => ast.Node.InfixOp.Op { .AssignTimesWarp = {} },
- Token.Id.CaretEqual => ast.Node.InfixOp.Op { .AssignBitXor = {} },
- Token.Id.Equal => ast.Node.InfixOp.Op { .Assign = {} },
- Token.Id.MinusEqual => ast.Node.InfixOp.Op { .AssignMinus = {} },
- Token.Id.MinusPercentEqual => ast.Node.InfixOp.Op { .AssignMinusWrap = {} },
- Token.Id.PercentEqual => ast.Node.InfixOp.Op { .AssignMod = {} },
- Token.Id.PipeEqual => ast.Node.InfixOp.Op { .AssignBitOr = {} },
- Token.Id.PlusEqual => ast.Node.InfixOp.Op { .AssignPlus = {} },
- Token.Id.PlusPercentEqual => ast.Node.InfixOp.Op { .AssignPlusWrap = {} },
- Token.Id.SlashEqual => ast.Node.InfixOp.Op { .AssignDiv = {} },
+ return switch (id.*) {
+ Token.Id.AmpersandEqual => ast.Node.InfixOp.Op{ .AssignBitAnd = {} },
+ Token.Id.AngleBracketAngleBracketLeftEqual => ast.Node.InfixOp.Op{ .AssignBitShiftLeft = {} },
+ Token.Id.AngleBracketAngleBracketRightEqual => ast.Node.InfixOp.Op{ .AssignBitShiftRight = {} },
+ Token.Id.AsteriskEqual => ast.Node.InfixOp.Op{ .AssignTimes = {} },
+ Token.Id.AsteriskPercentEqual => ast.Node.InfixOp.Op{ .AssignTimesWarp = {} },
+ Token.Id.CaretEqual => ast.Node.InfixOp.Op{ .AssignBitXor = {} },
+ Token.Id.Equal => ast.Node.InfixOp.Op{ .Assign = {} },
+ Token.Id.MinusEqual => ast.Node.InfixOp.Op{ .AssignMinus = {} },
+ Token.Id.MinusPercentEqual => ast.Node.InfixOp.Op{ .AssignMinusWrap = {} },
+ Token.Id.PercentEqual => ast.Node.InfixOp.Op{ .AssignMod = {} },
+ Token.Id.PipeEqual => ast.Node.InfixOp.Op{ .AssignBitOr = {} },
+ Token.Id.PlusEqual => ast.Node.InfixOp.Op{ .AssignPlus = {} },
+ Token.Id.PlusPercentEqual => ast.Node.InfixOp.Op{ .AssignPlusWrap = {} },
+ Token.Id.SlashEqual => ast.Node.InfixOp.Op{ .AssignDiv = {} },
else => null,
};
}
fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
return switch (id) {
- Token.Id.Keyword_catch => ast.Node.InfixOp.Op { .Catch = null },
- Token.Id.QuestionMarkQuestionMark => ast.Node.InfixOp.Op { .UnwrapMaybe = void{} },
+ Token.Id.Keyword_catch => ast.Node.InfixOp.Op{ .Catch = null },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.InfixOp.Op{ .UnwrapMaybe = void{} },
else => null,
};
}
fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
return switch (id) {
- Token.Id.BangEqual => ast.Node.InfixOp.Op { .BangEqual = void{} },
- Token.Id.EqualEqual => ast.Node.InfixOp.Op { .EqualEqual = void{} },
- Token.Id.AngleBracketLeft => ast.Node.InfixOp.Op { .LessThan = void{} },
- Token.Id.AngleBracketLeftEqual => ast.Node.InfixOp.Op { .LessOrEqual = void{} },
- Token.Id.AngleBracketRight => ast.Node.InfixOp.Op { .GreaterThan = void{} },
- Token.Id.AngleBracketRightEqual => ast.Node.InfixOp.Op { .GreaterOrEqual = void{} },
+ Token.Id.BangEqual => ast.Node.InfixOp.Op{ .BangEqual = void{} },
+ Token.Id.EqualEqual => ast.Node.InfixOp.Op{ .EqualEqual = void{} },
+ Token.Id.AngleBracketLeft => ast.Node.InfixOp.Op{ .LessThan = void{} },
+ Token.Id.AngleBracketLeftEqual => ast.Node.InfixOp.Op{ .LessOrEqual = void{} },
+ Token.Id.AngleBracketRight => ast.Node.InfixOp.Op{ .GreaterThan = void{} },
+ Token.Id.AngleBracketRightEqual => ast.Node.InfixOp.Op{ .GreaterOrEqual = void{} },
else => null,
};
}
fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
return switch (id) {
- Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op { .BitShiftLeft = void{} },
- Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op { .BitShiftRight = void{} },
+ Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op{ .BitShiftLeft = void{} },
+ Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op{ .BitShiftRight = void{} },
else => null,
};
}
fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
return switch (id) {
- Token.Id.Minus => ast.Node.InfixOp.Op { .Sub = void{} },
- Token.Id.MinusPercent => ast.Node.InfixOp.Op { .SubWrap = void{} },
- Token.Id.Plus => ast.Node.InfixOp.Op { .Add = void{} },
- Token.Id.PlusPercent => ast.Node.InfixOp.Op { .AddWrap = void{} },
- Token.Id.PlusPlus => ast.Node.InfixOp.Op { .ArrayCat = void{} },
+ Token.Id.Minus => ast.Node.InfixOp.Op{ .Sub = void{} },
+ Token.Id.MinusPercent => ast.Node.InfixOp.Op{ .SubWrap = void{} },
+ Token.Id.Plus => ast.Node.InfixOp.Op{ .Add = void{} },
+ Token.Id.PlusPercent => ast.Node.InfixOp.Op{ .AddWrap = void{} },
+ Token.Id.PlusPlus => ast.Node.InfixOp.Op{ .ArrayCat = void{} },
else => null,
};
}
fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
return switch (id) {
- Token.Id.Slash => ast.Node.InfixOp.Op { .Div = void{} },
- Token.Id.Asterisk => ast.Node.InfixOp.Op { .Mult = void{} },
- Token.Id.AsteriskAsterisk => ast.Node.InfixOp.Op { .ArrayMult = void{} },
- Token.Id.AsteriskPercent => ast.Node.InfixOp.Op { .MultWrap = void{} },
- Token.Id.Percent => ast.Node.InfixOp.Op { .Mod = void{} },
- Token.Id.PipePipe => ast.Node.InfixOp.Op { .MergeErrorSets = void{} },
+ Token.Id.Slash => ast.Node.InfixOp.Op{ .Div = void{} },
+ Token.Id.Asterisk => ast.Node.InfixOp.Op{ .Mult = void{} },
+ Token.Id.AsteriskAsterisk => ast.Node.InfixOp.Op{ .ArrayMult = void{} },
+ Token.Id.AsteriskPercent => ast.Node.InfixOp.Op{ .MultWrap = void{} },
+ Token.Id.Percent => ast.Node.InfixOp.Op{ .Mod = void{} },
+ Token.Id.PipePipe => ast.Node.InfixOp.Op{ .MergeErrorSets = void{} },
else => null,
};
}
fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
return switch (id) {
- Token.Id.Bang => ast.Node.PrefixOp.Op { .BoolNot = void{} },
- Token.Id.Tilde => ast.Node.PrefixOp.Op { .BitNot = void{} },
- Token.Id.Minus => ast.Node.PrefixOp.Op { .Negation = void{} },
- Token.Id.MinusPercent => ast.Node.PrefixOp.Op { .NegationWrap = void{} },
- Token.Id.Asterisk, Token.Id.AsteriskAsterisk => ast.Node.PrefixOp.Op { .Deref = void{} },
- Token.Id.Ampersand => ast.Node.PrefixOp.Op {
- .AddrOf = ast.Node.PrefixOp.AddrOfInfo {
- .align_expr = null,
- .bit_offset_start_token = null,
- .bit_offset_end_token = null,
- .const_token = null,
- .volatile_token = null,
- },
- },
- Token.Id.QuestionMark => ast.Node.PrefixOp.Op { .MaybeType = void{} },
- Token.Id.QuestionMarkQuestionMark => ast.Node.PrefixOp.Op { .UnwrapMaybe = void{} },
- Token.Id.Keyword_await => ast.Node.PrefixOp.Op { .Await = void{} },
- Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{ } },
+ Token.Id.Bang => ast.Node.PrefixOp.Op{ .BoolNot = void{} },
+ Token.Id.Tilde => ast.Node.PrefixOp.Op{ .BitNot = void{} },
+ Token.Id.Minus => ast.Node.PrefixOp.Op{ .Negation = void{} },
+ Token.Id.MinusPercent => ast.Node.PrefixOp.Op{ .NegationWrap = void{} },
+ Token.Id.Asterisk,
+ Token.Id.AsteriskAsterisk => ast.Node.PrefixOp.Op{ .Deref = void{} },
+ Token.Id.Ampersand => ast.Node.PrefixOp.Op{ .AddrOf = ast.Node.PrefixOp.AddrOfInfo{
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ } },
+ Token.Id.QuestionMark => ast.Node.PrefixOp.Op{ .MaybeType = void{} },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.PrefixOp.Op{ .UnwrapMaybe = void{} },
+ Token.Id.Keyword_await => ast.Node.PrefixOp.Op{ .Await = void{} },
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op{ .Try = void{} },
else => null,
};
}
fn createLiteral(arena: &mem.Allocator, comptime T: type, token_index: TokenIndex) !&T {
- return arena.construct(T {
- .base = ast.Node {.id = ast.Node.typeToId(T)},
+ return arena.construct(T{
+ .base = ast.Node{ .id = ast.Node.typeToId(T) },
.token = token_index,
});
}
@@ -3389,15 +3159,14 @@ fn createToCtxLiteral(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, compti
fn eatToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
const token = nextToken(tok_it, tree);
- if (token.ptr.id == id)
- return token.index;
+ if (token.ptr.id == id) return token.index;
putBackToken(tok_it, tree);
return null;
}
fn nextToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) AnnotatedToken {
- const result = AnnotatedToken {
+ const result = AnnotatedToken{
.index = tok_it.index,
.ptr = ??tok_it.next(),
};
std/zig/render.zig
@@ -7,7 +7,7 @@ const Token = std.zig.Token;
const indent_delta = 4;
-pub const Error = error {
+pub const Error = error{
/// Ran out of memory allocating call stack frames to complete rendering.
OutOfMemory,
};
@@ -17,9 +17,9 @@ pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) (@typeOf(
var it = tree.root_node.decls.iterator(0);
while (it.next()) |decl| {
- try renderTopLevelDecl(allocator, stream, tree, 0, *decl);
+ try renderTopLevelDecl(allocator, stream, tree, 0, decl.*);
if (it.peek()) |next_decl| {
- const n = if (nodeLineOffset(tree, *decl, *next_decl) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, decl.*, next_decl.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -154,10 +154,10 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = block.statements.iterator(0);
while (it.next()) |statement| {
try stream.writeByteNTimes(' ', block_indent);
- try renderStatement(allocator, stream, tree, block_indent, *statement);
+ try renderStatement(allocator, stream, tree, block_indent, statement.*);
if (it.peek()) |next_statement| {
- const n = if (nodeLineOffset(tree, *statement, *next_statement) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, statement.*, next_statement.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -203,7 +203,6 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try stream.write(" ");
try renderExpression(allocator, stream, tree, indent, body);
}
-
},
ast.Node.Id.InfixOp => {
@@ -335,7 +334,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = call_info.params.iterator(0);
while (it.next()) |param_node| {
- try renderExpression(allocator, stream, tree, indent, *param_node);
+ try renderExpression(allocator, stream, tree, indent, param_node.*);
if (it.peek() != null) {
try stream.write(", ");
}
@@ -351,7 +350,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try stream.write("]");
},
- ast.Node.SuffixOp.Op.SuffixOp {
+ ast.Node.SuffixOp.Op.SuffixOp => {
try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
try stream.write(".*");
},
@@ -375,7 +374,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
}
if (field_inits.len == 1) {
- const field_init = *field_inits.at(0);
+ const field_init = field_inits.at(0).*;
try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
try stream.write("{ ");
@@ -392,12 +391,12 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = field_inits.iterator(0);
while (it.next()) |field_init| {
try stream.writeByteNTimes(' ', new_indent);
- try renderExpression(allocator, stream, tree, new_indent, *field_init);
- if ((*field_init).id != ast.Node.Id.LineComment) {
+ try renderExpression(allocator, stream, tree, new_indent, field_init.*);
+ if ((field_init.*).id != ast.Node.Id.LineComment) {
try stream.write(",");
}
if (it.peek()) |next_field_init| {
- const n = if (nodeLineOffset(tree, *field_init, *next_field_init) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, field_init.*, next_field_init.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -408,14 +407,13 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
},
ast.Node.SuffixOp.Op.ArrayInitializer => |*exprs| {
-
if (exprs.len == 0) {
try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
try stream.write("{}");
return;
}
if (exprs.len == 1) {
- const expr = *exprs.at(0);
+ const expr = exprs.at(0).*;
try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
try stream.write("{");
@@ -432,11 +430,11 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = exprs.iterator(0);
while (it.next()) |expr| {
try stream.writeByteNTimes(' ', new_indent);
- try renderExpression(allocator, stream, tree, new_indent, *expr);
+ try renderExpression(allocator, stream, tree, new_indent, expr.*);
try stream.write(",");
if (it.peek()) |next_expr| {
- const n = if (nodeLineOffset(tree, *expr, *next_expr) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, expr.*, next_expr.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -469,7 +467,6 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
ast.Node.ControlFlowExpression.Kind.Return => {
try stream.print("return");
},
-
}
if (flow_expr.rhs) |rhs| {
@@ -575,7 +572,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
switch (container_decl.layout) {
ast.Node.ContainerDecl.Layout.Packed => try stream.print("packed "),
ast.Node.ContainerDecl.Layout.Extern => try stream.print("extern "),
- ast.Node.ContainerDecl.Layout.Auto => { },
+ ast.Node.ContainerDecl.Layout.Auto => {},
}
switch (container_decl.kind) {
@@ -611,10 +608,10 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = container_decl.fields_and_decls.iterator(0);
while (it.next()) |decl| {
try stream.writeByteNTimes(' ', new_indent);
- try renderTopLevelDecl(allocator, stream, tree, new_indent, *decl);
+ try renderTopLevelDecl(allocator, stream, tree, new_indent, decl.*);
if (it.peek()) |next_decl| {
- const n = if (nodeLineOffset(tree, *decl, *next_decl) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, decl.*, next_decl.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -634,7 +631,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
}
if (err_set_decl.decls.len == 1) blk: {
- const node = *err_set_decl.decls.at(0);
+ const node = err_set_decl.decls.at(0).*;
// if there are any doc comments or same line comments
// don't try to put it all on one line
@@ -644,7 +641,6 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
break :blk;
}
-
try stream.write("error{");
try renderTopLevelDecl(allocator, stream, tree, indent, node);
try stream.write("}");
@@ -657,12 +653,12 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = err_set_decl.decls.iterator(0);
while (it.next()) |node| {
try stream.writeByteNTimes(' ', new_indent);
- try renderTopLevelDecl(allocator, stream, tree, new_indent, *node);
- if ((*node).id != ast.Node.Id.LineComment) {
+ try renderTopLevelDecl(allocator, stream, tree, new_indent, node.*);
+ if ((node.*).id != ast.Node.Id.LineComment) {
try stream.write(",");
}
if (it.peek()) |next_node| {
- const n = if (nodeLineOffset(tree, *node, *next_node) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, node.*, next_node.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -676,9 +672,9 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
try stream.print("\n");
- var i : usize = 0;
+ var i: usize = 0;
while (i < multiline_str_literal.lines.len) : (i += 1) {
- const t = *multiline_str_literal.lines.at(i);
+ const t = multiline_str_literal.lines.at(i).*;
try stream.writeByteNTimes(' ', indent + indent_delta);
try stream.print("{}", tree.tokenSlice(t));
}
@@ -695,7 +691,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = builtin_call.params.iterator(0);
while (it.next()) |param_node| {
- try renderExpression(allocator, stream, tree, indent, *param_node);
+ try renderExpression(allocator, stream, tree, indent, param_node.*);
if (it.peek() != null) {
try stream.write(", ");
}
@@ -740,7 +736,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = fn_proto.params.iterator(0);
while (it.next()) |param_decl_node| {
- try renderParamDecl(allocator, stream, tree, indent, *param_decl_node);
+ try renderParamDecl(allocator, stream, tree, indent, param_decl_node.*);
if (it.peek() != null) {
try stream.write(", ");
@@ -764,7 +760,6 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try renderExpression(allocator, stream, tree, indent, node);
},
}
-
},
ast.Node.Id.PromiseType => {
@@ -801,10 +796,10 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = switch_node.cases.iterator(0);
while (it.next()) |node| {
try stream.writeByteNTimes(' ', new_indent);
- try renderExpression(allocator, stream, tree, new_indent, *node);
+ try renderExpression(allocator, stream, tree, new_indent, node.*);
if (it.peek()) |next_node| {
- const n = if (nodeLineOffset(tree, *node, *next_node) >= 2) u8(2) else u8(1);
+ const n = if (nodeLineOffset(tree, node.*, next_node.*) >= 2) u8(2) else u8(1);
try stream.writeByteNTimes('\n', n);
}
}
@@ -819,7 +814,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
var it = switch_case.items.iterator(0);
while (it.next()) |node| {
- try renderExpression(allocator, stream, tree, indent, *node);
+ try renderExpression(allocator, stream, tree, indent, node.*);
if (it.peek() != null) {
try stream.write(",\n");
@@ -846,8 +841,10 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try stream.print("{}", tree.tokenSlice(else_node.else_token));
const block_body = switch (else_node.body.id) {
- ast.Node.Id.Block, ast.Node.Id.If,
- ast.Node.Id.For, ast.Node.Id.While,
+ ast.Node.Id.Block,
+ ast.Node.Id.If,
+ ast.Node.Id.For,
+ ast.Node.Id.While,
ast.Node.Id.Switch => true,
else => false,
};
@@ -972,7 +969,11 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try renderExpression(allocator, stream, tree, indent, if_node.body);
switch (if_node.body.id) {
- ast.Node.Id.Block, ast.Node.Id.If, ast.Node.Id.For, ast.Node.Id.While, ast.Node.Id.Switch => {
+ ast.Node.Id.Block,
+ ast.Node.Id.If,
+ ast.Node.Id.For,
+ ast.Node.Id.While,
+ ast.Node.Id.Switch => {
if (if_node.@"else") |@"else"| {
if (if_node.body.id == ast.Node.Id.Block) {
try stream.write(" ");
@@ -995,7 +996,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
try renderExpression(allocator, stream, tree, indent, @"else".body);
}
- }
+ },
}
},
@@ -1018,11 +1019,11 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
{
var it = asm_node.outputs.iterator(0);
while (it.next()) |asm_output| {
- const node = &(*asm_output).base;
+ const node = &(asm_output.*).base;
try renderExpression(allocator, stream, tree, indent_extra, node);
if (it.peek()) |next_asm_output| {
- const next_node = &(*next_asm_output).base;
+ const next_node = &(next_asm_output.*).base;
const n = if (nodeLineOffset(tree, node, next_node) >= 2) u8(2) else u8(1);
try stream.writeByte(',');
try stream.writeByteNTimes('\n', n);
@@ -1038,11 +1039,11 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
{
var it = asm_node.inputs.iterator(0);
while (it.next()) |asm_input| {
- const node = &(*asm_input).base;
+ const node = &(asm_input.*).base;
try renderExpression(allocator, stream, tree, indent_extra, node);
if (it.peek()) |next_asm_input| {
- const next_node = &(*next_asm_input).base;
+ const next_node = &(next_asm_input.*).base;
const n = if (nodeLineOffset(tree, node, next_node) >= 2) u8(2) else u8(1);
try stream.writeByte(',');
try stream.writeByteNTimes('\n', n);
@@ -1058,7 +1059,7 @@ fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, ind
{
var it = asm_node.clobbers.iterator(0);
while (it.next()) |node| {
- try renderExpression(allocator, stream, tree, indent_once, *node);
+ try renderExpression(allocator, stream, tree, indent_once, node.*);
if (it.peek() != null) {
try stream.write(", ");
@@ -1220,8 +1221,7 @@ fn renderComments(tree: &ast.Tree, stream: var, node: var, indent: usize) (@type
const comment = node.doc_comments ?? return;
var it = comment.lines.iterator(0);
while (it.next()) |line_token_index| {
- try stream.print("{}\n", tree.tokenSlice(*line_token_index));
+ try stream.print("{}\n", tree.tokenSlice(line_token_index.*));
try stream.writeByteNTimes(' ', indent);
}
}
-