Commit e1b01d32f0

Vexu <git@vexu.eu>
2020-01-04 13:34:00
std-c ast base
1 parent 2183c4b
Changed files (3)
lib/std/c/ast.zig
@@ -0,0 +1,66 @@
+const std = @import("std.zig");
+const SegmentedList = std.SegmentedList;
+const Token = std.c.Token;
+const Source = std.c.tokenizer.Source;
+
+pub const TokenIndex = usize;
+
+pub const Tree = struct {
+    tokens: TokenList,
+    sources: SourceList,
+    root_node: *Node.Root,
+    arena_allocator: std.heap.ArenaAllocator,
+    errors: ErrorList,
+
+    pub const SourceList = SegmentedList(Source, 4);
+    pub const TokenList = Source.TokenList;
+    pub const ErrorList = SegmentedList(Error, 0);
+
+    pub fn deinit(self: *Tree) void {
+        // Here we copy the arena allocator into stack memory, because
+        // otherwise it would destroy itself while it was still working.
+        var arena_allocator = self.arena_allocator;
+        arena_allocator.deinit();
+        // self is destroyed
+    }
+};
+
+pub const Error = union(enum) {
+    InvalidToken: InvalidToken,
+
+    pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
+        switch (self.*) {
+            .InvalidToken => |*x| return x.render(tokens, stream),
+        }
+    }
+
+    pub fn loc(self: *const Error) TokenIndex {
+        switch (self.*) {
+            .InvalidToken => |x| return x.token,
+        }
+    }
+
+    pub const InvalidToken = SingleTokenError("Invalid token '{}'");
+
+    fn SingleTokenError(comptime msg: []const u8) type {
+        return struct {
+            token: TokenIndex,
+
+            pub fn render(self: *const @This(), tokens: *Tree.TokenList, stream: var) !void {
+                const actual_token = tokens.at(self.token);
+                return stream.print(msg, .{actual_token.id.symbol()});
+            }
+        };
+    }
+};
+
+pub const Root = struct {
+    decls: DeclList,
+    eof_token: TokenIndex,
+
+    pub const DeclList = SegmentedList(*Decl, 4);
+};
+
+pub const Decl = struct {
+
+};
\ No newline at end of file
lib/std/c/tokenizer.zig
@@ -4,6 +4,9 @@ const mem = std.mem;
 pub const Source = struct {
     buffer: []const u8,
     file_name: []const u8,
+    tokens: TokenList,
+
+    pub const TokenList = SegmentedList(Token, 64);
 };
 
 pub const Token = struct {
lib/std/c.zig
@@ -2,6 +2,11 @@ const builtin = @import("builtin");
 const std = @import("std");
 const page_size = std.mem.page_size;
 
+const tokenizer = @import("c/tokenizer.zig");
+pub const Token = tokenizer.Token;
+pub const Tokenizer = tokenizer.Tokenizer;
+pub const ast = @import("c/ast.zig");
+
 pub usingnamespace @import("os/bits.zig");
 
 pub usingnamespace switch (builtin.os) {