Commit 9ea04f4f1c

Jakub Konka <kubkon@jakubkonka.com>
2023-03-31 22:12:22
tapi: update yaml parser
https://github.com/kubkon/zig-yaml/commit/5de8b0b3a2cdb86f9a173118efa7e5e0747cca14
1 parent 9cb2919
Changed files (5)
src/link/tapi/parse/test.zig
@@ -21,45 +21,45 @@ test "explicit doc" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     const directive = tree.tokens[doc.directive.?];
-    try testing.expectEqual(directive.id, .Literal);
-    try testing.expect(mem.eql(u8, "tapi-tbd", tree.source[directive.start..directive.end]));
+    try testing.expectEqual(directive.id, .literal);
+    try testing.expectEqualStrings("tapi-tbd", tree.source[directive.start..directive.end]);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 5);
-    try testing.expectEqual(map.end.?, 14);
+    try testing.expectEqual(map.base.start, 5);
+    try testing.expectEqual(map.base.end, 14);
     try testing.expectEqual(map.values.items.len, 2);
 
     {
         const entry = map.values.items[0];
 
         const key = tree.tokens[entry.key];
-        try testing.expectEqual(key.id, .Literal);
-        try testing.expect(mem.eql(u8, "tbd-version", tree.source[key.start..key.end]));
+        try testing.expectEqual(key.id, .literal);
+        try testing.expectEqualStrings("tbd-version", tree.source[key.start..key.end]);
 
-        const value = entry.value.cast(Node.Value).?;
-        const value_tok = tree.tokens[value.start.?];
-        try testing.expectEqual(value_tok.id, .Literal);
-        try testing.expect(mem.eql(u8, "4", tree.source[value_tok.start..value_tok.end]));
+        const value = entry.value.?.cast(Node.Value).?;
+        const value_tok = tree.tokens[value.base.start];
+        try testing.expectEqual(value_tok.id, .literal);
+        try testing.expectEqualStrings("4", tree.source[value_tok.start..value_tok.end]);
     }
 
     {
         const entry = map.values.items[1];
 
         const key = tree.tokens[entry.key];
-        try testing.expectEqual(key.id, .Literal);
-        try testing.expect(mem.eql(u8, "abc-version", tree.source[key.start..key.end]));
+        try testing.expectEqual(key.id, .literal);
+        try testing.expectEqualStrings("abc-version", tree.source[key.start..key.end]);
 
-        const value = entry.value.cast(Node.Value).?;
-        const value_tok = tree.tokens[value.start.?];
-        try testing.expectEqual(value_tok.id, .Literal);
-        try testing.expect(mem.eql(u8, "5", tree.source[value_tok.start..value_tok.end]));
+        const value = entry.value.?.cast(Node.Value).?;
+        const value_tok = tree.tokens[value.base.start];
+        try testing.expectEqual(value_tok.id, .literal);
+        try testing.expectEqualStrings("5", tree.source[value_tok.start..value_tok.end]);
     }
 }
 
@@ -77,39 +77,31 @@ test "leaf in quotes" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
     try testing.expect(doc.directive == null);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 0);
-    try testing.expectEqual(map.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(map.base.start, 0);
+    try testing.expectEqual(map.base.end, tree.tokens.len - 2);
     try testing.expectEqual(map.values.items.len, 3);
 
     {
         const entry = map.values.items[0];
 
         const key = tree.tokens[entry.key];
-        try testing.expectEqual(key.id, .Literal);
-        try testing.expect(mem.eql(
-            u8,
-            "key1",
-            tree.source[key.start..key.end],
-        ));
-
-        const value = entry.value.cast(Node.Value).?;
-        const start = tree.tokens[value.start.?];
-        const end = tree.tokens[value.end.?];
-        try testing.expectEqual(start.id, .Literal);
-        try testing.expectEqual(end.id, .Literal);
-        try testing.expect(mem.eql(
-            u8,
-            "no quotes",
-            tree.source[start.start..end.end],
-        ));
+        try testing.expectEqual(key.id, .literal);
+        try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
+
+        const value = entry.value.?.cast(Node.Value).?;
+        const start = tree.tokens[value.base.start];
+        const end = tree.tokens[value.base.end];
+        try testing.expectEqual(start.id, .literal);
+        try testing.expectEqual(end.id, .literal);
+        try testing.expectEqualStrings("no quotes", tree.source[start.start..end.end]);
     }
 }
 
@@ -128,70 +120,60 @@ test "nested maps" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
     try testing.expect(doc.directive == null);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 0);
-    try testing.expectEqual(map.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(map.base.start, 0);
+    try testing.expectEqual(map.base.end, tree.tokens.len - 2);
     try testing.expectEqual(map.values.items.len, 2);
 
     {
         const entry = map.values.items[0];
 
         const key = tree.tokens[entry.key];
-        try testing.expectEqual(key.id, .Literal);
-        try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end]));
+        try testing.expectEqual(key.id, .literal);
+        try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
 
-        const nested_map = entry.value.cast(Node.Map).?;
-        try testing.expectEqual(nested_map.start.?, 4);
-        try testing.expectEqual(nested_map.end.?, 16);
+        const nested_map = entry.value.?.cast(Node.Map).?;
+        try testing.expectEqual(nested_map.base.start, 4);
+        try testing.expectEqual(nested_map.base.end, 16);
         try testing.expectEqual(nested_map.values.items.len, 2);
 
         {
             const nested_entry = nested_map.values.items[0];
 
             const nested_key = tree.tokens[nested_entry.key];
-            try testing.expectEqual(nested_key.id, .Literal);
-            try testing.expect(mem.eql(
-                u8,
-                "key1_1",
-                tree.source[nested_key.start..nested_key.end],
-            ));
-
-            const nested_value = nested_entry.value.cast(Node.Value).?;
-            const nested_value_tok = tree.tokens[nested_value.start.?];
-            try testing.expectEqual(nested_value_tok.id, .Literal);
-            try testing.expect(mem.eql(
-                u8,
+            try testing.expectEqual(nested_key.id, .literal);
+            try testing.expectEqualStrings("key1_1", tree.source[nested_key.start..nested_key.end]);
+
+            const nested_value = nested_entry.value.?.cast(Node.Value).?;
+            const nested_value_tok = tree.tokens[nested_value.base.start];
+            try testing.expectEqual(nested_value_tok.id, .literal);
+            try testing.expectEqualStrings(
                 "value1_1",
                 tree.source[nested_value_tok.start..nested_value_tok.end],
-            ));
+            );
         }
 
         {
             const nested_entry = nested_map.values.items[1];
 
             const nested_key = tree.tokens[nested_entry.key];
-            try testing.expectEqual(nested_key.id, .Literal);
-            try testing.expect(mem.eql(
-                u8,
-                "key1_2",
-                tree.source[nested_key.start..nested_key.end],
-            ));
-
-            const nested_value = nested_entry.value.cast(Node.Value).?;
-            const nested_value_tok = tree.tokens[nested_value.start.?];
-            try testing.expectEqual(nested_value_tok.id, .Literal);
-            try testing.expect(mem.eql(
-                u8,
+            try testing.expectEqual(nested_key.id, .literal);
+            try testing.expectEqualStrings("key1_2", tree.source[nested_key.start..nested_key.end]);
+
+            const nested_value = nested_entry.value.?.cast(Node.Value).?;
+            const nested_value_tok = tree.tokens[nested_value.base.start];
+            try testing.expectEqual(nested_value_tok.id, .literal);
+            try testing.expectEqualStrings(
                 "value1_2",
                 tree.source[nested_value_tok.start..nested_value_tok.end],
-            ));
+            );
         }
     }
 
@@ -199,17 +181,13 @@ test "nested maps" {
         const entry = map.values.items[1];
 
         const key = tree.tokens[entry.key];
-        try testing.expectEqual(key.id, .Literal);
-        try testing.expect(mem.eql(u8, "key2", tree.source[key.start..key.end]));
-
-        const value = entry.value.cast(Node.Value).?;
-        const value_tok = tree.tokens[value.start.?];
-        try testing.expectEqual(value_tok.id, .Literal);
-        try testing.expect(mem.eql(
-            u8,
-            "value2",
-            tree.source[value_tok.start..value_tok.end],
-        ));
+        try testing.expectEqual(key.id, .literal);
+        try testing.expectEqualStrings("key2", tree.source[key.start..key.end]);
+
+        const value = entry.value.?.cast(Node.Value).?;
+        const value_tok = tree.tokens[value.base.start];
+        try testing.expectEqual(value_tok.id, .literal);
+        try testing.expectEqualStrings("value2", tree.source[value_tok.start..value_tok.end]);
     }
 }
 
@@ -227,46 +205,46 @@ test "map of list of values" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 0);
-    try testing.expectEqual(map.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(map.base.start, 0);
+    try testing.expectEqual(map.base.end, tree.tokens.len - 2);
     try testing.expectEqual(map.values.items.len, 1);
 
     const entry = map.values.items[0];
     const key = tree.tokens[entry.key];
-    try testing.expectEqual(key.id, .Literal);
-    try testing.expect(mem.eql(u8, "ints", tree.source[key.start..key.end]));
+    try testing.expectEqual(key.id, .literal);
+    try testing.expectEqualStrings("ints", tree.source[key.start..key.end]);
 
-    const value = entry.value.cast(Node.List).?;
-    try testing.expectEqual(value.start.?, 4);
-    try testing.expectEqual(value.end.?, tree.tokens.len - 2);
+    const value = entry.value.?.cast(Node.List).?;
+    try testing.expectEqual(value.base.start, 4);
+    try testing.expectEqual(value.base.end, tree.tokens.len - 2);
     try testing.expectEqual(value.values.items.len, 3);
 
     {
         const elem = value.values.items[0].cast(Node.Value).?;
-        const leaf = tree.tokens[elem.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "0", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[elem.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("0", tree.source[leaf.start..leaf.end]);
     }
 
     {
         const elem = value.values.items[1].cast(Node.Value).?;
-        const leaf = tree.tokens[elem.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "1", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[elem.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("1", tree.source[leaf.start..leaf.end]);
     }
 
     {
         const elem = value.values.items[2].cast(Node.Value).?;
-        const leaf = tree.tokens[elem.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "2", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[elem.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("2", tree.source[leaf.start..leaf.end]);
     }
 }
 
@@ -285,64 +263,64 @@ test "map of list of maps" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 0);
-    try testing.expectEqual(map.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(map.base.start, 0);
+    try testing.expectEqual(map.base.end, tree.tokens.len - 2);
     try testing.expectEqual(map.values.items.len, 1);
 
     const entry = map.values.items[0];
     const key = tree.tokens[entry.key];
-    try testing.expectEqual(key.id, .Literal);
-    try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end]));
+    try testing.expectEqual(key.id, .literal);
+    try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
 
-    const value = entry.value.cast(Node.List).?;
-    try testing.expectEqual(value.start.?, 3);
-    try testing.expectEqual(value.end.?, tree.tokens.len - 2);
+    const value = entry.value.?.cast(Node.List).?;
+    try testing.expectEqual(value.base.start, 3);
+    try testing.expectEqual(value.base.end, tree.tokens.len - 2);
     try testing.expectEqual(value.values.items.len, 3);
 
     {
         const elem = value.values.items[0].cast(Node.Map).?;
         const nested = elem.values.items[0];
         const nested_key = tree.tokens[nested.key];
-        try testing.expectEqual(nested_key.id, .Literal);
-        try testing.expect(mem.eql(u8, "key2", tree.source[nested_key.start..nested_key.end]));
+        try testing.expectEqual(nested_key.id, .literal);
+        try testing.expectEqualStrings("key2", tree.source[nested_key.start..nested_key.end]);
 
-        const nested_v = nested.value.cast(Node.Value).?;
-        const leaf = tree.tokens[nested_v.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "value2", tree.source[leaf.start..leaf.end]));
+        const nested_v = nested.value.?.cast(Node.Value).?;
+        const leaf = tree.tokens[nested_v.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("value2", tree.source[leaf.start..leaf.end]);
     }
 
     {
         const elem = value.values.items[1].cast(Node.Map).?;
         const nested = elem.values.items[0];
         const nested_key = tree.tokens[nested.key];
-        try testing.expectEqual(nested_key.id, .Literal);
-        try testing.expect(mem.eql(u8, "key3", tree.source[nested_key.start..nested_key.end]));
+        try testing.expectEqual(nested_key.id, .literal);
+        try testing.expectEqualStrings("key3", tree.source[nested_key.start..nested_key.end]);
 
-        const nested_v = nested.value.cast(Node.Value).?;
-        const leaf = tree.tokens[nested_v.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "value3", tree.source[leaf.start..leaf.end]));
+        const nested_v = nested.value.?.cast(Node.Value).?;
+        const leaf = tree.tokens[nested_v.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("value3", tree.source[leaf.start..leaf.end]);
     }
 
     {
         const elem = value.values.items[2].cast(Node.Map).?;
         const nested = elem.values.items[0];
         const nested_key = tree.tokens[nested.key];
-        try testing.expectEqual(nested_key.id, .Literal);
-        try testing.expect(mem.eql(u8, "key4", tree.source[nested_key.start..nested_key.end]));
+        try testing.expectEqual(nested_key.id, .literal);
+        try testing.expectEqualStrings("key4", tree.source[nested_key.start..nested_key.end]);
 
-        const nested_v = nested.value.cast(Node.Value).?;
-        const leaf = tree.tokens[nested_v.start.?];
-        try testing.expectEqual(leaf.id, .Literal);
-        try testing.expect(mem.eql(u8, "value4", tree.source[leaf.start..leaf.end]));
+        const nested_v = nested.value.?.cast(Node.Value).?;
+        const leaf = tree.tokens[nested_v.base.start];
+        try testing.expectEqual(leaf.id, .literal);
+        try testing.expectEqualStrings("value4", tree.source[leaf.start..leaf.end]);
     }
 }
 
@@ -360,15 +338,15 @@ test "list of lists" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .list);
 
     const list = doc.value.?.cast(Node.List).?;
-    try testing.expectEqual(list.start.?, 0);
-    try testing.expectEqual(list.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(list.base.start, 0);
+    try testing.expectEqual(list.base.end, tree.tokens.len - 2);
     try testing.expectEqual(list.values.items.len, 3);
 
     {
@@ -379,22 +357,22 @@ test "list of lists" {
         {
             try testing.expectEqual(nested.values.items[0].tag, .value);
             const value = nested.values.items[0].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[1].tag, .value);
             const value = nested.values.items[1].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[2].tag, .value);
             const value = nested.values.items[2].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
         }
     }
 
@@ -406,23 +384,23 @@ test "list of lists" {
         {
             try testing.expectEqual(nested.values.items[0].tag, .value);
             const value = nested.values.items[0].cast(Node.Value).?;
-            const start = tree.tokens[value.start.?];
-            const end = tree.tokens[value.end.?];
-            try testing.expect(mem.eql(u8, "Mark McGwire", tree.source[start.start..end.end]));
+            const start = tree.tokens[value.base.start];
+            const end = tree.tokens[value.base.end];
+            try testing.expectEqualStrings("Mark McGwire", tree.source[start.start..end.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[1].tag, .value);
             const value = nested.values.items[1].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "65", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("65", tree.source[leaf.start..leaf.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[2].tag, .value);
             const value = nested.values.items[2].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "0.278", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("0.278", tree.source[leaf.start..leaf.end]);
         }
     }
 
@@ -434,23 +412,23 @@ test "list of lists" {
         {
             try testing.expectEqual(nested.values.items[0].tag, .value);
             const value = nested.values.items[0].cast(Node.Value).?;
-            const start = tree.tokens[value.start.?];
-            const end = tree.tokens[value.end.?];
-            try testing.expect(mem.eql(u8, "Sammy Sosa", tree.source[start.start..end.end]));
+            const start = tree.tokens[value.base.start];
+            const end = tree.tokens[value.base.end];
+            try testing.expectEqualStrings("Sammy Sosa", tree.source[start.start..end.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[1].tag, .value);
             const value = nested.values.items[1].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "63", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("63", tree.source[leaf.start..leaf.end]);
         }
 
         {
             try testing.expectEqual(nested.values.items[2].tag, .value);
             const value = nested.values.items[2].cast(Node.Value).?;
-            const leaf = tree.tokens[value.start.?];
-            try testing.expect(mem.eql(u8, "0.288", tree.source[leaf.start..leaf.end]));
+            const leaf = tree.tokens[value.base.start];
+            try testing.expectEqualStrings("0.288", tree.source[leaf.start..leaf.end]);
         }
     }
 }
@@ -467,36 +445,36 @@ test "inline list" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .list);
 
     const list = doc.value.?.cast(Node.List).?;
-    try testing.expectEqual(list.start.?, 0);
-    try testing.expectEqual(list.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(list.base.start, 0);
+    try testing.expectEqual(list.base.end, tree.tokens.len - 2);
     try testing.expectEqual(list.values.items.len, 3);
 
     {
         try testing.expectEqual(list.values.items[0].tag, .value);
         const value = list.values.items[0].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
     }
 
     {
         try testing.expectEqual(list.values.items[1].tag, .value);
         const value = list.values.items[1].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
     }
 
     {
         try testing.expectEqual(list.values.items[2].tag, .value);
         const value = list.values.items[2].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
     }
 }
 
@@ -514,45 +492,273 @@ test "inline list as mapping value" {
     try testing.expectEqual(tree.docs.items.len, 1);
 
     const doc = tree.docs.items[0].cast(Node.Doc).?;
-    try testing.expectEqual(doc.start.?, 0);
-    try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(doc.base.start, 0);
+    try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
 
     try testing.expect(doc.value != null);
     try testing.expectEqual(doc.value.?.tag, .map);
 
     const map = doc.value.?.cast(Node.Map).?;
-    try testing.expectEqual(map.start.?, 0);
-    try testing.expectEqual(map.end.?, tree.tokens.len - 2);
+    try testing.expectEqual(map.base.start, 0);
+    try testing.expectEqual(map.base.end, tree.tokens.len - 2);
     try testing.expectEqual(map.values.items.len, 1);
 
     const entry = map.values.items[0];
     const key = tree.tokens[entry.key];
-    try testing.expectEqual(key.id, .Literal);
-    try testing.expect(mem.eql(u8, "key", tree.source[key.start..key.end]));
+    try testing.expectEqual(key.id, .literal);
+    try testing.expectEqualStrings("key", tree.source[key.start..key.end]);
 
-    const list = entry.value.cast(Node.List).?;
-    try testing.expectEqual(list.start.?, 4);
-    try testing.expectEqual(list.end.?, tree.tokens.len - 2);
+    const list = entry.value.?.cast(Node.List).?;
+    try testing.expectEqual(list.base.start, 4);
+    try testing.expectEqual(list.base.end, tree.tokens.len - 2);
     try testing.expectEqual(list.values.items.len, 3);
 
     {
         try testing.expectEqual(list.values.items[0].tag, .value);
         const value = list.values.items[0].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
     }
 
     {
         try testing.expectEqual(list.values.items[1].tag, .value);
         const value = list.values.items[1].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
     }
 
     {
         try testing.expectEqual(list.values.items[2].tag, .value);
         const value = list.values.items[2].cast(Node.Value).?;
-        const leaf = tree.tokens[value.start.?];
-        try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
+        const leaf = tree.tokens[value.base.start];
+        try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
     }
 }
+
+fn parseSuccess(comptime source: []const u8) !void {
+    var tree = Tree.init(testing.allocator);
+    defer tree.deinit();
+    try tree.parse(source);
+}
+
+fn parseError(comptime source: []const u8, err: parse.ParseError) !void {
+    var tree = Tree.init(testing.allocator);
+    defer tree.deinit();
+    try testing.expectError(err, tree.parse(source));
+}
+
+test "empty doc with spaces and comments" {
+    try parseSuccess(
+        \\
+        \\
+        \\   # this is a comment in a weird place
+        \\# and this one is too
+    );
+}
+
+test "comment between --- and ! in document start" {
+    try parseError(
+        \\--- # what is it?
+        \\!
+    , error.UnexpectedToken);
+}
+
+test "correct doc start with tag" {
+    try parseSuccess(
+        \\--- !some-tag
+        \\
+    );
+}
+
+test "doc close without explicit doc open" {
+    try parseError(
+        \\
+        \\
+        \\# something cool
+        \\...
+    , error.UnexpectedToken);
+}
+
+test "doc open and close are ok" {
+    try parseSuccess(
+        \\---
+        \\# first doc
+        \\
+        \\
+        \\---
+        \\# second doc
+        \\
+        \\
+        \\...
+    );
+}
+
+test "doc with a single string is ok" {
+    try parseSuccess(
+        \\a string of some sort
+        \\
+    );
+}
+
+test "explicit doc with a single string is ok" {
+    try parseSuccess(
+        \\--- !anchor
+        \\# nothing to see here except one string
+        \\  # not a lot to go on with
+        \\a single string
+        \\...
+    );
+}
+
+test "doc with two string is bad" {
+    try parseError(
+        \\first
+        \\second
+        \\# this should fail already
+    , error.UnexpectedToken);
+}
+
+test "single quote string can have new lines" {
+    try parseSuccess(
+        \\'what is this
+        \\ thing?'
+    );
+}
+
+test "single quote string on one line is fine" {
+    try parseSuccess(
+        \\'here''s an apostrophe'
+    );
+}
+
+test "double quote string can have new lines" {
+    try parseSuccess(
+        \\"what is this
+        \\ thing?"
+    );
+}
+
+test "double quote string on one line is fine" {
+    try parseSuccess(
+        \\"a newline\nand a\ttab"
+    );
+}
+
+test "map with key and value literals" {
+    try parseSuccess(
+        \\key1: val1
+        \\key2 : val2
+    );
+}
+
+test "map of maps" {
+    try parseSuccess(
+        \\
+        \\# the first key
+        \\key1:
+        \\  # the first subkey
+        \\  key1_1: 0
+        \\  key1_2: 1
+        \\# the second key
+        \\key2:
+        \\  key2_1: -1
+        \\  key2_2: -2
+        \\# the end of map
+    );
+}
+
+test "map value indicator needs to be on the same line" {
+    try parseError(
+        \\a
+        \\  : b
+    , error.UnexpectedToken);
+}
+
+test "value needs to be indented" {
+    try parseError(
+        \\a:
+        \\b
+    , error.MalformedYaml);
+}
+
+test "comment between a key and a value is fine" {
+    try parseSuccess(
+        \\a:
+        \\  # this is a value
+        \\  b
+    );
+}
+
+test "simple list" {
+    try parseSuccess(
+        \\# first el
+        \\- a
+        \\# second el
+        \\-  b
+        \\# third el
+        \\-   c
+    );
+}
+
+test "list indentation matters" {
+    try parseSuccess(
+        \\  - a
+        \\- b
+    );
+
+    try parseSuccess(
+        \\- a
+        \\  - b
+    );
+}
+
+test "unindented list is fine too" {
+    try parseSuccess(
+        \\a:
+        \\- 0
+        \\- 1
+    );
+}
+
+test "empty values in a map" {
+    try parseSuccess(
+        \\a:
+        \\b:
+        \\- 0
+    );
+}
+
+test "weirdly nested map of maps of lists" {
+    try parseSuccess(
+        \\a:
+        \\ b:
+        \\  - 0
+        \\  - 1
+    );
+}
+
+test "square brackets denote a list" {
+    try parseSuccess(
+        \\[ a,
+        \\  b, c ]
+    );
+}
+
+test "empty list" {
+    try parseSuccess(
+        \\[ ]
+    );
+}
+
+test "comment within a bracketed list is an error" {
+    try parseError(
+        \\[ # something
+        \\]
+    , error.MalformedYaml);
+}
+
+test "mixed ints with floats in a list" {
+    try parseSuccess(
+        \\[0, 1.0]
+    );
+}
src/link/tapi/yaml/test.zig
@@ -0,0 +1,475 @@
+const std = @import("std");
+const mem = std.mem;
+const testing = std.testing;
+
+const yaml_mod = @import("../yaml.zig");
+const Yaml = yaml_mod.Yaml;
+
+test "simple list" {
+    const source =
+        \\- a
+        \\- b
+        \\- c
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const list = yaml.docs.items[0].list;
+    try testing.expectEqual(list.len, 3);
+
+    try testing.expectEqualStrings("a", list[0].string);
+    try testing.expectEqualStrings("b", list[1].string);
+    try testing.expectEqualStrings("c", list[2].string);
+}
+
+test "simple list typed as array of strings" {
+    const source =
+        \\- a
+        \\- b
+        \\- c
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const arr = try yaml.parse([3][]const u8);
+    try testing.expectEqual(3, arr.len);
+    try testing.expectEqualStrings("a", arr[0]);
+    try testing.expectEqualStrings("b", arr[1]);
+    try testing.expectEqualStrings("c", arr[2]);
+}
+
+test "simple list typed as array of ints" {
+    const source =
+        \\- 0
+        \\- 1
+        \\- 2
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const arr = try yaml.parse([3]u8);
+    try testing.expectEqualSlices(u8, &[_]u8{ 0, 1, 2 }, &arr);
+}
+
+test "list of mixed sign integer" {
+    const source =
+        \\- 0
+        \\- -1
+        \\- 2
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const arr = try yaml.parse([3]i8);
+    try testing.expectEqualSlices(i8, &[_]i8{ 0, -1, 2 }, &arr);
+}
+
+test "simple map untyped" {
+    const source =
+        \\a: 0
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const map = yaml.docs.items[0].map;
+    try testing.expect(map.contains("a"));
+    try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
+}
+
+test "simple map untyped with a list of maps" {
+    const source =
+        \\a: 0
+        \\b:
+        \\  - foo: 1
+        \\    bar: 2
+        \\  - foo: 3
+        \\    bar: 4
+        \\c: 1
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const map = yaml.docs.items[0].map;
+    try testing.expect(map.contains("a"));
+    try testing.expect(map.contains("b"));
+    try testing.expect(map.contains("c"));
+    try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
+    try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
+    try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
+    try testing.expectEqual(@as(i64, 2), map.get("b").?.list[0].map.get("bar").?.int);
+    try testing.expectEqual(@as(i64, 3), map.get("b").?.list[1].map.get("foo").?.int);
+    try testing.expectEqual(@as(i64, 4), map.get("b").?.list[1].map.get("bar").?.int);
+}
+
+test "simple map untyped with a list of maps. no indent" {
+    const source =
+        \\b:
+        \\- foo: 1
+        \\c: 1
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const map = yaml.docs.items[0].map;
+    try testing.expect(map.contains("b"));
+    try testing.expect(map.contains("c"));
+    try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
+    try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
+}
+
+test "simple map untyped with a list of maps. no indent 2" {
+    const source =
+        \\a: 0
+        \\b:
+        \\- foo: 1
+        \\  bar: 2
+        \\- foo: 3
+        \\  bar: 4
+        \\c: 1
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectEqual(yaml.docs.items.len, 1);
+
+    const map = yaml.docs.items[0].map;
+    try testing.expect(map.contains("a"));
+    try testing.expect(map.contains("b"));
+    try testing.expect(map.contains("c"));
+    try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
+    try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
+    try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
+    try testing.expectEqual(@as(i64, 2), map.get("b").?.list[0].map.get("bar").?.int);
+    try testing.expectEqual(@as(i64, 3), map.get("b").?.list[1].map.get("foo").?.int);
+    try testing.expectEqual(@as(i64, 4), map.get("b").?.list[1].map.get("bar").?.int);
+}
+
+test "simple map typed" {
+    const source =
+        \\a: 0
+        \\b: hello there
+        \\c: 'wait, what?'
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const simple = try yaml.parse(struct { a: usize, b: []const u8, c: []const u8 });
+    try testing.expectEqual(@as(usize, 0), simple.a);
+    try testing.expectEqualStrings("hello there", simple.b);
+    try testing.expectEqualStrings("wait, what?", simple.c);
+}
+
+test "typed nested structs" {
+    const source =
+        \\a:
+        \\  b: hello there
+        \\  c: 'wait, what?'
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const simple = try yaml.parse(struct {
+        a: struct {
+            b: []const u8,
+            c: []const u8,
+        },
+    });
+    try testing.expectEqualStrings("hello there", simple.a.b);
+    try testing.expectEqualStrings("wait, what?", simple.a.c);
+}
+
+test "single quoted string" {
+    const source =
+        \\- 'hello'
+        \\- 'here''s an escaped quote'
+        \\- 'newlines and tabs\nare not\tsupported'
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const arr = try yaml.parse([3][]const u8);
+    try testing.expectEqual(arr.len, 3);
+    try testing.expectEqualStrings("hello", arr[0]);
+    try testing.expectEqualStrings("here's an escaped quote", arr[1]);
+    try testing.expectEqualStrings("newlines and tabs\\nare not\\tsupported", arr[2]);
+}
+
+test "double quoted string" {
+    const source =
+        \\- "hello"
+        \\- "\"here\" are some escaped quotes"
+        \\- "newlines and tabs\nare\tsupported"
+        \\- "let's have
+        \\some fun!"
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const arr = try yaml.parse([4][]const u8);
+    try testing.expectEqual(arr.len, 4);
+    try testing.expectEqualStrings("hello", arr[0]);
+    try testing.expectEqualStrings(
+        \\"here" are some escaped quotes
+    , arr[1]);
+    try testing.expectEqualStrings(
+        \\newlines and tabs
+        \\are	supported
+    , arr[2]);
+    try testing.expectEqualStrings(
+        \\let's have
+        \\some fun!
+    , arr[3]);
+}
+
+test "multidoc typed as a slice of structs" {
+    const source =
+        \\---
+        \\a: 0
+        \\---
+        \\a: 1
+        \\...
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    {
+        const result = try yaml.parse([2]struct { a: usize });
+        try testing.expectEqual(result.len, 2);
+        try testing.expectEqual(result[0].a, 0);
+        try testing.expectEqual(result[1].a, 1);
+    }
+
+    {
+        const result = try yaml.parse([]struct { a: usize });
+        try testing.expectEqual(result.len, 2);
+        try testing.expectEqual(result[0].a, 0);
+        try testing.expectEqual(result[1].a, 1);
+    }
+}
+
+test "multidoc typed as a struct is an error" {
+    const source =
+        \\---
+        \\a: 0
+        \\---
+        \\b: 1
+        \\...
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize }));
+    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { b: usize }));
+    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize, b: usize }));
+}
+
+test "multidoc typed as a slice of structs with optionals" {
+    const source =
+        \\---
+        \\a: 0
+        \\c: 1.0
+        \\---
+        \\a: 1
+        \\b: different field
+        \\...
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const result = try yaml.parse([]struct { a: usize, b: ?[]const u8, c: ?f16 });
+    try testing.expectEqual(result.len, 2);
+
+    try testing.expectEqual(result[0].a, 0);
+    try testing.expect(result[0].b == null);
+    try testing.expect(result[0].c != null);
+    try testing.expectEqual(result[0].c.?, 1.0);
+
+    try testing.expectEqual(result[1].a, 1);
+    try testing.expect(result[1].b != null);
+    try testing.expectEqualStrings("different field", result[1].b.?);
+    try testing.expect(result[1].c == null);
+}
+
+test "empty yaml can be represented as void" {
+    const source = "";
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+    const result = try yaml.parse(void);
+    try testing.expect(@TypeOf(result) == void);
+}
+
+test "nonempty yaml cannot be represented as void" {
+    const source =
+        \\a: b
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(void));
+}
+
+test "typed array size mismatch" {
+    const source =
+        \\- 0
+        \\- 0
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([1]usize));
+    try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([5]usize));
+}
+
+test "comments" {
+    const source =
+        \\
+        \\key: # this is the key
+        \\# first value
+        \\
+        \\- val1
+        \\
+        \\# second value
+        \\- val2
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const simple = try yaml.parse(struct {
+        key: []const []const u8,
+    });
+    try testing.expect(simple.key.len == 2);
+    try testing.expectEqualStrings("val1", simple.key[0]);
+    try testing.expectEqualStrings("val2", simple.key[1]);
+}
+
+test "promote ints to floats in a list mixed numeric types" {
+    const source =
+        \\a_list: [0, 1.0]
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    const simple = try yaml.parse(struct {
+        a_list: []const f64,
+    });
+    try testing.expectEqualSlices(f64, &[_]f64{ 0.0, 1.0 }, simple.a_list);
+}
+
+test "demoting floats to ints in a list is an error" {
+    const source =
+        \\a_list: [0, 1.0]
+    ;
+
+    var yaml = try Yaml.load(testing.allocator, source);
+    defer yaml.deinit();
+
+    try testing.expectError(error.TypeMismatch, yaml.parse(struct {
+        a_list: []const u64,
+    }));
+}
+
+test "duplicate map keys" {
+    const source =
+        \\a: b
+        \\a: c
+    ;
+    try testing.expectError(error.DuplicateMapKey, Yaml.load(testing.allocator, source));
+}
+
+fn testStringify(expected: []const u8, input: anytype) !void {
+    var output = std.ArrayList(u8).init(testing.allocator);
+    defer output.deinit();
+
+    try yaml_mod.stringify(testing.allocator, input, output.writer());
+    try testing.expectEqualStrings(expected, output.items);
+}
+
+test "stringify an int" {
+    try testStringify("128", @as(u32, 128));
+}
+
+test "stringify a simple struct" {
+    try testStringify(
+        \\a: 1
+        \\b: 2
+        \\c: 2.5
+    , struct { a: i64, b: f64, c: f64 }{ .a = 1, .b = 2.0, .c = 2.5 });
+}
+
+test "stringify a struct with an optional" {
+    try testStringify(
+        \\a: 1
+        \\b: 2
+        \\c: 2.5
+    , struct { a: i64, b: ?f64, c: f64 }{ .a = 1, .b = 2.0, .c = 2.5 });
+
+    try testStringify(
+        \\a: 1
+        \\c: 2.5
+    , struct { a: i64, b: ?f64, c: f64 }{ .a = 1, .b = null, .c = 2.5 });
+}
+
+test "stringify a struct with all optionals" {
+    try testStringify("", struct { a: ?i64, b: ?f64 }{ .a = null, .b = null });
+}
+
+test "stringify an optional" {
+    try testStringify("", null);
+    try testStringify("", @as(?u64, null));
+}
+
+test "stringify a union" {
+    const Dummy = union(enum) {
+        x: u64,
+        y: f64,
+    };
+    try testStringify("a: 1", struct { a: Dummy }{ .a = .{ .x = 1 } });
+    try testStringify("a: 2.1", struct { a: Dummy }{ .a = .{ .y = 2.1 } });
+}
+
+test "stringify a string" {
+    try testStringify("a: name", struct { a: []const u8 }{ .a = "name" });
+    try testStringify("name", "name");
+}
+
+test "stringify a list" {
+    try testStringify("[ 1, 2, 3 ]", @as([]const u64, &.{ 1, 2, 3 }));
+    try testStringify("[ 1, 2, 3 ]", .{ @as(i64, 1), 2, 3 });
+    try testStringify("[ 1, name, 3 ]", .{ 1, "name", 3 });
+
+    const arr: [3]i64 = .{ 1, 2, 3 };
+    try testStringify("[ 1, 2, 3 ]", arr);
+}
src/link/tapi/parse.zig
@@ -1,8 +1,7 @@
 const std = @import("std");
 const assert = std.debug.assert;
-const log = std.log.scoped(.tapi);
+const log = std.log.scoped(.yaml);
 const mem = std.mem;
-const testing = std.testing;
 
 const Allocator = mem.Allocator;
 const Tokenizer = @import("Tokenizer.zig");
@@ -11,9 +10,9 @@ const TokenIndex = Tokenizer.TokenIndex;
 const TokenIterator = Tokenizer.TokenIterator;
 
 pub const ParseError = error{
+    InvalidEscapeSequence,
     MalformedYaml,
     NestedDocuments,
-    UnexpectedTag,
     UnexpectedEof,
     UnexpectedToken,
     Unhandled,
@@ -22,6 +21,8 @@ pub const ParseError = error{
 pub const Node = struct {
     tag: Tag,
     tree: *const Tree,
+    start: TokenIndex,
+    end: TokenIndex,
 
     pub const Tag = enum {
         doc,
@@ -61,9 +62,12 @@ pub const Node = struct {
     }
 
     pub const Doc = struct {
-        base: Node = Node{ .tag = Tag.doc, .tree = undefined },
-        start: ?TokenIndex = null,
-        end: ?TokenIndex = null,
+        base: Node = Node{
+            .tag = Tag.doc,
+            .tree = undefined,
+            .start = undefined,
+            .end = undefined,
+        },
         directive: ?TokenIndex = null,
         value: ?*Node = null,
 
@@ -86,10 +90,8 @@ pub const Node = struct {
             _ = fmt;
             if (self.directive) |id| {
                 try std.fmt.format(writer, "{{ ", .{});
-                const directive = self.base.tree.tokens[id];
-                try std.fmt.format(writer, ".directive = {s}, ", .{
-                    self.base.tree.source[directive.start..directive.end],
-                });
+                const directive = self.base.tree.getRaw(id, id);
+                try std.fmt.format(writer, ".directive = {s}, ", .{directive});
             }
             if (self.value) |node| {
                 try std.fmt.format(writer, "{}", .{node});
@@ -101,22 +103,27 @@ pub const Node = struct {
     };
 
     pub const Map = struct {
-        base: Node = Node{ .tag = Tag.map, .tree = undefined },
-        start: ?TokenIndex = null,
-        end: ?TokenIndex = null,
+        base: Node = Node{
+            .tag = Tag.map,
+            .tree = undefined,
+            .start = undefined,
+            .end = undefined,
+        },
         values: std.ArrayListUnmanaged(Entry) = .{},
 
         pub const base_tag: Node.Tag = .map;
 
         pub const Entry = struct {
             key: TokenIndex,
-            value: *Node,
+            value: ?*Node,
         };
 
         pub fn deinit(self: *Map, allocator: Allocator) void {
             for (self.values.items) |entry| {
-                entry.value.deinit(allocator);
-                allocator.destroy(entry.value);
+                if (entry.value) |value| {
+                    value.deinit(allocator);
+                    allocator.destroy(value);
+                }
             }
             self.values.deinit(allocator);
         }
@@ -131,20 +138,24 @@ pub const Node = struct {
             _ = fmt;
             try std.fmt.format(writer, "{{ ", .{});
             for (self.values.items) |entry| {
-                const key = self.base.tree.tokens[entry.key];
-                try std.fmt.format(writer, "{s} => {}, ", .{
-                    self.base.tree.source[key.start..key.end],
-                    entry.value,
-                });
+                const key = self.base.tree.getRaw(entry.key, entry.key);
+                if (entry.value) |value| {
+                    try std.fmt.format(writer, "{s} => {}, ", .{ key, value });
+                } else {
+                    try std.fmt.format(writer, "{s} => null, ", .{key});
+                }
             }
             return std.fmt.format(writer, " }}", .{});
         }
     };
 
     pub const List = struct {
-        base: Node = Node{ .tag = Tag.list, .tree = undefined },
-        start: ?TokenIndex = null,
-        end: ?TokenIndex = null,
+        base: Node = Node{
+            .tag = Tag.list,
+            .tree = undefined,
+            .start = undefined,
+            .end = undefined,
+        },
         values: std.ArrayListUnmanaged(*Node) = .{},
 
         pub const base_tag: Node.Tag = .list;
@@ -174,15 +185,18 @@ pub const Node = struct {
     };
 
     pub const Value = struct {
-        base: Node = Node{ .tag = Tag.value, .tree = undefined },
-        start: ?TokenIndex = null,
-        end: ?TokenIndex = null,
+        base: Node = Node{
+            .tag = Tag.value,
+            .tree = undefined,
+            .start = undefined,
+            .end = undefined,
+        },
+        string_value: std.ArrayListUnmanaged(u8) = .{},
 
         pub const base_tag: Node.Tag = .value;
 
         pub fn deinit(self: *Value, allocator: Allocator) void {
-            _ = self;
-            _ = allocator;
+            self.string_value.deinit(allocator);
         }
 
         pub fn format(
@@ -193,11 +207,8 @@ pub const Node = struct {
         ) !void {
             _ = options;
             _ = fmt;
-            const start = self.base.tree.tokens[self.start.?];
-            const end = self.base.tree.tokens[self.end.?];
-            return std.fmt.format(writer, "{s}", .{
-                self.base.tree.source[start.start..end.end],
-            });
+            const raw = self.base.tree.getRaw(self.base.start, self.base.end);
+            return std.fmt.format(writer, "{s}", .{raw});
         }
     };
 };
@@ -233,6 +244,21 @@ pub const Tree = struct {
         self.docs.deinit(self.allocator);
     }
 
+    pub fn getDirective(self: Tree, doc_index: usize) ?[]const u8 {
+        assert(doc_index < self.docs.items.len);
+        const doc = self.docs.items[doc_index].cast(Node.Doc) orelse return null;
+        const id = doc.directive orelse return null;
+        return self.getRaw(id, id);
+    }
+
+    pub fn getRaw(self: Tree, start: TokenIndex, end: TokenIndex) []const u8 {
+        assert(start <= end);
+        assert(start < self.tokens.len and end < self.tokens.len);
+        const start_token = self.tokens[start];
+        const end_token = self.tokens[end];
+        return self.source[start_token.start..end_token.end];
+    }
+
     pub fn parse(self: *Tree, source: []const u8) !void {
         var tokenizer = Tokenizer{ .buffer = source };
         var tokens = std.ArrayList(Token).init(self.allocator);
@@ -252,8 +278,8 @@ pub const Tree = struct {
             });
 
             switch (token.id) {
-                .Eof => break,
-                .NewLine => {
+                .eof => break,
+                .new_line => {
                     line += 1;
                     prev_line_last_col = token.end;
                 },
@@ -272,20 +298,20 @@ pub const Tree = struct {
             .line_cols = &self.line_cols,
         };
 
-        while (true) {
-            if (parser.token_it.peek() == null) return;
+        parser.eatCommentsAndSpace(&.{});
 
-            const pos = parser.token_it.pos;
-            const token = parser.token_it.next();
+        while (true) {
+            parser.eatCommentsAndSpace(&.{});
+            const token = parser.token_it.next() orelse break;
 
-            log.debug("Next token: {}, {}", .{ pos, token });
+            log.debug("(main) next {s}@{d}", .{ @tagName(token.id), parser.token_it.pos - 1 });
 
             switch (token.id) {
-                .Space, .Comment, .NewLine => {},
-                .Eof => break,
+                .eof => break,
                 else => {
-                    const doc = try parser.doc(pos);
-                    try self.docs.append(self.allocator, &doc.base);
+                    parser.token_it.seekBy(-1);
+                    const doc = try parser.doc();
+                    try self.docs.append(self.allocator, doc);
                 },
             }
         }
@@ -298,355 +324,308 @@ const Parser = struct {
     token_it: *TokenIterator,
     line_cols: *const std.AutoHashMap(TokenIndex, LineCol),
 
-    fn doc(self: *Parser, start: TokenIndex) ParseError!*Node.Doc {
+    fn value(self: *Parser) ParseError!?*Node {
+        self.eatCommentsAndSpace(&.{});
+
+        const pos = self.token_it.pos;
+        const token = self.token_it.next() orelse return error.UnexpectedEof;
+
+        log.debug("  next {s}@{d}", .{ @tagName(token.id), pos });
+
+        switch (token.id) {
+            .literal => if (self.eatToken(.map_value_ind, &.{ .new_line, .comment })) |_| {
+                // map
+                self.token_it.seekTo(pos);
+                return self.map();
+            } else {
+                // leaf value
+                self.token_it.seekTo(pos);
+                return self.leaf_value();
+            },
+            .single_quoted, .double_quoted => {
+                // leaf value
+                self.token_it.seekBy(-1);
+                return self.leaf_value();
+            },
+            .seq_item_ind => {
+                // list
+                self.token_it.seekBy(-1);
+                return self.list();
+            },
+            .flow_seq_start => {
+                // list
+                self.token_it.seekBy(-1);
+                return self.list_bracketed();
+            },
+            else => return null,
+        }
+    }
+
+    fn doc(self: *Parser) ParseError!*Node {
         const node = try self.allocator.create(Node.Doc);
         errdefer self.allocator.destroy(node);
-        node.* = .{ .start = start };
+        node.* = .{};
         node.base.tree = self.tree;
+        node.base.start = self.token_it.pos;
 
-        self.token_it.seekTo(start);
-
-        log.debug("Doc start: {}, {}", .{ start, self.tree.tokens[start] });
+        log.debug("(doc) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
 
-        const explicit_doc: bool = if (self.eatToken(.DocStart)) |_| explicit_doc: {
-            if (self.eatToken(.Tag)) |_| {
-                node.directive = try self.expectToken(.Literal);
+        // Parse header
+        const explicit_doc: bool = if (self.eatToken(.doc_start, &.{})) |doc_pos| explicit_doc: {
+            if (self.getCol(doc_pos) > 0) return error.MalformedYaml;
+            if (self.eatToken(.tag, &.{ .new_line, .comment })) |_| {
+                node.directive = try self.expectToken(.literal, &.{ .new_line, .comment });
             }
-            _ = try self.expectToken(.NewLine);
             break :explicit_doc true;
         } else false;
 
-        while (true) {
-            const pos = self.token_it.pos;
-            const token = self.token_it.next();
-
-            log.debug("Next token: {}, {}", .{ pos, token });
+        // Parse value
+        node.value = try self.value();
+        if (node.value == null) {
+            self.token_it.seekBy(-1);
+        }
+        errdefer if (node.value) |val| {
+            val.deinit(self.allocator);
+            self.allocator.destroy(val);
+        };
 
-            switch (token.id) {
-                .Tag => {
-                    return error.UnexpectedTag;
-                },
-                .Literal, .SingleQuote, .DoubleQuote => {
-                    _ = try self.expectToken(.MapValueInd);
-                    const map_node = try self.map(pos);
-                    node.value = &map_node.base;
-                },
-                .SeqItemInd => {
-                    const list_node = try self.list(pos);
-                    node.value = &list_node.base;
-                },
-                .FlowSeqStart => {
-                    const list_node = try self.list_bracketed(pos);
-                    node.value = &list_node.base;
-                },
-                .DocEnd => {
-                    if (explicit_doc) break;
-                    return error.UnexpectedToken;
-                },
-                .DocStart, .Eof => {
-                    self.token_it.seekBy(-1);
-                    break;
-                },
-                else => {
-                    return error.UnexpectedToken;
-                },
+        // Parse footer
+        footer: {
+            if (self.eatToken(.doc_end, &.{})) |pos| {
+                if (!explicit_doc) return error.UnexpectedToken;
+                if (self.getCol(pos) > 0) return error.MalformedYaml;
+                node.base.end = pos;
+                break :footer;
+            }
+            if (self.eatToken(.doc_start, &.{})) |pos| {
+                if (!explicit_doc) return error.UnexpectedToken;
+                if (self.getCol(pos) > 0) return error.MalformedYaml;
+                self.token_it.seekBy(-1);
+                node.base.end = pos - 1;
+                break :footer;
+            }
+            if (self.eatToken(.eof, &.{})) |pos| {
+                node.base.end = pos - 1;
+                break :footer;
             }
+            return error.UnexpectedToken;
         }
 
-        node.end = self.token_it.pos - 1;
+        log.debug("(doc) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
 
-        log.debug("Doc end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
-
-        return node;
+        return &node.base;
     }
 
-    fn map(self: *Parser, start: TokenIndex) ParseError!*Node.Map {
+    fn map(self: *Parser) ParseError!*Node {
         const node = try self.allocator.create(Node.Map);
         errdefer self.allocator.destroy(node);
-        node.* = .{ .start = start };
+        node.* = .{};
         node.base.tree = self.tree;
+        node.base.start = self.token_it.pos;
+        errdefer {
+            for (node.values.items) |entry| {
+                if (entry.value) |val| {
+                    val.deinit(self.allocator);
+                    self.allocator.destroy(val);
+                }
+            }
+            node.values.deinit(self.allocator);
+        }
 
-        self.token_it.seekTo(start);
-
-        log.debug("Map start: {}, {}", .{ start, self.tree.tokens[start] });
+        log.debug("(map) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
 
-        const col = self.getCol(start);
+        const col = self.getCol(node.base.start);
 
         while (true) {
-            self.eatCommentsAndSpace();
+            self.eatCommentsAndSpace(&.{});
 
-            // Parse key.
+            // Parse key
             const key_pos = self.token_it.pos;
-            if (self.getCol(key_pos) != col) {
+            if (self.getCol(key_pos) < col) {
                 break;
             }
 
-            const key = self.token_it.next();
+            const key = self.token_it.next() orelse return error.UnexpectedEof;
             switch (key.id) {
-                .Literal => {},
-                else => {
+                .literal => {},
+                .doc_start, .doc_end, .eof => {
                     self.token_it.seekBy(-1);
                     break;
                 },
+                else => {
+                    // TODO key not being a literal
+                    return error.Unhandled;
+                },
             }
 
-            log.debug("Map key: {}, '{s}'", .{ key, self.tree.source[key.start..key.end] });
+            log.debug("(map) key {s}@{d}", .{ self.tree.getRaw(key_pos, key_pos), key_pos });
 
             // Separator
-            _ = try self.expectToken(.MapValueInd);
-
-            // Parse value.
-            const value: *Node = value: {
-                if (self.eatToken(.NewLine)) |_| {
-                    self.eatCommentsAndSpace();
-
-                    // Explicit, complex value such as list or map.
-                    const value_pos = self.token_it.pos;
-                    const value = self.token_it.next();
-                    switch (value.id) {
-                        .Literal, .SingleQuote, .DoubleQuote => {
-                            // Assume nested map.
-                            const map_node = try self.map(value_pos);
-                            break :value &map_node.base;
-                        },
-                        .SeqItemInd => {
-                            // Assume list of values.
-                            const list_node = try self.list(value_pos);
-                            break :value &list_node.base;
-                        },
-                        else => {
-                            log.err("{}", .{key});
-                            return error.Unhandled;
-                        },
-                    }
-                } else {
-                    self.eatCommentsAndSpace();
-
-                    const value_pos = self.token_it.pos;
-                    const value = self.token_it.next();
-                    switch (value.id) {
-                        .Literal, .SingleQuote, .DoubleQuote => {
-                            // Assume leaf value.
-                            const leaf_node = try self.leaf_value(value_pos);
-                            break :value &leaf_node.base;
-                        },
-                        .FlowSeqStart => {
-                            const list_node = try self.list_bracketed(value_pos);
-                            break :value &list_node.base;
-                        },
-                        else => {
-                            log.err("{}", .{key});
-                            return error.Unhandled;
-                        },
+            _ = try self.expectToken(.map_value_ind, &.{ .new_line, .comment });
+
+            // Parse value
+            const val = try self.value();
+            errdefer if (val) |v| {
+                v.deinit(self.allocator);
+                self.allocator.destroy(v);
+            };
+
+            if (val) |v| {
+                if (self.getCol(v.start) < self.getCol(key_pos)) {
+                    return error.MalformedYaml;
+                }
+                if (v.cast(Node.Value)) |_| {
+                    if (self.getCol(v.start) == self.getCol(key_pos)) {
+                        return error.MalformedYaml;
                     }
                 }
-            };
-            log.debug("Map value: {}", .{value});
+            }
 
             try node.values.append(self.allocator, .{
                 .key = key_pos,
-                .value = value,
+                .value = val,
             });
-
-            _ = self.eatToken(.NewLine);
         }
 
-        node.end = self.token_it.pos - 1;
+        node.base.end = self.token_it.pos - 1;
 
-        log.debug("Map end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
+        log.debug("(map) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
 
-        return node;
+        return &node.base;
     }
 
-    fn list(self: *Parser, start: TokenIndex) ParseError!*Node.List {
+    fn list(self: *Parser) ParseError!*Node {
         const node = try self.allocator.create(Node.List);
         errdefer self.allocator.destroy(node);
-        node.* = .{
-            .start = start,
-        };
+        node.* = .{};
         node.base.tree = self.tree;
+        node.base.start = self.token_it.pos;
+        errdefer {
+            for (node.values.items) |val| {
+                val.deinit(self.allocator);
+                self.allocator.destroy(val);
+            }
+            node.values.deinit(self.allocator);
+        }
 
-        self.token_it.seekTo(start);
-
-        log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] });
-
-        const col = self.getCol(start);
+        log.debug("(list) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
 
         while (true) {
-            self.eatCommentsAndSpace();
+            self.eatCommentsAndSpace(&.{});
 
-            if (self.getCol(self.token_it.pos) != col) {
-                break;
-            }
-            _ = self.eatToken(.SeqItemInd) orelse {
-                break;
-            };
+            _ = self.eatToken(.seq_item_ind, &.{}) orelse break;
 
-            const pos = self.token_it.pos;
-            const token = self.token_it.next();
-            const value: *Node = value: {
-                switch (token.id) {
-                    .Literal, .SingleQuote, .DoubleQuote => {
-                        if (self.eatToken(.MapValueInd)) |_| {
-                            // nested map
-                            const map_node = try self.map(pos);
-                            break :value &map_node.base;
-                        } else {
-                            // standalone (leaf) value
-                            const leaf_node = try self.leaf_value(pos);
-                            break :value &leaf_node.base;
-                        }
-                    },
-                    .FlowSeqStart => {
-                        const list_node = try self.list_bracketed(pos);
-                        break :value &list_node.base;
-                    },
-                    else => {
-                        log.err("{}", .{token});
-                        return error.Unhandled;
-                    },
-                }
-            };
-            try node.values.append(self.allocator, value);
-
-            _ = self.eatToken(.NewLine);
+            const val = (try self.value()) orelse return error.MalformedYaml;
+            try node.values.append(self.allocator, val);
         }
 
-        node.end = self.token_it.pos - 1;
+        node.base.end = self.token_it.pos - 1;
 
-        log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
+        log.debug("(list) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
 
-        return node;
+        return &node.base;
     }
 
-    fn list_bracketed(self: *Parser, start: TokenIndex) ParseError!*Node.List {
+    fn list_bracketed(self: *Parser) ParseError!*Node {
         const node = try self.allocator.create(Node.List);
         errdefer self.allocator.destroy(node);
-        node.* = .{ .start = start };
+        node.* = .{};
         node.base.tree = self.tree;
+        node.base.start = self.token_it.pos;
+        errdefer {
+            for (node.values.items) |val| {
+                val.deinit(self.allocator);
+                self.allocator.destroy(val);
+            }
+            node.values.deinit(self.allocator);
+        }
 
-        self.token_it.seekTo(start);
-
-        log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] });
+        log.debug("(list) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
 
-        _ = try self.expectToken(.FlowSeqStart);
+        _ = try self.expectToken(.flow_seq_start, &.{});
 
         while (true) {
-            _ = self.eatToken(.NewLine);
-            self.eatCommentsAndSpace();
+            self.eatCommentsAndSpace(&.{.comment});
 
-            const pos = self.token_it.pos;
-            const token = self.token_it.next();
-
-            log.debug("Next token: {}, {}", .{ pos, token });
+            if (self.eatToken(.flow_seq_end, &.{.comment})) |pos| {
+                node.base.end = pos;
+                break;
+            }
+            _ = self.eatToken(.comma, &.{.comment});
 
-            const value: *Node = value: {
-                switch (token.id) {
-                    .FlowSeqStart => {
-                        const list_node = try self.list_bracketed(pos);
-                        break :value &list_node.base;
-                    },
-                    .FlowSeqEnd => {
-                        break;
-                    },
-                    .Literal, .SingleQuote, .DoubleQuote => {
-                        const leaf_node = try self.leaf_value(pos);
-                        _ = self.eatToken(.Comma);
-                        // TODO newline
-                        break :value &leaf_node.base;
-                    },
-                    else => {
-                        log.err("{}", .{token});
-                        return error.Unhandled;
-                    },
-                }
-            };
-            try node.values.append(self.allocator, value);
+            const val = (try self.value()) orelse return error.MalformedYaml;
+            try node.values.append(self.allocator, val);
         }
 
-        node.end = self.token_it.pos - 1;
-
-        log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
+        log.debug("(list) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
 
-        return node;
+        return &node.base;
     }
 
-    fn leaf_value(self: *Parser, start: TokenIndex) ParseError!*Node.Value {
+    fn leaf_value(self: *Parser) ParseError!*Node {
         const node = try self.allocator.create(Node.Value);
         errdefer self.allocator.destroy(node);
-        node.* = .{ .start = start };
+        node.* = .{ .string_value = .{} };
         node.base.tree = self.tree;
-
-        self.token_it.seekTo(start);
-
-        log.debug("Leaf start: {}, {}", .{ node.start.?, self.tree.tokens[node.start.?] });
-
-        parse: {
-            if (self.eatToken(.SingleQuote)) |_| {
-                node.start = node.start.? + 1;
-                while (true) {
-                    const tok = self.token_it.next();
-                    switch (tok.id) {
-                        .SingleQuote => {
-                            node.end = self.token_it.pos - 2;
-                            break :parse;
-                        },
-                        .NewLine => return error.UnexpectedToken,
-                        else => {},
-                    }
-                }
-            }
-
-            if (self.eatToken(.DoubleQuote)) |_| {
-                node.start = node.start.? + 1;
-                while (true) {
-                    const tok = self.token_it.next();
-                    switch (tok.id) {
-                        .DoubleQuote => {
-                            node.end = self.token_it.pos - 2;
-                            break :parse;
-                        },
-                        .NewLine => return error.UnexpectedToken,
-                        else => {},
-                    }
-                }
-            }
-
-            // TODO handle multiline strings in new block scope
-            while (true) {
-                const tok = self.token_it.next();
-                switch (tok.id) {
-                    .Literal => {},
-                    .Space => {
-                        const trailing = self.token_it.pos - 2;
-                        self.eatCommentsAndSpace();
-                        if (self.token_it.peek()) |peek| {
-                            if (peek.id != .Literal) {
-                                node.end = trailing;
-                                break;
-                            }
+        node.base.start = self.token_it.pos;
+        errdefer node.string_value.deinit(self.allocator);
+
+        // TODO handle multiline strings in new block scope
+        while (self.token_it.next()) |tok| {
+            switch (tok.id) {
+                .single_quoted => {
+                    node.base.end = self.token_it.pos - 1;
+                    const raw = self.tree.getRaw(node.base.start, node.base.end);
+                    try self.parseSingleQuoted(node, raw);
+                    break;
+                },
+                .double_quoted => {
+                    node.base.end = self.token_it.pos - 1;
+                    const raw = self.tree.getRaw(node.base.start, node.base.end);
+                    try self.parseDoubleQuoted(node, raw);
+                    break;
+                },
+                .literal => {},
+                .space => {
+                    const trailing = self.token_it.pos - 2;
+                    self.eatCommentsAndSpace(&.{});
+                    if (self.token_it.peek()) |peek| {
+                        if (peek.id != .literal) {
+                            node.base.end = trailing;
+                            const raw = self.tree.getRaw(node.base.start, node.base.end);
+                            try node.string_value.appendSlice(self.allocator, raw);
+                            break;
                         }
-                    },
-                    else => {
-                        self.token_it.seekBy(-1);
-                        node.end = self.token_it.pos - 1;
-                        break;
-                    },
-                }
+                    }
+                },
+                else => {
+                    self.token_it.seekBy(-1);
+                    node.base.end = self.token_it.pos - 1;
+                    const raw = self.tree.getRaw(node.base.start, node.base.end);
+                    try node.string_value.appendSlice(self.allocator, raw);
+                    break;
+                },
             }
         }
 
-        log.debug("Leaf end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
+        log.debug("(leaf) {s}", .{self.tree.getRaw(node.base.start, node.base.end)});
 
-        return node;
+        return &node.base;
     }
 
-    fn eatCommentsAndSpace(self: *Parser) void {
-        while (true) {
-            _ = self.token_it.peek() orelse return;
-            const token = self.token_it.next();
+    fn eatCommentsAndSpace(self: *Parser, comptime exclusions: []const Token.Id) void {
+        log.debug("eatCommentsAndSpace", .{});
+        outer: while (self.token_it.next()) |token| {
+            log.debug("  (token '{s}')", .{@tagName(token.id)});
             switch (token.id) {
-                .Comment, .Space => {},
+                .comment, .space, .new_line => |space| {
+                    inline for (exclusions) |excl| {
+                        if (excl == space) {
+                            self.token_it.seekBy(-1);
+                            break :outer;
+                        }
+                    } else continue;
+                },
                 else => {
                     self.token_it.seekBy(-1);
                     break;
@@ -655,25 +634,24 @@ const Parser = struct {
         }
     }
 
-    fn eatToken(self: *Parser, id: Token.Id) ?TokenIndex {
-        while (true) {
-            const pos = self.token_it.pos;
-            _ = self.token_it.peek() orelse return null;
-            const token = self.token_it.next();
-            switch (token.id) {
-                .Comment, .Space => continue,
-                else => |next_id| if (next_id == id) {
-                    return pos;
-                } else {
-                    self.token_it.seekTo(pos);
-                    return null;
-                },
-            }
+    fn eatToken(self: *Parser, id: Token.Id, comptime exclusions: []const Token.Id) ?TokenIndex {
+        log.debug("eatToken('{s}')", .{@tagName(id)});
+        self.eatCommentsAndSpace(exclusions);
+        const pos = self.token_it.pos;
+        const token = self.token_it.next() orelse return null;
+        if (token.id == id) {
+            log.debug("  (found at {d})", .{pos});
+            return pos;
+        } else {
+            log.debug("  (not found)", .{});
+            self.token_it.seekBy(-1);
+            return null;
         }
     }
 
-    fn expectToken(self: *Parser, id: Token.Id) ParseError!TokenIndex {
-        return self.eatToken(id) orelse error.UnexpectedToken;
+    fn expectToken(self: *Parser, id: Token.Id, comptime exclusions: []const Token.Id) ParseError!TokenIndex {
+        log.debug("expectToken('{s}')", .{@tagName(id)});
+        return self.eatToken(id, exclusions) orelse error.UnexpectedToken;
     }
 
     fn getLine(self: *Parser, index: TokenIndex) usize {
@@ -683,8 +661,85 @@ const Parser = struct {
     fn getCol(self: *Parser, index: TokenIndex) usize {
         return self.line_cols.get(index).?.col;
     }
+
+    fn parseSingleQuoted(self: *Parser, node: *Node.Value, raw: []const u8) ParseError!void {
+        assert(raw[0] == '\'' and raw[raw.len - 1] == '\'');
+
+        const raw_no_quotes = raw[1 .. raw.len - 1];
+        try node.string_value.ensureTotalCapacity(self.allocator, raw_no_quotes.len);
+
+        var state: enum {
+            start,
+            escape,
+        } = .start;
+        var index: usize = 0;
+
+        while (index < raw_no_quotes.len) : (index += 1) {
+            const c = raw_no_quotes[index];
+            switch (state) {
+                .start => switch (c) {
+                    '\'' => {
+                        state = .escape;
+                    },
+                    else => {
+                        node.string_value.appendAssumeCapacity(c);
+                    },
+                },
+                .escape => switch (c) {
+                    '\'' => {
+                        state = .start;
+                        node.string_value.appendAssumeCapacity(c);
+                    },
+                    else => return error.InvalidEscapeSequence,
+                },
+            }
+        }
+    }
+
+    fn parseDoubleQuoted(self: *Parser, node: *Node.Value, raw: []const u8) ParseError!void {
+        assert(raw[0] == '"' and raw[raw.len - 1] == '"');
+
+        const raw_no_quotes = raw[1 .. raw.len - 1];
+        try node.string_value.ensureTotalCapacity(self.allocator, raw_no_quotes.len);
+
+        var state: enum {
+            start,
+            escape,
+        } = .start;
+
+        var index: usize = 0;
+        while (index < raw_no_quotes.len) : (index += 1) {
+            const c = raw_no_quotes[index];
+            switch (state) {
+                .start => switch (c) {
+                    '\\' => {
+                        state = .escape;
+                    },
+                    else => {
+                        node.string_value.appendAssumeCapacity(c);
+                    },
+                },
+                .escape => switch (c) {
+                    'n' => {
+                        state = .start;
+                        node.string_value.appendAssumeCapacity('\n');
+                    },
+                    't' => {
+                        state = .start;
+                        node.string_value.appendAssumeCapacity('\t');
+                    },
+                    '"' => {
+                        state = .start;
+                        node.string_value.appendAssumeCapacity('"');
+                    },
+                    else => return error.InvalidEscapeSequence,
+                },
+            }
+        }
+    }
 };
 
 test {
+    std.testing.refAllDecls(@This());
     _ = @import("parse/test.zig");
 }
src/link/tapi/Tokenizer.zig
@@ -1,7 +1,7 @@
 const Tokenizer = @This();
 
 const std = @import("std");
-const log = std.log.scoped(.tapi);
+const log = std.log.scoped(.yaml);
 const testing = std.testing;
 
 buffer: []const u8,
@@ -13,29 +13,31 @@ pub const Token = struct {
     end: usize,
 
     pub const Id = enum {
-        Eof,
-
-        NewLine,
-        DocStart, // ---
-        DocEnd, // ...
-        SeqItemInd, // -
-        MapValueInd, // :
-        FlowMapStart, // {
-        FlowMapEnd, // }
-        FlowSeqStart, // [
-        FlowSeqEnd, // ]
-
-        Comma,
-        Space,
-        Tab,
-        Comment, // #
-        Alias, // *
-        Anchor, // &
-        Tag, // !
-        SingleQuote, // '
-        DoubleQuote, // "
-
-        Literal,
+        // zig fmt: off
+        eof,
+
+        new_line,
+        doc_start,      // ---
+        doc_end,        // ...
+        seq_item_ind,   // -
+        map_value_ind,  // :
+        flow_map_start, // {
+        flow_map_end,   // }
+        flow_seq_start, // [
+        flow_seq_end,   // ]
+
+        comma,
+        space,
+        tab,
+        comment,        // #
+        alias,          // *
+        anchor,         // &
+        tag,            // !
+
+        single_quoted,   // '...'
+        double_quoted,   // "..."
+        literal,
+        // zig fmt: on
     };
 };
 
@@ -45,8 +47,8 @@ pub const TokenIterator = struct {
     buffer: []const Token,
     pos: TokenIndex = 0,
 
-    pub fn next(self: *TokenIterator) Token {
-        const token = self.buffer[self.pos];
+    pub fn next(self: *TokenIterator) ?Token {
+        const token = self.peek() orelse return null;
         self.pos += 1;
         return token;
     }
@@ -74,180 +76,212 @@ pub const TokenIterator = struct {
     }
 };
 
+fn stringMatchesPattern(comptime pattern: []const u8, slice: []const u8) bool {
+    comptime var count: usize = 0;
+    inline while (count < pattern.len) : (count += 1) {
+        if (count >= slice.len) return false;
+        const c = slice[count];
+        if (pattern[count] != c) return false;
+    }
+    return true;
+}
+
+fn matchesPattern(self: Tokenizer, comptime pattern: []const u8) bool {
+    return stringMatchesPattern(pattern, self.buffer[self.index..]);
+}
+
 pub fn next(self: *Tokenizer) Token {
     var result = Token{
-        .id = .Eof,
+        .id = .eof,
         .start = self.index,
         .end = undefined,
     };
 
-    var state: union(enum) {
-        Start,
-        NewLine,
-        Space,
-        Tab,
-        Hyphen: usize,
-        Dot: usize,
-        Literal,
-    } = .Start;
+    var state: enum {
+        start,
+        new_line,
+        space,
+        tab,
+        comment,
+        single_quoted,
+        double_quoted,
+        literal,
+    } = .start;
 
     while (self.index < self.buffer.len) : (self.index += 1) {
         const c = self.buffer[self.index];
         switch (state) {
-            .Start => switch (c) {
+            .start => switch (c) {
                 ' ' => {
-                    state = .Space;
+                    state = .space;
                 },
                 '\t' => {
-                    state = .Tab;
+                    state = .tab;
                 },
                 '\n' => {
-                    result.id = .NewLine;
+                    result.id = .new_line;
                     self.index += 1;
                     break;
                 },
                 '\r' => {
-                    state = .NewLine;
+                    state = .new_line;
                 },
-                '-' => {
-                    state = .{ .Hyphen = 1 };
+
+                '-' => if (self.matchesPattern("---")) {
+                    result.id = .doc_start;
+                    self.index += "---".len;
+                    break;
+                } else if (self.matchesPattern("- ")) {
+                    result.id = .seq_item_ind;
+                    self.index += "- ".len;
+                    break;
+                } else {
+                    state = .literal;
                 },
-                '.' => {
-                    state = .{ .Dot = 1 };
+
+                '.' => if (self.matchesPattern("...")) {
+                    result.id = .doc_end;
+                    self.index += "...".len;
+                    break;
+                } else {
+                    state = .literal;
                 },
+
                 ',' => {
-                    result.id = .Comma;
+                    result.id = .comma;
                     self.index += 1;
                     break;
                 },
                 '#' => {
-                    result.id = .Comment;
-                    self.index += 1;
-                    break;
+                    state = .comment;
                 },
                 '*' => {
-                    result.id = .Alias;
+                    result.id = .alias;
                     self.index += 1;
                     break;
                 },
                 '&' => {
-                    result.id = .Anchor;
+                    result.id = .anchor;
                     self.index += 1;
                     break;
                 },
                 '!' => {
-                    result.id = .Tag;
-                    self.index += 1;
-                    break;
-                },
-                '\'' => {
-                    result.id = .SingleQuote;
-                    self.index += 1;
-                    break;
-                },
-                '"' => {
-                    result.id = .DoubleQuote;
+                    result.id = .tag;
                     self.index += 1;
                     break;
                 },
                 '[' => {
-                    result.id = .FlowSeqStart;
+                    result.id = .flow_seq_start;
                     self.index += 1;
                     break;
                 },
                 ']' => {
-                    result.id = .FlowSeqEnd;
+                    result.id = .flow_seq_end;
                     self.index += 1;
                     break;
                 },
                 ':' => {
-                    result.id = .MapValueInd;
+                    result.id = .map_value_ind;
                     self.index += 1;
                     break;
                 },
                 '{' => {
-                    result.id = .FlowMapStart;
+                    result.id = .flow_map_start;
                     self.index += 1;
                     break;
                 },
                 '}' => {
-                    result.id = .FlowMapEnd;
+                    result.id = .flow_map_end;
                     self.index += 1;
                     break;
                 },
+                '\'' => {
+                    state = .single_quoted;
+                },
+                '"' => {
+                    state = .double_quoted;
+                },
                 else => {
-                    state = .Literal;
+                    state = .literal;
+                },
+            },
+
+            .comment => switch (c) {
+                '\r', '\n' => {
+                    result.id = .comment;
+                    break;
                 },
+                else => {},
             },
-            .Space => switch (c) {
+
+            .space => switch (c) {
                 ' ' => {},
                 else => {
-                    result.id = .Space;
+                    result.id = .space;
                     break;
                 },
             },
-            .Tab => switch (c) {
+
+            .tab => switch (c) {
                 '\t' => {},
                 else => {
-                    result.id = .Tab;
+                    result.id = .tab;
                     break;
                 },
             },
-            .NewLine => switch (c) {
+
+            .new_line => switch (c) {
                 '\n' => {
-                    result.id = .NewLine;
+                    result.id = .new_line;
                     self.index += 1;
                     break;
                 },
                 else => {}, // TODO this should be an error condition
             },
-            .Hyphen => |*count| switch (c) {
-                ' ' => {
-                    result.id = .SeqItemInd;
+
+            .single_quoted => switch (c) {
+                '\'' => if (!self.matchesPattern("''")) {
+                    result.id = .single_quoted;
                     self.index += 1;
                     break;
+                } else {
+                    self.index += "''".len - 1;
                 },
-                '-' => {
-                    count.* += 1;
-
-                    if (count.* == 3) {
-                        result.id = .DocStart;
-                        self.index += 1;
-                        break;
-                    }
-                },
-                else => {
-                    state = .Literal;
-                },
+                else => {},
             },
-            .Dot => |*count| switch (c) {
-                '.' => {
-                    count.* += 1;
 
-                    if (count.* == 3) {
-                        result.id = .DocEnd;
+            .double_quoted => switch (c) {
+                '"' => {
+                    if (stringMatchesPattern("\\", self.buffer[self.index - 1 ..])) {
+                        self.index += 1;
+                    } else {
+                        result.id = .double_quoted;
                         self.index += 1;
                         break;
                     }
                 },
-                else => {
-                    state = .Literal;
-                },
+                else => {},
             },
-            .Literal => switch (c) {
+
+            .literal => switch (c) {
                 '\r', '\n', ' ', '\'', '"', ',', ':', ']', '}' => {
-                    result.id = .Literal;
+                    result.id = .literal;
                     break;
                 },
                 else => {
-                    result.id = .Literal;
+                    result.id = .literal;
                 },
             },
         }
     }
 
-    if (state == .Literal and result.id == .Eof) {
-        result.id = .Literal;
+    if (self.index >= self.buffer.len) {
+        switch (state) {
+            .literal => {
+                result.id = .literal;
+            },
+            else => {},
+        }
     }
 
     result.end = self.index;
@@ -263,22 +297,24 @@ fn testExpected(source: []const u8, expected: []const Token.Id) !void {
         .buffer = source,
     };
 
-    var token_len: usize = 0;
-    for (expected) |exp| {
-        token_len += 1;
+    var given = std.ArrayList(Token.Id).init(testing.allocator);
+    defer given.deinit();
+
+    while (true) {
         const token = tokenizer.next();
-        try testing.expectEqual(exp, token.id);
+        try given.append(token.id);
+        if (token.id == .eof) break;
     }
 
-    while (tokenizer.next().id != .Eof) {
-        token_len += 1; // consume all tokens
-    }
+    try testing.expectEqualSlices(Token.Id, expected, given.items);
+}
 
-    try testing.expectEqual(expected.len, token_len);
+test {
+    std.testing.refAllDecls(@This());
 }
 
 test "empty doc" {
-    try testExpected("", &[_]Token.Id{.Eof});
+    try testExpected("", &[_]Token.Id{.eof});
 }
 
 test "empty doc with explicit markers" {
@@ -286,7 +322,22 @@ test "empty doc with explicit markers" {
         \\---
         \\...
     , &[_]Token.Id{
-        .DocStart, .NewLine, .DocEnd, .Eof,
+        .doc_start, .new_line, .doc_end, .eof,
+    });
+}
+
+test "empty doc with explicit markers and a directive" {
+    try testExpected(
+        \\--- !tbd-v1
+        \\...
+    , &[_]Token.Id{
+        .doc_start,
+        .space,
+        .tag,
+        .literal,
+        .new_line,
+        .doc_end,
+        .eof,
     });
 }
 
@@ -296,15 +347,15 @@ test "sequence of values" {
         \\- 1
         \\- 2
     , &[_]Token.Id{
-        .SeqItemInd,
-        .Literal,
-        .NewLine,
-        .SeqItemInd,
-        .Literal,
-        .NewLine,
-        .SeqItemInd,
-        .Literal,
-        .Eof,
+        .seq_item_ind,
+        .literal,
+        .new_line,
+        .seq_item_ind,
+        .literal,
+        .new_line,
+        .seq_item_ind,
+        .literal,
+        .eof,
     });
 }
 
@@ -313,24 +364,24 @@ test "sequence of sequences" {
         \\- [ val1, val2]
         \\- [val3, val4 ]
     , &[_]Token.Id{
-        .SeqItemInd,
-        .FlowSeqStart,
-        .Space,
-        .Literal,
-        .Comma,
-        .Space,
-        .Literal,
-        .FlowSeqEnd,
-        .NewLine,
-        .SeqItemInd,
-        .FlowSeqStart,
-        .Literal,
-        .Comma,
-        .Space,
-        .Literal,
-        .Space,
-        .FlowSeqEnd,
-        .Eof,
+        .seq_item_ind,
+        .flow_seq_start,
+        .space,
+        .literal,
+        .comma,
+        .space,
+        .literal,
+        .flow_seq_end,
+        .new_line,
+        .seq_item_ind,
+        .flow_seq_start,
+        .literal,
+        .comma,
+        .space,
+        .literal,
+        .space,
+        .flow_seq_end,
+        .eof,
     });
 }
 
@@ -339,16 +390,16 @@ test "mappings" {
         \\key1: value1
         \\key2: value2
     , &[_]Token.Id{
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .Eof,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .eof,
     });
 }
 
@@ -357,21 +408,21 @@ test "inline mapped sequence of values" {
         \\key :  [ val1, 
         \\          val2 ]
     , &[_]Token.Id{
-        .Literal,
-        .Space,
-        .MapValueInd,
-        .Space,
-        .FlowSeqStart,
-        .Space,
-        .Literal,
-        .Comma,
-        .Space,
-        .NewLine,
-        .Space,
-        .Literal,
-        .Space,
-        .FlowSeqEnd,
-        .Eof,
+        .literal,
+        .space,
+        .map_value_ind,
+        .space,
+        .flow_seq_start,
+        .space,
+        .literal,
+        .comma,
+        .space,
+        .new_line,
+        .space,
+        .literal,
+        .space,
+        .flow_seq_end,
+        .eof,
     });
 }
 
@@ -388,52 +439,50 @@ test "part of tbd" {
         \\install-name:    '/usr/lib/libSystem.B.dylib'
         \\...
     , &[_]Token.Id{
-        .DocStart,
-        .Space,
-        .Tag,
-        .Literal,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .FlowSeqStart,
-        .Space,
-        .Literal,
-        .Space,
-        .FlowSeqEnd,
-        .NewLine,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .NewLine,
-        .Space,
-        .SeqItemInd,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .NewLine,
-        .Space,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .NewLine,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .SingleQuote,
-        .Literal,
-        .SingleQuote,
-        .NewLine,
-        .DocEnd,
-        .Eof,
+        .doc_start,
+        .space,
+        .tag,
+        .literal,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .flow_seq_start,
+        .space,
+        .literal,
+        .space,
+        .flow_seq_end,
+        .new_line,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .new_line,
+        .space,
+        .seq_item_ind,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .new_line,
+        .space,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .new_line,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .single_quoted,
+        .new_line,
+        .doc_end,
+        .eof,
     });
 }
 
@@ -443,18 +492,84 @@ test "Unindented list" {
         \\- foo: 1
         \\c: 1
     , &[_]Token.Id{
-        .Literal,
-        .MapValueInd,
-        .NewLine,
-        .SeqItemInd,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
-        .NewLine,
-        .Literal,
-        .MapValueInd,
-        .Space,
-        .Literal,
+        .literal,
+        .map_value_ind,
+        .new_line,
+        .seq_item_ind,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .literal,
+        .eof,
+    });
+}
+
+test "escape sequences" {
+    try testExpected(
+        \\a: 'here''s an apostrophe'
+        \\b: "a newline\nand a\ttab"
+        \\c: "\"here\" and there"
+    , &[_]Token.Id{
+        .literal,
+        .map_value_ind,
+        .space,
+        .single_quoted,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .double_quoted,
+        .new_line,
+        .literal,
+        .map_value_ind,
+        .space,
+        .double_quoted,
+        .eof,
+    });
+}
+
+test "comments" {
+    try testExpected(
+        \\key: # some comment about the key
+        \\# first value
+        \\- val1
+        \\# second value
+        \\- val2
+    , &[_]Token.Id{
+        .literal,
+        .map_value_ind,
+        .space,
+        .comment,
+        .new_line,
+        .comment,
+        .new_line,
+        .seq_item_ind,
+        .literal,
+        .new_line,
+        .comment,
+        .new_line,
+        .seq_item_ind,
+        .literal,
+        .eof,
+    });
+}
+
+test "quoted literals" {
+    try testExpected(
+        \\'#000000'
+        \\'[000000'
+        \\"&someString"
+    , &[_]Token.Id{
+        .single_quoted,
+        .new_line,
+        .single_quoted,
+        .new_line,
+        .double_quoted,
+        .eof,
     });
 }
src/link/tapi/yaml.zig
@@ -2,8 +2,7 @@ const std = @import("std");
 const assert = std.debug.assert;
 const math = std.math;
 const mem = std.mem;
-const testing = std.testing;
-const log = std.log.scoped(.tapi);
+const log = std.log.scoped(.yaml);
 
 const Allocator = mem.Allocator;
 const ArenaAllocator = std.heap.ArenaAllocator;
@@ -17,22 +16,15 @@ const ParseError = parse.ParseError;
 
 pub const YamlError = error{
     UnexpectedNodeType,
+    DuplicateMapKey,
     OutOfMemory,
+    CannotEncodeValue,
 } || ParseError || std.fmt.ParseIntError;
 
-pub const ValueType = enum {
-    empty,
-    int,
-    float,
-    string,
-    list,
-    map,
-};
-
 pub const List = []Value;
-pub const Map = std.StringArrayHashMap(Value);
+pub const Map = std.StringHashMap(Value);
 
-pub const Value = union(ValueType) {
+pub const Value = union(enum) {
     empty,
     int: i64,
     float: f64,
@@ -70,9 +62,7 @@ pub const Value = union(ValueType) {
         should_inline_first_key: bool = false,
     };
 
-    pub const StringifyError = std.os.WriteError;
-
-    pub fn stringify(self: Value, writer: anytype, args: StringifyArgs) StringifyError!void {
+    pub fn stringify(self: Value, writer: anytype, args: StringifyArgs) anyerror!void {
         switch (self) {
             .empty => return,
             .int => |int| return writer.print("{}", .{int}),
@@ -83,7 +73,7 @@ pub const Value = union(ValueType) {
                 if (len == 0) return;
 
                 const first = list[0];
-                if (first.is_compound()) {
+                if (first.isCompound()) {
                     for (list, 0..) |elem, i| {
                         try writer.writeByteNTimes(' ', args.indentation);
                         try writer.writeAll("- ");
@@ -108,20 +98,23 @@ pub const Value = union(ValueType) {
                 try writer.writeAll(" ]");
             },
             .map => |map| {
-                const keys = map.keys();
-                const len = keys.len;
+                const len = map.count();
                 if (len == 0) return;
 
-                for (keys, 0..) |key, i| {
+                var i: usize = 0;
+                var it = map.iterator();
+                while (it.next()) |entry| {
+                    const key = entry.key_ptr.*;
+                    const value = entry.value_ptr.*;
+
                     if (!args.should_inline_first_key or i != 0) {
                         try writer.writeByteNTimes(' ', args.indentation);
                     }
                     try writer.print("{s}: ", .{key});
 
-                    const value = map.get(key) orelse unreachable;
                     const should_inline = blk: {
-                        if (!value.is_compound()) break :blk true;
-                        if (value == .list and value.list.len > 0 and !value.list[0].is_compound()) break :blk true;
+                        if (!value.isCompound()) break :blk true;
+                        if (value == .list and value.list.len > 0 and !value.list[0].isCompound()) break :blk true;
                         break :blk false;
                     };
 
@@ -137,35 +130,44 @@ pub const Value = union(ValueType) {
                     if (i < len - 1) {
                         try writer.writeByte('\n');
                     }
+
+                    i += 1;
                 }
             },
         }
     }
 
-    fn is_compound(self: Value) bool {
+    fn isCompound(self: Value) bool {
         return switch (self) {
             .list, .map => true,
             else => false,
         };
     }
 
-    fn fromNode(arena: Allocator, tree: *const Tree, node: *const Node, type_hint: ?ValueType) YamlError!Value {
+    fn fromNode(arena: Allocator, tree: *const Tree, node: *const Node) YamlError!Value {
         if (node.cast(Node.Doc)) |doc| {
             const inner = doc.value orelse {
                 // empty doc
                 return Value{ .empty = {} };
             };
-            return Value.fromNode(arena, tree, inner, null);
+            return Value.fromNode(arena, tree, inner);
         } else if (node.cast(Node.Map)) |map| {
-            var out_map = std.StringArrayHashMap(Value).init(arena);
-            try out_map.ensureUnusedCapacity(map.values.items.len);
+            // TODO use ContextAdapted HashMap and do not duplicate keys, intern
+            // in a contiguous string buffer.
+            var out_map = std.StringHashMap(Value).init(arena);
+            try out_map.ensureUnusedCapacity(math.cast(u32, map.values.items.len) orelse return error.Overflow);
 
             for (map.values.items) |entry| {
-                const key_tok = tree.tokens[entry.key];
-                const key = try arena.dupe(u8, tree.source[key_tok.start..key_tok.end]);
-                const value = try Value.fromNode(arena, tree, entry.value, null);
-
-                out_map.putAssumeCapacityNoClobber(key, value);
+                const key = try arena.dupe(u8, tree.getRaw(entry.key, entry.key));
+                const gop = out_map.getOrPutAssumeCapacity(key);
+                if (gop.found_existing) {
+                    return error.DuplicateMapKey;
+                }
+                const value = if (entry.value) |value|
+                    try Value.fromNode(arena, tree, value)
+                else
+                    .empty;
+                gop.value_ptr.* = value;
             }
 
             return Value{ .map = out_map };
@@ -173,56 +175,124 @@ pub const Value = union(ValueType) {
             var out_list = std.ArrayList(Value).init(arena);
             try out_list.ensureUnusedCapacity(list.values.items.len);
 
-            if (list.values.items.len > 0) {
-                const hint = if (list.values.items[0].cast(Node.Value)) |value| hint: {
-                    const start = tree.tokens[value.start.?];
-                    const end = tree.tokens[value.end.?];
-                    const raw = tree.source[start.start..end.end];
-                    _ = std.fmt.parseInt(i64, raw, 10) catch {
-                        _ = std.fmt.parseFloat(f64, raw) catch {
-                            break :hint ValueType.string;
-                        };
-                        break :hint ValueType.float;
-                    };
-                    break :hint ValueType.int;
-                } else null;
-
-                for (list.values.items) |elem| {
-                    const value = try Value.fromNode(arena, tree, elem, hint);
-                    out_list.appendAssumeCapacity(value);
-                }
+            for (list.values.items) |elem| {
+                const value = try Value.fromNode(arena, tree, elem);
+                out_list.appendAssumeCapacity(value);
             }
 
             return Value{ .list = try out_list.toOwnedSlice() };
         } else if (node.cast(Node.Value)) |value| {
-            const start = tree.tokens[value.start.?];
-            const end = tree.tokens[value.end.?];
-            const raw = tree.source[start.start..end.end];
-
-            if (type_hint) |hint| {
-                return switch (hint) {
-                    .int => Value{ .int = try std.fmt.parseInt(i64, raw, 10) },
-                    .float => Value{ .float = try std.fmt.parseFloat(f64, raw) },
-                    .string => Value{ .string = try arena.dupe(u8, raw) },
-                    else => unreachable,
-                };
-            }
+            const raw = tree.getRaw(node.start, node.end);
 
             try_int: {
                 // TODO infer base for int
                 const int = std.fmt.parseInt(i64, raw, 10) catch break :try_int;
                 return Value{ .int = int };
             }
+
             try_float: {
                 const float = std.fmt.parseFloat(f64, raw) catch break :try_float;
                 return Value{ .float = float };
             }
-            return Value{ .string = try arena.dupe(u8, raw) };
+
+            return Value{ .string = try arena.dupe(u8, value.string_value.items) };
         } else {
             log.err("Unexpected node type: {}", .{node.tag});
             return error.UnexpectedNodeType;
         }
     }
+
+    fn encode(arena: Allocator, input: anytype) YamlError!?Value {
+        switch (@typeInfo(@TypeOf(input))) {
+            .ComptimeInt,
+            .Int,
+            => return Value{ .int = math.cast(i64, input) orelse return error.Overflow },
+
+            .Float => return Value{ .float = math.lossyCast(f64, input) },
+
+            .Struct => |info| if (info.is_tuple) {
+                var list = std.ArrayList(Value).init(arena);
+                errdefer list.deinit();
+                try list.ensureTotalCapacityPrecise(info.fields.len);
+
+                inline for (info.fields) |field| {
+                    if (try encode(arena, @field(input, field.name))) |value| {
+                        list.appendAssumeCapacity(value);
+                    }
+                }
+
+                return Value{ .list = try list.toOwnedSlice() };
+            } else {
+                var map = Map.init(arena);
+                errdefer map.deinit();
+                try map.ensureTotalCapacity(info.fields.len);
+
+                inline for (info.fields) |field| {
+                    if (try encode(arena, @field(input, field.name))) |value| {
+                        const key = try arena.dupe(u8, field.name);
+                        map.putAssumeCapacityNoClobber(key, value);
+                    }
+                }
+
+                return Value{ .map = map };
+            },
+
+            .Union => |info| if (info.tag_type) |tag_type| {
+                inline for (info.fields) |field| {
+                    if (@field(tag_type, field.name) == input) {
+                        return try encode(arena, @field(input, field.name));
+                    }
+                } else unreachable;
+            } else return error.UntaggedUnion,
+
+            .Array => return encode(arena, &input),
+
+            .Pointer => |info| switch (info.size) {
+                .One => switch (@typeInfo(info.child)) {
+                    .Array => |child_info| {
+                        const Slice = []const child_info.child;
+                        return encode(arena, @as(Slice, input));
+                    },
+                    else => {
+                        @compileError("Unhandled type: {s}" ++ @typeName(info.child));
+                    },
+                },
+                .Slice => {
+                    if (info.child == u8) {
+                        return Value{ .string = try arena.dupe(u8, input) };
+                    }
+
+                    var list = std.ArrayList(Value).init(arena);
+                    errdefer list.deinit();
+                    try list.ensureTotalCapacityPrecise(input.len);
+
+                    for (input) |elem| {
+                        if (try encode(arena, elem)) |value| {
+                            list.appendAssumeCapacity(value);
+                        } else {
+                            log.err("Could not encode value in a list: {any}", .{elem});
+                            return error.CannotEncodeValue;
+                        }
+                    }
+
+                    return Value{ .list = try list.toOwnedSlice() };
+                },
+                else => {
+                    @compileError("Unhandled type: {s}" ++ @typeName(@TypeOf(input)));
+                },
+            },
+
+            // TODO we should probably have an option to encode `null` and also
+            // allow for some default value too.
+            .Optional => return if (input) |val| encode(arena, val) else null,
+
+            .Null => return null,
+
+            else => {
+                @compileError("Unhandled type: {s}" ++ @typeName(@TypeOf(input)));
+            },
+        }
+    }
 };
 
 pub const Yaml = struct {
@@ -234,30 +304,18 @@ pub const Yaml = struct {
         self.arena.deinit();
     }
 
-    pub fn stringify(self: Yaml, writer: anytype) !void {
-        for (self.docs.items) |doc| {
-            // if (doc.directive) |directive| {
-            //     try writer.print("--- !{s}\n", .{directive});
-            // }
-            try doc.stringify(writer, .{});
-            // if (doc.directive != null) {
-            //     try writer.writeAll("...\n");
-            // }
-        }
-    }
-
     pub fn load(allocator: Allocator, source: []const u8) !Yaml {
         var arena = ArenaAllocator.init(allocator);
-        const arena_allocator = arena.allocator();
+        errdefer arena.deinit();
 
-        var tree = Tree.init(arena_allocator);
+        var tree = Tree.init(arena.allocator());
         try tree.parse(source);
 
-        var docs = std.ArrayList(Value).init(arena_allocator);
-        try docs.ensureUnusedCapacity(tree.docs.items.len);
+        var docs = std.ArrayList(Value).init(arena.allocator());
+        try docs.ensureTotalCapacityPrecise(tree.docs.items.len);
 
         for (tree.docs.items) |node| {
-            const value = try Value.fromNode(arena_allocator, &tree, node, null);
+            const value = try Value.fromNode(arena.allocator(), &tree, node);
             docs.appendAssumeCapacity(value);
         }
 
@@ -316,17 +374,19 @@ pub const Yaml = struct {
 
     fn parseValue(self: *Yaml, comptime T: type, value: Value) Error!T {
         return switch (@typeInfo(T)) {
-            .Int => math.cast(T, try value.asInt()) orelse error.Overflow,
-            .Float => math.lossyCast(T, try value.asFloat()),
+            .Int => math.cast(T, try value.asInt()) orelse return error.Overflow,
+            .Float => if (value.asFloat()) |float| {
+                return math.lossyCast(T, float);
+            } else |_| {
+                return math.lossyCast(T, try value.asInt());
+            },
             .Struct => self.parseStruct(T, try value.asMap()),
             .Union => self.parseUnion(T, value),
             .Array => self.parseArray(T, try value.asList()),
-            .Pointer => {
-                if (value.asList()) |list| {
-                    return self.parsePointer(T, .{ .list = list });
-                } else |_| {
-                    return self.parsePointer(T, .{ .string = try value.asString() });
-                }
+            .Pointer => if (value.asList()) |list| {
+                return self.parsePointer(T, .{ .list = list });
+            } else |_| {
+                return self.parsePointer(T, .{ .string = try value.asString() });
             },
             .Void => error.TypeMismatch,
             .Optional => unreachable,
@@ -372,7 +432,7 @@ pub const Yaml = struct {
             }
 
             const unwrapped = value orelse {
-                log.debug("missing struct field: {s}: {s}", .{ field.name, @typeName(field.type) });
+                log.err("missing struct field: {s}: {s}", .{ field.name, @typeName(field.type) });
                 return error.StructFieldMissing;
             };
             @field(parsed, field.name) = try self.parseValue(field.type, unwrapped);
@@ -387,8 +447,7 @@ pub const Yaml = struct {
 
         switch (ptr_info.size) {
             .Slice => {
-                const child_info = @typeInfo(ptr_info.child);
-                if (child_info == .Int and child_info.Int.bits == 8) {
+                if (ptr_info.child == u8) {
                     return value.asString();
                 }
 
@@ -413,315 +472,36 @@ pub const Yaml = struct {
 
         return parsed;
     }
-};
-
-test {
-    testing.refAllDecls(@This());
-}
-
-test "simple list" {
-    const source =
-        \\- a
-        \\- b
-        \\- c
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
 
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const list = yaml.docs.items[0].list;
-    try testing.expectEqual(list.len, 3);
-
-    try testing.expect(mem.eql(u8, list[0].string, "a"));
-    try testing.expect(mem.eql(u8, list[1].string, "b"));
-    try testing.expect(mem.eql(u8, list[2].string, "c"));
-}
-
-test "simple list typed as array of strings" {
-    const source =
-        \\- a
-        \\- b
-        \\- c
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const arr = try yaml.parse([3][]const u8);
-    try testing.expectEqual(arr.len, 3);
-    try testing.expect(mem.eql(u8, arr[0], "a"));
-    try testing.expect(mem.eql(u8, arr[1], "b"));
-    try testing.expect(mem.eql(u8, arr[2], "c"));
-}
-
-test "simple list typed as array of ints" {
-    const source =
-        \\- 0
-        \\- 1
-        \\- 2
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const arr = try yaml.parse([3]u8);
-    try testing.expectEqual(arr.len, 3);
-    try testing.expectEqual(arr[0], 0);
-    try testing.expectEqual(arr[1], 1);
-    try testing.expectEqual(arr[2], 2);
-}
-
-test "list of mixed sign integer" {
-    const source =
-        \\- 0
-        \\- -1
-        \\- 2
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const arr = try yaml.parse([3]i8);
-    try testing.expectEqual(arr.len, 3);
-    try testing.expectEqual(arr[0], 0);
-    try testing.expectEqual(arr[1], -1);
-    try testing.expectEqual(arr[2], 2);
-}
-
-test "simple map untyped" {
-    const source =
-        \\a: 0
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const map = yaml.docs.items[0].map;
-    try testing.expect(map.contains("a"));
-    try testing.expectEqual(map.get("a").?.int, 0);
-}
-
-test "simple map untyped with a list of maps" {
-    const source =
-        \\a: 0
-        \\b: 
-        \\  - foo: 1
-        \\    bar: 2
-        \\  - foo: 3
-        \\    bar: 4
-        \\c: 1
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const map = yaml.docs.items[0].map;
-    try testing.expect(map.contains("a"));
-    try testing.expect(map.contains("b"));
-    try testing.expect(map.contains("c"));
-    try testing.expectEqual(map.get("a").?.int, 0);
-    try testing.expectEqual(map.get("c").?.int, 1);
-    try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
-    try testing.expectEqual(map.get("b").?.list[0].map.get("bar").?.int, 2);
-    try testing.expectEqual(map.get("b").?.list[1].map.get("foo").?.int, 3);
-    try testing.expectEqual(map.get("b").?.list[1].map.get("bar").?.int, 4);
-}
-
-test "simple map untyped with a list of maps. no indent" {
-    const source =
-        \\b: 
-        \\- foo: 1
-        \\c: 1
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const map = yaml.docs.items[0].map;
-    try testing.expect(map.contains("b"));
-    try testing.expect(map.contains("c"));
-    try testing.expectEqual(map.get("c").?.int, 1);
-    try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
-}
-
-test "simple map untyped with a list of maps. no indent 2" {
-    const source =
-        \\a: 0
-        \\b:
-        \\- foo: 1
-        \\  bar: 2
-        \\- foo: 3
-        \\  bar: 4
-        \\c: 1
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectEqual(yaml.docs.items.len, 1);
-
-    const map = yaml.docs.items[0].map;
-    try testing.expect(map.contains("a"));
-    try testing.expect(map.contains("b"));
-    try testing.expect(map.contains("c"));
-    try testing.expectEqual(map.get("a").?.int, 0);
-    try testing.expectEqual(map.get("c").?.int, 1);
-    try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
-    try testing.expectEqual(map.get("b").?.list[0].map.get("bar").?.int, 2);
-    try testing.expectEqual(map.get("b").?.list[1].map.get("foo").?.int, 3);
-    try testing.expectEqual(map.get("b").?.list[1].map.get("bar").?.int, 4);
-}
-
-test "simple map typed" {
-    const source =
-        \\a: 0
-        \\b: hello there
-        \\c: 'wait, what?'
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    const simple = try yaml.parse(struct { a: usize, b: []const u8, c: []const u8 });
-    try testing.expectEqual(simple.a, 0);
-    try testing.expect(mem.eql(u8, simple.b, "hello there"));
-    try testing.expect(mem.eql(u8, simple.c, "wait, what?"));
-}
-
-test "typed nested structs" {
-    const source =
-        \\a:
-        \\  b: hello there
-        \\  c: 'wait, what?'
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    const simple = try yaml.parse(struct {
-        a: struct {
-            b: []const u8,
-            c: []const u8,
-        },
-    });
-    try testing.expect(mem.eql(u8, simple.a.b, "hello there"));
-    try testing.expect(mem.eql(u8, simple.a.c, "wait, what?"));
-}
-
-test "multidoc typed as a slice of structs" {
-    const source =
-        \\---
-        \\a: 0
-        \\---
-        \\a: 1
-        \\...
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    {
-        const result = try yaml.parse([2]struct { a: usize });
-        try testing.expectEqual(result.len, 2);
-        try testing.expectEqual(result[0].a, 0);
-        try testing.expectEqual(result[1].a, 1);
-    }
-
-    {
-        const result = try yaml.parse([]struct { a: usize });
-        try testing.expectEqual(result.len, 2);
-        try testing.expectEqual(result[0].a, 0);
-        try testing.expectEqual(result[1].a, 1);
+    pub fn stringify(self: Yaml, writer: anytype) !void {
+        for (self.docs.items, 0..) |doc, i| {
+            try writer.writeAll("---");
+            if (self.tree.?.getDirective(i)) |directive| {
+                try writer.print(" !{s}", .{directive});
+            }
+            try writer.writeByte('\n');
+            try doc.stringify(writer, .{});
+            try writer.writeByte('\n');
+        }
+        try writer.writeAll("...\n");
     }
-}
-
-test "multidoc typed as a struct is an error" {
-    const source =
-        \\---
-        \\a: 0
-        \\---
-        \\b: 1
-        \\...
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize }));
-    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { b: usize }));
-    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize, b: usize }));
-}
-
-test "multidoc typed as a slice of structs with optionals" {
-    const source =
-        \\---
-        \\a: 0
-        \\c: 1.0
-        \\---
-        \\a: 1
-        \\b: different field
-        \\...
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    const result = try yaml.parse([]struct { a: usize, b: ?[]const u8, c: ?f16 });
-    try testing.expectEqual(result.len, 2);
-
-    try testing.expectEqual(result[0].a, 0);
-    try testing.expect(result[0].b == null);
-    try testing.expect(result[0].c != null);
-    try testing.expectEqual(result[0].c.?, 1.0);
-
-    try testing.expectEqual(result[1].a, 1);
-    try testing.expect(result[1].b != null);
-    try testing.expect(mem.eql(u8, result[1].b.?, "different field"));
-    try testing.expect(result[1].c == null);
-}
-
-test "empty yaml can be represented as void" {
-    const source = "";
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-    const result = try yaml.parse(void);
-    try testing.expect(@TypeOf(result) == void);
-}
+};
 
-test "nonempty yaml cannot be represented as void" {
-    const source =
-        \\a: b
-    ;
+pub fn stringify(allocator: Allocator, input: anytype, writer: anytype) !void {
+    var arena = ArenaAllocator.init(allocator);
+    defer arena.deinit();
 
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
+    var maybe_value = try Value.encode(arena.allocator(), input);
 
-    try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(void));
+    if (maybe_value) |value| {
+        // TODO should we output as an explicit doc?
+        // How can allow the user to specify?
+        try value.stringify(writer, .{});
+    }
 }
 
-test "typed array size mismatch" {
-    const source =
-        \\- 0
-        \\- 0
-    ;
-
-    var yaml = try Yaml.load(testing.allocator, source);
-    defer yaml.deinit();
-
-    try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([1]usize));
-    try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([5]usize));
+test {
+    std.testing.refAllDecls(Tokenizer);
+    std.testing.refAllDecls(parse);
+    _ = @import("yaml/test.zig");
 }