Commit 0f2339f55b

Josh Wolfe <thejoshwolfe@gmail.com>
2023-06-21 01:01:34
std: json.parseFromValue() (#15981)
1 parent d2b2567
Changed files (3)
lib/std/json/dynamic_test.zig
@@ -1,6 +1,8 @@
 const std = @import("std");
 const mem = std.mem;
 const testing = std.testing;
+const ArenaAllocator = std.heap.ArenaAllocator;
+const Allocator = std.mem.Allocator;
 
 const ObjectMap = @import("dynamic.zig").ObjectMap;
 const Array = @import("dynamic.zig").Array;
@@ -9,6 +11,8 @@ const Value = @import("dynamic.zig").Value;
 const parseFromSlice = @import("static.zig").parseFromSlice;
 const parseFromSliceLeaky = @import("static.zig").parseFromSliceLeaky;
 const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
+const parseFromValueLeaky = @import("static.zig").parseFromValueLeaky;
+const ParseOptions = @import("static.zig").ParseOptions;
 
 const jsonReader = @import("scanner.zig").reader;
 
@@ -240,3 +244,48 @@ test "Value.jsonStringify" {
         try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
     }
 }
+
+test "polymorphic parsing" {
+    if (true) return error.SkipZigTest; // See https://github.com/ziglang/zig/issues/16108
+    const doc =
+        \\{ "type": "div",
+        \\  "color": "blue",
+        \\  "children": [
+        \\    { "type": "button",
+        \\      "caption": "OK" },
+        \\    { "type": "button",
+        \\      "caption": "Cancel" } ] }
+    ;
+    const Node = union(enum) {
+        div: Div,
+        button: Button,
+        const Self = @This();
+        const Div = struct {
+            color: enum { red, blue },
+            children: []Self,
+        };
+        const Button = struct {
+            caption: []const u8,
+        };
+
+        pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
+            if (source != .object) return error.UnexpectedToken;
+            const type_value = source.object.get("type") orelse return error.UnexpectedToken; // Missing "type" field.
+            if (type_value != .string) return error.UnexpectedToken; // "type" expected to be string.
+            const type_str = type_value.string;
+            var child_options = options;
+            child_options.ignore_unknown_fields = true;
+            if (std.mem.eql(u8, type_str, "div")) return .{ .div = try parseFromValueLeaky(Div, allocator, source, child_options) };
+            if (std.mem.eql(u8, type_str, "button")) return .{ .button = try parseFromValueLeaky(Button, allocator, source, child_options) };
+            return error.UnexpectedToken; // unknown type.
+        }
+    };
+
+    var arena = ArenaAllocator.init(testing.allocator);
+    defer arena.deinit();
+    const dynamic_tree = try parseFromSliceLeaky(Value, arena.allocator(), doc, .{});
+    const tree = try parseFromValueLeaky(Node, arena.allocator(), dynamic_tree, .{});
+
+    try testing.expect(tree.div.color == .blue);
+    try testing.expectEqualStrings("Cancel", tree.div.children[1].button.caption);
+}
lib/std/json/static.zig
@@ -10,21 +10,29 @@ const AllocWhen = @import("./scanner.zig").AllocWhen;
 const default_max_value_len = @import("./scanner.zig").default_max_value_len;
 const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
 
+const Value = @import("./dynamic.zig").Value;
+const Array = @import("./dynamic.zig").Array;
+
+/// Controls how to deal with various inconsistencies between the JSON document and the Zig struct type passed in.
+/// For duplicate fields or unknown fields, set options in this struct.
+/// For missing fields, give the Zig struct fields default values.
 pub const ParseOptions = struct {
     /// Behaviour when a duplicate field is encountered.
+    /// The default is to return `error.DuplicateField`.
     duplicate_field_behavior: enum {
         use_first,
         @"error",
         use_last,
     } = .@"error",
 
-    /// If false, finding an unknown field returns an error.
+    /// If false, finding an unknown field returns `error.UnknownField`.
     ignore_unknown_fields: bool = false,
 
-    /// Passed to json.Scanner.nextAllocMax() or json.Reader.nextAllocMax().
-    /// The default for parseFromSlice() or parseFromTokenSource() with a *json.Scanner input
-    /// is the length of the input slice, which means error.ValueTooLong will never be returned.
-    /// The default for parseFromTokenSource() with a *json.Reader is default_max_value_len.
+    /// Passed to `std.json.Scanner.nextAllocMax` or `std.json.Reader.nextAllocMax`.
+    /// The default for `parseFromSlice` or `parseFromTokenSource` with a `*std.json.Scanner` input
+    /// is the length of the input slice, which means `error.ValueTooLong` will never be returned.
+    /// The default for `parseFromTokenSource` with a `*std.json.Reader` is `std.json.default_max_value_len`.
+    /// Ignored for `parseFromValue` and `parseFromValueLeaky`.
     max_value_len: ?usize = null,
 };
 
@@ -43,6 +51,7 @@ pub fn Parsed(comptime T: type) type {
 
 /// Parses the json document from `s` and returns the result packaged in a `std.json.Parsed`.
 /// You must call `deinit()` of the returned object to clean up allocated resources.
+/// If you are using a `std.heap.ArenaAllocator` or similar, consider calling `parseFromSliceLeaky` instead.
 /// Note that `error.BufferUnderrun` is not actually possible to return from this function.
 pub fn parseFromSlice(
     comptime T: type,
@@ -114,33 +123,65 @@ pub fn parseFromTokenSourceLeaky(
         }
     }
 
-    const value = try parseInternal(T, allocator, scanner_or_reader, resolved_options);
+    const value = try internalParse(T, allocator, scanner_or_reader, resolved_options);
 
     assert(.end_of_document == try scanner_or_reader.next());
 
     return value;
 }
 
+/// Like `parseFromSlice`, but the input is an already-parsed `std.json.Value` object.
+pub fn parseFromValue(
+    comptime T: type,
+    allocator: Allocator,
+    source: Value,
+    options: ParseOptions,
+) ParseFromValueError!Parsed(T) {
+    var parsed = Parsed(T){
+        .arena = try allocator.create(ArenaAllocator),
+        .value = undefined,
+    };
+    errdefer allocator.destroy(parsed.arena);
+    parsed.arena.* = ArenaAllocator.init(allocator);
+    errdefer parsed.arena.deinit();
+
+    parsed.value = try parseFromValueLeaky(T, parsed.arena.allocator(), source, options);
+
+    return parsed;
+}
+
+pub fn parseFromValueLeaky(
+    comptime T: type,
+    allocator: Allocator,
+    source: Value,
+    options: ParseOptions,
+) ParseFromValueError!T {
+    // I guess this function doesn't need to exist,
+    // but the flow of the sourcecode is easy to follow and grouped nicely with
+    // this pub redirect function near the top and the implementation near the bottom.
+    return internalParseFromValue(T, allocator, source, options);
+}
+
 /// The error set that will be returned when parsing from `*Source`.
 /// Note that this may contain `error.BufferUnderrun`, but that error will never actually be returned.
 pub fn ParseError(comptime Source: type) type {
     // A few of these will either always be present or present enough of the time that
     // omitting them is more confusing than always including them.
-    return error{
-        UnexpectedToken,
-        InvalidNumber,
-        Overflow,
-        InvalidEnumTag,
-        DuplicateField,
-        UnknownField,
-        MissingField,
-        LengthMismatch,
-    } ||
-        std.fmt.ParseIntError || std.fmt.ParseFloatError ||
-        Source.NextError || Source.PeekError || Source.AllocError;
+    return ParseFromValueError || Source.NextError || Source.PeekError || Source.AllocError;
 }
 
-fn parseInternal(
+pub const ParseFromValueError = std.fmt.ParseIntError || std.fmt.ParseFloatError || Allocator.Error || error{
+    UnexpectedToken,
+    InvalidNumber,
+    Overflow,
+    InvalidEnumTag,
+    DuplicateField,
+    UnknownField,
+    MissingField,
+    LengthMismatch,
+};
+
+fn internalParse(
     comptime T: type,
     allocator: Allocator,
     source: anytype,
@@ -170,13 +211,7 @@ fn parseInternal(
                 inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
                 else => return error.UnexpectedToken,
             };
-            if (isNumberFormattedLikeAnInteger(slice))
-                return std.fmt.parseInt(T, slice, 10);
-            // Try to coerce a float to an integer.
-            const float = try std.fmt.parseFloat(f128, slice);
-            if (@round(float) != float) return error.InvalidNumber;
-            if (float > std.math.maxInt(T) or float < std.math.minInt(T)) return error.Overflow;
-            return @intFromFloat(T, float);
+            return sliceToInt(T, slice);
         },
         .Optional => |optionalInfo| {
             switch (try source.peekNextTokenType()) {
@@ -185,11 +220,11 @@ fn parseInternal(
                     return null;
                 },
                 else => {
-                    return try parseInternal(optionalInfo.child, allocator, source, options);
+                    return try internalParse(optionalInfo.child, allocator, source, options);
                 },
             }
         },
-        .Enum => |enumInfo| {
+        .Enum => {
             if (comptime std.meta.trait.hasFn("jsonParse")(T)) {
                 return T.jsonParse(allocator, source, options);
             }
@@ -200,12 +235,7 @@ fn parseInternal(
                 inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
                 else => return error.UnexpectedToken,
             };
-            // Check for a named value.
-            if (std.meta.stringToEnum(T, slice)) |value| return value;
-            // Check for a numeric value.
-            if (!isNumberFormattedLikeAnInteger(slice)) return error.InvalidEnumTag;
-            const n = std.fmt.parseInt(enumInfo.tag_type, slice, 10) catch return error.InvalidEnumTag;
-            return try std.meta.intToEnum(T, n);
+            return sliceToEnum(T, slice);
         },
         .Union => |unionInfo| {
             if (comptime std.meta.trait.hasFn("jsonParse")(T)) {
@@ -226,7 +256,7 @@ fn parseInternal(
             inline for (unionInfo.fields) |u_field| {
                 if (std.mem.eql(u8, u_field.name, field_name)) {
                     // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object.
-                    // (Recursing into parseInternal() might trigger more allocations.)
+                    // (Recursing into internalParse() might trigger more allocations.)
                     freeAllocated(allocator, name_token.?);
                     name_token = null;
 
@@ -237,7 +267,7 @@ fn parseInternal(
                         result = @unionInit(T, u_field.name, {});
                     } else {
                         // Recurse.
-                        result = @unionInit(T, u_field.name, try parseInternal(u_field.type, allocator, source, options));
+                        result = @unionInit(T, u_field.name, try internalParse(u_field.type, allocator, source, options));
                     }
                     break;
                 }
@@ -256,10 +286,8 @@ fn parseInternal(
                 if (.array_begin != try source.next()) return error.UnexpectedToken;
 
                 var r: T = undefined;
-                var fields_seen: usize = 0;
                 inline for (0..structInfo.fields.len) |i| {
-                    r[i] = try parseInternal(structInfo.fields[i].type, allocator, source, options);
-                    fields_seen = i + 1;
+                    r[i] = try internalParse(structInfo.fields[i].type, allocator, source, options);
                 }
 
                 if (.array_end != try source.next()) return error.UnexpectedToken;
@@ -288,7 +316,7 @@ fn parseInternal(
                     if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name);
                     if (std.mem.eql(u8, field.name, field_name)) {
                         // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object.
-                        // (Recursing into parseInternal() might trigger more allocations.)
+                        // (Recursing into internalParse() might trigger more allocations.)
                         freeAllocated(allocator, name_token.?);
                         name_token = null;
 
@@ -297,14 +325,14 @@ fn parseInternal(
                                 .use_first => {
                                     // Parse and ignore the redundant value.
                                     // We don't want to skip the value, because we want type checking.
-                                    _ = try parseInternal(field.type, allocator, source, options);
+                                    _ = try internalParse(field.type, allocator, source, options);
                                     break;
                                 },
                                 .@"error" => return error.DuplicateField,
                                 .use_last => {},
                             }
                         }
-                        @field(r, field.name) = try parseInternal(field.type, allocator, source, options);
+                        @field(r, field.name) = try internalParse(field.type, allocator, source, options);
                         fields_seen[i] = true;
                         break;
                     }
@@ -318,16 +346,7 @@ fn parseInternal(
                     }
                 }
             }
-            inline for (structInfo.fields, 0..) |field, i| {
-                if (!fields_seen[i]) {
-                    if (field.default_value) |default_ptr| {
-                        const default = @ptrCast(*align(1) const field.type, default_ptr).*;
-                        @field(r, field.name) = default;
-                    } else {
-                        return error.MissingField;
-                    }
-                }
-            }
+            try fillDefaultStructValues(T, &r, &fields_seen);
             return r;
         },
 
@@ -335,7 +354,7 @@ fn parseInternal(
             switch (try source.peekNextTokenType()) {
                 .array_begin => {
                     // Typical array.
-                    return parseInternalArray(T, arrayInfo.child, arrayInfo.len, allocator, source, options);
+                    return internalParseArray(T, arrayInfo.child, arrayInfo.len, allocator, source, options);
                 },
                 .string => {
                     if (arrayInfo.child != u8) return error.UnexpectedToken;
@@ -389,7 +408,7 @@ fn parseInternal(
         .Vector => |vecInfo| {
             switch (try source.peekNextTokenType()) {
                 .array_begin => {
-                    return parseInternalArray(T, vecInfo.child, vecInfo.len, allocator, source, options);
+                    return internalParseArray(T, vecInfo.child, vecInfo.len, allocator, source, options);
                 },
                 else => return error.UnexpectedToken,
             }
@@ -399,7 +418,7 @@ fn parseInternal(
             switch (ptrInfo.size) {
                 .One => {
                     const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
-                    r.* = try parseInternal(ptrInfo.child, allocator, source, options);
+                    r.* = try internalParse(ptrInfo.child, allocator, source, options);
                     return r;
                 },
                 .Slice => {
@@ -419,7 +438,7 @@ fn parseInternal(
                                 }
 
                                 try arraylist.ensureUnusedCapacity(1);
-                                arraylist.appendAssumeCapacity(try parseInternal(ptrInfo.child, allocator, source, options));
+                                arraylist.appendAssumeCapacity(try internalParse(ptrInfo.child, allocator, source, options));
                             }
 
                             if (ptrInfo.sentinel) |some| {
@@ -463,7 +482,7 @@ fn parseInternal(
     unreachable;
 }
 
-fn parseInternalArray(
+fn internalParseArray(
     comptime T: type,
     comptime Child: type,
     comptime len: comptime_int,
@@ -476,7 +495,7 @@ fn parseInternalArray(
     var r: T = undefined;
     var i: usize = 0;
     while (i < len) : (i += 1) {
-        r[i] = try parseInternal(Child, allocator, source, options);
+        r[i] = try internalParse(Child, allocator, source, options);
     }
 
     if (.array_end != try source.next()) return error.UnexpectedToken;
@@ -484,6 +503,271 @@ fn parseInternalArray(
     return r;
 }
 
+fn internalParseFromValue(
+    comptime T: type,
+    allocator: Allocator,
+    source: Value,
+    options: ParseOptions,
+) ParseFromValueError!T {
+    switch (@typeInfo(T)) {
+        .Bool => {
+            switch (source) {
+                .bool => |b| return b,
+                else => return error.UnexpectedToken,
+            }
+        },
+        .Float, .ComptimeFloat => {
+            switch (source) {
+                .float => |f| return @floatCast(T, f),
+                .integer => |i| return @floatFromInt(T, i),
+                .number_string, .string => |s| return std.fmt.parseFloat(T, s),
+                else => return error.UnexpectedToken,
+            }
+        },
+        .Int, .ComptimeInt => {
+            switch (source) {
+                .float => |f| {
+                    if (@round(f) != f) return error.InvalidNumber;
+                    if (f > std.math.maxInt(T)) return error.Overflow;
+                    if (f < std.math.minInt(T)) return error.Overflow;
+                    return @intFromFloat(T, f);
+                },
+                .integer => |i| {
+                    if (i > std.math.maxInt(T)) return error.Overflow;
+                    if (i < std.math.minInt(T)) return error.Overflow;
+                    return @intCast(T, i);
+                },
+                .number_string, .string => |s| {
+                    return sliceToInt(T, s);
+                },
+                else => return error.UnexpectedToken,
+            }
+        },
+        .Optional => |optionalInfo| {
+            switch (source) {
+                .null => return null,
+                else => return try internalParseFromValue(optionalInfo.child, allocator, source, options),
+            }
+        },
+        .Enum => {
+            if (comptime std.meta.trait.hasFn("jsonParseFromValue")(T)) {
+                return T.jsonParseFromValue(allocator, source, options);
+            }
+
+            switch (source) {
+                .float => return error.InvalidEnumTag,
+                .integer => |i| return std.meta.intToEnum(T, i),
+                .number_string, .string => |s| return sliceToEnum(T, s),
+                else => return error.UnexpectedToken,
+            }
+        },
+        .Union => |unionInfo| {
+            if (comptime std.meta.trait.hasFn("jsonParseFromValue")(T)) {
+                return T.jsonParseFromValue(allocator, source, options);
+            }
+
+            if (unionInfo.tag_type == null) @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
+
+            if (source != .object) return error.UnexpectedToken;
+            if (source.object.count() != 1) return error.UnexpectedToken;
+
+            var it = source.object.iterator();
+            const kv = it.next().?;
+            const field_name = kv.key_ptr.*;
+
+            inline for (unionInfo.fields) |u_field| {
+                if (std.mem.eql(u8, u_field.name, field_name)) {
+                    if (u_field.type == void) {
+                        // void isn't really a json type, but we can support void payload union tags with {} as a value.
+                        if (kv.value_ptr.* != .object) return error.UnexpectedToken;
+                        if (kv.value_ptr.*.object.count() != 0) return error.UnexpectedToken;
+                        return @unionInit(T, u_field.name, {});
+                    }
+                    // Recurse.
+                    return @unionInit(T, u_field.name, try internalParseFromValue(u_field.type, allocator, kv.value_ptr.*, options));
+                }
+            }
+            // Didn't match anything.
+            return error.UnknownField;
+        },
+
+        .Struct => |structInfo| {
+            if (structInfo.is_tuple) {
+                if (source != .array) return error.UnexpectedToken;
+                if (source.array.items.len != structInfo.fields.len) return error.UnexpectedToken;
+
+                var r: T = undefined;
+                inline for (0..structInfo.fields.len, source.array.items) |i, item| {
+                    r[i] = try internalParseFromValue(structInfo.fields[i].type, allocator, item, options);
+                }
+
+                return r;
+            }
+
+            if (comptime std.meta.trait.hasFn("jsonParseFromValue")(T)) {
+                return T.jsonParseFromValue(allocator, source, options);
+            }
+
+            if (source != .object) return error.UnexpectedToken;
+
+            var r: T = undefined;
+            var fields_seen = [_]bool{false} ** structInfo.fields.len;
+
+            var it = source.object.iterator();
+            while (it.next()) |kv| {
+                const field_name = kv.key_ptr.*;
+
+                inline for (structInfo.fields, 0..) |field, i| {
+                    if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name);
+                    if (std.mem.eql(u8, field.name, field_name)) {
+                        if (fields_seen[i]) {
+                            switch (options.duplicate_field_behavior) {
+                                .use_first => {
+                                    // Parse and ignore the redundant value.
+                                    // We don't want to skip the value, because we want type checking.
+                                    _ = try internalParseFromValue(field.type, allocator, kv.value_ptr.*, options);
+                                    break;
+                                },
+                                .@"error" => return error.DuplicateField,
+                                .use_last => {},
+                            }
+                        }
+                        @field(r, field.name) = try internalParseFromValue(field.type, allocator, kv.value_ptr.*, options);
+                        fields_seen[i] = true;
+                        break;
+                    }
+                } else {
+                    // Didn't match anything.
+                    if (!options.ignore_unknown_fields) return error.UnknownField;
+                }
+            }
+            try fillDefaultStructValues(T, &r, &fields_seen);
+            return r;
+        },
+
+        .Array => |arrayInfo| {
+            switch (source) {
+                .array => |array| {
+                    // Typical array.
+                    return internalParseArrayFromArrayValue(T, arrayInfo.child, arrayInfo.len, allocator, array, options);
+                },
+                .string => |s| {
+                    if (arrayInfo.child != u8) return error.UnexpectedToken;
+                    // Fixed-length string.
+
+                    if (s.len != arrayInfo.len) return error.LengthMismatch;
+
+                    var r: T = undefined;
+                    @memcpy(r[0..], s);
+                    return r;
+                },
+
+                else => return error.UnexpectedToken,
+            }
+        },
+
+        .Vector => |vecInfo| {
+            switch (source) {
+                .array => |array| {
+                    return internalParseArrayFromArrayValue(T, vecInfo.child, vecInfo.len, allocator, array, options);
+                },
+                else => return error.UnexpectedToken,
+            }
+        },
+
+        .Pointer => |ptrInfo| {
+            switch (ptrInfo.size) {
+                .One => {
+                    const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
+                    r.* = try internalParseFromValue(ptrInfo.child, allocator, source, options);
+                    return r;
+                },
+                .Slice => {
+                    switch (source) {
+                        .array => |array| {
+                            const r = if (ptrInfo.sentinel) |sentinel_ptr|
+                                try allocator.allocSentinel(ptrInfo.child, array.items.len, @ptrCast(*align(1) const ptrInfo.child, sentinel_ptr).*)
+                            else
+                                try allocator.alloc(ptrInfo.child, array.items.len);
+
+                            for (array.items, r) |item, *dest| {
+                                dest.* = try internalParseFromValue(ptrInfo.child, allocator, item, options);
+                            }
+
+                            return r;
+                        },
+                        .string => |s| {
+                            if (ptrInfo.child != u8) return error.UnexpectedToken;
+                            // Dynamic length string.
+
+                            const r = if (ptrInfo.sentinel) |sentinel_ptr|
+                                try allocator.allocSentinel(ptrInfo.child, s.len, @ptrCast(*align(1) const ptrInfo.child, sentinel_ptr).*)
+                            else
+                                try allocator.alloc(ptrInfo.child, s.len);
+                            @memcpy(r[0..], s);
+
+                            return r;
+                        },
+                        else => return error.UnexpectedToken,
+                    }
+                },
+                else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
+            }
+        },
+        else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
+    }
+}
+
+fn internalParseArrayFromArrayValue(
+    comptime T: type,
+    comptime Child: type,
+    comptime len: comptime_int,
+    allocator: Allocator,
+    array: Array,
+    options: ParseOptions,
+) !T {
+    if (array.items.len != len) return error.LengthMismatch;
+
+    var r: T = undefined;
+    for (array.items, 0..) |item, i| {
+        r[i] = try internalParseFromValue(Child, allocator, item, options);
+    }
+
+    return r;
+}
+
+fn sliceToInt(comptime T: type, slice: []const u8) !T {
+    if (isNumberFormattedLikeAnInteger(slice))
+        return std.fmt.parseInt(T, slice, 10);
+    // Try to coerce a float to an integer.
+    const float = try std.fmt.parseFloat(f128, slice);
+    if (@round(float) != float) return error.InvalidNumber;
+    if (float > std.math.maxInt(T) or float < std.math.minInt(T)) return error.Overflow;
+    return @intCast(T, @intFromFloat(i128, float));
+}
+
+fn sliceToEnum(comptime T: type, slice: []const u8) !T {
+    // Check for a named value.
+    if (std.meta.stringToEnum(T, slice)) |value| return value;
+    // Check for a numeric value.
+    if (!isNumberFormattedLikeAnInteger(slice)) return error.InvalidEnumTag;
+    const n = std.fmt.parseInt(@typeInfo(T).Enum.tag_type, slice, 10) catch return error.InvalidEnumTag;
+    return std.meta.intToEnum(T, n);
+}
+
+fn fillDefaultStructValues(comptime T: type, r: *T, fields_seen: *[@typeInfo(T).Struct.fields.len]bool) !void {
+    inline for (@typeInfo(T).Struct.fields, 0..) |field, i| {
+        if (!fields_seen[i]) {
+            if (field.default_value) |default_ptr| {
+                const default = @ptrCast(*align(1) const field.type, default_ptr).*;
+                @field(r, field.name) = default;
+            } else {
+                return error.MissingField;
+            }
+        }
+    }
+}
+
 fn freeAllocated(allocator: Allocator, token: Token) void {
     switch (token) {
         .allocated_number, .allocated_string => |slice| {
lib/std/json/static_test.zig
@@ -1,14 +1,373 @@
 const std = @import("std");
 const testing = std.testing;
 const ArenaAllocator = std.heap.ArenaAllocator;
+const Allocator = std.mem.Allocator;
 
 const parseFromSlice = @import("./static.zig").parseFromSlice;
 const parseFromSliceLeaky = @import("./static.zig").parseFromSliceLeaky;
 const parseFromTokenSource = @import("./static.zig").parseFromTokenSource;
 const parseFromTokenSourceLeaky = @import("./static.zig").parseFromTokenSourceLeaky;
+const parseFromValue = @import("./static.zig").parseFromValue;
+const parseFromValueLeaky = @import("./static.zig").parseFromValueLeaky;
 const ParseOptions = @import("./static.zig").ParseOptions;
+
 const JsonScanner = @import("./scanner.zig").Scanner;
 const jsonReader = @import("./scanner.zig").reader;
+const Diagnostics = @import("./scanner.zig").Diagnostics;
+
+const Value = @import("./dynamic.zig").Value;
+
+const Primitives = struct {
+    bool: bool,
+    // f16, f80, f128: don't work in std.fmt.parseFloat(T).
+    f32: f32,
+    f64: f64,
+    u0: u0,
+    i0: i0,
+    u1: u1,
+    i1: i1,
+    u8: u8,
+    i8: i8,
+    i130: i130,
+};
+
+const primitives_0 = Primitives{
+    .bool = false,
+    .f32 = 0,
+    .f64 = 0,
+    .u0 = 0,
+    .i0 = 0,
+    .u1 = 0,
+    .i1 = 0,
+    .u8 = 0,
+    .i8 = 0,
+    .i130 = 0,
+};
+const primitives_0_doc_0 =
+    \\{
+    \\  "bool": false,
+    \\  "f32": 0,
+    \\  "f64": 0,
+    \\  "u0": 0,
+    \\  "i0": 0,
+    \\  "u1": 0,
+    \\  "i1": 0,
+    \\  "u8": 0,
+    \\  "i8": 0,
+    \\  "i130": 0
+    \\}
+;
+const primitives_0_doc_1 = // looks like a float.
+    \\{
+    \\  "bool": false,
+    \\  "f32": 0.0,
+    \\  "f64": 0.0,
+    \\  "u0": 0.0,
+    \\  "i0": 0.0,
+    \\  "u1": 0.0,
+    \\  "i1": 0.0,
+    \\  "u8": 0.0,
+    \\  "i8": 0.0,
+    \\  "i130": 0.0
+    \\}
+;
+
+const primitives_1 = Primitives{
+    .bool = true,
+    .f32 = 1073741824,
+    .f64 = 1152921504606846976,
+    .u0 = 0,
+    .i0 = 0,
+    .u1 = 1,
+    .i1 = -1,
+    .u8 = 255,
+    .i8 = -128,
+    .i130 = -680564733841876926926749214863536422911,
+};
+const primitives_1_doc_0 =
+    \\{
+    \\  "bool": true,
+    \\  "f32": 1073741824,
+    \\  "f64": 1152921504606846976,
+    \\  "u0": 0,
+    \\  "i0": 0,
+    \\  "u1": 1,
+    \\  "i1": -1,
+    \\  "u8": 255,
+    \\  "i8": -128,
+    \\  "i130": -680564733841876926926749214863536422911
+    \\}
+;
+const primitives_1_doc_1 = // float rounding.
+    \\{
+    \\  "bool": true,
+    \\  "f32": 1073741825,
+    \\  "f64": 1152921504606846977,
+    \\  "u0": 0,
+    \\  "i0": 0,
+    \\  "u1": 1,
+    \\  "i1": -1,
+    \\  "u8": 255,
+    \\  "i8": -128,
+    \\  "i130": -680564733841876926926749214863536422911
+    \\}
+;
+
+const Aggregates = struct {
+    optional: ?i32,
+    array: [4]i32,
+    vector: @Vector(4, i32),
+    pointer: *i32,
+    pointer_const: *const i32,
+    slice: []i32,
+    slice_const: []const i32,
+    slice_sentinel: [:0]i32,
+    slice_sentinel_const: [:0]const i32,
+};
+
+var zero: i32 = 0;
+const zero_const: i32 = 0;
+var array_of_zeros: [4:0]i32 = [_:0]i32{ 0, 0, 0, 0 };
+var one: i32 = 1;
+const one_const: i32 = 1;
+var array_countdown: [4:0]i32 = [_:0]i32{ 4, 3, 2, 1 };
+
+const aggregates_0 = Aggregates{
+    .optional = null,
+    .array = [4]i32{ 0, 0, 0, 0 },
+    .vector = @Vector(4, i32){ 0, 0, 0, 0 },
+    .pointer = &zero,
+    .pointer_const = &zero_const,
+    .slice = array_of_zeros[0..0],
+    .slice_const = &[_]i32{},
+    .slice_sentinel = array_of_zeros[0..0 :0],
+    .slice_sentinel_const = &[_:0]i32{},
+};
+const aggregates_0_doc =
+    \\{
+    \\  "optional": null,
+    \\  "array": [0, 0, 0, 0],
+    \\  "vector": [0, 0, 0, 0],
+    \\  "pointer": 0,
+    \\  "pointer_const": 0,
+    \\  "slice": [],
+    \\  "slice_const": [],
+    \\  "slice_sentinel": [],
+    \\  "slice_sentinel_const": []
+    \\}
+;
+
+const aggregates_1 = Aggregates{
+    .optional = 1,
+    .array = [4]i32{ 1, 2, 3, 4 },
+    .vector = @Vector(4, i32){ 1, 2, 3, 4 },
+    .pointer = &one,
+    .pointer_const = &one_const,
+    .slice = array_countdown[0..],
+    .slice_const = array_countdown[0..],
+    .slice_sentinel = array_countdown[0.. :0],
+    .slice_sentinel_const = array_countdown[0.. :0],
+};
+const aggregates_1_doc =
+    \\{
+    \\  "optional": 1,
+    \\  "array": [1, 2, 3, 4],
+    \\  "vector": [1, 2, 3, 4],
+    \\  "pointer": 1,
+    \\  "pointer_const": 1,
+    \\  "slice": [4, 3, 2, 1],
+    \\  "slice_const": [4, 3, 2, 1],
+    \\  "slice_sentinel": [4, 3, 2, 1],
+    \\  "slice_sentinel_const": [4, 3, 2, 1]
+    \\}
+;
+
+const Strings = struct {
+    slice_u8: []u8,
+    slice_const_u8: []const u8,
+    array_u8: [4]u8,
+    slice_sentinel_u8: [:0]u8,
+    slice_const_sentinel_u8: [:0]const u8,
+    array_sentinel_u8: [4:0]u8,
+};
+
+var abcd = [4:0]u8{ 'a', 'b', 'c', 'd' };
+const strings_0 = Strings{
+    .slice_u8 = abcd[0..],
+    .slice_const_u8 = "abcd",
+    .array_u8 = [4]u8{ 'a', 'b', 'c', 'd' },
+    .slice_sentinel_u8 = abcd[0..],
+    .slice_const_sentinel_u8 = "abcd",
+    .array_sentinel_u8 = [4:0]u8{ 'a', 'b', 'c', 'd' },
+};
+const strings_0_doc_0 =
+    \\{
+    \\  "slice_u8": "abcd",
+    \\  "slice_const_u8": "abcd",
+    \\  "array_u8": "abcd",
+    \\  "slice_sentinel_u8": "abcd",
+    \\  "slice_const_sentinel_u8": "abcd",
+    \\  "array_sentinel_u8": "abcd"
+    \\}
+;
+const strings_0_doc_1 =
+    \\{
+    \\  "slice_u8": [97, 98, 99, 100],
+    \\  "slice_const_u8": [97, 98, 99, 100],
+    \\  "array_u8": [97, 98, 99, 100],
+    \\  "slice_sentinel_u8": [97, 98, 99, 100],
+    \\  "slice_const_sentinel_u8": [97, 98, 99, 100],
+    \\  "array_sentinel_u8": [97, 98, 99, 100]
+    \\}
+;
+
+const Subnamespaces = struct {
+    packed_struct: packed struct { a: u32, b: u32 },
+    union_enum: union(enum) { i: i32, s: []const u8, v },
+    inferred_enum: enum { a, b },
+    explicit_enum: enum(u8) { a = 0, b = 1 },
+
+    custom_struct: struct {
+        pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = options;
+            try source.skipValue();
+            return @This(){};
+        }
+        pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = source;
+            _ = options;
+            return @This(){};
+        }
+    },
+    custom_union: union(enum) {
+        i: i32,
+        s: []const u8,
+        pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = options;
+            try source.skipValue();
+            return @This(){ .i = 0 };
+        }
+        pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = source;
+            _ = options;
+            return @This(){ .i = 0 };
+        }
+    },
+    custom_enum: enum {
+        a,
+        b,
+        pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = options;
+            try source.skipValue();
+            return .a;
+        }
+        pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
+            _ = allocator;
+            _ = source;
+            _ = options;
+            return .a;
+        }
+    },
+};
+
+const subnamespaces_0 = Subnamespaces{
+    .packed_struct = .{ .a = 0, .b = 0 },
+    .union_enum = .{ .i = 0 },
+    .inferred_enum = .a,
+    .explicit_enum = .a,
+    .custom_struct = .{},
+    .custom_union = .{ .i = 0 },
+    .custom_enum = .a,
+};
+const subnamespaces_0_doc =
+    \\{
+    \\  "packed_struct": {"a": 0, "b": 0},
+    \\  "union_enum": {"i": 0},
+    \\  "inferred_enum": "a",
+    \\  "explicit_enum": "a",
+    \\  "custom_struct": null,
+    \\  "custom_union": null,
+    \\  "custom_enum": null
+    \\}
+;
+
+fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
+    // First do the one with the debug info in case we get a SyntaxError or something.
+    {
+        var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
+        defer scanner.deinit();
+        var diagnostics = Diagnostics{};
+        scanner.enableDiagnostics(&diagnostics);
+        var parsed = parseFromTokenSource(T, testing.allocator, &scanner, .{}) catch |e| {
+            std.debug.print("at line,col: {}:{}\n", .{ diagnostics.getLine(), diagnostics.getColumn() });
+            return e;
+        };
+        defer parsed.deinit();
+        try testing.expectEqualDeep(expected, parsed.value);
+    }
+    {
+        const parsed = try parseFromSlice(T, testing.allocator, doc, .{});
+        defer parsed.deinit();
+        try testing.expectEqualDeep(expected, parsed.value);
+    }
+    {
+        var stream = std.io.fixedBufferStream(doc);
+        var json_reader = jsonReader(std.testing.allocator, stream.reader());
+        defer json_reader.deinit();
+        var parsed = try parseFromTokenSource(T, testing.allocator, &json_reader, .{});
+        defer parsed.deinit();
+        try testing.expectEqualDeep(expected, parsed.value);
+    }
+
+    var arena = ArenaAllocator.init(testing.allocator);
+    defer arena.deinit();
+    {
+        try testing.expectEqualDeep(expected, try parseFromSliceLeaky(T, arena.allocator(), doc, .{}));
+    }
+    {
+        var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
+        defer scanner.deinit();
+        try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &scanner, .{}));
+    }
+    {
+        var stream = std.io.fixedBufferStream(doc);
+        var json_reader = jsonReader(std.testing.allocator, stream.reader());
+        defer json_reader.deinit();
+        try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &json_reader, .{}));
+    }
+
+    const parsed_dynamic = try parseFromSlice(Value, testing.allocator, doc, .{});
+    defer parsed_dynamic.deinit();
+    {
+        const parsed = try parseFromValue(T, testing.allocator, parsed_dynamic.value, .{});
+        defer parsed.deinit();
+        try testing.expectEqualDeep(expected, parsed.value);
+    }
+    {
+        try testing.expectEqualDeep(expected, try parseFromValueLeaky(T, arena.allocator(), parsed_dynamic.value, .{}));
+    }
+}
+
+test "test all types" {
+    if (true) return error.SkipZigTest; // See https://github.com/ziglang/zig/issues/16108
+    try testAllParseFunctions(Primitives, primitives_0, primitives_0_doc_0);
+    try testAllParseFunctions(Primitives, primitives_0, primitives_0_doc_1);
+    try testAllParseFunctions(Primitives, primitives_1, primitives_1_doc_0);
+    try testAllParseFunctions(Primitives, primitives_1, primitives_1_doc_1);
+
+    try testAllParseFunctions(Aggregates, aggregates_0, aggregates_0_doc);
+    try testAllParseFunctions(Aggregates, aggregates_1, aggregates_1_doc);
+
+    try testAllParseFunctions(Strings, strings_0, strings_0_doc_0);
+    try testAllParseFunctions(Strings, strings_0, strings_0_doc_1);
+
+    try testAllParseFunctions(Subnamespaces, subnamespaces_0, subnamespaces_0_doc);
+}
 
 test "parse" {
     try testing.expectEqual(false, try parseFromSliceLeaky(bool, testing.allocator, "false", .{}));