master
  1const Manifest = @This();
  2const std = @import("std");
  3const mem = std.mem;
  4const Allocator = std.mem.Allocator;
  5const assert = std.debug.assert;
  6const Ast = std.zig.Ast;
  7const testing = std.testing;
  8const Package = @import("../Package.zig");
  9
 10pub const max_bytes = 10 * 1024 * 1024;
 11pub const basename = "build.zig.zon";
 12pub const max_name_len = 32;
 13pub const max_version_len = 32;
 14
 15pub const Dependency = struct {
 16    location: Location,
 17    location_tok: Ast.TokenIndex,
 18    location_node: Ast.Node.Index,
 19    hash: ?[]const u8,
 20    hash_tok: Ast.OptionalTokenIndex,
 21    hash_node: Ast.Node.OptionalIndex,
 22    node: Ast.Node.Index,
 23    name_tok: Ast.TokenIndex,
 24    lazy: bool,
 25
 26    pub const Location = union(enum) {
 27        url: []const u8,
 28        path: []const u8,
 29    };
 30};
 31
 32pub const ErrorMessage = struct {
 33    msg: []const u8,
 34    tok: Ast.TokenIndex,
 35    off: u32,
 36};
 37
 38name: []const u8,
 39id: u32,
 40version: std.SemanticVersion,
 41version_node: Ast.Node.Index,
 42dependencies: std.StringArrayHashMapUnmanaged(Dependency),
 43dependencies_node: Ast.Node.OptionalIndex,
 44paths: std.StringArrayHashMapUnmanaged(void),
 45minimum_zig_version: ?std.SemanticVersion,
 46
 47errors: []ErrorMessage,
 48arena_state: std.heap.ArenaAllocator.State,
 49
 50pub const ParseOptions = struct {
 51    allow_missing_paths_field: bool = false,
 52    /// Deprecated, to be removed after 0.14.0 is tagged.
 53    allow_name_string: bool = true,
 54    /// Deprecated, to be removed after 0.14.0 is tagged.
 55    allow_missing_fingerprint: bool = true,
 56};
 57
 58pub const Error = Allocator.Error;
 59
 60pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
 61    const main_node_index = ast.nodeData(.root).node;
 62
 63    var arena_instance = std.heap.ArenaAllocator.init(gpa);
 64    errdefer arena_instance.deinit();
 65
 66    var p: Parse = .{
 67        .gpa = gpa,
 68        .ast = ast,
 69        .arena = arena_instance.allocator(),
 70        .errors = .{},
 71
 72        .name = undefined,
 73        .id = 0,
 74        .version = undefined,
 75        .version_node = undefined,
 76        .dependencies = .{},
 77        .dependencies_node = .none,
 78        .paths = .{},
 79        .allow_missing_paths_field = options.allow_missing_paths_field,
 80        .allow_name_string = options.allow_name_string,
 81        .allow_missing_fingerprint = options.allow_missing_fingerprint,
 82        .minimum_zig_version = null,
 83        .buf = .{},
 84    };
 85    defer p.buf.deinit(gpa);
 86    defer p.errors.deinit(gpa);
 87    defer p.dependencies.deinit(gpa);
 88    defer p.paths.deinit(gpa);
 89
 90    p.parseRoot(main_node_index) catch |err| switch (err) {
 91        error.ParseFailure => assert(p.errors.items.len > 0),
 92        else => |e| return e,
 93    };
 94
 95    return .{
 96        .name = p.name,
 97        .id = p.id,
 98        .version = p.version,
 99        .version_node = p.version_node,
100        .dependencies = try p.dependencies.clone(p.arena),
101        .dependencies_node = p.dependencies_node,
102        .paths = try p.paths.clone(p.arena),
103        .minimum_zig_version = p.minimum_zig_version,
104        .errors = try p.arena.dupe(ErrorMessage, p.errors.items),
105        .arena_state = arena_instance.state,
106    };
107}
108
109pub fn deinit(man: *Manifest, gpa: Allocator) void {
110    man.arena_state.promote(gpa).deinit();
111    man.* = undefined;
112}
113
114pub fn copyErrorsIntoBundle(
115    man: Manifest,
116    ast: Ast,
117    /// ErrorBundle null-terminated string index
118    src_path: u32,
119    eb: *std.zig.ErrorBundle.Wip,
120) Allocator.Error!void {
121    for (man.errors) |msg| {
122        const start_loc = ast.tokenLocation(0, msg.tok);
123
124        try eb.addRootErrorMessage(.{
125            .msg = try eb.addString(msg.msg),
126            .src_loc = try eb.addSourceLocation(.{
127                .src_path = src_path,
128                .span_start = ast.tokenStart(msg.tok),
129                .span_end = @intCast(ast.tokenStart(msg.tok) + ast.tokenSlice(msg.tok).len),
130                .span_main = ast.tokenStart(msg.tok) + msg.off,
131                .line = @intCast(start_loc.line),
132                .column = @intCast(start_loc.column),
133                .source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
134            }),
135        });
136    }
137}
138
139const Parse = struct {
140    gpa: Allocator,
141    ast: Ast,
142    arena: Allocator,
143    buf: std.ArrayList(u8),
144    errors: std.ArrayList(ErrorMessage),
145
146    name: []const u8,
147    id: u32,
148    version: std.SemanticVersion,
149    version_node: Ast.Node.Index,
150    dependencies: std.StringArrayHashMapUnmanaged(Dependency),
151    dependencies_node: Ast.Node.OptionalIndex,
152    paths: std.StringArrayHashMapUnmanaged(void),
153    allow_missing_paths_field: bool,
154    allow_name_string: bool,
155    allow_missing_fingerprint: bool,
156    minimum_zig_version: ?std.SemanticVersion,
157
158    const InnerError = error{ ParseFailure, OutOfMemory };
159
160    fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
161        const ast = p.ast;
162        const main_token = ast.nodeMainToken(node);
163
164        var buf: [2]Ast.Node.Index = undefined;
165        const struct_init = ast.fullStructInit(&buf, node) orelse {
166            return fail(p, main_token, "expected top level expression to be a struct", .{});
167        };
168
169        var have_name = false;
170        var have_version = false;
171        var have_included_paths = false;
172        var fingerprint: ?Package.Fingerprint = null;
173
174        for (struct_init.ast.fields) |field_init| {
175            const name_token = ast.firstToken(field_init) - 2;
176            const field_name = try identifierTokenString(p, name_token);
177            // We could get fancy with reflection and comptime logic here but doing
178            // things manually provides an opportunity to do any additional verification
179            // that is desirable on a per-field basis.
180            if (mem.eql(u8, field_name, "dependencies")) {
181                p.dependencies_node = field_init.toOptional();
182                try parseDependencies(p, field_init);
183            } else if (mem.eql(u8, field_name, "paths")) {
184                have_included_paths = true;
185                try parseIncludedPaths(p, field_init);
186            } else if (mem.eql(u8, field_name, "name")) {
187                p.name = try parseName(p, field_init);
188                have_name = true;
189            } else if (mem.eql(u8, field_name, "fingerprint")) {
190                fingerprint = try parseFingerprint(p, field_init);
191            } else if (mem.eql(u8, field_name, "version")) {
192                p.version_node = field_init;
193                const version_text = try parseString(p, field_init);
194                if (version_text.len > max_version_len) {
195                    try appendError(p, ast.nodeMainToken(field_init), "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
196                }
197                p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
198                    try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
199                    break :v undefined;
200                };
201                have_version = true;
202            } else if (mem.eql(u8, field_name, "minimum_zig_version")) {
203                const version_text = try parseString(p, field_init);
204                p.minimum_zig_version = std.SemanticVersion.parse(version_text) catch |err| v: {
205                    try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
206                    break :v null;
207                };
208            } else {
209                // Ignore unknown fields so that we can add fields in future zig
210                // versions without breaking older zig versions.
211            }
212        }
213
214        if (!have_name) {
215            try appendError(p, main_token, "missing top-level 'name' field", .{});
216        } else {
217            if (fingerprint) |n| {
218                if (!n.validate(p.name)) {
219                    return fail(p, main_token, "invalid fingerprint: 0x{x}; if this is a new or forked package, use this value: 0x{x}", .{
220                        n.int(), Package.Fingerprint.generate(p.name).int(),
221                    });
222                }
223                p.id = n.id;
224            } else if (!p.allow_missing_fingerprint) {
225                try appendError(p, main_token, "missing top-level 'fingerprint' field; suggested value: 0x{x}", .{
226                    Package.Fingerprint.generate(p.name).int(),
227                });
228            } else {
229                p.id = 0;
230            }
231        }
232
233        if (!have_version) {
234            try appendError(p, main_token, "missing top-level 'version' field", .{});
235        }
236
237        if (!have_included_paths) {
238            if (p.allow_missing_paths_field) {
239                try p.paths.put(p.gpa, "", {});
240            } else {
241                try appendError(p, main_token, "missing top-level 'paths' field", .{});
242            }
243        }
244    }
245
246    fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
247        const ast = p.ast;
248
249        var buf: [2]Ast.Node.Index = undefined;
250        const struct_init = ast.fullStructInit(&buf, node) orelse {
251            const tok = ast.nodeMainToken(node);
252            return fail(p, tok, "expected dependencies expression to be a struct", .{});
253        };
254
255        for (struct_init.ast.fields) |field_init| {
256            const name_token = ast.firstToken(field_init) - 2;
257            const dep_name = try identifierTokenString(p, name_token);
258            const dep = try parseDependency(p, field_init);
259            try p.dependencies.put(p.gpa, dep_name, dep);
260        }
261    }
262
263    fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
264        const ast = p.ast;
265
266        var buf: [2]Ast.Node.Index = undefined;
267        const struct_init = ast.fullStructInit(&buf, node) orelse {
268            const tok = ast.nodeMainToken(node);
269            return fail(p, tok, "expected dependency expression to be a struct", .{});
270        };
271
272        var dep: Dependency = .{
273            .location = undefined,
274            .location_tok = undefined,
275            .location_node = undefined,
276            .hash = null,
277            .hash_tok = .none,
278            .hash_node = .none,
279            .node = node,
280            .name_tok = undefined,
281            .lazy = false,
282        };
283        var has_location = false;
284
285        for (struct_init.ast.fields) |field_init| {
286            const name_token = ast.firstToken(field_init) - 2;
287            dep.name_tok = name_token;
288            const field_name = try identifierTokenString(p, name_token);
289            // We could get fancy with reflection and comptime logic here but doing
290            // things manually provides an opportunity to do any additional verification
291            // that is desirable on a per-field basis.
292            if (mem.eql(u8, field_name, "url")) {
293                if (has_location) {
294                    return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
295                }
296                dep.location = .{
297                    .url = parseString(p, field_init) catch |err| switch (err) {
298                        error.ParseFailure => continue,
299                        else => |e| return e,
300                    },
301                };
302                has_location = true;
303                dep.location_tok = ast.nodeMainToken(field_init);
304                dep.location_node = field_init;
305            } else if (mem.eql(u8, field_name, "path")) {
306                if (has_location) {
307                    return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
308                }
309                dep.location = .{
310                    .path = parseString(p, field_init) catch |err| switch (err) {
311                        error.ParseFailure => continue,
312                        else => |e| return e,
313                    },
314                };
315                has_location = true;
316                dep.location_tok = ast.nodeMainToken(field_init);
317                dep.location_node = field_init;
318            } else if (mem.eql(u8, field_name, "hash")) {
319                dep.hash = parseHash(p, field_init) catch |err| switch (err) {
320                    error.ParseFailure => continue,
321                    else => |e| return e,
322                };
323                dep.hash_tok = .fromToken(ast.nodeMainToken(field_init));
324                dep.hash_node = field_init.toOptional();
325            } else if (mem.eql(u8, field_name, "lazy")) {
326                dep.lazy = parseBool(p, field_init) catch |err| switch (err) {
327                    error.ParseFailure => continue,
328                    else => |e| return e,
329                };
330            } else {
331                // Ignore unknown fields so that we can add fields in future zig
332                // versions without breaking older zig versions.
333            }
334        }
335
336        if (!has_location) {
337            try appendError(p, ast.nodeMainToken(node), "dependency requires location field, one of 'url' or 'path'.", .{});
338        }
339
340        return dep;
341    }
342
343    fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
344        const ast = p.ast;
345
346        var buf: [2]Ast.Node.Index = undefined;
347        const array_init = ast.fullArrayInit(&buf, node) orelse {
348            const tok = ast.nodeMainToken(node);
349            return fail(p, tok, "expected paths expression to be a list of strings", .{});
350        };
351
352        for (array_init.ast.elements) |elem_node| {
353            const path_string = try parseString(p, elem_node);
354            // This is normalized so that it can be used in string comparisons
355            // against file system paths.
356            const normalized = try std.fs.path.resolve(p.arena, &.{path_string});
357            try p.paths.put(p.gpa, normalized, {});
358        }
359    }
360
361    fn parseBool(p: *Parse, node: Ast.Node.Index) !bool {
362        const ast = p.ast;
363        if (ast.nodeTag(node) != .identifier) {
364            return fail(p, ast.nodeMainToken(node), "expected identifier", .{});
365        }
366        const ident_token = ast.nodeMainToken(node);
367        const token_bytes = ast.tokenSlice(ident_token);
368        if (mem.eql(u8, token_bytes, "true")) {
369            return true;
370        } else if (mem.eql(u8, token_bytes, "false")) {
371            return false;
372        } else {
373            return fail(p, ident_token, "expected boolean", .{});
374        }
375    }
376
377    fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
378        const ast = p.ast;
379        const main_token = ast.nodeMainToken(node);
380        if (ast.nodeTag(node) != .number_literal) {
381            return fail(p, main_token, "expected integer literal", .{});
382        }
383        const token_bytes = ast.tokenSlice(main_token);
384        const parsed = std.zig.parseNumberLiteral(token_bytes);
385        switch (parsed) {
386            .int => |n| return @bitCast(n),
387            .big_int, .float => return fail(p, main_token, "expected u64 integer literal, found {s}", .{
388                @tagName(parsed),
389            }),
390            .failure => |err| return fail(p, main_token, "bad integer literal: {s}", .{@tagName(err)}),
391        }
392    }
393
394    fn parseName(p: *Parse, node: Ast.Node.Index) ![]const u8 {
395        const ast = p.ast;
396        const main_token = ast.nodeMainToken(node);
397
398        if (p.allow_name_string and ast.nodeTag(node) == .string_literal) {
399            const name = try parseString(p, node);
400            if (!std.zig.isValidId(name))
401                return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
402
403            if (name.len > max_name_len)
404                return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{
405                    std.zig.fmtId(name), max_name_len,
406                });
407
408            return name;
409        }
410
411        if (ast.nodeTag(node) != .enum_literal)
412            return fail(p, main_token, "expected enum literal", .{});
413
414        const ident_name = ast.tokenSlice(main_token);
415        if (mem.startsWith(u8, ident_name, "@"))
416            return fail(p, main_token, "name must be a valid bare zig identifier", .{});
417
418        if (ident_name.len > max_name_len)
419            return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{
420                std.zig.fmtId(ident_name), max_name_len,
421            });
422
423        return ident_name;
424    }
425
426    fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
427        const ast = p.ast;
428        if (ast.nodeTag(node) != .string_literal) {
429            return fail(p, ast.nodeMainToken(node), "expected string literal", .{});
430        }
431        const str_lit_token = ast.nodeMainToken(node);
432        const token_bytes = ast.tokenSlice(str_lit_token);
433        p.buf.clearRetainingCapacity();
434        try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
435        const duped = try p.arena.dupe(u8, p.buf.items);
436        return duped;
437    }
438
439    fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
440        const ast = p.ast;
441        const tok = ast.nodeMainToken(node);
442        const h = try parseString(p, node);
443
444        if (h.len > Package.Hash.max_len) {
445            return fail(p, tok, "hash length exceeds maximum: {d}", .{h.len});
446        }
447
448        return h;
449    }
450
451    /// TODO: try to DRY this with AstGen.identifierTokenString
452    fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
453        const ast = p.ast;
454        assert(ast.tokenTag(token) == .identifier);
455        const ident_name = ast.tokenSlice(token);
456        if (!mem.startsWith(u8, ident_name, "@")) {
457            return ident_name;
458        }
459        p.buf.clearRetainingCapacity();
460        try parseStrLit(p, token, &p.buf, ident_name, 1);
461        const duped = try p.arena.dupe(u8, p.buf.items);
462        return duped;
463    }
464
465    /// TODO: try to DRY this with AstGen.parseStrLit
466    fn parseStrLit(
467        p: *Parse,
468        token: Ast.TokenIndex,
469        buf: *std.ArrayList(u8),
470        bytes: []const u8,
471        offset: u32,
472    ) InnerError!void {
473        const raw_string = bytes[offset..];
474        const result = r: {
475            var aw: std.Io.Writer.Allocating = .fromArrayList(p.gpa, buf);
476            defer buf.* = aw.toArrayList();
477            break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
478                error.WriteFailed => return error.OutOfMemory,
479            };
480        };
481        switch (result) {
482            .success => {},
483            .failure => |err| try p.appendStrLitError(err, token, bytes, offset),
484        }
485    }
486
487    /// TODO: try to DRY this with AstGen.failWithStrLitError
488    fn appendStrLitError(
489        p: *Parse,
490        err: std.zig.string_literal.Error,
491        token: Ast.TokenIndex,
492        bytes: []const u8,
493        offset: u32,
494    ) Allocator.Error!void {
495        const raw_string = bytes[offset..];
496        switch (err) {
497            .invalid_escape_character => |bad_index| {
498                try p.appendErrorOff(
499                    token,
500                    offset + @as(u32, @intCast(bad_index)),
501                    "invalid escape character: '{c}'",
502                    .{raw_string[bad_index]},
503                );
504            },
505            .expected_hex_digit => |bad_index| {
506                try p.appendErrorOff(
507                    token,
508                    offset + @as(u32, @intCast(bad_index)),
509                    "expected hex digit, found '{c}'",
510                    .{raw_string[bad_index]},
511                );
512            },
513            .empty_unicode_escape_sequence => |bad_index| {
514                try p.appendErrorOff(
515                    token,
516                    offset + @as(u32, @intCast(bad_index)),
517                    "empty unicode escape sequence",
518                    .{},
519                );
520            },
521            .expected_hex_digit_or_rbrace => |bad_index| {
522                try p.appendErrorOff(
523                    token,
524                    offset + @as(u32, @intCast(bad_index)),
525                    "expected hex digit or '}}', found '{c}'",
526                    .{raw_string[bad_index]},
527                );
528            },
529            .invalid_unicode_codepoint => |bad_index| {
530                try p.appendErrorOff(
531                    token,
532                    offset + @as(u32, @intCast(bad_index)),
533                    "unicode escape does not correspond to a valid unicode scalar value",
534                    .{},
535                );
536            },
537            .expected_lbrace => |bad_index| {
538                try p.appendErrorOff(
539                    token,
540                    offset + @as(u32, @intCast(bad_index)),
541                    "expected '{{', found '{c}",
542                    .{raw_string[bad_index]},
543                );
544            },
545            .expected_rbrace => |bad_index| {
546                try p.appendErrorOff(
547                    token,
548                    offset + @as(u32, @intCast(bad_index)),
549                    "expected '}}', found '{c}",
550                    .{raw_string[bad_index]},
551                );
552            },
553            .expected_single_quote => |bad_index| {
554                try p.appendErrorOff(
555                    token,
556                    offset + @as(u32, @intCast(bad_index)),
557                    "expected single quote ('), found '{c}",
558                    .{raw_string[bad_index]},
559                );
560            },
561            .invalid_character => |bad_index| {
562                try p.appendErrorOff(
563                    token,
564                    offset + @as(u32, @intCast(bad_index)),
565                    "invalid byte in string or character literal: '{c}'",
566                    .{raw_string[bad_index]},
567                );
568            },
569            .empty_char_literal => {
570                try p.appendErrorOff(token, offset, "empty character literal", .{});
571            },
572        }
573    }
574
575    fn fail(
576        p: *Parse,
577        tok: Ast.TokenIndex,
578        comptime fmt: []const u8,
579        args: anytype,
580    ) InnerError {
581        try appendError(p, tok, fmt, args);
582        return error.ParseFailure;
583    }
584
585    fn appendError(p: *Parse, tok: Ast.TokenIndex, comptime fmt: []const u8, args: anytype) !void {
586        return appendErrorOff(p, tok, 0, fmt, args);
587    }
588
589    fn appendErrorOff(
590        p: *Parse,
591        tok: Ast.TokenIndex,
592        byte_offset: u32,
593        comptime fmt: []const u8,
594        args: anytype,
595    ) Allocator.Error!void {
596        try p.errors.append(p.gpa, .{
597            .msg = try std.fmt.allocPrint(p.arena, fmt, args),
598            .tok = tok,
599            .off = byte_offset,
600        });
601    }
602};
603
604test "basic" {
605    const gpa = testing.allocator;
606
607    const example =
608        \\.{
609        \\    .name = "foo",
610        \\    .version = "3.2.1",
611        \\    .paths = .{""},
612        \\    .dependencies = .{
613        \\        .bar = .{
614        \\            .url = "https://example.com/baz.tar.gz",
615        \\            .hash = "1220f1b680b6065fcfc94fe777f22e73bcb7e2767e5f4d99d4255fe76ded69c7a35f",
616        \\        },
617        \\    },
618        \\}
619    ;
620
621    var ast = try Ast.parse(gpa, example, .zon);
622    defer ast.deinit(gpa);
623
624    try testing.expect(ast.errors.len == 0);
625
626    var manifest = try Manifest.parse(gpa, ast, .{});
627    defer manifest.deinit(gpa);
628
629    try testing.expect(manifest.errors.len == 0);
630    try testing.expectEqualStrings("foo", manifest.name);
631
632    try testing.expectEqual(@as(std.SemanticVersion, .{
633        .major = 3,
634        .minor = 2,
635        .patch = 1,
636    }), manifest.version);
637
638    try testing.expect(manifest.dependencies.count() == 1);
639    try testing.expectEqualStrings("bar", manifest.dependencies.keys()[0]);
640    try testing.expectEqualStrings(
641        "https://example.com/baz.tar.gz",
642        manifest.dependencies.values()[0].location.url,
643    );
644    try testing.expectEqualStrings(
645        "1220f1b680b6065fcfc94fe777f22e73bcb7e2767e5f4d99d4255fe76ded69c7a35f",
646        manifest.dependencies.values()[0].hash orelse return error.TestFailed,
647    );
648
649    try testing.expect(manifest.minimum_zig_version == null);
650}
651
652test "minimum_zig_version" {
653    const gpa = testing.allocator;
654
655    const example =
656        \\.{
657        \\    .name = "foo",
658        \\    .version = "3.2.1",
659        \\    .paths = .{""},
660        \\    .minimum_zig_version = "0.11.1",
661        \\}
662    ;
663
664    var ast = try Ast.parse(gpa, example, .zon);
665    defer ast.deinit(gpa);
666
667    try testing.expect(ast.errors.len == 0);
668
669    var manifest = try Manifest.parse(gpa, ast, .{});
670    defer manifest.deinit(gpa);
671
672    try testing.expect(manifest.errors.len == 0);
673    try testing.expect(manifest.dependencies.count() == 0);
674
675    try testing.expect(manifest.minimum_zig_version != null);
676
677    try testing.expectEqual(@as(std.SemanticVersion, .{
678        .major = 0,
679        .minor = 11,
680        .patch = 1,
681    }), manifest.minimum_zig_version.?);
682}
683
684test "minimum_zig_version - invalid version" {
685    const gpa = testing.allocator;
686
687    const example =
688        \\.{
689        \\    .name = "foo",
690        \\    .version = "3.2.1",
691        \\    .minimum_zig_version = "X.11.1",
692        \\    .paths = .{""},
693        \\}
694    ;
695
696    var ast = try Ast.parse(gpa, example, .zon);
697    defer ast.deinit(gpa);
698
699    try testing.expect(ast.errors.len == 0);
700
701    var manifest = try Manifest.parse(gpa, ast, .{});
702    defer manifest.deinit(gpa);
703
704    try testing.expect(manifest.errors.len == 1);
705    try testing.expect(manifest.dependencies.count() == 0);
706
707    try testing.expect(manifest.minimum_zig_version == null);
708}