master
   1const std = @import("std");
   2const builtin = @import("builtin");
   3const fs = std.fs;
   4const process = std.process;
   5const Progress = std.Progress;
   6const print = std.debug.print;
   7const mem = std.mem;
   8const testing = std.testing;
   9const Allocator = std.mem.Allocator;
  10const ArrayList = std.ArrayList;
  11const getExternalExecutor = std.zig.system.getExternalExecutor;
  12const fatal = std.process.fatal;
  13const Writer = std.Io.Writer;
  14
  15const max_doc_file_size = 10 * 1024 * 1024;
  16
  17const obj_ext = builtin.object_format.fileExt(builtin.cpu.arch);
  18
  19const usage =
  20    \\Usage: docgen [options] input output
  21    \\
  22    \\   Generates an HTML document from a docgen template.
  23    \\
  24    \\Options:
  25    \\   --code-dir dir         Path to directory containing code example outputs
  26    \\   -h, --help             Print this help and exit
  27    \\
  28;
  29
  30pub fn main() !void {
  31    var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
  32    defer arena_instance.deinit();
  33
  34    const arena = arena_instance.allocator();
  35
  36    var args_it = try process.argsWithAllocator(arena);
  37    if (!args_it.skip()) @panic("expected self arg");
  38
  39    const gpa = arena;
  40
  41    var threaded: std.Io.Threaded = .init(gpa);
  42    defer threaded.deinit();
  43    const io = threaded.io();
  44
  45    var opt_code_dir: ?[]const u8 = null;
  46    var opt_input: ?[]const u8 = null;
  47    var opt_output: ?[]const u8 = null;
  48
  49    while (args_it.next()) |arg| {
  50        if (mem.startsWith(u8, arg, "-")) {
  51            if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
  52                try fs.File.stdout().writeAll(usage);
  53                process.exit(0);
  54            } else if (mem.eql(u8, arg, "--code-dir")) {
  55                if (args_it.next()) |param| {
  56                    opt_code_dir = param;
  57                } else {
  58                    fatal("expected parameter after --code-dir", .{});
  59                }
  60            } else {
  61                fatal("unrecognized option: '{s}'", .{arg});
  62            }
  63        } else if (opt_input == null) {
  64            opt_input = arg;
  65        } else if (opt_output == null) {
  66            opt_output = arg;
  67        } else {
  68            fatal("unexpected positional argument: '{s}'", .{arg});
  69        }
  70    }
  71    const input_path = opt_input orelse fatal("missing input file", .{});
  72    const output_path = opt_output orelse fatal("missing output file", .{});
  73    const code_dir_path = opt_code_dir orelse fatal("missing --code-dir argument", .{});
  74
  75    var in_file = try fs.cwd().openFile(input_path, .{});
  76    defer in_file.close();
  77
  78    var out_file = try fs.cwd().createFile(output_path, .{});
  79    defer out_file.close();
  80    var out_file_buffer: [4096]u8 = undefined;
  81    var out_file_writer = out_file.writer(&out_file_buffer);
  82
  83    var code_dir = try fs.cwd().openDir(code_dir_path, .{});
  84    defer code_dir.close();
  85
  86    var in_file_reader = in_file.reader(io, &.{});
  87    const input_file_bytes = try in_file_reader.interface.allocRemaining(arena, .limited(max_doc_file_size));
  88
  89    var tokenizer = Tokenizer.init(input_path, input_file_bytes);
  90    var toc = try genToc(arena, &tokenizer);
  91
  92    try genHtml(arena, &tokenizer, &toc, code_dir, &out_file_writer.interface);
  93    try out_file_writer.end();
  94}
  95
  96const Token = struct {
  97    id: Id,
  98    start: usize,
  99    end: usize,
 100
 101    const Id = enum {
 102        invalid,
 103        content,
 104        bracket_open,
 105        tag_content,
 106        separator,
 107        bracket_close,
 108        eof,
 109    };
 110};
 111
 112const Tokenizer = struct {
 113    buffer: []const u8,
 114    index: usize,
 115    state: State,
 116    source_file_name: []const u8,
 117
 118    const State = enum {
 119        start,
 120        l_bracket,
 121        hash,
 122        tag_name,
 123        eof,
 124    };
 125
 126    fn init(source_file_name: []const u8, buffer: []const u8) Tokenizer {
 127        return Tokenizer{
 128            .buffer = buffer,
 129            .index = 0,
 130            .state = .start,
 131            .source_file_name = source_file_name,
 132        };
 133    }
 134
 135    fn next(self: *Tokenizer) Token {
 136        var result = Token{
 137            .id = .eof,
 138            .start = self.index,
 139            .end = undefined,
 140        };
 141        while (self.index < self.buffer.len) : (self.index += 1) {
 142            const c = self.buffer[self.index];
 143            switch (self.state) {
 144                .start => switch (c) {
 145                    '{' => {
 146                        self.state = .l_bracket;
 147                    },
 148                    else => {
 149                        result.id = .content;
 150                    },
 151                },
 152                .l_bracket => switch (c) {
 153                    '#' => {
 154                        if (result.id != .eof) {
 155                            self.index -= 1;
 156                            self.state = .start;
 157                            break;
 158                        } else {
 159                            result.id = .bracket_open;
 160                            self.index += 1;
 161                            self.state = .tag_name;
 162                            break;
 163                        }
 164                    },
 165                    else => {
 166                        result.id = .content;
 167                        self.state = .start;
 168                    },
 169                },
 170                .tag_name => switch (c) {
 171                    '|' => {
 172                        if (result.id != .eof) {
 173                            break;
 174                        } else {
 175                            result.id = .separator;
 176                            self.index += 1;
 177                            break;
 178                        }
 179                    },
 180                    '#' => {
 181                        self.state = .hash;
 182                    },
 183                    else => {
 184                        result.id = .tag_content;
 185                    },
 186                },
 187                .hash => switch (c) {
 188                    '}' => {
 189                        if (result.id != .eof) {
 190                            self.index -= 1;
 191                            self.state = .tag_name;
 192                            break;
 193                        } else {
 194                            result.id = .bracket_close;
 195                            self.index += 1;
 196                            self.state = .start;
 197                            break;
 198                        }
 199                    },
 200                    else => {
 201                        result.id = .tag_content;
 202                        self.state = .tag_name;
 203                    },
 204                },
 205                .eof => unreachable,
 206            }
 207        } else {
 208            switch (self.state) {
 209                .start, .l_bracket, .eof => {},
 210                else => {
 211                    result.id = .invalid;
 212                },
 213            }
 214            self.state = .eof;
 215        }
 216        result.end = self.index;
 217        return result;
 218    }
 219
 220    const Location = struct {
 221        line: usize,
 222        column: usize,
 223        line_start: usize,
 224        line_end: usize,
 225    };
 226
 227    fn getTokenLocation(self: *Tokenizer, token: Token) Location {
 228        var loc = Location{
 229            .line = 0,
 230            .column = 0,
 231            .line_start = 0,
 232            .line_end = 0,
 233        };
 234        for (self.buffer, 0..) |c, i| {
 235            if (i == token.start) {
 236                loc.line_end = i;
 237                while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
 238                return loc;
 239            }
 240            if (c == '\n') {
 241                loc.line += 1;
 242                loc.column = 0;
 243                loc.line_start = i + 1;
 244            } else {
 245                loc.column += 1;
 246            }
 247        }
 248        return loc;
 249    }
 250};
 251
 252fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: anytype) anyerror {
 253    const loc = tokenizer.getTokenLocation(token);
 254    const args_prefix = .{ tokenizer.source_file_name, loc.line + 1, loc.column + 1 };
 255    print("{s}:{d}:{d}: error: " ++ fmt ++ "\n", args_prefix ++ args);
 256    if (loc.line_start <= loc.line_end) {
 257        print("{s}\n", .{tokenizer.buffer[loc.line_start..loc.line_end]});
 258        {
 259            var i: usize = 0;
 260            while (i < loc.column) : (i += 1) {
 261                print(" ", .{});
 262            }
 263        }
 264        {
 265            const caret_count = @min(token.end, loc.line_end) - token.start;
 266            var i: usize = 0;
 267            while (i < caret_count) : (i += 1) {
 268                print("~", .{});
 269            }
 270        }
 271        print("\n", .{});
 272    }
 273    return error.ParseError;
 274}
 275
 276fn assertToken(tokenizer: *Tokenizer, token: Token, id: Token.Id) !void {
 277    if (token.id != id) {
 278        return parseError(tokenizer, token, "expected {s}, found {s}", .{ @tagName(id), @tagName(token.id) });
 279    }
 280}
 281
 282fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token {
 283    const token = tokenizer.next();
 284    try assertToken(tokenizer, token, id);
 285    return token;
 286}
 287
 288const HeaderOpen = struct {
 289    name: []const u8,
 290    url: []const u8,
 291    n: usize,
 292};
 293
 294const SeeAlsoItem = struct {
 295    name: []const u8,
 296    token: Token,
 297};
 298
 299const Code = struct {
 300    name: []const u8,
 301    token: Token,
 302};
 303
 304const Link = struct {
 305    url: []const u8,
 306    name: []const u8,
 307    token: Token,
 308};
 309
 310const SyntaxBlock = struct {
 311    source_type: SourceType,
 312    name: []const u8,
 313    source_token: Token,
 314
 315    const SourceType = enum {
 316        zig,
 317        c,
 318        peg,
 319        javascript,
 320    };
 321};
 322
 323const Node = union(enum) {
 324    Content: []const u8,
 325    Nav,
 326    Builtin: Token,
 327    HeaderOpen: HeaderOpen,
 328    SeeAlso: []const SeeAlsoItem,
 329    Code: Code,
 330    Link: Link,
 331    InlineSyntax: Token,
 332    Shell: Token,
 333    SyntaxBlock: SyntaxBlock,
 334};
 335
 336const Toc = struct {
 337    nodes: []Node,
 338    toc: []u8,
 339    urls: std.StringHashMap(Token),
 340};
 341
 342const Action = enum {
 343    open,
 344    close,
 345};
 346
 347fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
 348    var urls = std.StringHashMap(Token).init(allocator);
 349    errdefer urls.deinit();
 350
 351    var header_stack_size: usize = 0;
 352    var last_action: Action = .open;
 353    var last_columns: ?u8 = null;
 354
 355    var toc_buf: Writer.Allocating = .init(allocator);
 356    defer toc_buf.deinit();
 357
 358    const toc = &toc_buf.writer;
 359
 360    var nodes = std.array_list.Managed(Node).init(allocator);
 361    defer nodes.deinit();
 362
 363    try toc.writeByte('\n');
 364
 365    while (true) {
 366        const token = tokenizer.next();
 367        switch (token.id) {
 368            .eof => {
 369                if (header_stack_size != 0) {
 370                    return parseError(tokenizer, token, "unbalanced headers", .{});
 371                }
 372                try toc.writeAll("    </ul>\n");
 373                break;
 374            },
 375            .content => {
 376                try nodes.append(Node{ .Content = tokenizer.buffer[token.start..token.end] });
 377            },
 378            .bracket_open => {
 379                const tag_token = try eatToken(tokenizer, .tag_content);
 380                const tag_name = tokenizer.buffer[tag_token.start..tag_token.end];
 381
 382                if (mem.eql(u8, tag_name, "nav")) {
 383                    _ = try eatToken(tokenizer, .bracket_close);
 384
 385                    try nodes.append(Node.Nav);
 386                } else if (mem.eql(u8, tag_name, "builtin")) {
 387                    _ = try eatToken(tokenizer, .bracket_close);
 388                    try nodes.append(Node{ .Builtin = tag_token });
 389                } else if (mem.eql(u8, tag_name, "header_open")) {
 390                    _ = try eatToken(tokenizer, .separator);
 391                    const content_token = try eatToken(tokenizer, .tag_content);
 392                    const content = tokenizer.buffer[content_token.start..content_token.end];
 393                    var columns: ?u8 = null;
 394                    while (true) {
 395                        const bracket_tok = tokenizer.next();
 396                        switch (bracket_tok.id) {
 397                            .bracket_close => break,
 398                            .separator => continue,
 399                            .tag_content => {
 400                                const param = tokenizer.buffer[bracket_tok.start..bracket_tok.end];
 401                                if (mem.eql(u8, param, "2col")) {
 402                                    columns = 2;
 403                                } else {
 404                                    return parseError(
 405                                        tokenizer,
 406                                        bracket_tok,
 407                                        "unrecognized header_open param: {s}",
 408                                        .{param},
 409                                    );
 410                                }
 411                            },
 412                            else => return parseError(tokenizer, bracket_tok, "invalid header_open token", .{}),
 413                        }
 414                    }
 415
 416                    header_stack_size += 1;
 417
 418                    const urlized = try urlize(allocator, content);
 419                    try nodes.append(Node{
 420                        .HeaderOpen = HeaderOpen{
 421                            .name = content,
 422                            .url = urlized,
 423                            .n = header_stack_size + 1, // highest-level section headers start at h2
 424                        },
 425                    });
 426                    if (try urls.fetchPut(urlized, tag_token)) |kv| {
 427                        parseError(tokenizer, tag_token, "duplicate header url: #{s}", .{urlized}) catch {};
 428                        parseError(tokenizer, kv.value, "other tag here", .{}) catch {};
 429                        return error.ParseError;
 430                    }
 431                    if (last_action == .open) {
 432                        try toc.writeByte('\n');
 433                        try toc.splatByteAll(' ', header_stack_size * 4);
 434                        if (last_columns) |n| {
 435                            try toc.print("<ul style=\"columns: {d}\">\n", .{n});
 436                        } else {
 437                            try toc.writeAll("<ul>\n");
 438                        }
 439                    } else {
 440                        last_action = .open;
 441                    }
 442                    last_columns = columns;
 443                    try toc.splatByteAll(' ', 4 + header_stack_size * 4);
 444                    try toc.print("<li><a id=\"toc-{s}\" href=\"#{s}\">{s}</a>", .{ urlized, urlized, content });
 445                } else if (mem.eql(u8, tag_name, "header_close")) {
 446                    if (header_stack_size == 0) {
 447                        return parseError(tokenizer, tag_token, "unbalanced close header", .{});
 448                    }
 449                    header_stack_size -= 1;
 450                    _ = try eatToken(tokenizer, .bracket_close);
 451
 452                    if (last_action == .close) {
 453                        try toc.splatByteAll(' ', 8 + header_stack_size * 4);
 454                        try toc.writeAll("</ul></li>\n");
 455                    } else {
 456                        try toc.writeAll("</li>\n");
 457                        last_action = .close;
 458                    }
 459                } else if (mem.eql(u8, tag_name, "see_also")) {
 460                    var list = std.array_list.Managed(SeeAlsoItem).init(allocator);
 461                    errdefer list.deinit();
 462
 463                    while (true) {
 464                        const see_also_tok = tokenizer.next();
 465                        switch (see_also_tok.id) {
 466                            .tag_content => {
 467                                const content = tokenizer.buffer[see_also_tok.start..see_also_tok.end];
 468                                try list.append(SeeAlsoItem{
 469                                    .name = content,
 470                                    .token = see_also_tok,
 471                                });
 472                            },
 473                            .separator => {},
 474                            .bracket_close => {
 475                                try nodes.append(Node{ .SeeAlso = try list.toOwnedSlice() });
 476                                break;
 477                            },
 478                            else => return parseError(tokenizer, see_also_tok, "invalid see_also token", .{}),
 479                        }
 480                    }
 481                } else if (mem.eql(u8, tag_name, "link")) {
 482                    _ = try eatToken(tokenizer, .separator);
 483                    const name_tok = try eatToken(tokenizer, .tag_content);
 484                    const name = tokenizer.buffer[name_tok.start..name_tok.end];
 485
 486                    const url_name = blk: {
 487                        const tok = tokenizer.next();
 488                        switch (tok.id) {
 489                            .bracket_close => break :blk name,
 490                            .separator => {
 491                                const explicit_text = try eatToken(tokenizer, .tag_content);
 492                                _ = try eatToken(tokenizer, .bracket_close);
 493                                break :blk tokenizer.buffer[explicit_text.start..explicit_text.end];
 494                            },
 495                            else => return parseError(tokenizer, tok, "invalid link token", .{}),
 496                        }
 497                    };
 498
 499                    try nodes.append(Node{
 500                        .Link = Link{
 501                            .url = try urlize(allocator, url_name),
 502                            .name = name,
 503                            .token = name_tok,
 504                        },
 505                    });
 506                } else if (mem.eql(u8, tag_name, "code")) {
 507                    _ = try eatToken(tokenizer, .separator);
 508                    const name_tok = try eatToken(tokenizer, .tag_content);
 509                    _ = try eatToken(tokenizer, .bracket_close);
 510                    try nodes.append(.{
 511                        .Code = .{
 512                            .name = tokenizer.buffer[name_tok.start..name_tok.end],
 513                            .token = name_tok,
 514                        },
 515                    });
 516                } else if (mem.eql(u8, tag_name, "syntax")) {
 517                    _ = try eatToken(tokenizer, .bracket_close);
 518                    const content_tok = try eatToken(tokenizer, .content);
 519                    _ = try eatToken(tokenizer, .bracket_open);
 520                    const end_syntax_tag = try eatToken(tokenizer, .tag_content);
 521                    const end_tag_name = tokenizer.buffer[end_syntax_tag.start..end_syntax_tag.end];
 522                    if (!mem.eql(u8, end_tag_name, "endsyntax")) {
 523                        return parseError(
 524                            tokenizer,
 525                            end_syntax_tag,
 526                            "invalid token inside syntax: {s}",
 527                            .{end_tag_name},
 528                        );
 529                    }
 530                    _ = try eatToken(tokenizer, .bracket_close);
 531                    try nodes.append(Node{ .InlineSyntax = content_tok });
 532                } else if (mem.eql(u8, tag_name, "shell_samp")) {
 533                    _ = try eatToken(tokenizer, .bracket_close);
 534                    const content_tok = try eatToken(tokenizer, .content);
 535                    _ = try eatToken(tokenizer, .bracket_open);
 536                    const end_syntax_tag = try eatToken(tokenizer, .tag_content);
 537                    const end_tag_name = tokenizer.buffer[end_syntax_tag.start..end_syntax_tag.end];
 538                    if (!mem.eql(u8, end_tag_name, "end_shell_samp")) {
 539                        return parseError(
 540                            tokenizer,
 541                            end_syntax_tag,
 542                            "invalid token inside syntax: {s}",
 543                            .{end_tag_name},
 544                        );
 545                    }
 546                    _ = try eatToken(tokenizer, .bracket_close);
 547                    try nodes.append(Node{ .Shell = content_tok });
 548                } else if (mem.eql(u8, tag_name, "syntax_block")) {
 549                    _ = try eatToken(tokenizer, .separator);
 550                    const source_type_tok = try eatToken(tokenizer, .tag_content);
 551                    var name: []const u8 = "sample_code";
 552                    const maybe_sep = tokenizer.next();
 553                    switch (maybe_sep.id) {
 554                        .separator => {
 555                            const name_tok = try eatToken(tokenizer, .tag_content);
 556                            name = tokenizer.buffer[name_tok.start..name_tok.end];
 557                            _ = try eatToken(tokenizer, .bracket_close);
 558                        },
 559                        .bracket_close => {},
 560                        else => return parseError(tokenizer, token, "invalid token", .{}),
 561                    }
 562                    const source_type_str = tokenizer.buffer[source_type_tok.start..source_type_tok.end];
 563                    var source_type: SyntaxBlock.SourceType = undefined;
 564                    if (mem.eql(u8, source_type_str, "zig")) {
 565                        source_type = SyntaxBlock.SourceType.zig;
 566                    } else if (mem.eql(u8, source_type_str, "c")) {
 567                        source_type = SyntaxBlock.SourceType.c;
 568                    } else if (mem.eql(u8, source_type_str, "peg")) {
 569                        source_type = SyntaxBlock.SourceType.peg;
 570                    } else if (mem.eql(u8, source_type_str, "javascript")) {
 571                        source_type = SyntaxBlock.SourceType.javascript;
 572                    } else {
 573                        return parseError(tokenizer, source_type_tok, "unrecognized code kind: {s}", .{source_type_str});
 574                    }
 575                    const source_token = while (true) {
 576                        const content_tok = try eatToken(tokenizer, .content);
 577                        _ = try eatToken(tokenizer, .bracket_open);
 578                        const end_code_tag = try eatToken(tokenizer, .tag_content);
 579                        const end_tag_name = tokenizer.buffer[end_code_tag.start..end_code_tag.end];
 580                        if (mem.eql(u8, end_tag_name, "end_syntax_block")) {
 581                            _ = try eatToken(tokenizer, .bracket_close);
 582                            break content_tok;
 583                        } else {
 584                            return parseError(
 585                                tokenizer,
 586                                end_code_tag,
 587                                "invalid token inside code_begin: {s}",
 588                                .{end_tag_name},
 589                            );
 590                        }
 591                        _ = try eatToken(tokenizer, .bracket_close);
 592                    };
 593                    try nodes.append(Node{ .SyntaxBlock = SyntaxBlock{ .source_type = source_type, .name = name, .source_token = source_token } });
 594                } else {
 595                    return parseError(tokenizer, tag_token, "unrecognized tag name: {s}", .{tag_name});
 596                }
 597            },
 598            else => return parseError(tokenizer, token, "invalid token", .{}),
 599        }
 600    }
 601
 602    return .{
 603        .nodes = try nodes.toOwnedSlice(),
 604        .toc = try toc_buf.toOwnedSlice(),
 605        .urls = urls,
 606    };
 607}
 608
 609fn urlize(gpa: Allocator, input: []const u8) ![]u8 {
 610    var buf: ArrayList(u8) = .empty;
 611    defer buf.deinit(gpa);
 612
 613    for (input) |c| {
 614        switch (c) {
 615            'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
 616                try buf.append(gpa, c);
 617            },
 618            ' ' => {
 619                try buf.append(gpa, '-');
 620            },
 621            else => {},
 622        }
 623    }
 624    return try buf.toOwnedSlice(gpa);
 625}
 626
 627fn escapeHtml(allocator: Allocator, input: []const u8) ![]u8 {
 628    var buf = std.array_list.Managed(u8).init(allocator);
 629    defer buf.deinit();
 630
 631    const out = buf.writer();
 632    try writeEscaped(out, input);
 633    return try buf.toOwnedSlice();
 634}
 635
 636fn writeEscaped(out: *Writer, input: []const u8) !void {
 637    for (input) |c| {
 638        try switch (c) {
 639            '&' => out.writeAll("&amp;"),
 640            '<' => out.writeAll("&lt;"),
 641            '>' => out.writeAll("&gt;"),
 642            '"' => out.writeAll("&quot;"),
 643            else => out.writeByte(c),
 644        };
 645    }
 646}
 647
 648// Returns true if number is in slice.
 649fn in(slice: []const u8, number: u8) bool {
 650    for (slice) |n| {
 651        if (number == n) return true;
 652    }
 653    return false;
 654}
 655
 656const builtin_types = [_][]const u8{
 657    "f16",          "f32",     "f64",        "f80",          "f128",
 658    "c_longdouble", "c_short", "c_ushort",   "c_int",        "c_uint",
 659    "c_long",       "c_ulong", "c_longlong", "c_ulonglong",  "c_char",
 660    "anyopaque",    "void",    "bool",       "isize",        "usize",
 661    "noreturn",     "type",    "anyerror",   "comptime_int", "comptime_float",
 662};
 663
 664fn isType(name: []const u8) bool {
 665    for (builtin_types) |t| {
 666        if (mem.eql(u8, t, name))
 667            return true;
 668    }
 669    return false;
 670}
 671
 672fn writeEscapedLines(out: *Writer, text: []const u8) !void {
 673    return writeEscaped(out, text);
 674}
 675
 676fn tokenizeAndPrintRaw(
 677    allocator: Allocator,
 678    docgen_tokenizer: *Tokenizer,
 679    out: *Writer,
 680    source_token: Token,
 681    raw_src: []const u8,
 682) !void {
 683    const src_non_terminated = mem.trim(u8, raw_src, " \r\n");
 684    const src = try allocator.dupeZ(u8, src_non_terminated);
 685
 686    try out.writeAll("<code>");
 687    var tokenizer = std.zig.Tokenizer.init(src);
 688    var index: usize = 0;
 689    var next_tok_is_fn = false;
 690    while (true) {
 691        const prev_tok_was_fn = next_tok_is_fn;
 692        next_tok_is_fn = false;
 693
 694        const token = tokenizer.next();
 695        if (mem.indexOf(u8, src[index..token.loc.start], "//")) |comment_start_off| {
 696            // render one comment
 697            const comment_start = index + comment_start_off;
 698            const comment_end_off = mem.indexOf(u8, src[comment_start..token.loc.start], "\n");
 699            const comment_end = if (comment_end_off) |o| comment_start + o else token.loc.start;
 700
 701            try writeEscapedLines(out, src[index..comment_start]);
 702            try out.writeAll("<span class=\"tok-comment\">");
 703            try writeEscaped(out, src[comment_start..comment_end]);
 704            try out.writeAll("</span>");
 705            index = comment_end;
 706            tokenizer.index = index;
 707            continue;
 708        }
 709
 710        try writeEscapedLines(out, src[index..token.loc.start]);
 711        switch (token.tag) {
 712            .eof => break,
 713
 714            .keyword_addrspace,
 715            .keyword_align,
 716            .keyword_and,
 717            .keyword_asm,
 718            .keyword_break,
 719            .keyword_catch,
 720            .keyword_comptime,
 721            .keyword_const,
 722            .keyword_continue,
 723            .keyword_defer,
 724            .keyword_else,
 725            .keyword_enum,
 726            .keyword_errdefer,
 727            .keyword_error,
 728            .keyword_export,
 729            .keyword_extern,
 730            .keyword_for,
 731            .keyword_if,
 732            .keyword_inline,
 733            .keyword_noalias,
 734            .keyword_noinline,
 735            .keyword_nosuspend,
 736            .keyword_opaque,
 737            .keyword_or,
 738            .keyword_orelse,
 739            .keyword_packed,
 740            .keyword_anyframe,
 741            .keyword_pub,
 742            .keyword_resume,
 743            .keyword_return,
 744            .keyword_linksection,
 745            .keyword_callconv,
 746            .keyword_struct,
 747            .keyword_suspend,
 748            .keyword_switch,
 749            .keyword_test,
 750            .keyword_threadlocal,
 751            .keyword_try,
 752            .keyword_union,
 753            .keyword_unreachable,
 754            .keyword_var,
 755            .keyword_volatile,
 756            .keyword_allowzero,
 757            .keyword_while,
 758            .keyword_anytype,
 759            => {
 760                try out.writeAll("<span class=\"tok-kw\">");
 761                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 762                try out.writeAll("</span>");
 763            },
 764
 765            .keyword_fn => {
 766                try out.writeAll("<span class=\"tok-kw\">");
 767                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 768                try out.writeAll("</span>");
 769                next_tok_is_fn = true;
 770            },
 771
 772            .string_literal,
 773            .multiline_string_literal_line,
 774            .char_literal,
 775            => {
 776                try out.writeAll("<span class=\"tok-str\">");
 777                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 778                try out.writeAll("</span>");
 779            },
 780
 781            .builtin => {
 782                try out.writeAll("<span class=\"tok-builtin\">");
 783                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 784                try out.writeAll("</span>");
 785            },
 786
 787            .doc_comment,
 788            .container_doc_comment,
 789            => {
 790                try out.writeAll("<span class=\"tok-comment\">");
 791                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 792                try out.writeAll("</span>");
 793            },
 794
 795            .identifier => {
 796                const tok_bytes = src[token.loc.start..token.loc.end];
 797                if (mem.eql(u8, tok_bytes, "undefined") or
 798                    mem.eql(u8, tok_bytes, "null") or
 799                    mem.eql(u8, tok_bytes, "true") or
 800                    mem.eql(u8, tok_bytes, "false"))
 801                {
 802                    try out.writeAll("<span class=\"tok-null\">");
 803                    try writeEscaped(out, tok_bytes);
 804                    try out.writeAll("</span>");
 805                } else if (prev_tok_was_fn) {
 806                    try out.writeAll("<span class=\"tok-fn\">");
 807                    try writeEscaped(out, tok_bytes);
 808                    try out.writeAll("</span>");
 809                } else {
 810                    const is_int = blk: {
 811                        if (src[token.loc.start] != 'i' and src[token.loc.start] != 'u')
 812                            break :blk false;
 813                        var i = token.loc.start + 1;
 814                        if (i == token.loc.end)
 815                            break :blk false;
 816                        while (i != token.loc.end) : (i += 1) {
 817                            if (src[i] < '0' or src[i] > '9')
 818                                break :blk false;
 819                        }
 820                        break :blk true;
 821                    };
 822                    if (is_int or isType(tok_bytes)) {
 823                        try out.writeAll("<span class=\"tok-type\">");
 824                        try writeEscaped(out, tok_bytes);
 825                        try out.writeAll("</span>");
 826                    } else {
 827                        try writeEscaped(out, tok_bytes);
 828                    }
 829                }
 830            },
 831
 832            .number_literal => {
 833                try out.writeAll("<span class=\"tok-number\">");
 834                try writeEscaped(out, src[token.loc.start..token.loc.end]);
 835                try out.writeAll("</span>");
 836            },
 837
 838            .bang,
 839            .pipe,
 840            .pipe_pipe,
 841            .pipe_equal,
 842            .equal,
 843            .equal_equal,
 844            .equal_angle_bracket_right,
 845            .bang_equal,
 846            .l_paren,
 847            .r_paren,
 848            .semicolon,
 849            .percent,
 850            .percent_equal,
 851            .l_brace,
 852            .r_brace,
 853            .l_bracket,
 854            .r_bracket,
 855            .period,
 856            .period_asterisk,
 857            .ellipsis2,
 858            .ellipsis3,
 859            .caret,
 860            .caret_equal,
 861            .plus,
 862            .plus_plus,
 863            .plus_equal,
 864            .plus_percent,
 865            .plus_percent_equal,
 866            .plus_pipe,
 867            .plus_pipe_equal,
 868            .minus,
 869            .minus_equal,
 870            .minus_percent,
 871            .minus_percent_equal,
 872            .minus_pipe,
 873            .minus_pipe_equal,
 874            .asterisk,
 875            .asterisk_equal,
 876            .asterisk_asterisk,
 877            .asterisk_percent,
 878            .asterisk_percent_equal,
 879            .asterisk_pipe,
 880            .asterisk_pipe_equal,
 881            .arrow,
 882            .colon,
 883            .slash,
 884            .slash_equal,
 885            .comma,
 886            .ampersand,
 887            .ampersand_equal,
 888            .question_mark,
 889            .angle_bracket_left,
 890            .angle_bracket_left_equal,
 891            .angle_bracket_angle_bracket_left,
 892            .angle_bracket_angle_bracket_left_equal,
 893            .angle_bracket_angle_bracket_left_pipe,
 894            .angle_bracket_angle_bracket_left_pipe_equal,
 895            .angle_bracket_right,
 896            .angle_bracket_right_equal,
 897            .angle_bracket_angle_bracket_right,
 898            .angle_bracket_angle_bracket_right_equal,
 899            .tilde,
 900            => try writeEscaped(out, src[token.loc.start..token.loc.end]),
 901
 902            .invalid, .invalid_periodasterisks => return parseError(
 903                docgen_tokenizer,
 904                source_token,
 905                "syntax error",
 906                .{},
 907            ),
 908        }
 909        index = token.loc.end;
 910    }
 911    try out.writeAll("</code>");
 912}
 913
 914fn tokenizeAndPrint(
 915    allocator: Allocator,
 916    docgen_tokenizer: *Tokenizer,
 917    out: *Writer,
 918    source_token: Token,
 919) !void {
 920    const raw_src = docgen_tokenizer.buffer[source_token.start..source_token.end];
 921    return tokenizeAndPrintRaw(allocator, docgen_tokenizer, out, source_token, raw_src);
 922}
 923
 924fn printSourceBlock(allocator: Allocator, docgen_tokenizer: *Tokenizer, out: *Writer, syntax_block: SyntaxBlock) !void {
 925    const source_type = @tagName(syntax_block.source_type);
 926
 927    try out.print("<figure><figcaption class=\"{s}-cap\"><cite class=\"file\">{s}</cite></figcaption><pre>", .{ source_type, syntax_block.name });
 928    switch (syntax_block.source_type) {
 929        .zig => try tokenizeAndPrint(allocator, docgen_tokenizer, out, syntax_block.source_token),
 930        else => {
 931            const raw_source = docgen_tokenizer.buffer[syntax_block.source_token.start..syntax_block.source_token.end];
 932            const trimmed_raw_source = mem.trim(u8, raw_source, " \r\n");
 933
 934            try out.writeAll("<code>");
 935            try writeEscapedLines(out, trimmed_raw_source);
 936            try out.writeAll("</code>");
 937        },
 938    }
 939    try out.writeAll("</pre></figure>");
 940}
 941
 942fn printShell(out: *Writer, shell_content: []const u8, escape: bool) !void {
 943    const trimmed_shell_content = mem.trim(u8, shell_content, " \r\n");
 944    try out.writeAll("<figure><figcaption class=\"shell-cap\">Shell</figcaption><pre><samp>");
 945    var cmd_cont: bool = false;
 946    var iter = std.mem.splitScalar(u8, trimmed_shell_content, '\n');
 947    while (iter.next()) |orig_line| {
 948        const line = mem.trimEnd(u8, orig_line, " \r");
 949        if (!cmd_cont and line.len > 1 and mem.eql(u8, line[0..2], "$ ") and line[line.len - 1] != '\\') {
 950            try out.writeAll("$ <kbd>");
 951            const s = std.mem.trimStart(u8, line[1..], " ");
 952            if (escape) {
 953                try writeEscaped(out, s);
 954            } else {
 955                try out.writeAll(s);
 956            }
 957            try out.writeAll("</kbd>" ++ "\n");
 958        } else if (!cmd_cont and line.len > 1 and mem.eql(u8, line[0..2], "$ ") and line[line.len - 1] == '\\') {
 959            try out.writeAll("$ <kbd>");
 960            const s = std.mem.trimStart(u8, line[1..], " ");
 961            if (escape) {
 962                try writeEscaped(out, s);
 963            } else {
 964                try out.writeAll(s);
 965            }
 966            try out.writeAll("\n");
 967            cmd_cont = true;
 968        } else if (line.len > 0 and line[line.len - 1] != '\\' and cmd_cont) {
 969            if (escape) {
 970                try writeEscaped(out, line);
 971            } else {
 972                try out.writeAll(line);
 973            }
 974            try out.writeAll("</kbd>" ++ "\n");
 975            cmd_cont = false;
 976        } else {
 977            if (escape) {
 978                try writeEscaped(out, line);
 979            } else {
 980                try out.writeAll(line);
 981            }
 982            try out.writeAll("\n");
 983        }
 984    }
 985
 986    try out.writeAll("</samp></pre></figure>");
 987}
 988
 989fn genHtml(
 990    allocator: Allocator,
 991    tokenizer: *Tokenizer,
 992    toc: *Toc,
 993    code_dir: std.fs.Dir,
 994    out: *Writer,
 995) !void {
 996    for (toc.nodes) |node| {
 997        switch (node) {
 998            .Content => |data| {
 999                try out.writeAll(data);
1000            },
1001            .Link => |info| {
1002                if (!toc.urls.contains(info.url)) {
1003                    return parseError(tokenizer, info.token, "url not found: {s}", .{info.url});
1004                }
1005                try out.print("<a href=\"#{s}\">{s}</a>", .{ info.url, info.name });
1006            },
1007            .Nav => {
1008                try out.writeAll(toc.toc);
1009            },
1010            .Builtin => |tok| {
1011                try out.writeAll("<figure><figcaption class=\"zig-cap\"><cite>@import(\"builtin\")</cite></figcaption><pre>");
1012                const builtin_code = @embedFile("builtin"); // ๐Ÿ˜Ž
1013                try tokenizeAndPrintRaw(allocator, tokenizer, out, tok, builtin_code);
1014                try out.writeAll("</pre></figure>");
1015            },
1016            .HeaderOpen => |info| {
1017                try out.print(
1018                    "<h{d} id=\"{s}\"><a href=\"#toc-{s}\">{s}</a> <a class=\"hdr\" href=\"#{s}\">ยง</a></h{d}>\n",
1019                    .{ info.n, info.url, info.url, info.name, info.url, info.n },
1020                );
1021            },
1022            .SeeAlso => |items| {
1023                try out.writeAll("<p>See also:</p><ul>\n");
1024                for (items) |item| {
1025                    const url = try urlize(allocator, item.name);
1026                    if (!toc.urls.contains(url)) {
1027                        return parseError(tokenizer, item.token, "url not found: {s}", .{url});
1028                    }
1029                    try out.print("<li><a href=\"#{s}\">{s}</a></li>\n", .{ url, item.name });
1030                }
1031                try out.writeAll("</ul>\n");
1032            },
1033            .InlineSyntax => |content_tok| {
1034                try tokenizeAndPrint(allocator, tokenizer, out, content_tok);
1035            },
1036            .Shell => |content_tok| {
1037                const raw_shell_content = tokenizer.buffer[content_tok.start..content_tok.end];
1038                try printShell(out, raw_shell_content, true);
1039            },
1040            .SyntaxBlock => |syntax_block| {
1041                try printSourceBlock(allocator, tokenizer, out, syntax_block);
1042            },
1043            .Code => |code| {
1044                const out_basename = try std.fmt.allocPrint(allocator, "{s}.out", .{
1045                    fs.path.stem(code.name),
1046                });
1047                defer allocator.free(out_basename);
1048
1049                const contents = code_dir.readFileAlloc(out_basename, allocator, .limited(std.math.maxInt(u32))) catch |err| {
1050                    return parseError(tokenizer, code.token, "unable to open '{s}': {t}", .{ out_basename, err });
1051                };
1052                defer allocator.free(contents);
1053
1054                try out.writeAll(contents);
1055            },
1056        }
1057    }
1058}