master
1const std = @import("std");
2const log = std.log;
3const assert = std.debug.assert;
4const Ast = std.zig.Ast;
5const Walk = @import("Walk");
6const markdown = @import("markdown.zig");
7const Decl = Walk.Decl;
8const ArrayList = std.ArrayList;
9const Writer = std.Io.Writer;
10
11const fileSourceHtml = @import("html_render.zig").fileSourceHtml;
12const appendEscaped = @import("html_render.zig").appendEscaped;
13const resolveDeclLink = @import("html_render.zig").resolveDeclLink;
14const missing_feature_url_escape = @import("html_render.zig").missing_feature_url_escape;
15
16const gpa = std.heap.wasm_allocator;
17
18const js = struct {
19 /// Keep in sync with the `LOG_` constants in `main.js`.
20 const LogLevel = enum(u8) {
21 err,
22 warn,
23 info,
24 debug,
25 };
26
27 extern "js" fn log(level: LogLevel, ptr: [*]const u8, len: usize) void;
28};
29
30pub const std_options: std.Options = .{
31 .logFn = logFn,
32 //.log_level = .debug,
33};
34
35pub fn panic(msg: []const u8, st: ?*std.builtin.StackTrace, addr: ?usize) noreturn {
36 _ = st;
37 _ = addr;
38 log.err("panic: {s}", .{msg});
39 @trap();
40}
41
42fn logFn(
43 comptime message_level: log.Level,
44 comptime scope: @EnumLiteral(),
45 comptime format: []const u8,
46 args: anytype,
47) void {
48 const prefix = if (scope == .default) "" else @tagName(scope) ++ ": ";
49 var buf: [500]u8 = undefined;
50 const line = std.fmt.bufPrint(&buf, prefix ++ format, args) catch l: {
51 buf[buf.len - 3 ..][0..3].* = "...".*;
52 break :l &buf;
53 };
54 js.log(@field(js.LogLevel, @tagName(message_level)), line.ptr, line.len);
55}
56
57export fn alloc(n: usize) [*]u8 {
58 const slice = gpa.alloc(u8, n) catch @panic("OOM");
59 return slice.ptr;
60}
61
62export fn unpack(tar_ptr: [*]u8, tar_len: usize) void {
63 const tar_bytes = tar_ptr[0..tar_len];
64 //log.debug("received {d} bytes of tar file", .{tar_bytes.len});
65
66 unpackInner(tar_bytes) catch |err| {
67 std.debug.panic("unable to unpack tar: {s}", .{@errorName(err)});
68 };
69}
70
71var query_string: ArrayList(u8) = .empty;
72var query_results: ArrayList(Decl.Index) = .empty;
73
74/// Resizes the query string to be the correct length; returns the pointer to
75/// the query string.
76export fn query_begin(query_string_len: usize) [*]u8 {
77 query_string.resize(gpa, query_string_len) catch @panic("OOM");
78 return query_string.items.ptr;
79}
80
81/// Executes the query. Returns the pointer to the query results which is an
82/// array of u32.
83/// The first element is the length of the array.
84/// Subsequent elements are Decl.Index values which are all public
85/// declarations.
86export fn query_exec(ignore_case: bool) [*]Decl.Index {
87 const query = query_string.items;
88 log.debug("querying '{s}'", .{query});
89 query_exec_fallible(query, ignore_case) catch |err| switch (err) {
90 error.OutOfMemory => @panic("OOM"),
91 };
92 query_results.items[0] = @enumFromInt(query_results.items.len - 1);
93 return query_results.items.ptr;
94}
95
96const max_matched_items = 1000;
97
98fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
99 const Score = packed struct(u32) {
100 points: u16,
101 segments: u16,
102 };
103 const g = struct {
104 var full_path_search_text: ArrayList(u8) = .empty;
105 var full_path_search_text_lower: ArrayList(u8) = .empty;
106 var doc_search_text: ArrayList(u8) = .empty;
107 /// Each element matches a corresponding query_results element.
108 var scores: ArrayList(Score) = .empty;
109 };
110
111 // First element stores the size of the list.
112 try query_results.resize(gpa, 1);
113 // Corresponding point value is meaningless and therefore undefined.
114 try g.scores.resize(gpa, 1);
115
116 decl_loop: for (Walk.decls.items, 0..) |*decl, decl_index| {
117 const info = decl.extra_info();
118 if (!info.is_pub) continue;
119
120 try decl.reset_with_path(&g.full_path_search_text);
121 if (decl.parent != .none)
122 try Decl.append_parent_ns(&g.full_path_search_text, decl.parent);
123 try g.full_path_search_text.appendSlice(gpa, info.name);
124
125 try g.full_path_search_text_lower.resize(gpa, g.full_path_search_text.items.len);
126 @memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
127
128 const ast = decl.file.get_ast();
129 if (info.first_doc_comment.unwrap()) |first_doc_comment| {
130 try collect_docs(&g.doc_search_text, ast, first_doc_comment);
131 }
132
133 if (ignore_case) {
134 ascii_lower(g.full_path_search_text_lower.items);
135 ascii_lower(g.doc_search_text.items);
136 }
137
138 var it = std.mem.tokenizeScalar(u8, query, ' ');
139 var points: u16 = 0;
140 var bypass_limit = false;
141 while (it.next()) |term| {
142 // exact, case sensitive match of full decl path
143 if (std.mem.eql(u8, g.full_path_search_text.items, term)) {
144 points += 4;
145 bypass_limit = true;
146 continue;
147 }
148 // exact, case sensitive match of just decl name
149 if (std.mem.eql(u8, info.name, term)) {
150 points += 3;
151 bypass_limit = true;
152 continue;
153 }
154 // substring, case insensitive match of full decl path
155 if (std.mem.indexOf(u8, g.full_path_search_text_lower.items, term) != null) {
156 points += 2;
157 continue;
158 }
159 if (std.mem.indexOf(u8, g.doc_search_text.items, term) != null) {
160 points += 1;
161 continue;
162 }
163 continue :decl_loop;
164 }
165
166 if (query_results.items.len < max_matched_items or bypass_limit) {
167 try query_results.append(gpa, @enumFromInt(decl_index));
168 try g.scores.append(gpa, .{
169 .points = points,
170 .segments = @intCast(count_scalar(g.full_path_search_text.items, '.')),
171 });
172 }
173 }
174
175 const sort_context: struct {
176 pub fn swap(sc: @This(), a_index: usize, b_index: usize) void {
177 _ = sc;
178 std.mem.swap(Score, &g.scores.items[a_index], &g.scores.items[b_index]);
179 std.mem.swap(Decl.Index, &query_results.items[a_index], &query_results.items[b_index]);
180 }
181
182 pub fn lessThan(sc: @This(), a_index: usize, b_index: usize) bool {
183 _ = sc;
184 const a_score = g.scores.items[a_index];
185 const b_score = g.scores.items[b_index];
186 if (b_score.points < a_score.points) {
187 return true;
188 } else if (b_score.points > a_score.points) {
189 return false;
190 } else if (a_score.segments < b_score.segments) {
191 return true;
192 } else if (a_score.segments > b_score.segments) {
193 return false;
194 } else {
195 const a_decl = query_results.items[a_index];
196 const b_decl = query_results.items[b_index];
197 const a_file_path = a_decl.get().file.path();
198 const b_file_path = b_decl.get().file.path();
199 // This neglects to check the local namespace inside the file.
200 return std.mem.lessThan(u8, b_file_path, a_file_path);
201 }
202 }
203 } = .{};
204
205 std.mem.sortUnstableContext(1, query_results.items.len, sort_context);
206
207 if (query_results.items.len > max_matched_items)
208 query_results.shrinkRetainingCapacity(max_matched_items);
209}
210
211const String = Slice(u8);
212
213fn Slice(T: type) type {
214 return packed struct(u64) {
215 ptr: u32,
216 len: u32,
217
218 fn init(s: []const T) @This() {
219 return .{
220 .ptr = @intFromPtr(s.ptr),
221 .len = s.len,
222 };
223 }
224 };
225}
226
227const ErrorIdentifier = packed struct(u64) {
228 token_index: Ast.TokenIndex,
229 decl_index: Decl.Index,
230
231 fn hasDocs(ei: ErrorIdentifier) bool {
232 const decl_index = ei.decl_index;
233 const ast = decl_index.get().file.get_ast();
234 const token_index = ei.token_index;
235 if (token_index == 0) return false;
236 return ast.tokenTag(token_index - 1) == .doc_comment;
237 }
238
239 fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *ArrayList(u8)) Oom!void {
240 const decl_index = ei.decl_index;
241 const ast = decl_index.get().file.get_ast();
242 const name = ast.tokenSlice(ei.token_index);
243 const has_link = base_decl != decl_index;
244
245 try out.appendSlice(gpa, "<dt>");
246 try out.appendSlice(gpa, name);
247 if (has_link) {
248 try out.appendSlice(gpa, " <a href=\"#");
249 _ = missing_feature_url_escape;
250 try decl_index.get().fqn(out);
251 try out.appendSlice(gpa, "\">");
252 try out.appendSlice(gpa, decl_index.get().extra_info().name);
253 try out.appendSlice(gpa, "</a>");
254 }
255 try out.appendSlice(gpa, "</dt>");
256
257 if (Decl.findFirstDocComment(ast, ei.token_index).unwrap()) |first_doc_comment| {
258 try out.appendSlice(gpa, "<dd>");
259 try render_docs(out, decl_index, first_doc_comment, false);
260 try out.appendSlice(gpa, "</dd>");
261 }
262 }
263};
264
265var string_result: ArrayList(u8) = .empty;
266var error_set_result: std.StringArrayHashMapUnmanaged(ErrorIdentifier) = .empty;
267
268export fn decl_error_set(decl_index: Decl.Index) Slice(ErrorIdentifier) {
269 return Slice(ErrorIdentifier).init(decl_error_set_fallible(decl_index) catch @panic("OOM"));
270}
271
272export fn error_set_node_list(base_decl: Decl.Index, node: Ast.Node.Index) Slice(ErrorIdentifier) {
273 error_set_result.clearRetainingCapacity();
274 addErrorsFromExpr(base_decl, &error_set_result, node) catch @panic("OOM");
275 sort_error_set_result();
276 return Slice(ErrorIdentifier).init(error_set_result.values());
277}
278
279export fn fn_error_set_decl(decl_index: Decl.Index, node: Ast.Node.Index) Decl.Index {
280 return switch (decl_index.get().file.categorize_expr(node)) {
281 .alias => |aliasee| fn_error_set_decl(aliasee, aliasee.get().ast_node),
282 else => decl_index,
283 };
284}
285
286fn decl_error_set_fallible(decl_index: Decl.Index) Oom![]ErrorIdentifier {
287 error_set_result.clearRetainingCapacity();
288 try addErrorsFromDecl(decl_index, &error_set_result);
289 sort_error_set_result();
290 return error_set_result.values();
291}
292
293fn sort_error_set_result() void {
294 const sort_context: struct {
295 pub fn lessThan(sc: @This(), a_index: usize, b_index: usize) bool {
296 _ = sc;
297 const a_name = error_set_result.keys()[a_index];
298 const b_name = error_set_result.keys()[b_index];
299 return std.mem.lessThan(u8, a_name, b_name);
300 }
301 } = .{};
302 error_set_result.sortUnstable(sort_context);
303}
304
305fn addErrorsFromDecl(
306 decl_index: Decl.Index,
307 out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
308) Oom!void {
309 switch (decl_index.get().categorize()) {
310 .error_set => |node| try addErrorsFromExpr(decl_index, out, node),
311 .alias => |aliasee| try addErrorsFromDecl(aliasee, out),
312 else => |cat| log.debug("unable to addErrorsFromDecl: {any}", .{cat}),
313 }
314}
315
316fn addErrorsFromExpr(
317 decl_index: Decl.Index,
318 out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
319 node: Ast.Node.Index,
320) Oom!void {
321 const decl = decl_index.get();
322 const ast = decl.file.get_ast();
323
324 switch (decl.file.categorize_expr(node)) {
325 .error_set => |n| switch (ast.nodeTag(n)) {
326 .error_set_decl => {
327 try addErrorsFromNode(decl_index, out, node);
328 },
329 .merge_error_sets => {
330 const lhs, const rhs = ast.nodeData(n).node_and_node;
331 try addErrorsFromExpr(decl_index, out, lhs);
332 try addErrorsFromExpr(decl_index, out, rhs);
333 },
334 else => unreachable,
335 },
336 .alias => |aliasee| {
337 try addErrorsFromDecl(aliasee, out);
338 },
339 else => return,
340 }
341}
342
343fn addErrorsFromNode(
344 decl_index: Decl.Index,
345 out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
346 node: Ast.Node.Index,
347) Oom!void {
348 const decl = decl_index.get();
349 const ast = decl.file.get_ast();
350 const error_token = ast.nodeMainToken(node);
351 var tok_i = error_token + 2;
352 while (true) : (tok_i += 1) switch (ast.tokenTag(tok_i)) {
353 .doc_comment, .comma => {},
354 .identifier => {
355 const name = ast.tokenSlice(tok_i);
356 const gop = try out.getOrPut(gpa, name);
357 // If there are more than one, take the one with doc comments.
358 // If they both have doc comments, prefer the existing one.
359 const new: ErrorIdentifier = .{
360 .token_index = tok_i,
361 .decl_index = decl_index,
362 };
363 if (!gop.found_existing or
364 (!gop.value_ptr.hasDocs() and new.hasDocs()))
365 {
366 gop.value_ptr.* = new;
367 }
368 },
369 .r_brace => break,
370 else => unreachable,
371 };
372}
373
374export fn type_fn_fields(decl_index: Decl.Index) Slice(Ast.Node.Index) {
375 return decl_fields(decl_index);
376}
377
378export fn decl_fields(decl_index: Decl.Index) Slice(Ast.Node.Index) {
379 return Slice(Ast.Node.Index).init(decl_fields_fallible(decl_index) catch @panic("OOM"));
380}
381
382export fn decl_params(decl_index: Decl.Index) Slice(Ast.Node.Index) {
383 return Slice(Ast.Node.Index).init(decl_params_fallible(decl_index) catch @panic("OOM"));
384}
385
386fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
387 const decl = decl_index.get();
388 const ast = decl.file.get_ast();
389
390 switch (decl.categorize()) {
391 .type_function => {
392 // If the type function returns a reference to another type function, get the fields from there
393 if (decl.get_type_fn_return_type_fn()) |function_decl| {
394 return decl_fields_fallible(function_decl);
395 }
396 // If the type function returns a container, such as a `struct`, read that container's fields
397 if (decl.get_type_fn_return_expr()) |return_expr| {
398 switch (ast.nodeTag(return_expr)) {
399 .container_decl, .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing => {
400 return ast_decl_fields_fallible(ast, return_expr);
401 },
402 else => {},
403 }
404 }
405 return &.{};
406 },
407 else => {
408 const value_node = decl.value_node() orelse return &.{};
409 return ast_decl_fields_fallible(ast, value_node);
410 },
411 }
412}
413
414fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.Index {
415 const g = struct {
416 var result: ArrayList(Ast.Node.Index) = .empty;
417 };
418 g.result.clearRetainingCapacity();
419 var buf: [2]Ast.Node.Index = undefined;
420 const container_decl = ast.fullContainerDecl(&buf, ast_index) orelse return &.{};
421 for (container_decl.ast.members) |member_node| switch (ast.nodeTag(member_node)) {
422 .container_field_init,
423 .container_field_align,
424 .container_field,
425 => try g.result.append(gpa, member_node),
426
427 else => continue,
428 };
429 return g.result.items;
430}
431
432fn decl_params_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
433 const g = struct {
434 var result: ArrayList(Ast.Node.Index) = .empty;
435 };
436 g.result.clearRetainingCapacity();
437 const decl = decl_index.get();
438 const ast = decl.file.get_ast();
439 const value_node = decl.value_node() orelse return &.{};
440 var buf: [1]Ast.Node.Index = undefined;
441 const fn_proto = ast.fullFnProto(&buf, value_node) orelse return &.{};
442 try g.result.appendSlice(gpa, fn_proto.ast.params);
443 return g.result.items;
444}
445
446export fn error_html(base_decl: Decl.Index, error_identifier: ErrorIdentifier) String {
447 string_result.clearRetainingCapacity();
448 error_identifier.html(base_decl, &string_result) catch @panic("OOM");
449 return String.init(string_result.items);
450}
451
452export fn decl_field_html(decl_index: Decl.Index, field_node: Ast.Node.Index) String {
453 string_result.clearRetainingCapacity();
454 decl_field_html_fallible(&string_result, decl_index, field_node) catch @panic("OOM");
455 return String.init(string_result.items);
456}
457
458export fn decl_param_html(decl_index: Decl.Index, param_node: Ast.Node.Index) String {
459 string_result.clearRetainingCapacity();
460 decl_param_html_fallible(&string_result, decl_index, param_node) catch @panic("OOM");
461 return String.init(string_result.items);
462}
463
464fn decl_field_html_fallible(
465 out: *ArrayList(u8),
466 decl_index: Decl.Index,
467 field_node: Ast.Node.Index,
468) !void {
469 const decl = decl_index.get();
470 const ast = decl.file.get_ast();
471 try out.appendSlice(gpa, "<pre><code>");
472 try fileSourceHtml(decl.file, out, field_node, .{});
473 try out.appendSlice(gpa, "</code></pre>");
474
475 const field = ast.fullContainerField(field_node).?;
476
477 if (Decl.findFirstDocComment(ast, field.firstToken()).unwrap()) |first_doc_comment| {
478 try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
479 try render_docs(out, decl_index, first_doc_comment, false);
480 try out.appendSlice(gpa, "</div>");
481 }
482}
483
484fn decl_param_html_fallible(
485 out: *ArrayList(u8),
486 decl_index: Decl.Index,
487 param_node: Ast.Node.Index,
488) !void {
489 const decl = decl_index.get();
490 const ast = decl.file.get_ast();
491 const colon = ast.firstToken(param_node) - 1;
492 const name_token = colon - 1;
493 const first_doc_comment = f: {
494 var it = ast.firstToken(param_node);
495 while (it > 0) {
496 it -= 1;
497 switch (ast.tokenTag(it)) {
498 .doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
499 else => break,
500 }
501 }
502 break :f it + 1;
503 };
504 const name = ast.tokenSlice(name_token);
505
506 try out.appendSlice(gpa, "<pre><code>");
507 try appendEscaped(out, name);
508 try out.appendSlice(gpa, ": ");
509 try fileSourceHtml(decl.file, out, param_node, .{});
510 try out.appendSlice(gpa, "</code></pre>");
511
512 if (ast.tokenTag(first_doc_comment) == .doc_comment) {
513 try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
514 try render_docs(out, decl_index, first_doc_comment, false);
515 try out.appendSlice(gpa, "</div>");
516 }
517}
518
519export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
520 const decl = decl_index.get();
521 const ast = decl.file.get_ast();
522 const proto_node = switch (ast.nodeTag(decl.ast_node)) {
523 .fn_decl => ast.nodeData(decl.ast_node).node_and_node[0],
524
525 .fn_proto,
526 .fn_proto_one,
527 .fn_proto_simple,
528 .fn_proto_multi,
529 => decl.ast_node,
530
531 else => unreachable,
532 };
533
534 string_result.clearRetainingCapacity();
535 fileSourceHtml(decl.file, &string_result, proto_node, .{
536 .skip_doc_comments = true,
537 .skip_comments = true,
538 .collapse_whitespace = true,
539 .fn_link = if (linkify_fn_name) decl_index else .none,
540 }) catch |err| {
541 std.debug.panic("unable to render source: {s}", .{@errorName(err)});
542 };
543 return String.init(string_result.items);
544}
545
546export fn decl_source_html(decl_index: Decl.Index) String {
547 const decl = decl_index.get();
548
549 string_result.clearRetainingCapacity();
550 fileSourceHtml(decl.file, &string_result, decl.ast_node, .{}) catch |err| {
551 std.debug.panic("unable to render source: {s}", .{@errorName(err)});
552 };
553 return String.init(string_result.items);
554}
555
556export fn decl_doctest_html(decl_index: Decl.Index) String {
557 const decl = decl_index.get();
558 const doctest_ast_node = decl.file.get().doctests.get(decl.ast_node) orelse
559 return String.init("");
560
561 string_result.clearRetainingCapacity();
562 fileSourceHtml(decl.file, &string_result, doctest_ast_node, .{}) catch |err| {
563 std.debug.panic("unable to render source: {s}", .{@errorName(err)});
564 };
565 return String.init(string_result.items);
566}
567
568export fn decl_fqn(decl_index: Decl.Index) String {
569 const decl = decl_index.get();
570 string_result.clearRetainingCapacity();
571 decl.fqn(&string_result) catch @panic("OOM");
572 return String.init(string_result.items);
573}
574
575export fn decl_parent(decl_index: Decl.Index) Decl.Index {
576 const decl = decl_index.get();
577 return decl.parent;
578}
579
580export fn fn_error_set(decl_index: Decl.Index) Ast.Node.OptionalIndex {
581 const decl = decl_index.get();
582 const ast = decl.file.get_ast();
583 var buf: [1]Ast.Node.Index = undefined;
584 const full = ast.fullFnProto(&buf, decl.ast_node).?;
585 const return_type = full.ast.return_type.unwrap().?;
586 return switch (ast.nodeTag(return_type)) {
587 .error_set_decl => return_type.toOptional(),
588 .error_union => ast.nodeData(return_type).node_and_node[0].toOptional(),
589 else => .none,
590 };
591}
592
593export fn decl_file_path(decl_index: Decl.Index) String {
594 string_result.clearRetainingCapacity();
595 string_result.appendSlice(gpa, decl_index.get().file.path()) catch @panic("OOM");
596 return String.init(string_result.items);
597}
598
599export fn decl_category_name(decl_index: Decl.Index) String {
600 const decl = decl_index.get();
601 const ast = decl.file.get_ast();
602 const name = switch (decl.categorize()) {
603 .namespace, .container => |node| {
604 if (ast.nodeTag(decl.ast_node) == .root)
605 return String.init("struct");
606 string_result.clearRetainingCapacity();
607 var buf: [2]Ast.Node.Index = undefined;
608 const container_decl = ast.fullContainerDecl(&buf, node).?;
609 if (container_decl.layout_token) |t| {
610 if (ast.tokenTag(t) == .keyword_extern) {
611 string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
612 }
613 }
614 const main_token_tag = ast.tokenTag(container_decl.ast.main_token);
615 string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
616 return String.init(string_result.items);
617 },
618 .global_variable => "Global Variable",
619 .function => "Function",
620 .type_function => "Type Function",
621 .type, .type_type => "Type",
622 .error_set => "Error Set",
623 .global_const => "Constant",
624 .primitive => "Primitive Value",
625 .alias => "Alias",
626 };
627 return String.init(name);
628}
629
630export fn decl_name(decl_index: Decl.Index) String {
631 const decl = decl_index.get();
632 string_result.clearRetainingCapacity();
633 const name = n: {
634 if (decl.parent == .none) {
635 // Then it is the root struct of a file.
636 break :n std.fs.path.stem(decl.file.path());
637 }
638 break :n decl.extra_info().name;
639 };
640 string_result.appendSlice(gpa, name) catch @panic("OOM");
641 return String.init(string_result.items);
642}
643
644export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
645 const decl = decl_index.get();
646 string_result.clearRetainingCapacity();
647 if (decl.extra_info().first_doc_comment.unwrap()) |first_doc_comment| {
648 render_docs(&string_result, decl_index, first_doc_comment, short) catch @panic("OOM");
649 }
650 return String.init(string_result.items);
651}
652
653fn collect_docs(
654 list: *ArrayList(u8),
655 ast: *const Ast,
656 first_doc_comment: Ast.TokenIndex,
657) Oom!void {
658 list.clearRetainingCapacity();
659 var it = first_doc_comment;
660 while (true) : (it += 1) switch (ast.tokenTag(it)) {
661 .doc_comment, .container_doc_comment => {
662 // It is tempting to trim this string but think carefully about how
663 // that will affect the markdown parser.
664 const line = ast.tokenSlice(it)[3..];
665 try list.appendSlice(gpa, line);
666 },
667 else => break,
668 };
669}
670
671fn render_docs(
672 out: *ArrayList(u8),
673 decl_index: Decl.Index,
674 first_doc_comment: Ast.TokenIndex,
675 short: bool,
676) Oom!void {
677 const decl = decl_index.get();
678 const ast = decl.file.get_ast();
679
680 var parser = try markdown.Parser.init(gpa);
681 defer parser.deinit();
682 var it = first_doc_comment;
683 while (true) : (it += 1) switch (ast.tokenTag(it)) {
684 .doc_comment, .container_doc_comment => {
685 const line = ast.tokenSlice(it)[3..];
686 if (short and line.len == 0) break;
687 try parser.feedLine(line);
688 },
689 else => break,
690 };
691
692 var parsed_doc = try parser.endInput();
693 defer parsed_doc.deinit(gpa);
694
695 const g = struct {
696 var link_buffer: ArrayList(u8) = .empty;
697 };
698
699 const Renderer = markdown.Renderer(Decl.Index);
700 const renderer: Renderer = .{
701 .context = decl_index,
702 .renderFn = struct {
703 fn render(
704 r: Renderer,
705 doc: markdown.Document,
706 node: markdown.Document.Node.Index,
707 writer: *Writer,
708 ) Writer.Error!void {
709 const data = doc.nodes.items(.data)[@intFromEnum(node)];
710 switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
711 .code_span => {
712 try writer.writeAll("<code>");
713 const content = doc.string(data.text.content);
714 if (resolve_decl_path(r.context, content)) |resolved_decl_index| {
715 g.link_buffer.clearRetainingCapacity();
716 resolveDeclLink(resolved_decl_index, &g.link_buffer) catch return error.WriteFailed;
717
718 try writer.writeAll("<a href=\"#");
719 _ = missing_feature_url_escape;
720 try writer.writeAll(g.link_buffer.items);
721 try writer.print("\">{f}</a>", .{markdown.fmtHtml(content)});
722 } else {
723 try writer.print("{f}", .{markdown.fmtHtml(content)});
724 }
725
726 try writer.writeAll("</code>");
727 },
728
729 else => try Renderer.renderDefault(r, doc, node, writer),
730 }
731 }
732 }.render,
733 };
734
735 var allocating = Writer.Allocating.fromArrayList(gpa, out);
736 defer out.* = allocating.toArrayList();
737 renderer.render(parsed_doc, &allocating.writer) catch |err| switch (err) {
738 error.WriteFailed => return error.OutOfMemory,
739 };
740}
741
742fn resolve_decl_path(decl_index: Decl.Index, path: []const u8) ?Decl.Index {
743 var path_components = std.mem.splitScalar(u8, path, '.');
744 var current_decl_index = decl_index.get().lookup(path_components.first()) orelse return null;
745 while (path_components.next()) |component| {
746 switch (current_decl_index.get().categorize()) {
747 .alias => |aliasee| current_decl_index = aliasee,
748 else => {},
749 }
750 current_decl_index = current_decl_index.get().get_child(component) orelse return null;
751 }
752 return current_decl_index;
753}
754
755export fn decl_type_html(decl_index: Decl.Index) String {
756 const decl = decl_index.get();
757 const ast = decl.file.get_ast();
758 string_result.clearRetainingCapacity();
759 t: {
760 // If there is an explicit type, use it.
761 if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
762 if (var_decl.ast.type_node.unwrap()) |type_node| {
763 string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
764 fileSourceHtml(decl.file, &string_result, type_node, .{
765 .skip_comments = true,
766 .collapse_whitespace = true,
767 }) catch |e| {
768 std.debug.panic("unable to render html: {s}", .{@errorName(e)});
769 };
770 string_result.appendSlice(gpa, "</code>") catch @panic("OOM");
771 break :t;
772 }
773 }
774 }
775 return String.init(string_result.items);
776}
777
778const Oom = error{OutOfMemory};
779
780fn unpackInner(tar_bytes: []u8) !void {
781 var reader: std.Io.Reader = .fixed(tar_bytes);
782 var file_name_buffer: [1024]u8 = undefined;
783 var link_name_buffer: [1024]u8 = undefined;
784 var it: std.tar.Iterator = .init(&reader, .{
785 .file_name_buffer = &file_name_buffer,
786 .link_name_buffer = &link_name_buffer,
787 });
788 while (try it.next()) |tar_file| {
789 switch (tar_file.kind) {
790 .file => {
791 if (tar_file.size == 0 and tar_file.name.len == 0) break;
792 if (std.mem.endsWith(u8, tar_file.name, ".zig")) {
793 log.debug("found file: '{s}'", .{tar_file.name});
794 const file_name = try gpa.dupe(u8, tar_file.name);
795 if (std.mem.indexOfScalar(u8, file_name, '/')) |pkg_name_end| {
796 const pkg_name = file_name[0..pkg_name_end];
797 const gop = try Walk.modules.getOrPut(gpa, pkg_name);
798 const file: Walk.File.Index = @enumFromInt(Walk.files.entries.len);
799 if (!gop.found_existing or
800 std.mem.eql(u8, file_name[pkg_name_end..], "/root.zig") or
801 std.mem.eql(u8, file_name[pkg_name_end + 1 .. file_name.len - ".zig".len], pkg_name))
802 {
803 gop.value_ptr.* = file;
804 }
805 const file_bytes = tar_bytes[reader.seek..][0..@intCast(tar_file.size)];
806 assert(file == try Walk.add_file(file_name, file_bytes));
807 }
808 } else {
809 log.warn("skipping: '{s}' - the tar creation should have done that", .{
810 tar_file.name,
811 });
812 }
813 },
814 else => continue,
815 }
816 }
817}
818
819fn ascii_lower(bytes: []u8) void {
820 for (bytes) |*b| b.* = std.ascii.toLower(b.*);
821}
822
823export fn module_name(index: u32) String {
824 const names = Walk.modules.keys();
825 return String.init(if (index >= names.len) "" else names[index]);
826}
827
828export fn find_module_root(pkg: Walk.ModuleIndex) Decl.Index {
829 const root_file = Walk.modules.values()[@intFromEnum(pkg)];
830 const result = root_file.findRootDecl();
831 assert(result != .none);
832 return result;
833}
834
835/// Set by `set_input_string`.
836var input_string: ArrayList(u8) = .empty;
837
838export fn set_input_string(len: usize) [*]u8 {
839 input_string.resize(gpa, len) catch @panic("OOM");
840 return input_string.items.ptr;
841}
842
843/// Looks up the root struct decl corresponding to a file by path.
844/// Uses `input_string`.
845export fn find_file_root() Decl.Index {
846 const file: Walk.File.Index = @enumFromInt(Walk.files.getIndex(input_string.items) orelse return .none);
847 return file.findRootDecl();
848}
849
850/// Uses `input_string`.
851/// Tries to look up the Decl component-wise but then falls back to a file path
852/// based scan.
853export fn find_decl() Decl.Index {
854 const result = Decl.find(input_string.items);
855 if (result != .none) return result;
856
857 const g = struct {
858 var match_fqn: ArrayList(u8) = .empty;
859 };
860 for (Walk.decls.items, 0..) |*decl, decl_index| {
861 g.match_fqn.clearRetainingCapacity();
862 decl.fqn(&g.match_fqn) catch @panic("OOM");
863 if (std.mem.eql(u8, g.match_fqn.items, input_string.items)) {
864 //const path = @as(Decl.Index, @enumFromInt(decl_index)).get().file.path();
865 //log.debug("find_decl '{s}' found in {s}", .{ input_string.items, path });
866 return @enumFromInt(decl_index);
867 }
868 }
869 return .none;
870}
871
872/// Set only by `categorize_decl`; read only by `get_aliasee`, valid only
873/// when `categorize_decl` returns `.alias`.
874var global_aliasee: Decl.Index = .none;
875
876export fn get_aliasee() Decl.Index {
877 return global_aliasee;
878}
879export fn categorize_decl(decl_index: Decl.Index, resolve_alias_count: usize) Walk.Category.Tag {
880 global_aliasee = .none;
881 var chase_alias_n = resolve_alias_count;
882 var decl = decl_index.get();
883 while (true) {
884 const result = decl.categorize();
885 switch (result) {
886 .alias => |new_index| {
887 assert(new_index != .none);
888 global_aliasee = new_index;
889 if (chase_alias_n > 0) {
890 chase_alias_n -= 1;
891 decl = new_index.get();
892 continue;
893 }
894 },
895 else => {},
896 }
897 return result;
898 }
899}
900
901export fn type_fn_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
902 const decl = parent.get();
903
904 // If the type function returns another type function, get the members of that function
905 if (decl.get_type_fn_return_type_fn()) |function_decl| {
906 return namespace_members(function_decl, include_private);
907 }
908
909 return namespace_members(parent, include_private);
910}
911
912export fn namespace_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
913 const g = struct {
914 var members: ArrayList(Decl.Index) = .empty;
915 };
916
917 g.members.clearRetainingCapacity();
918
919 for (Walk.decls.items, 0..) |*decl, i| {
920 if (decl.parent == parent) {
921 if (include_private or decl.is_pub()) {
922 g.members.append(gpa, @enumFromInt(i)) catch @panic("OOM");
923 }
924 }
925 }
926
927 return Slice(Decl.Index).init(g.members.items);
928}
929
930fn count_scalar(haystack: []const u8, needle: u8) usize {
931 var total: usize = 0;
932 for (haystack) |elem| {
933 if (elem == needle)
934 total += 1;
935 }
936 return total;
937}