master
1const Emit = @This();
2
3const std = @import("std");
4const assert = std.debug.assert;
5const Allocator = std.mem.Allocator;
6const ArrayList = std.ArrayList;
7
8const Wasm = link.File.Wasm;
9const Mir = @import("Mir.zig");
10const link = @import("../../link.zig");
11const Zcu = @import("../../Zcu.zig");
12const InternPool = @import("../../InternPool.zig");
13const codegen = @import("../../codegen.zig");
14
15mir: Mir,
16wasm: *Wasm,
17/// The binary representation that will be emitted by this module.
18code: *ArrayList(u8),
19
20pub const Error = error{
21 OutOfMemory,
22};
23
24pub fn lowerToCode(emit: *Emit) Error!void {
25 const mir = &emit.mir;
26 const code = emit.code;
27 const wasm = emit.wasm;
28 const comp = wasm.base.comp;
29 const gpa = comp.gpa;
30 const is_obj = comp.config.output_mode == .Obj;
31 const target = &comp.root_mod.resolved_target.result;
32 const is_wasm32 = target.cpu.arch == .wasm32;
33
34 const tags = mir.instructions.items(.tag);
35 const datas = mir.instructions.items(.data);
36 var inst: u32 = 0;
37
38 loop: switch (tags[inst]) {
39 .dbg_epilogue_begin => {
40 return;
41 },
42 .block, .loop => {
43 const block_type = datas[inst].block_type;
44 try code.ensureUnusedCapacity(gpa, 2);
45 code.appendAssumeCapacity(@intFromEnum(tags[inst]));
46 code.appendAssumeCapacity(@intFromEnum(block_type));
47
48 inst += 1;
49 continue :loop tags[inst];
50 },
51 .uav_ref => {
52 if (is_obj) {
53 try uavRefObj(wasm, code, datas[inst].ip_index, 0, is_wasm32);
54 } else {
55 try uavRefExe(wasm, code, datas[inst].ip_index, 0, is_wasm32);
56 }
57 inst += 1;
58 continue :loop tags[inst];
59 },
60 .uav_ref_off => {
61 const extra = mir.extraData(Mir.UavRefOff, datas[inst].payload).data;
62 if (is_obj) {
63 try uavRefObj(wasm, code, extra.value, extra.offset, is_wasm32);
64 } else {
65 try uavRefExe(wasm, code, extra.value, extra.offset, is_wasm32);
66 }
67 inst += 1;
68 continue :loop tags[inst];
69 },
70 .nav_ref => {
71 try navRefOff(wasm, code, .{ .nav_index = datas[inst].nav_index, .offset = 0 }, is_wasm32);
72 inst += 1;
73 continue :loop tags[inst];
74 },
75 .nav_ref_off => {
76 try navRefOff(wasm, code, mir.extraData(Mir.NavRefOff, datas[inst].payload).data, is_wasm32);
77 inst += 1;
78 continue :loop tags[inst];
79 },
80 .func_ref => {
81 const indirect_func_idx: Wasm.ZcuIndirectFunctionSetIndex = @enumFromInt(
82 wasm.zcu_indirect_function_set.getIndex(datas[inst].nav_index).?,
83 );
84 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
85 if (is_obj) {
86 @panic("TODO");
87 } else {
88 writeUleb128(code, 1 + @intFromEnum(indirect_func_idx));
89 }
90 inst += 1;
91 continue :loop tags[inst];
92 },
93 .dbg_line => {
94 inst += 1;
95 continue :loop tags[inst];
96 },
97 .errors_len => {
98 try code.ensureUnusedCapacity(gpa, 6);
99 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
100 // MIR is lowered during flush, so there is indeed only one thread at this time.
101 const errors_len = 1 + comp.zcu.?.intern_pool.global_error_set.getNamesFromMainThread().len;
102 writeSleb128(code, errors_len);
103
104 inst += 1;
105 continue :loop tags[inst];
106 },
107 .error_name_table_ref => {
108 wasm.error_name_table_ref_count += 1;
109 try code.ensureUnusedCapacity(gpa, 11);
110 const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
111 code.appendAssumeCapacity(@intFromEnum(opcode));
112 if (is_obj) {
113 try wasm.out_relocs.append(gpa, .{
114 .offset = @intCast(code.items.len),
115 .pointee = .{ .symbol_index = try wasm.errorNameTableSymbolIndex() },
116 .tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
117 .addend = 0,
118 });
119 code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
120
121 inst += 1;
122 continue :loop tags[inst];
123 } else {
124 const addr: u32 = wasm.errorNameTableAddr();
125 writeSleb128(code, addr);
126
127 inst += 1;
128 continue :loop tags[inst];
129 }
130 },
131 .br_if, .br, .memory_grow, .memory_size => {
132 try code.ensureUnusedCapacity(gpa, 11);
133 code.appendAssumeCapacity(@intFromEnum(tags[inst]));
134 writeUleb128(code, datas[inst].label);
135
136 inst += 1;
137 continue :loop tags[inst];
138 },
139
140 .local_get, .local_set, .local_tee => {
141 try code.ensureUnusedCapacity(gpa, 11);
142 code.appendAssumeCapacity(@intFromEnum(tags[inst]));
143 writeUleb128(code, datas[inst].local);
144
145 inst += 1;
146 continue :loop tags[inst];
147 },
148
149 .br_table => {
150 const extra_index = datas[inst].payload;
151 const extra = mir.extraData(Mir.JumpTable, extra_index);
152 const labels = mir.extra[extra.end..][0..extra.data.length];
153 try code.ensureUnusedCapacity(gpa, 11 + 10 * labels.len);
154 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.br_table));
155 // -1 because default label is not part of length/depth.
156 writeUleb128(code, extra.data.length - 1);
157 for (labels) |label| writeUleb128(code, label);
158
159 inst += 1;
160 continue :loop tags[inst];
161 },
162
163 .call_nav => {
164 try code.ensureUnusedCapacity(gpa, 6);
165 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
166 if (is_obj) {
167 try wasm.out_relocs.append(gpa, .{
168 .offset = @intCast(code.items.len),
169 .pointee = .{ .symbol_index = try wasm.navSymbolIndex(datas[inst].nav_index) },
170 .tag = .function_index_leb,
171 .addend = 0,
172 });
173 code.appendNTimesAssumeCapacity(0, 5);
174 } else {
175 appendOutputFunctionIndex(code, .fromIpNav(wasm, datas[inst].nav_index));
176 }
177
178 inst += 1;
179 continue :loop tags[inst];
180 },
181
182 .call_indirect => {
183 try code.ensureUnusedCapacity(gpa, 11);
184 const fn_info = comp.zcu.?.typeToFunc(.fromInterned(datas[inst].ip_index)).?;
185 const func_ty_index = wasm.getExistingFunctionType(
186 fn_info.cc,
187 fn_info.param_types.get(&comp.zcu.?.intern_pool),
188 .fromInterned(fn_info.return_type),
189 target,
190 ).?;
191 if (is_obj) {
192 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call_indirect));
193 try wasm.out_relocs.append(gpa, .{
194 .offset = @intCast(code.items.len),
195 .pointee = .{ .type_index = func_ty_index },
196 .tag = .type_index_leb,
197 .addend = 0,
198 });
199 code.appendNTimesAssumeCapacity(0, 5);
200 } else {
201 const index: Wasm.Flush.FuncTypeIndex = @enumFromInt(wasm.flush_buffer.func_types.getIndex(func_ty_index) orelse {
202 // In this case we tried to call a function pointer for
203 // which the type signature does not match any function
204 // body or function import in the entire wasm executable.
205 //
206 // Since there is no way to create a reference to a
207 // function without it being in the function table or
208 // import table, this instruction is unreachable.
209 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.@"unreachable"));
210 inst += 1;
211 continue :loop tags[inst];
212 });
213 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call_indirect));
214 writeUleb128(code, @intFromEnum(index));
215 }
216 writeUleb128(code, @as(u32, 0)); // table index
217
218 inst += 1;
219 continue :loop tags[inst];
220 },
221
222 .call_tag_name => {
223 try code.ensureUnusedCapacity(gpa, 6);
224 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
225 if (is_obj) {
226 try wasm.out_relocs.append(gpa, .{
227 .offset = @intCast(code.items.len),
228 .pointee = .{ .symbol_index = try wasm.tagNameSymbolIndex(datas[inst].ip_index) },
229 .tag = .function_index_leb,
230 .addend = 0,
231 });
232 code.appendNTimesAssumeCapacity(0, 5);
233 } else {
234 appendOutputFunctionIndex(code, .fromTagNameType(wasm, datas[inst].ip_index));
235 }
236
237 inst += 1;
238 continue :loop tags[inst];
239 },
240
241 .call_intrinsic => {
242 // Although this currently uses `wasm.internString`, note that it
243 // *could* be changed to directly index into a preloaded strings
244 // table initialized based on the `Mir.Intrinsic` enum.
245 const symbol_name = try wasm.internString(@tagName(datas[inst].intrinsic));
246
247 try code.ensureUnusedCapacity(gpa, 6);
248 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
249 if (is_obj) {
250 try wasm.out_relocs.append(gpa, .{
251 .offset = @intCast(code.items.len),
252 .pointee = .{ .symbol_index = try wasm.symbolNameIndex(symbol_name) },
253 .tag = .function_index_leb,
254 .addend = 0,
255 });
256 code.appendNTimesAssumeCapacity(0, 5);
257 } else {
258 appendOutputFunctionIndex(code, .fromSymbolName(wasm, symbol_name));
259 }
260
261 inst += 1;
262 continue :loop tags[inst];
263 },
264
265 .global_set_sp => {
266 try code.ensureUnusedCapacity(gpa, 6);
267 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.global_set));
268 if (is_obj) {
269 try wasm.out_relocs.append(gpa, .{
270 .offset = @intCast(code.items.len),
271 .pointee = .{ .symbol_index = try wasm.stackPointerSymbolIndex() },
272 .tag = .global_index_leb,
273 .addend = 0,
274 });
275 code.appendNTimesAssumeCapacity(0, 5);
276 } else {
277 const sp_global: Wasm.GlobalIndex = .stack_pointer;
278 writeUleb128(code, @intFromEnum(sp_global));
279 }
280
281 inst += 1;
282 continue :loop tags[inst];
283 },
284
285 .f32_const => {
286 try code.ensureUnusedCapacity(gpa, 5);
287 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.f32_const));
288 std.mem.writeInt(u32, code.addManyAsArrayAssumeCapacity(4), @bitCast(datas[inst].float32), .little);
289
290 inst += 1;
291 continue :loop tags[inst];
292 },
293
294 .f64_const => {
295 try code.ensureUnusedCapacity(gpa, 9);
296 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.f64_const));
297 const float64 = mir.extraData(Mir.Float64, datas[inst].payload).data;
298 std.mem.writeInt(u64, code.addManyAsArrayAssumeCapacity(8), float64.toInt(), .little);
299
300 inst += 1;
301 continue :loop tags[inst];
302 },
303 .i32_const => {
304 try code.ensureUnusedCapacity(gpa, 6);
305 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
306 writeSleb128(code, datas[inst].imm32);
307
308 inst += 1;
309 continue :loop tags[inst];
310 },
311 .i64_const => {
312 try code.ensureUnusedCapacity(gpa, 11);
313 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i64_const));
314 const int64: i64 = @bitCast(mir.extraData(Mir.Imm64, datas[inst].payload).data.toInt());
315 writeSleb128(code, int64);
316
317 inst += 1;
318 continue :loop tags[inst];
319 },
320
321 .i32_load,
322 .i64_load,
323 .f32_load,
324 .f64_load,
325 .i32_load8_s,
326 .i32_load8_u,
327 .i32_load16_s,
328 .i32_load16_u,
329 .i64_load8_s,
330 .i64_load8_u,
331 .i64_load16_s,
332 .i64_load16_u,
333 .i64_load32_s,
334 .i64_load32_u,
335 .i32_store,
336 .i64_store,
337 .f32_store,
338 .f64_store,
339 .i32_store8,
340 .i32_store16,
341 .i64_store8,
342 .i64_store16,
343 .i64_store32,
344 => {
345 try code.ensureUnusedCapacity(gpa, 1 + 20);
346 code.appendAssumeCapacity(@intFromEnum(tags[inst]));
347 encodeMemArg(code, mir.extraData(Mir.MemArg, datas[inst].payload).data);
348 inst += 1;
349 continue :loop tags[inst];
350 },
351
352 .end,
353 .@"return",
354 .@"unreachable",
355 .select,
356 .i32_eqz,
357 .i32_eq,
358 .i32_ne,
359 .i32_lt_s,
360 .i32_lt_u,
361 .i32_gt_s,
362 .i32_gt_u,
363 .i32_le_s,
364 .i32_le_u,
365 .i32_ge_s,
366 .i32_ge_u,
367 .i64_eqz,
368 .i64_eq,
369 .i64_ne,
370 .i64_lt_s,
371 .i64_lt_u,
372 .i64_gt_s,
373 .i64_gt_u,
374 .i64_le_s,
375 .i64_le_u,
376 .i64_ge_s,
377 .i64_ge_u,
378 .f32_eq,
379 .f32_ne,
380 .f32_lt,
381 .f32_gt,
382 .f32_le,
383 .f32_ge,
384 .f64_eq,
385 .f64_ne,
386 .f64_lt,
387 .f64_gt,
388 .f64_le,
389 .f64_ge,
390 .i32_add,
391 .i32_sub,
392 .i32_mul,
393 .i32_div_s,
394 .i32_div_u,
395 .i32_and,
396 .i32_or,
397 .i32_xor,
398 .i32_shl,
399 .i32_shr_s,
400 .i32_shr_u,
401 .i64_add,
402 .i64_sub,
403 .i64_mul,
404 .i64_div_s,
405 .i64_div_u,
406 .i64_and,
407 .i64_or,
408 .i64_xor,
409 .i64_shl,
410 .i64_shr_s,
411 .i64_shr_u,
412 .f32_abs,
413 .f32_neg,
414 .f32_ceil,
415 .f32_floor,
416 .f32_trunc,
417 .f32_nearest,
418 .f32_sqrt,
419 .f32_add,
420 .f32_sub,
421 .f32_mul,
422 .f32_div,
423 .f32_min,
424 .f32_max,
425 .f32_copysign,
426 .f64_abs,
427 .f64_neg,
428 .f64_ceil,
429 .f64_floor,
430 .f64_trunc,
431 .f64_nearest,
432 .f64_sqrt,
433 .f64_add,
434 .f64_sub,
435 .f64_mul,
436 .f64_div,
437 .f64_min,
438 .f64_max,
439 .f64_copysign,
440 .i32_wrap_i64,
441 .i64_extend_i32_s,
442 .i64_extend_i32_u,
443 .i32_extend8_s,
444 .i32_extend16_s,
445 .i64_extend8_s,
446 .i64_extend16_s,
447 .i64_extend32_s,
448 .f32_demote_f64,
449 .f64_promote_f32,
450 .i32_reinterpret_f32,
451 .i64_reinterpret_f64,
452 .f32_reinterpret_i32,
453 .f64_reinterpret_i64,
454 .i32_trunc_f32_s,
455 .i32_trunc_f32_u,
456 .i32_trunc_f64_s,
457 .i32_trunc_f64_u,
458 .i64_trunc_f32_s,
459 .i64_trunc_f32_u,
460 .i64_trunc_f64_s,
461 .i64_trunc_f64_u,
462 .f32_convert_i32_s,
463 .f32_convert_i32_u,
464 .f32_convert_i64_s,
465 .f32_convert_i64_u,
466 .f64_convert_i32_s,
467 .f64_convert_i32_u,
468 .f64_convert_i64_s,
469 .f64_convert_i64_u,
470 .i32_rem_s,
471 .i32_rem_u,
472 .i64_rem_s,
473 .i64_rem_u,
474 .i32_popcnt,
475 .i64_popcnt,
476 .i32_clz,
477 .i32_ctz,
478 .i64_clz,
479 .i64_ctz,
480 => {
481 try code.append(gpa, @intFromEnum(tags[inst]));
482 inst += 1;
483 continue :loop tags[inst];
484 },
485
486 .misc_prefix => {
487 try code.ensureUnusedCapacity(gpa, 6 + 6);
488 const extra_index = datas[inst].payload;
489 const opcode = mir.extra[extra_index];
490 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.misc_prefix));
491 writeUleb128(code, opcode);
492 switch (@as(std.wasm.MiscOpcode, @enumFromInt(opcode))) {
493 // bulk-memory opcodes
494 .data_drop => {
495 const segment = mir.extra[extra_index + 1];
496 writeUleb128(code, segment);
497
498 inst += 1;
499 continue :loop tags[inst];
500 },
501 .memory_init => {
502 const segment = mir.extra[extra_index + 1];
503 writeUleb128(code, segment);
504 writeUleb128(code, @as(u32, 0)); // memory index
505
506 inst += 1;
507 continue :loop tags[inst];
508 },
509 .memory_fill => {
510 writeUleb128(code, @as(u32, 0)); // memory index
511
512 inst += 1;
513 continue :loop tags[inst];
514 },
515 .memory_copy => {
516 writeUleb128(code, @as(u32, 0)); // dst memory index
517 writeUleb128(code, @as(u32, 0)); // src memory index
518
519 inst += 1;
520 continue :loop tags[inst];
521 },
522
523 // nontrapping-float-to-int-conversion opcodes
524 .i32_trunc_sat_f32_s,
525 .i32_trunc_sat_f32_u,
526 .i32_trunc_sat_f64_s,
527 .i32_trunc_sat_f64_u,
528 .i64_trunc_sat_f32_s,
529 .i64_trunc_sat_f32_u,
530 .i64_trunc_sat_f64_s,
531 .i64_trunc_sat_f64_u,
532 => {
533 inst += 1;
534 continue :loop tags[inst];
535 },
536
537 .table_init => @panic("TODO"),
538 .elem_drop => @panic("TODO"),
539 .table_copy => @panic("TODO"),
540 .table_grow => @panic("TODO"),
541 .table_size => @panic("TODO"),
542 .table_fill => @panic("TODO"),
543
544 _ => unreachable,
545 }
546 comptime unreachable;
547 },
548 .simd_prefix => {
549 try code.ensureUnusedCapacity(gpa, 6 + 20);
550 const extra_index = datas[inst].payload;
551 const opcode = mir.extra[extra_index];
552 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.simd_prefix));
553 writeUleb128(code, opcode);
554 switch (@as(std.wasm.SimdOpcode, @enumFromInt(opcode))) {
555 .v128_store,
556 .v128_load,
557 .v128_load8_splat,
558 .v128_load16_splat,
559 .v128_load32_splat,
560 .v128_load64_splat,
561 => {
562 encodeMemArg(code, mir.extraData(Mir.MemArg, extra_index + 1).data);
563 inst += 1;
564 continue :loop tags[inst];
565 },
566 .v128_const, .i8x16_shuffle => {
567 code.appendSliceAssumeCapacity(std.mem.asBytes(mir.extra[extra_index + 1 ..][0..4]));
568 inst += 1;
569 continue :loop tags[inst];
570 },
571 .i8x16_extract_lane_s,
572 .i8x16_extract_lane_u,
573 .i8x16_replace_lane,
574 .i16x8_extract_lane_s,
575 .i16x8_extract_lane_u,
576 .i16x8_replace_lane,
577 .i32x4_extract_lane,
578 .i32x4_replace_lane,
579 .i64x2_extract_lane,
580 .i64x2_replace_lane,
581 .f32x4_extract_lane,
582 .f32x4_replace_lane,
583 .f64x2_extract_lane,
584 .f64x2_replace_lane,
585 => {
586 code.appendAssumeCapacity(@intCast(mir.extra[extra_index + 1]));
587 inst += 1;
588 continue :loop tags[inst];
589 },
590 .i8x16_splat,
591 .i16x8_splat,
592 .i32x4_splat,
593 .i64x2_splat,
594 .f32x4_splat,
595 .f64x2_splat,
596 => {
597 inst += 1;
598 continue :loop tags[inst];
599 },
600
601 .v128_load8x8_s => @panic("TODO"),
602 .v128_load8x8_u => @panic("TODO"),
603 .v128_load16x4_s => @panic("TODO"),
604 .v128_load16x4_u => @panic("TODO"),
605 .v128_load32x2_s => @panic("TODO"),
606 .v128_load32x2_u => @panic("TODO"),
607 .i8x16_swizzle => @panic("TODO"),
608 .i8x16_eq => @panic("TODO"),
609 .i16x8_eq => @panic("TODO"),
610 .i32x4_eq => @panic("TODO"),
611 .i8x16_ne => @panic("TODO"),
612 .i16x8_ne => @panic("TODO"),
613 .i32x4_ne => @panic("TODO"),
614 .i8x16_lt_s => @panic("TODO"),
615 .i16x8_lt_s => @panic("TODO"),
616 .i32x4_lt_s => @panic("TODO"),
617 .i8x16_lt_u => @panic("TODO"),
618 .i16x8_lt_u => @panic("TODO"),
619 .i32x4_lt_u => @panic("TODO"),
620 .i8x16_gt_s => @panic("TODO"),
621 .i16x8_gt_s => @panic("TODO"),
622 .i32x4_gt_s => @panic("TODO"),
623 .i8x16_gt_u => @panic("TODO"),
624 .i16x8_gt_u => @panic("TODO"),
625 .i32x4_gt_u => @panic("TODO"),
626 .i8x16_le_s => @panic("TODO"),
627 .i16x8_le_s => @panic("TODO"),
628 .i32x4_le_s => @panic("TODO"),
629 .i8x16_le_u => @panic("TODO"),
630 .i16x8_le_u => @panic("TODO"),
631 .i32x4_le_u => @panic("TODO"),
632 .i8x16_ge_s => @panic("TODO"),
633 .i16x8_ge_s => @panic("TODO"),
634 .i32x4_ge_s => @panic("TODO"),
635 .i8x16_ge_u => @panic("TODO"),
636 .i16x8_ge_u => @panic("TODO"),
637 .i32x4_ge_u => @panic("TODO"),
638 .f32x4_eq => @panic("TODO"),
639 .f64x2_eq => @panic("TODO"),
640 .f32x4_ne => @panic("TODO"),
641 .f64x2_ne => @panic("TODO"),
642 .f32x4_lt => @panic("TODO"),
643 .f64x2_lt => @panic("TODO"),
644 .f32x4_gt => @panic("TODO"),
645 .f64x2_gt => @panic("TODO"),
646 .f32x4_le => @panic("TODO"),
647 .f64x2_le => @panic("TODO"),
648 .f32x4_ge => @panic("TODO"),
649 .f64x2_ge => @panic("TODO"),
650 .v128_not => @panic("TODO"),
651 .v128_and => @panic("TODO"),
652 .v128_andnot => @panic("TODO"),
653 .v128_or => @panic("TODO"),
654 .v128_xor => @panic("TODO"),
655 .v128_bitselect => @panic("TODO"),
656 .v128_any_true => @panic("TODO"),
657 .v128_load8_lane => @panic("TODO"),
658 .v128_load16_lane => @panic("TODO"),
659 .v128_load32_lane => @panic("TODO"),
660 .v128_load64_lane => @panic("TODO"),
661 .v128_store8_lane => @panic("TODO"),
662 .v128_store16_lane => @panic("TODO"),
663 .v128_store32_lane => @panic("TODO"),
664 .v128_store64_lane => @panic("TODO"),
665 .v128_load32_zero => @panic("TODO"),
666 .v128_load64_zero => @panic("TODO"),
667 .f32x4_demote_f64x2_zero => @panic("TODO"),
668 .f64x2_promote_low_f32x4 => @panic("TODO"),
669 .i8x16_abs => @panic("TODO"),
670 .i16x8_abs => @panic("TODO"),
671 .i32x4_abs => @panic("TODO"),
672 .i64x2_abs => @panic("TODO"),
673 .i8x16_neg => @panic("TODO"),
674 .i16x8_neg => @panic("TODO"),
675 .i32x4_neg => @panic("TODO"),
676 .i64x2_neg => @panic("TODO"),
677 .i8x16_popcnt => @panic("TODO"),
678 .i16x8_q15mulr_sat_s => @panic("TODO"),
679 .i8x16_all_true => @panic("TODO"),
680 .i16x8_all_true => @panic("TODO"),
681 .i32x4_all_true => @panic("TODO"),
682 .i64x2_all_true => @panic("TODO"),
683 .i8x16_bitmask => @panic("TODO"),
684 .i16x8_bitmask => @panic("TODO"),
685 .i32x4_bitmask => @panic("TODO"),
686 .i64x2_bitmask => @panic("TODO"),
687 .i8x16_narrow_i16x8_s => @panic("TODO"),
688 .i16x8_narrow_i32x4_s => @panic("TODO"),
689 .i8x16_narrow_i16x8_u => @panic("TODO"),
690 .i16x8_narrow_i32x4_u => @panic("TODO"),
691 .f32x4_ceil => @panic("TODO"),
692 .i16x8_extend_low_i8x16_s => @panic("TODO"),
693 .i32x4_extend_low_i16x8_s => @panic("TODO"),
694 .i64x2_extend_low_i32x4_s => @panic("TODO"),
695 .f32x4_floor => @panic("TODO"),
696 .i16x8_extend_high_i8x16_s => @panic("TODO"),
697 .i32x4_extend_high_i16x8_s => @panic("TODO"),
698 .i64x2_extend_high_i32x4_s => @panic("TODO"),
699 .f32x4_trunc => @panic("TODO"),
700 .i16x8_extend_low_i8x16_u => @panic("TODO"),
701 .i32x4_extend_low_i16x8_u => @panic("TODO"),
702 .i64x2_extend_low_i32x4_u => @panic("TODO"),
703 .f32x4_nearest => @panic("TODO"),
704 .i16x8_extend_high_i8x16_u => @panic("TODO"),
705 .i32x4_extend_high_i16x8_u => @panic("TODO"),
706 .i64x2_extend_high_i32x4_u => @panic("TODO"),
707 .i8x16_shl => @panic("TODO"),
708 .i16x8_shl => @panic("TODO"),
709 .i32x4_shl => @panic("TODO"),
710 .i64x2_shl => @panic("TODO"),
711 .i8x16_shr_s => @panic("TODO"),
712 .i16x8_shr_s => @panic("TODO"),
713 .i32x4_shr_s => @panic("TODO"),
714 .i64x2_shr_s => @panic("TODO"),
715 .i8x16_shr_u => @panic("TODO"),
716 .i16x8_shr_u => @panic("TODO"),
717 .i32x4_shr_u => @panic("TODO"),
718 .i64x2_shr_u => @panic("TODO"),
719 .i8x16_add => @panic("TODO"),
720 .i16x8_add => @panic("TODO"),
721 .i32x4_add => @panic("TODO"),
722 .i64x2_add => @panic("TODO"),
723 .i8x16_add_sat_s => @panic("TODO"),
724 .i16x8_add_sat_s => @panic("TODO"),
725 .i8x16_add_sat_u => @panic("TODO"),
726 .i16x8_add_sat_u => @panic("TODO"),
727 .i8x16_sub => @panic("TODO"),
728 .i16x8_sub => @panic("TODO"),
729 .i32x4_sub => @panic("TODO"),
730 .i64x2_sub => @panic("TODO"),
731 .i8x16_sub_sat_s => @panic("TODO"),
732 .i16x8_sub_sat_s => @panic("TODO"),
733 .i8x16_sub_sat_u => @panic("TODO"),
734 .i16x8_sub_sat_u => @panic("TODO"),
735 .f64x2_ceil => @panic("TODO"),
736 .f64x2_nearest => @panic("TODO"),
737 .f64x2_floor => @panic("TODO"),
738 .i16x8_mul => @panic("TODO"),
739 .i32x4_mul => @panic("TODO"),
740 .i64x2_mul => @panic("TODO"),
741 .i8x16_min_s => @panic("TODO"),
742 .i16x8_min_s => @panic("TODO"),
743 .i32x4_min_s => @panic("TODO"),
744 .i64x2_eq => @panic("TODO"),
745 .i8x16_min_u => @panic("TODO"),
746 .i16x8_min_u => @panic("TODO"),
747 .i32x4_min_u => @panic("TODO"),
748 .i64x2_ne => @panic("TODO"),
749 .i8x16_max_s => @panic("TODO"),
750 .i16x8_max_s => @panic("TODO"),
751 .i32x4_max_s => @panic("TODO"),
752 .i64x2_lt_s => @panic("TODO"),
753 .i8x16_max_u => @panic("TODO"),
754 .i16x8_max_u => @panic("TODO"),
755 .i32x4_max_u => @panic("TODO"),
756 .i64x2_gt_s => @panic("TODO"),
757 .f64x2_trunc => @panic("TODO"),
758 .i32x4_dot_i16x8_s => @panic("TODO"),
759 .i64x2_le_s => @panic("TODO"),
760 .i8x16_avgr_u => @panic("TODO"),
761 .i16x8_avgr_u => @panic("TODO"),
762 .i64x2_ge_s => @panic("TODO"),
763 .i16x8_extadd_pairwise_i8x16_s => @panic("TODO"),
764 .i16x8_extmul_low_i8x16_s => @panic("TODO"),
765 .i32x4_extmul_low_i16x8_s => @panic("TODO"),
766 .i64x2_extmul_low_i32x4_s => @panic("TODO"),
767 .i16x8_extadd_pairwise_i8x16_u => @panic("TODO"),
768 .i16x8_extmul_high_i8x16_s => @panic("TODO"),
769 .i32x4_extmul_high_i16x8_s => @panic("TODO"),
770 .i64x2_extmul_high_i32x4_s => @panic("TODO"),
771 .i32x4_extadd_pairwise_i16x8_s => @panic("TODO"),
772 .i16x8_extmul_low_i8x16_u => @panic("TODO"),
773 .i32x4_extmul_low_i16x8_u => @panic("TODO"),
774 .i64x2_extmul_low_i32x4_u => @panic("TODO"),
775 .i32x4_extadd_pairwise_i16x8_u => @panic("TODO"),
776 .i16x8_extmul_high_i8x16_u => @panic("TODO"),
777 .i32x4_extmul_high_i16x8_u => @panic("TODO"),
778 .i64x2_extmul_high_i32x4_u => @panic("TODO"),
779 .f32x4_abs => @panic("TODO"),
780 .f64x2_abs => @panic("TODO"),
781 .f32x4_neg => @panic("TODO"),
782 .f64x2_neg => @panic("TODO"),
783 .f32x4_sqrt => @panic("TODO"),
784 .f64x2_sqrt => @panic("TODO"),
785 .f32x4_add => @panic("TODO"),
786 .f64x2_add => @panic("TODO"),
787 .f32x4_sub => @panic("TODO"),
788 .f64x2_sub => @panic("TODO"),
789 .f32x4_mul => @panic("TODO"),
790 .f64x2_mul => @panic("TODO"),
791 .f32x4_div => @panic("TODO"),
792 .f64x2_div => @panic("TODO"),
793 .f32x4_min => @panic("TODO"),
794 .f64x2_min => @panic("TODO"),
795 .f32x4_max => @panic("TODO"),
796 .f64x2_max => @panic("TODO"),
797 .f32x4_pmin => @panic("TODO"),
798 .f64x2_pmin => @panic("TODO"),
799 .f32x4_pmax => @panic("TODO"),
800 .f64x2_pmax => @panic("TODO"),
801 .i32x4_trunc_sat_f32x4_s => @panic("TODO"),
802 .i32x4_trunc_sat_f32x4_u => @panic("TODO"),
803 .f32x4_convert_i32x4_s => @panic("TODO"),
804 .f32x4_convert_i32x4_u => @panic("TODO"),
805 .i32x4_trunc_sat_f64x2_s_zero => @panic("TODO"),
806 .i32x4_trunc_sat_f64x2_u_zero => @panic("TODO"),
807 .f64x2_convert_low_i32x4_s => @panic("TODO"),
808 .f64x2_convert_low_i32x4_u => @panic("TODO"),
809 .i8x16_relaxed_swizzle => @panic("TODO"),
810 .i32x4_relaxed_trunc_f32x4_s => @panic("TODO"),
811 .i32x4_relaxed_trunc_f32x4_u => @panic("TODO"),
812 .i32x4_relaxed_trunc_f64x2_s_zero => @panic("TODO"),
813 .i32x4_relaxed_trunc_f64x2_u_zero => @panic("TODO"),
814 .f32x4_relaxed_madd => @panic("TODO"),
815 .f32x4_relaxed_nmadd => @panic("TODO"),
816 .f64x2_relaxed_madd => @panic("TODO"),
817 .f64x2_relaxed_nmadd => @panic("TODO"),
818 .i8x16_relaxed_laneselect => @panic("TODO"),
819 .i16x8_relaxed_laneselect => @panic("TODO"),
820 .i32x4_relaxed_laneselect => @panic("TODO"),
821 .i64x2_relaxed_laneselect => @panic("TODO"),
822 .f32x4_relaxed_min => @panic("TODO"),
823 .f32x4_relaxed_max => @panic("TODO"),
824 .f64x2_relaxed_min => @panic("TODO"),
825 .f64x2_relaxed_max => @panic("TODO"),
826 .i16x8_relaxed_q15mulr_s => @panic("TODO"),
827 .i16x8_relaxed_dot_i8x16_i7x16_s => @panic("TODO"),
828 .i32x4_relaxed_dot_i8x16_i7x16_add_s => @panic("TODO"),
829 .f32x4_relaxed_dot_bf16x8_add_f32x4 => @panic("TODO"),
830 }
831 comptime unreachable;
832 },
833 .atomics_prefix => {
834 try code.ensureUnusedCapacity(gpa, 6 + 20);
835
836 const extra_index = datas[inst].payload;
837 const opcode = mir.extra[extra_index];
838 code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.atomics_prefix));
839 writeUleb128(code, opcode);
840 switch (@as(std.wasm.AtomicsOpcode, @enumFromInt(opcode))) {
841 .i32_atomic_load,
842 .i64_atomic_load,
843 .i32_atomic_load8_u,
844 .i32_atomic_load16_u,
845 .i64_atomic_load8_u,
846 .i64_atomic_load16_u,
847 .i64_atomic_load32_u,
848 .i32_atomic_store,
849 .i64_atomic_store,
850 .i32_atomic_store8,
851 .i32_atomic_store16,
852 .i64_atomic_store8,
853 .i64_atomic_store16,
854 .i64_atomic_store32,
855 .i32_atomic_rmw_add,
856 .i64_atomic_rmw_add,
857 .i32_atomic_rmw8_add_u,
858 .i32_atomic_rmw16_add_u,
859 .i64_atomic_rmw8_add_u,
860 .i64_atomic_rmw16_add_u,
861 .i64_atomic_rmw32_add_u,
862 .i32_atomic_rmw_sub,
863 .i64_atomic_rmw_sub,
864 .i32_atomic_rmw8_sub_u,
865 .i32_atomic_rmw16_sub_u,
866 .i64_atomic_rmw8_sub_u,
867 .i64_atomic_rmw16_sub_u,
868 .i64_atomic_rmw32_sub_u,
869 .i32_atomic_rmw_and,
870 .i64_atomic_rmw_and,
871 .i32_atomic_rmw8_and_u,
872 .i32_atomic_rmw16_and_u,
873 .i64_atomic_rmw8_and_u,
874 .i64_atomic_rmw16_and_u,
875 .i64_atomic_rmw32_and_u,
876 .i32_atomic_rmw_or,
877 .i64_atomic_rmw_or,
878 .i32_atomic_rmw8_or_u,
879 .i32_atomic_rmw16_or_u,
880 .i64_atomic_rmw8_or_u,
881 .i64_atomic_rmw16_or_u,
882 .i64_atomic_rmw32_or_u,
883 .i32_atomic_rmw_xor,
884 .i64_atomic_rmw_xor,
885 .i32_atomic_rmw8_xor_u,
886 .i32_atomic_rmw16_xor_u,
887 .i64_atomic_rmw8_xor_u,
888 .i64_atomic_rmw16_xor_u,
889 .i64_atomic_rmw32_xor_u,
890 .i32_atomic_rmw_xchg,
891 .i64_atomic_rmw_xchg,
892 .i32_atomic_rmw8_xchg_u,
893 .i32_atomic_rmw16_xchg_u,
894 .i64_atomic_rmw8_xchg_u,
895 .i64_atomic_rmw16_xchg_u,
896 .i64_atomic_rmw32_xchg_u,
897
898 .i32_atomic_rmw_cmpxchg,
899 .i64_atomic_rmw_cmpxchg,
900 .i32_atomic_rmw8_cmpxchg_u,
901 .i32_atomic_rmw16_cmpxchg_u,
902 .i64_atomic_rmw8_cmpxchg_u,
903 .i64_atomic_rmw16_cmpxchg_u,
904 .i64_atomic_rmw32_cmpxchg_u,
905 => {
906 const mem_arg = mir.extraData(Mir.MemArg, extra_index + 1).data;
907 encodeMemArg(code, mem_arg);
908 inst += 1;
909 continue :loop tags[inst];
910 },
911 .atomic_fence => {
912 // Hard-codes memory index 0 since multi-memory proposal is
913 // not yet accepted nor implemented.
914 const memory_index: u32 = 0;
915 writeUleb128(code, memory_index);
916 inst += 1;
917 continue :loop tags[inst];
918 },
919 .memory_atomic_notify => @panic("TODO"),
920 .memory_atomic_wait32 => @panic("TODO"),
921 .memory_atomic_wait64 => @panic("TODO"),
922 }
923 comptime unreachable;
924 },
925 }
926 comptime unreachable;
927}
928
929/// Asserts 20 unused capacity.
930fn encodeMemArg(code: *ArrayList(u8), mem_arg: Mir.MemArg) void {
931 assert(code.unusedCapacitySlice().len >= 20);
932 // Wasm encodes alignment as power of 2, rather than natural alignment.
933 const encoded_alignment = @ctz(mem_arg.alignment);
934 writeUleb128(code, encoded_alignment);
935 writeUleb128(code, mem_arg.offset);
936}
937
938fn uavRefObj(wasm: *Wasm, code: *ArrayList(u8), value: InternPool.Index, offset: i32, is_wasm32: bool) !void {
939 const comp = wasm.base.comp;
940 const gpa = comp.gpa;
941 const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
942
943 try code.ensureUnusedCapacity(gpa, 11);
944 code.appendAssumeCapacity(@intFromEnum(opcode));
945
946 try wasm.out_relocs.append(gpa, .{
947 .offset = @intCast(code.items.len),
948 .pointee = .{ .symbol_index = try wasm.uavSymbolIndex(value) },
949 .tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
950 .addend = offset,
951 });
952 code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
953}
954
955fn uavRefExe(wasm: *Wasm, code: *ArrayList(u8), value: InternPool.Index, offset: i32, is_wasm32: bool) !void {
956 const comp = wasm.base.comp;
957 const gpa = comp.gpa;
958 const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
959
960 try code.ensureUnusedCapacity(gpa, 11);
961 code.appendAssumeCapacity(@intFromEnum(opcode));
962
963 const addr = wasm.uavAddr(value);
964 writeUleb128(code, @as(u32, @intCast(@as(i64, addr) + offset)));
965}
966
967fn navRefOff(wasm: *Wasm, code: *ArrayList(u8), data: Mir.NavRefOff, is_wasm32: bool) !void {
968 const comp = wasm.base.comp;
969 const zcu = comp.zcu.?;
970 const ip = &zcu.intern_pool;
971 const gpa = comp.gpa;
972 const is_obj = comp.config.output_mode == .Obj;
973 const nav_ty = ip.getNav(data.nav_index).typeOf(ip);
974 assert(!ip.isFunctionType(nav_ty));
975
976 try code.ensureUnusedCapacity(gpa, 11);
977
978 const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
979 code.appendAssumeCapacity(@intFromEnum(opcode));
980 if (is_obj) {
981 try wasm.out_relocs.append(gpa, .{
982 .offset = @intCast(code.items.len),
983 .pointee = .{ .symbol_index = try wasm.navSymbolIndex(data.nav_index) },
984 .tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
985 .addend = data.offset,
986 });
987 code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
988 } else {
989 const addr = wasm.navAddr(data.nav_index);
990 writeUleb128(code, @as(u32, @intCast(@as(i64, addr) + data.offset)));
991 }
992}
993
994fn appendOutputFunctionIndex(code: *ArrayList(u8), i: Wasm.OutputFunctionIndex) void {
995 writeUleb128(code, @intFromEnum(i));
996}
997
998fn writeUleb128(code: *ArrayList(u8), arg: anytype) void {
999 var w: std.Io.Writer = .fixed(code.unusedCapacitySlice());
1000 w.writeUleb128(arg) catch unreachable;
1001 code.items.len += w.end;
1002}
1003
1004fn writeSleb128(code: *ArrayList(u8), arg: anytype) void {
1005 var w: std.Io.Writer = .fixed(code.unusedCapacitySlice());
1006 w.writeSleb128(arg) catch unreachable;
1007 code.items.len += w.end;
1008}