master
1#undef linux
2
3#include <stdarg.h>
4#include <stddef.h>
5
6#if defined(_MSC_VER)
7#define zig_msvc
8#elif defined(__clang__)
9#define zig_clang
10#define zig_gnuc
11#elif defined(__GNUC__)
12#define zig_gcc
13#define zig_gnuc
14#elif defined(__IBMC__)
15#define zig_xlc
16#elif defined(__TINYC__)
17#define zig_tinyc
18#elif defined(__slimcc__)
19#define zig_slimcc
20#endif
21
22#if defined(__aarch64__) || (defined(zig_msvc) && defined(_M_ARM64))
23#define zig_aarch64
24#elif defined(__thumb__) || (defined(zig_msvc) && defined(_M_ARM))
25#define zig_thumb
26#define zig_arm
27#elif defined(__arm__)
28#define zig_arm
29#elif defined(__hexagon__)
30#define zig_hexagon
31#elif defined(__kvx__)
32#define zig_kvx
33#elif defined(__loongarch32)
34#define zig_loongarch32
35#define zig_loongarch
36#elif defined(__loongarch64)
37#define zig_loongarch64
38#define zig_loongarch
39#elif defined(__mips64)
40#define zig_mips64
41#define zig_mips
42#elif defined(__mips__)
43#define zig_mips32
44#define zig_mips
45#elif defined(__or1k__)
46#define zig_or1k
47#elif defined(__powerpc64__)
48#define zig_powerpc64
49#define zig_powerpc
50#elif defined(__powerpc__)
51#define zig_powerpc32
52#define zig_powerpc
53#elif defined(__riscv) && __riscv_xlen == 32
54#define zig_riscv32
55#define zig_riscv
56#elif defined(__riscv) && __riscv_xlen == 64
57#define zig_riscv64
58#define zig_riscv
59#elif defined(__s390x__)
60#define zig_s390x
61#elif defined(__sparc__) && defined(__arch64__)
62#define zig_sparc64
63#define zig_sparc
64#elif defined(__sparc__)
65#define zig_sparc32
66#define zig_sparc
67#elif defined(__wasm32__)
68#define zig_wasm32
69#define zig_wasm
70#elif defined(__wasm64__)
71#define zig_wasm64
72#define zig_wasm
73#elif defined(__i386__) || (defined(zig_msvc) && defined(_M_IX86))
74#define zig_x86_32
75#define zig_x86
76#elif defined (__x86_64__) || (defined(zig_msvc) && defined(_M_X64))
77#define zig_x86_64
78#define zig_x86
79#elif defined(__I86__)
80#define zig_x86_16
81#define zig_x86
82#endif
83
84#if defined(zig_msvc) || __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
85#define zig_little_endian 1
86#define zig_big_endian 0
87#else
88#define zig_little_endian 0
89#define zig_big_endian 1
90#endif
91
92#if defined(__MACH__)
93#define zig_darwin
94#elif defined(__DragonFly__)
95#define zig_dragonfly
96#define zig_bsd
97#elif defined(__EMSCRIPTEN__)
98#define zig_emscripten
99#elif defined(__FreeBSD__)
100#define zig_freebsd
101#define zig_bsd
102#elif defined(__Fuchsia__)
103#define zig_fuchsia
104#elif defined(__HAIKU__)
105#define zig_haiku
106#elif defined(__gnu_hurd__)
107#define zig_hurd
108#elif defined(__linux__)
109#define zig_linux
110#elif defined(__NetBSD__)
111#define zig_netbsd
112#define zig_bsd
113#elif defined(__OpenBSD__)
114#define zig_openbsd
115#define zig_bsd
116#elif defined(__SVR4)
117#define zig_solaris
118#elif defined(__wasi__)
119#define zig_wasi
120#elif defined(_WIN32)
121#define zig_windows
122#endif
123
124#if defined(zig_windows)
125#define zig_coff
126#elif defined(__ELF__)
127#define zig_elf
128#elif defined(zig_darwin)
129#define zig_macho
130#endif
131
132#define zig_concat(lhs, rhs) lhs##rhs
133#define zig_expand_concat(lhs, rhs) zig_concat(lhs, rhs)
134
135#if defined(__has_include)
136#define zig_has_include(include) __has_include(include)
137#else
138#define zig_has_include(include) 0
139#endif
140
141#if defined(__has_builtin)
142#define zig_has_builtin(builtin) __has_builtin(__builtin_##builtin)
143#else
144#define zig_has_builtin(builtin) 0
145#endif
146#define zig_expand_has_builtin(b) zig_has_builtin(b)
147
148#if defined(__has_attribute)
149#define zig_has_attribute(attribute) __has_attribute(attribute)
150#else
151#define zig_has_attribute(attribute) 0
152#endif
153
154#if __STDC_VERSION__ >= 202311L
155#define zig_threadlocal thread_local
156#elif __STDC_VERSION__ >= 201112L
157#define zig_threadlocal _Thread_local
158#elif defined(zig_gnuc) || defined(zig_slimcc)
159#define zig_threadlocal __thread
160#elif defined(zig_msvc)
161#define zig_threadlocal __declspec(thread)
162#else
163#define zig_threadlocal zig_threadlocal_unavailable
164#endif
165
166#if defined(zig_msvc)
167#define zig_const_arr
168#define zig_callconv(c) __##c
169#else
170#define zig_const_arr static const
171#define zig_callconv(c) __attribute__((c))
172#endif
173
174#if zig_has_attribute(naked) || defined(zig_gcc)
175#define zig_naked_decl __attribute__((naked))
176#define zig_naked __attribute__((naked))
177#elif defined(zig_msvc)
178#define zig_naked_decl
179#define zig_naked __declspec(naked)
180#else
181#define zig_naked_decl zig_naked_unavailable
182#define zig_naked zig_naked_unavailable
183#endif
184
185#if zig_has_attribute(cold)
186#define zig_cold __attribute__((cold))
187#else
188#define zig_cold
189#endif
190
191#if zig_has_attribute(flatten)
192#define zig_maybe_flatten __attribute__((flatten))
193#else
194#define zig_maybe_flatten
195#endif
196
197#if zig_has_attribute(noinline)
198#define zig_never_inline __attribute__((noinline)) zig_maybe_flatten
199#elif defined(zig_msvc)
200#define zig_never_inline __declspec(noinline) zig_maybe_flatten
201#else
202#define zig_never_inline zig_never_inline_unavailable
203#endif
204
205#if zig_has_attribute(not_tail_called)
206#define zig_never_tail __attribute__((not_tail_called)) zig_never_inline
207#else
208#define zig_never_tail zig_never_tail_unavailable
209#endif
210
211#if zig_has_attribute(musttail)
212#define zig_always_tail __attribute__((musttail))
213#else
214#define zig_always_tail zig_always_tail_unavailable
215#endif
216
217#if __STDC_VERSION__ >= 199901L
218#define zig_restrict restrict
219#elif defined(zig_gnuc) || defined(zig_tinyc)
220#define zig_restrict __restrict
221#else
222#define zig_restrict
223#endif
224
225#if zig_has_attribute(no_builtin)
226#define zig_no_builtin __attribute__((no_builtin))
227#else
228#define zig_no_builtin
229#endif
230
231#if zig_has_attribute(aligned) || defined(zig_tinyc)
232#define zig_under_align(alignment) __attribute__((aligned(alignment)))
233#elif defined(zig_msvc)
234#define zig_under_align(alignment) __declspec(align(alignment))
235#else
236#define zig_under_align zig_align_unavailable
237#endif
238
239#if __STDC_VERSION__ >= 202311L
240#define zig_align(alignment) alignas(alignment)
241#elif __STDC_VERSION__ >= 201112L
242#define zig_align(alignment) _Alignas(alignment)
243#else
244#define zig_align(alignment) zig_under_align(alignment)
245#endif
246
247#if zig_has_attribute(aligned) || defined(zig_tinyc)
248#define zig_align_fn(alignment) __attribute__((aligned(alignment)))
249#elif defined(zig_msvc)
250#define zig_align_fn(alignment)
251#else
252#define zig_align_fn zig_align_fn_unavailable
253#endif
254
255#if zig_has_attribute(nonstring)
256#define zig_nonstring __attribute__((nonstring))
257#else
258#define zig_nonstring
259#endif
260
261#if zig_has_attribute(packed) || defined(zig_tinyc)
262#define zig_packed(definition) __attribute__((packed)) definition
263#elif defined(zig_msvc)
264#define zig_packed(definition) __pragma(pack(1)) definition __pragma(pack())
265#else
266#define zig_packed(definition) zig_packed_unavailable
267#endif
268
269#if zig_has_attribute(section) || defined(zig_tinyc)
270#define zig_linksection(name) __attribute__((section(name)))
271#define zig_linksection_fn zig_linksection
272#elif defined(zig_msvc)
273#define zig_linksection(name) __pragma(section(name, read, write)) __declspec(allocate(name))
274#define zig_linksection_fn(name) __pragma(section(name, read, execute)) __declspec(code_seg(name))
275#else
276#define zig_linksection(name) zig_linksection_unavailable
277#define zig_linksection_fn zig_linksection
278#endif
279
280#if zig_has_attribute(visibility)
281#define zig_visibility(name) __attribute__((visibility(#name)))
282#else
283#define zig_visibility(name) zig_visibility_##name
284#define zig_visibility_default
285#define zig_visibility_hidden zig_visibility_hidden_unavailable
286#define zig_visibility_protected zig_visibility_protected_unavailable
287#endif
288
289#if zig_has_builtin(unreachable) || defined(zig_gcc) || defined(zig_tinyc)
290#define zig_unreachable() __builtin_unreachable()
291#elif defined(zig_msvc)
292#define zig_unreachable() __assume(0)
293#else
294#define zig_unreachable()
295#endif
296
297#if defined(__cplusplus)
298#define zig_extern extern "C"
299#else
300#define zig_extern extern
301#endif
302
303#if defined(zig_msvc)
304#if defined(zig_x86_64)
305#define zig_mangle_c(symbol) symbol
306#else /* zig_x86_64 */
307#define zig_mangle_c(symbol) "_" symbol
308#endif /* zig_x86_64 */
309#else /* zig_msvc */
310#if defined(zig_macho)
311#define zig_mangle_c(symbol) "_" symbol
312#else /* zig_macho */
313#define zig_mangle_c(symbol) symbol
314#endif /* zig_macho */
315#endif /* zig_msvc */
316
317#if defined(zig_msvc)
318#define zig_export(symbol, name) ; \
319 __pragma(comment(linker, "/alternatename:" zig_mangle_c(name) "=" zig_mangle_c(symbol)))
320#elif (zig_has_attribute(alias) || defined(zig_tinyc)) && !defined(zig_macho)
321#define zig_export(symbol, name) __attribute__((alias(symbol)))
322#else
323#define zig_export(symbol, name) ; \
324 __asm(zig_mangle_c(name) " = " zig_mangle_c(symbol))
325#endif
326
327#define zig_mangled_tentative zig_mangled
328#define zig_mangled_final zig_mangled
329#if defined(zig_msvc)
330#define zig_mangled(mangled, unmangled) ; \
331 zig_export(#mangled, unmangled)
332#define zig_mangled_export(mangled, unmangled, symbol) \
333 zig_export(unmangled, #mangled) \
334 zig_export(symbol, unmangled)
335#else /* zig_msvc */
336#define zig_mangled(mangled, unmangled) __asm(zig_mangle_c(unmangled))
337#define zig_mangled_export(mangled, unmangled, symbol) \
338 zig_mangled_final(mangled, unmangled) \
339 zig_export(symbol, unmangled)
340#endif /* zig_msvc */
341
342#if defined(zig_msvc)
343#define zig_import(Type, fn_name, libc_name, sig_args, call_args) zig_extern Type fn_name sig_args;\
344 __pragma(comment(linker, "/alternatename:" zig_mangle_c(#fn_name) "=" zig_mangle_c(#libc_name)));
345#define zig_import_builtin(Type, fn_name, libc_name, sig_args, call_args) zig_import(Type, fn_name, libc_name, sig_args, call_args)
346#else /* zig_msvc */
347#define zig_import(Type, fn_name, libc_name, sig_args, call_args) zig_extern Type fn_name sig_args __asm(zig_mangle_c(#libc_name));
348#define zig_import_builtin(Type, fn_name, libc_name, sig_args, call_args) zig_extern Type libc_name sig_args; \
349 static inline Type fn_name sig_args { return libc_name call_args; }
350#endif
351
352#define zig_expand_import_0(Type, fn_name, libc_name, sig_args, call_args) zig_import(Type, fn_name, libc_name, sig_args, call_args)
353#define zig_expand_import_1(Type, fn_name, libc_name, sig_args, call_args) zig_import_builtin(Type, fn_name, libc_name, sig_args, call_args)
354
355#if zig_has_attribute(weak) || defined(zig_gcc) || defined(zig_tinyc)
356#define zig_weak_linkage __attribute__((weak))
357#define zig_weak_linkage_fn __attribute__((weak))
358#elif defined(zig_msvc)
359#define zig_weak_linkage __declspec(selectany)
360#define zig_weak_linkage_fn
361#else
362#define zig_weak_linkage zig_weak_linkage_unavailable
363#define zig_weak_linkage_fn zig_weak_linkage_unavailable
364#endif
365
366#if defined(zig_gnuc) || defined(zig_tinyc) || defined(zig_slimcc)
367#define zig_gnuc_asm
368#endif
369
370#if zig_has_builtin(trap)
371#define zig_trap() __builtin_trap()
372#elif defined(zig_msvc)
373
374#if defined(zig_x86)
375#define zig_trap() __ud2()
376#else
377#define zig_trap() __fastfail(7)
378#endif
379
380#elif defined(zig_gnuc_asm)
381
382#if defined(zig_thumb)
383#define zig_trap() __asm__ volatile("udf #0xfe")
384#elif defined(zig_arm) || defined(zig_aarch64)
385#define zig_trap() __asm__ volatile("udf #0xfdee")
386#elif defined(zig_hexagon)
387#define zig_trap() __asm__ volatile("r27:26 = memd(#0xbadc0fee)")
388#elif defined(zig_kvx) || defined(zig_loongarch) || defined(zig_powerpc)
389#define zig_trap() __asm__ volatile(".word 0x0")
390#elif defined(zig_mips)
391#define zig_trap() __asm__ volatile(".word 0x3d")
392#elif defined(zig_or1k)
393#define zig_trap() __asm__ volatile("l.cust8")
394#elif defined(zig_riscv)
395#define zig_trap() __asm__ volatile("unimp")
396#elif defined(zig_s390x)
397#define zig_trap() __asm__ volatile("j 0x2")
398#elif defined(zig_sparc)
399#define zig_trap() __asm__ volatile("illtrap")
400#elif defined(zig_x86_16)
401#define zig_trap() __asm__ volatile("int $0x3")
402#elif defined(zig_x86)
403#define zig_trap() __asm__ volatile("ud2")
404#else
405#define zig_trap() zig_trap_unavailable
406#endif
407
408#else
409#define zig_trap() zig_trap_unavailable
410#endif
411
412#if zig_has_builtin(debugtrap)
413#define zig_breakpoint() __builtin_debugtrap()
414#elif defined(zig_msvc)
415#define zig_breakpoint() __debugbreak()
416#elif defined(zig_gnuc_asm)
417
418#if defined(zig_arm)
419#define zig_breakpoint() __asm__ volatile("bkpt #0x0")
420#elif defined(zig_aarch64)
421#define zig_breakpoint() __asm__ volatile("brk #0xf000")
422#elif defined(zig_hexagon)
423#define zig_breakpoint() __asm__ volatile("brkpt")
424#elif defined(zig_kvx) || defined(zig_loongarch)
425#define zig_breakpoint() __asm__ volatile("break 0x0")
426#elif defined(zig_mips)
427#define zig_breakpoint() __asm__ volatile("break")
428#elif defined(zig_or1k)
429#define zig_breakpoint() __asm__ volatile("l.trap 0x0")
430#elif defined(zig_powerpc)
431#define zig_breakpoint() __asm__ volatile("trap")
432#elif defined(zig_riscv)
433#define zig_breakpoint() __asm__ volatile("ebreak")
434#elif defined(zig_s390x)
435#define zig_breakpoint() __asm__ volatile("j 0x6")
436#elif defined(zig_sparc)
437#define zig_breakpoint() __asm__ volatile("ta 0x1")
438#elif defined(zig_x86)
439#define zig_breakpoint() __asm__ volatile("int $0x3")
440#else
441#define zig_breakpoint() zig_breakpoint_unavailable
442#endif
443
444#else
445#define zig_breakpoint() zig_breakpoint_unavailable
446#endif
447
448#if zig_has_builtin(return_address) || defined(zig_gcc) || defined(zig_tinyc)
449#define zig_return_address() __builtin_extract_return_addr(__builtin_return_address(0))
450#elif defined(zig_msvc)
451#define zig_return_address() _ReturnAddress()
452#else
453#define zig_return_address() 0
454#endif
455
456#if zig_has_builtin(frame_address) || defined(zig_gcc) || defined(zig_tinyc)
457#define zig_frame_address() __builtin_frame_address(0)
458#elif defined(zig_msvc)
459#define zig_frame_address() _AddressOfReturnAddress()
460#else
461#define zig_frame_address() 0
462#endif
463
464#if zig_has_builtin(prefetch) || defined(zig_gcc)
465#define zig_prefetch(addr, rw, locality) __builtin_prefetch(addr, rw, locality)
466#else
467#define zig_prefetch(addr, rw, locality)
468#endif
469
470#if zig_has_builtin(memory_size) && zig_has_builtin(memory_grow)
471#define zig_wasm_memory_size(index) __builtin_wasm_memory_size(index)
472#define zig_wasm_memory_grow(index, delta) __builtin_wasm_memory_grow(index, delta)
473#else
474#define zig_wasm_memory_size(index) zig_unimplemented()
475#define zig_wasm_memory_grow(index, delta) zig_unimplemented()
476#endif
477
478#if __STDC_VERSION__ >= 202311L
479#define zig_noreturn [[noreturn]]
480#elif __STDC_VERSION__ >= 201112L
481#define zig_noreturn _Noreturn
482#elif zig_has_attribute(noreturn) || defined(zig_gcc) || defined(zig_tinyc)
483#define zig_noreturn __attribute__((noreturn))
484#elif defined(zig_msvc)
485#define zig_noreturn __declspec(noreturn)
486#else
487#define zig_noreturn
488#endif
489
490#define zig_compiler_rt_abbrev_uint32_t si
491#define zig_compiler_rt_abbrev_int32_t si
492#define zig_compiler_rt_abbrev_uint64_t di
493#define zig_compiler_rt_abbrev_int64_t di
494#define zig_compiler_rt_abbrev_zig_u128 ti
495#define zig_compiler_rt_abbrev_zig_i128 ti
496#define zig_compiler_rt_abbrev_zig_f16 hf
497#define zig_compiler_rt_abbrev_zig_f32 sf
498#define zig_compiler_rt_abbrev_zig_f64 df
499#define zig_compiler_rt_abbrev_zig_f80 xf
500#define zig_compiler_rt_abbrev_zig_f128 tf
501
502zig_extern void *memcpy (void *zig_restrict, void const *zig_restrict, size_t);
503zig_extern void *memset (void *, int, size_t);
504zig_extern void *memmove (void *, void const *, size_t);
505
506/* ================ Bool and 8/16/32/64-bit Integer Support ================= */
507
508#include <limits.h>
509
510#define zig_bitSizeOf(T) (CHAR_BIT * sizeof(T))
511
512#if __STDC_VERSION__ >= 202311L
513/* bool, true, and false are provided by the language. */
514#elif __STDC_VERSION__ >= 199901L || zig_has_include(<stdbool.h>)
515#include <stdbool.h>
516#else
517typedef char bool;
518#define false 0
519#define true 1
520#endif
521
522#if __STDC_VERSION__ >= 199901L || defined(zig_msvc) || zig_has_include(<stdint.h>)
523#include <stdint.h>
524#else
525#if SCHAR_MIN == ~0x7F && SCHAR_MAX == 0x7F && UCHAR_MAX == 0xFF
526typedef unsigned char uint8_t;
527typedef signed char int8_t;
528#define INT8_C(c) c
529#define UINT8_C(c) c##U
530#elif SHRT_MIN == ~0x7F && SHRT_MAX == 0x7F && USHRT_MAX == 0xFF
531typedef unsigned short uint8_t;
532typedef signed short int8_t;
533#define INT8_C(c) c
534#define UINT8_C(c) c##U
535#elif INT_MIN == ~0x7F && INT_MAX == 0x7F && UINT_MAX == 0xFF
536typedef unsigned int uint8_t;
537typedef signed int int8_t;
538#define INT8_C(c) c
539#define UINT8_C(c) c##U
540#elif LONG_MIN == ~0x7F && LONG_MAX == 0x7F && ULONG_MAX == 0xFF
541typedef unsigned long uint8_t;
542typedef signed long int8_t;
543#define INT8_C(c) c##L
544#define UINT8_C(c) c##LU
545#elif LLONG_MIN == ~0x7F && LLONG_MAX == 0x7F && ULLONG_MAX == 0xFF
546typedef unsigned long long uint8_t;
547typedef signed long long int8_t;
548#define INT8_C(c) c##LL
549#define UINT8_C(c) c##LLU
550#endif
551#define INT8_MIN (~INT8_C(0x7F))
552#define INT8_MAX ( INT8_C(0x7F))
553#define UINT8_MAX ( INT8_C(0xFF))
554
555#if SCHAR_MIN == ~0x7FFF && SCHAR_MAX == 0x7FFF && UCHAR_MAX == 0xFFFF
556typedef unsigned char uint16_t;
557typedef signed char int16_t;
558#define INT16_C(c) c
559#define UINT16_C(c) c##U
560#elif SHRT_MIN == ~0x7FFF && SHRT_MAX == 0x7FFF && USHRT_MAX == 0xFFFF
561typedef unsigned short uint16_t;
562typedef signed short int16_t;
563#define INT16_C(c) c
564#define UINT16_C(c) c##U
565#elif INT_MIN == ~0x7FFF && INT_MAX == 0x7FFF && UINT_MAX == 0xFFFF
566typedef unsigned int uint16_t;
567typedef signed int int16_t;
568#define INT16_C(c) c
569#define UINT16_C(c) c##U
570#elif LONG_MIN == ~0x7FFF && LONG_MAX == 0x7FFF && ULONG_MAX == 0xFFFF
571typedef unsigned long uint16_t;
572typedef signed long int16_t;
573#define INT16_C(c) c##L
574#define UINT16_C(c) c##LU
575#elif LLONG_MIN == ~0x7FFF && LLONG_MAX == 0x7FFF && ULLONG_MAX == 0xFFFF
576typedef unsigned long long uint16_t;
577typedef signed long long int16_t;
578#define INT16_C(c) c##LL
579#define UINT16_C(c) c##LLU
580#endif
581#define INT16_MIN (~INT16_C(0x7FFF))
582#define INT16_MAX ( INT16_C(0x7FFF))
583#define UINT16_MAX ( INT16_C(0xFFFF))
584
585#if SCHAR_MIN == ~0x7FFFFFFF && SCHAR_MAX == 0x7FFFFFFF && UCHAR_MAX == 0xFFFFFFFF
586typedef unsigned char uint32_t;
587typedef signed char int32_t;
588#define INT32_C(c) c
589#define UINT32_C(c) c##U
590#elif SHRT_MIN == ~0x7FFFFFFF && SHRT_MAX == 0x7FFFFFFF && USHRT_MAX == 0xFFFFFFFF
591typedef unsigned short uint32_t;
592typedef signed short int32_t;
593#define INT32_C(c) c
594#define UINT32_C(c) c##U
595#elif INT_MIN == ~0x7FFFFFFF && INT_MAX == 0x7FFFFFFF && UINT_MAX == 0xFFFFFFFF
596typedef unsigned int uint32_t;
597typedef signed int int32_t;
598#define INT32_C(c) c
599#define UINT32_C(c) c##U
600#elif LONG_MIN == ~0x7FFFFFFF && LONG_MAX == 0x7FFFFFFF && ULONG_MAX == 0xFFFFFFFF
601typedef unsigned long uint32_t;
602typedef signed long int32_t;
603#define INT32_C(c) c##L
604#define UINT32_C(c) c##LU
605#elif LLONG_MIN == ~0x7FFFFFFF && LLONG_MAX == 0x7FFFFFFF && ULLONG_MAX == 0xFFFFFFFF
606typedef unsigned long long uint32_t;
607typedef signed long long int32_t;
608#define INT32_C(c) c##LL
609#define UINT32_C(c) c##LLU
610#endif
611#define INT32_MIN (~INT32_C(0x7FFFFFFF))
612#define INT32_MAX ( INT32_C(0x7FFFFFFF))
613#define UINT32_MAX ( INT32_C(0xFFFFFFFF))
614
615#if SCHAR_MIN == ~0x7FFFFFFFFFFFFFFF && SCHAR_MAX == 0x7FFFFFFFFFFFFFFF && UCHAR_MAX == 0xFFFFFFFFFFFFFFFF
616typedef unsigned char uint64_t;
617typedef signed char int64_t;
618#define INT64_C(c) c
619#define UINT64_C(c) c##U
620#elif SHRT_MIN == ~0x7FFFFFFFFFFFFFFF && SHRT_MAX == 0x7FFFFFFFFFFFFFFF && USHRT_MAX == 0xFFFFFFFFFFFFFFFF
621typedef unsigned short uint64_t;
622typedef signed short int64_t;
623#define INT64_C(c) c
624#define UINT64_C(c) c##U
625#elif INT_MIN == ~0x7FFFFFFFFFFFFFFF && INT_MAX == 0x7FFFFFFFFFFFFFFF && UINT_MAX == 0xFFFFFFFFFFFFFFFF
626typedef unsigned int uint64_t;
627typedef signed int int64_t;
628#define INT64_C(c) c
629#define UINT64_C(c) c##U
630#elif LONG_MIN == ~0x7FFFFFFFFFFFFFFF && LONG_MAX == 0x7FFFFFFFFFFFFFFF && ULONG_MAX == 0xFFFFFFFFFFFFFFFF
631typedef unsigned long uint64_t;
632typedef signed long int64_t;
633#define INT64_C(c) c##L
634#define UINT64_C(c) c##LU
635#elif LLONG_MIN == ~0x7FFFFFFFFFFFFFFF && LLONG_MAX == 0x7FFFFFFFFFFFFFFF && ULLONG_MAX == 0xFFFFFFFFFFFFFFFF
636typedef unsigned long long uint64_t;
637typedef signed long long int64_t;
638#define INT64_C(c) c##LL
639#define UINT64_C(c) c##LLU
640#endif
641#define INT64_MIN (~INT64_C(0x7FFFFFFFFFFFFFFF))
642#define INT64_MAX ( INT64_C(0x7FFFFFFFFFFFFFFF))
643#define UINT64_MAX ( INT64_C(0xFFFFFFFFFFFFFFFF))
644
645typedef size_t uintptr_t;
646typedef ptrdiff_t intptr_t;
647
648#endif
649
650#define zig_minInt_i8 INT8_MIN
651#define zig_maxInt_i8 INT8_MAX
652#define zig_minInt_u8 UINT8_C(0)
653#define zig_maxInt_u8 UINT8_MAX
654#define zig_minInt_i16 INT16_MIN
655#define zig_maxInt_i16 INT16_MAX
656#define zig_minInt_u16 UINT16_C(0)
657#define zig_maxInt_u16 UINT16_MAX
658#define zig_minInt_i32 INT32_MIN
659#define zig_maxInt_i32 INT32_MAX
660#define zig_minInt_u32 UINT32_C(0)
661#define zig_maxInt_u32 UINT32_MAX
662#define zig_minInt_i64 INT64_MIN
663#define zig_maxInt_i64 INT64_MAX
664#define zig_minInt_u64 UINT64_C(0)
665#define zig_maxInt_u64 UINT64_MAX
666
667#define zig_intLimit(s, w, limit, bits) zig_shr_##s##w(zig_##limit##Int_##s##w, w - (bits))
668#define zig_minInt_i(w, bits) zig_intLimit(i, w, min, bits)
669#define zig_maxInt_i(w, bits) zig_intLimit(i, w, max, bits)
670#define zig_minInt_u(w, bits) zig_intLimit(u, w, min, bits)
671#define zig_maxInt_u(w, bits) zig_intLimit(u, w, max, bits)
672
673#define zig_operator(Type, RhsType, operation, operator) \
674 static inline Type zig_##operation(Type lhs, RhsType rhs) { \
675 return lhs operator rhs; \
676 }
677#define zig_basic_operator(Type, operation, operator) \
678 zig_operator(Type, Type, operation, operator)
679#define zig_shift_operator(Type, operation, operator) \
680 zig_operator(Type, uint8_t, operation, operator)
681#define zig_int_helpers(w, PromotedUnsigned) \
682 zig_basic_operator(uint##w##_t, and_u##w, &) \
683 zig_basic_operator( int##w##_t, and_i##w, &) \
684 zig_basic_operator(uint##w##_t, or_u##w, |) \
685 zig_basic_operator( int##w##_t, or_i##w, |) \
686 zig_basic_operator(uint##w##_t, xor_u##w, ^) \
687 zig_basic_operator( int##w##_t, xor_i##w, ^) \
688 zig_shift_operator(uint##w##_t, shl_u##w, <<) \
689 zig_shift_operator( int##w##_t, shl_i##w, <<) \
690 zig_shift_operator(uint##w##_t, shr_u##w, >>) \
691\
692 static inline int##w##_t zig_shr_i##w(int##w##_t lhs, uint8_t rhs) { \
693 int##w##_t sign_mask = lhs < INT##w##_C(0) ? -INT##w##_C(1) : INT##w##_C(0); \
694 return ((lhs ^ sign_mask) >> rhs) ^ sign_mask; \
695 } \
696\
697 static inline uint##w##_t zig_not_u##w(uint##w##_t val, uint8_t bits) { \
698 return val ^ zig_maxInt_u(w, bits); \
699 } \
700\
701 static inline int##w##_t zig_not_i##w(int##w##_t val, uint8_t bits) { \
702 (void)bits; \
703 return ~val; \
704 } \
705\
706 static inline uint##w##_t zig_wrap_u##w(uint##w##_t val, uint8_t bits) { \
707 return val & zig_maxInt_u(w, bits); \
708 } \
709\
710 static inline int##w##_t zig_wrap_i##w(int##w##_t val, uint8_t bits) { \
711 return (val & UINT##w##_C(1) << (bits - UINT8_C(1))) != 0 \
712 ? val | zig_minInt_i(w, bits) : val & zig_maxInt_i(w, bits); \
713 } \
714\
715 static inline uint##w##_t zig_abs_i##w(int##w##_t val) { \
716 return (val < 0) ? -(uint##w##_t)val : (uint##w##_t)val; \
717 } \
718\
719 zig_basic_operator(uint##w##_t, div_floor_u##w, /) \
720\
721 static inline int##w##_t zig_div_floor_i##w(int##w##_t lhs, int##w##_t rhs) { \
722 return lhs / rhs + (lhs % rhs != INT##w##_C(0) ? zig_shr_i##w(lhs ^ rhs, UINT8_C(w) - UINT8_C(1)) : INT##w##_C(0)); \
723 } \
724\
725 zig_basic_operator(uint##w##_t, mod_u##w, %) \
726\
727 static inline int##w##_t zig_mod_i##w(int##w##_t lhs, int##w##_t rhs) { \
728 int##w##_t rem = lhs % rhs; \
729 return rem + (rem != INT##w##_C(0) ? rhs & zig_shr_i##w(lhs ^ rhs, UINT8_C(w) - UINT8_C(1)) : INT##w##_C(0)); \
730 } \
731\
732 static inline uint##w##_t zig_shlw_u##w(uint##w##_t lhs, uint8_t rhs, uint8_t bits) { \
733 return zig_wrap_u##w(zig_shl_u##w(lhs, rhs), bits); \
734 } \
735\
736 static inline int##w##_t zig_shlw_i##w(int##w##_t lhs, uint8_t rhs, uint8_t bits) { \
737 return zig_wrap_i##w((int##w##_t)zig_shl_u##w((uint##w##_t)lhs, rhs), bits); \
738 } \
739\
740 static inline uint##w##_t zig_addw_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
741 return zig_wrap_u##w(lhs + rhs, bits); \
742 } \
743\
744 static inline int##w##_t zig_addw_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
745 return zig_wrap_i##w((int##w##_t)((uint##w##_t)lhs + (uint##w##_t)rhs), bits); \
746 } \
747\
748 static inline uint##w##_t zig_subw_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
749 return zig_wrap_u##w(lhs - rhs, bits); \
750 } \
751\
752 static inline int##w##_t zig_subw_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
753 return zig_wrap_i##w((int##w##_t)((uint##w##_t)lhs - (uint##w##_t)rhs), bits); \
754 } \
755\
756 static inline uint##w##_t zig_mulw_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
757 return zig_wrap_u##w((PromotedUnsigned)lhs * rhs, bits); \
758 } \
759\
760 static inline int##w##_t zig_mulw_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
761 return zig_wrap_i##w((int##w##_t)((uint##w##_t)lhs * (uint##w##_t)rhs), bits); \
762 }
763#if UINT8_MAX <= UINT_MAX
764zig_int_helpers(8, unsigned int)
765#elif UINT8_MAX <= ULONG_MAX
766zig_int_helpers(8, unsigned long)
767#elif UINT8_MAX <= ULLONG_MAX
768zig_int_helpers(8, unsigned long long)
769#else
770zig_int_helpers(8, uint8_t)
771#endif
772#if UINT16_MAX <= UINT_MAX
773zig_int_helpers(16, unsigned int)
774#elif UINT16_MAX <= ULONG_MAX
775zig_int_helpers(16, unsigned long)
776#elif UINT16_MAX <= ULLONG_MAX
777zig_int_helpers(16, unsigned long long)
778#else
779zig_int_helpers(16, uint16_t)
780#endif
781#if UINT32_MAX <= UINT_MAX
782zig_int_helpers(32, unsigned int)
783#elif UINT32_MAX <= ULONG_MAX
784zig_int_helpers(32, unsigned long)
785#elif UINT32_MAX <= ULLONG_MAX
786zig_int_helpers(32, unsigned long long)
787#else
788zig_int_helpers(32, uint32_t)
789#endif
790#if UINT64_MAX <= UINT_MAX
791zig_int_helpers(64, unsigned int)
792#elif UINT64_MAX <= ULONG_MAX
793zig_int_helpers(64, unsigned long)
794#elif UINT64_MAX <= ULLONG_MAX
795zig_int_helpers(64, unsigned long long)
796#else
797zig_int_helpers(64, uint64_t)
798#endif
799
800static inline bool zig_addo_u32(uint32_t *res, uint32_t lhs, uint32_t rhs, uint8_t bits) {
801#if zig_has_builtin(add_overflow) || defined(zig_gcc)
802 uint32_t full_res;
803 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
804 *res = zig_wrap_u32(full_res, bits);
805 return overflow || full_res < zig_minInt_u(32, bits) || full_res > zig_maxInt_u(32, bits);
806#else
807 *res = zig_addw_u32(lhs, rhs, bits);
808 return *res < lhs;
809#endif
810}
811
812static inline bool zig_addo_i32(int32_t *res, int32_t lhs, int32_t rhs, uint8_t bits) {
813#if zig_has_builtin(add_overflow) || defined(zig_gcc)
814 int32_t full_res;
815 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
816#else
817 int32_t full_res = (int32_t)((uint32_t)lhs + (uint32_t)rhs);
818 bool overflow = ((full_res ^ lhs) & (full_res ^ rhs)) < 0;
819#endif
820 *res = zig_wrap_i32(full_res, bits);
821 return overflow || full_res < zig_minInt_i(32, bits) || full_res > zig_maxInt_i(32, bits);
822}
823
824static inline bool zig_addo_u64(uint64_t *res, uint64_t lhs, uint64_t rhs, uint8_t bits) {
825#if zig_has_builtin(add_overflow) || defined(zig_gcc)
826 uint64_t full_res;
827 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
828 *res = zig_wrap_u64(full_res, bits);
829 return overflow || full_res < zig_minInt_u(64, bits) || full_res > zig_maxInt_u(64, bits);
830#else
831 *res = zig_addw_u64(lhs, rhs, bits);
832 return *res < lhs;
833#endif
834}
835
836static inline bool zig_addo_i64(int64_t *res, int64_t lhs, int64_t rhs, uint8_t bits) {
837#if zig_has_builtin(add_overflow) || defined(zig_gcc)
838 int64_t full_res;
839 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
840#else
841 int64_t full_res = (int64_t)((uint64_t)lhs + (uint64_t)rhs);
842 bool overflow = ((full_res ^ lhs) & (full_res ^ rhs)) < 0;
843#endif
844 *res = zig_wrap_i64(full_res, bits);
845 return overflow || full_res < zig_minInt_i(64, bits) || full_res > zig_maxInt_i(64, bits);
846}
847
848static inline bool zig_addo_u8(uint8_t *res, uint8_t lhs, uint8_t rhs, uint8_t bits) {
849#if zig_has_builtin(add_overflow) || defined(zig_gcc)
850 uint8_t full_res;
851 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
852 *res = zig_wrap_u8(full_res, bits);
853 return overflow || full_res < zig_minInt_u(8, bits) || full_res > zig_maxInt_u(8, bits);
854#else
855 uint32_t full_res;
856 bool overflow = zig_addo_u32(&full_res, lhs, rhs, bits);
857 *res = (uint8_t)full_res;
858 return overflow;
859#endif
860}
861
862static inline bool zig_addo_i8(int8_t *res, int8_t lhs, int8_t rhs, uint8_t bits) {
863#if zig_has_builtin(add_overflow) || defined(zig_gcc)
864 int8_t full_res;
865 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
866 *res = zig_wrap_i8(full_res, bits);
867 return overflow || full_res < zig_minInt_i(8, bits) || full_res > zig_maxInt_i(8, bits);
868#else
869 int32_t full_res;
870 bool overflow = zig_addo_i32(&full_res, lhs, rhs, bits);
871 *res = (int8_t)full_res;
872 return overflow;
873#endif
874}
875
876static inline bool zig_addo_u16(uint16_t *res, uint16_t lhs, uint16_t rhs, uint8_t bits) {
877#if zig_has_builtin(add_overflow) || defined(zig_gcc)
878 uint16_t full_res;
879 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
880 *res = zig_wrap_u16(full_res, bits);
881 return overflow || full_res < zig_minInt_u(16, bits) || full_res > zig_maxInt_u(16, bits);
882#else
883 uint32_t full_res;
884 bool overflow = zig_addo_u32(&full_res, lhs, rhs, bits);
885 *res = (uint16_t)full_res;
886 return overflow;
887#endif
888}
889
890static inline bool zig_addo_i16(int16_t *res, int16_t lhs, int16_t rhs, uint8_t bits) {
891#if zig_has_builtin(add_overflow) || defined(zig_gcc)
892 int16_t full_res;
893 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
894 *res = zig_wrap_i16(full_res, bits);
895 return overflow || full_res < zig_minInt_i(16, bits) || full_res > zig_maxInt_i(16, bits);
896#else
897 int32_t full_res;
898 bool overflow = zig_addo_i32(&full_res, lhs, rhs, bits);
899 *res = (int16_t)full_res;
900 return overflow;
901#endif
902}
903
904static inline bool zig_subo_u32(uint32_t *res, uint32_t lhs, uint32_t rhs, uint8_t bits) {
905#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
906 uint32_t full_res;
907 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
908 *res = zig_wrap_u32(full_res, bits);
909 return overflow || full_res < zig_minInt_u(32, bits) || full_res > zig_maxInt_u(32, bits);
910#else
911 *res = zig_subw_u32(lhs, rhs, bits);
912 return *res > lhs;
913#endif
914}
915
916static inline bool zig_subo_i32(int32_t *res, int32_t lhs, int32_t rhs, uint8_t bits) {
917#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
918 int32_t full_res;
919 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
920#else
921 int32_t full_res = (int32_t)((uint32_t)lhs - (uint32_t)rhs);
922 bool overflow = ((lhs ^ rhs) & (full_res ^ lhs)) < 0;
923#endif
924 *res = zig_wrap_i32(full_res, bits);
925 return overflow || full_res < zig_minInt_i(32, bits) || full_res > zig_maxInt_i(32, bits);
926}
927
928static inline bool zig_subo_u64(uint64_t *res, uint64_t lhs, uint64_t rhs, uint8_t bits) {
929#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
930 uint64_t full_res;
931 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
932 *res = zig_wrap_u64(full_res, bits);
933 return overflow || full_res < zig_minInt_u(64, bits) || full_res > zig_maxInt_u(64, bits);
934#else
935 *res = zig_subw_u64(lhs, rhs, bits);
936 return *res > lhs;
937#endif
938}
939
940static inline bool zig_subo_i64(int64_t *res, int64_t lhs, int64_t rhs, uint8_t bits) {
941#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
942 int64_t full_res;
943 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
944#else
945 int64_t full_res = (int64_t)((uint64_t)lhs - (uint64_t)rhs);
946 bool overflow = ((lhs ^ rhs) & (full_res ^ lhs)) < 0;
947#endif
948 *res = zig_wrap_i64(full_res, bits);
949 return overflow || full_res < zig_minInt_i(64, bits) || full_res > zig_maxInt_i(64, bits);
950}
951
952static inline bool zig_subo_u8(uint8_t *res, uint8_t lhs, uint8_t rhs, uint8_t bits) {
953#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
954 uint8_t full_res;
955 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
956 *res = zig_wrap_u8(full_res, bits);
957 return overflow || full_res < zig_minInt_u(8, bits) || full_res > zig_maxInt_u(8, bits);
958#else
959 uint32_t full_res;
960 bool overflow = zig_subo_u32(&full_res, lhs, rhs, bits);
961 *res = (uint8_t)full_res;
962 return overflow;
963#endif
964}
965
966static inline bool zig_subo_i8(int8_t *res, int8_t lhs, int8_t rhs, uint8_t bits) {
967#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
968 int8_t full_res;
969 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
970 *res = zig_wrap_i8(full_res, bits);
971 return overflow || full_res < zig_minInt_i(8, bits) || full_res > zig_maxInt_i(8, bits);
972#else
973 int32_t full_res;
974 bool overflow = zig_subo_i32(&full_res, lhs, rhs, bits);
975 *res = (int8_t)full_res;
976 return overflow;
977#endif
978}
979
980static inline bool zig_subo_u16(uint16_t *res, uint16_t lhs, uint16_t rhs, uint8_t bits) {
981#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
982 uint16_t full_res;
983 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
984 *res = zig_wrap_u16(full_res, bits);
985 return overflow || full_res < zig_minInt_u(16, bits) || full_res > zig_maxInt_u(16, bits);
986#else
987 uint32_t full_res;
988 bool overflow = zig_subo_u32(&full_res, lhs, rhs, bits);
989 *res = (uint16_t)full_res;
990 return overflow;
991#endif
992}
993
994static inline bool zig_subo_i16(int16_t *res, int16_t lhs, int16_t rhs, uint8_t bits) {
995#if zig_has_builtin(sub_overflow) || defined(zig_gcc)
996 int16_t full_res;
997 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
998 *res = zig_wrap_i16(full_res, bits);
999 return overflow || full_res < zig_minInt_i(16, bits) || full_res > zig_maxInt_i(16, bits);
1000#else
1001 int32_t full_res;
1002 bool overflow = zig_subo_i32(&full_res, lhs, rhs, bits);
1003 *res = (int16_t)full_res;
1004 return overflow;
1005#endif
1006}
1007
1008static inline bool zig_mulo_u32(uint32_t *res, uint32_t lhs, uint32_t rhs, uint8_t bits) {
1009#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1010 uint32_t full_res;
1011 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1012 *res = zig_wrap_u32(full_res, bits);
1013 return overflow || full_res < zig_minInt_u(32, bits) || full_res > zig_maxInt_u(32, bits);
1014#else
1015 *res = zig_mulw_u32(lhs, rhs, bits);
1016 return rhs != UINT32_C(0) && lhs > zig_maxInt_u(32, bits) / rhs;
1017#endif
1018}
1019
1020zig_extern int32_t __mulosi4(int32_t lhs, int32_t rhs, int *overflow);
1021static inline bool zig_mulo_i32(int32_t *res, int32_t lhs, int32_t rhs, uint8_t bits) {
1022#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1023 int32_t full_res;
1024 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1025#else
1026 int overflow_int;
1027 int32_t full_res = __mulosi4(lhs, rhs, &overflow_int);
1028 bool overflow = overflow_int != 0;
1029#endif
1030 *res = zig_wrap_i32(full_res, bits);
1031 return overflow || full_res < zig_minInt_i(32, bits) || full_res > zig_maxInt_i(32, bits);
1032}
1033
1034static inline bool zig_mulo_u64(uint64_t *res, uint64_t lhs, uint64_t rhs, uint8_t bits) {
1035#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1036 uint64_t full_res;
1037 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1038 *res = zig_wrap_u64(full_res, bits);
1039 return overflow || full_res < zig_minInt_u(64, bits) || full_res > zig_maxInt_u(64, bits);
1040#else
1041 *res = zig_mulw_u64(lhs, rhs, bits);
1042 return rhs != UINT64_C(0) && lhs > zig_maxInt_u(64, bits) / rhs;
1043#endif
1044}
1045
1046zig_extern int64_t __mulodi4(int64_t lhs, int64_t rhs, int *overflow);
1047static inline bool zig_mulo_i64(int64_t *res, int64_t lhs, int64_t rhs, uint8_t bits) {
1048#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1049 int64_t full_res;
1050 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1051#else
1052 int overflow_int;
1053 int64_t full_res = __mulodi4(lhs, rhs, &overflow_int);
1054 bool overflow = overflow_int != 0;
1055#endif
1056 *res = zig_wrap_i64(full_res, bits);
1057 return overflow || full_res < zig_minInt_i(64, bits) || full_res > zig_maxInt_i(64, bits);
1058}
1059
1060static inline bool zig_mulo_u8(uint8_t *res, uint8_t lhs, uint8_t rhs, uint8_t bits) {
1061#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1062 uint8_t full_res;
1063 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1064 *res = zig_wrap_u8(full_res, bits);
1065 return overflow || full_res < zig_minInt_u(8, bits) || full_res > zig_maxInt_u(8, bits);
1066#else
1067 uint32_t full_res;
1068 bool overflow = zig_mulo_u32(&full_res, lhs, rhs, bits);
1069 *res = (uint8_t)full_res;
1070 return overflow;
1071#endif
1072}
1073
1074static inline bool zig_mulo_i8(int8_t *res, int8_t lhs, int8_t rhs, uint8_t bits) {
1075#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1076 int8_t full_res;
1077 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1078 *res = zig_wrap_i8(full_res, bits);
1079 return overflow || full_res < zig_minInt_i(8, bits) || full_res > zig_maxInt_i(8, bits);
1080#else
1081 int32_t full_res;
1082 bool overflow = zig_mulo_i32(&full_res, lhs, rhs, bits);
1083 *res = (int8_t)full_res;
1084 return overflow;
1085#endif
1086}
1087
1088static inline bool zig_mulo_u16(uint16_t *res, uint16_t lhs, uint16_t rhs, uint8_t bits) {
1089#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1090 uint16_t full_res;
1091 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1092 *res = zig_wrap_u16(full_res, bits);
1093 return overflow || full_res < zig_minInt_u(16, bits) || full_res > zig_maxInt_u(16, bits);
1094#else
1095 uint32_t full_res;
1096 bool overflow = zig_mulo_u32(&full_res, lhs, rhs, bits);
1097 *res = (uint16_t)full_res;
1098 return overflow;
1099#endif
1100}
1101
1102static inline bool zig_mulo_i16(int16_t *res, int16_t lhs, int16_t rhs, uint8_t bits) {
1103#if zig_has_builtin(mul_overflow) || defined(zig_gcc)
1104 int16_t full_res;
1105 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1106 *res = zig_wrap_i16(full_res, bits);
1107 return overflow || full_res < zig_minInt_i(16, bits) || full_res > zig_maxInt_i(16, bits);
1108#else
1109 int32_t full_res;
1110 bool overflow = zig_mulo_i32(&full_res, lhs, rhs, bits);
1111 *res = (int16_t)full_res;
1112 return overflow;
1113#endif
1114}
1115
1116#define zig_int_builtins(w) \
1117 static inline bool zig_shlo_u##w(uint##w##_t *res, uint##w##_t lhs, uint8_t rhs, uint8_t bits) { \
1118 *res = zig_shlw_u##w(lhs, rhs, bits); \
1119 return lhs > zig_maxInt_u(w, bits) >> rhs; \
1120 } \
1121\
1122 static inline bool zig_shlo_i##w(int##w##_t *res, int##w##_t lhs, uint8_t rhs, uint8_t bits) { \
1123 *res = zig_shlw_i##w(lhs, rhs, bits); \
1124 int##w##_t mask = (int##w##_t)(UINT##w##_MAX << (bits - rhs - 1)); \
1125 return (lhs & mask) != INT##w##_C(0) && (lhs & mask) != mask; \
1126 } \
1127\
1128 static inline uint##w##_t zig_shls_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
1129 uint##w##_t res; \
1130 if (rhs < bits && !zig_shlo_u##w(&res, lhs, rhs, bits)) return res; \
1131 return lhs == INT##w##_C(0) ? INT##w##_C(0) : zig_maxInt_u(w, bits); \
1132 } \
1133\
1134 static inline int##w##_t zig_shls_i##w(int##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
1135 int##w##_t res; \
1136 if (rhs < bits && !zig_shlo_i##w(&res, lhs, rhs, bits)) return res; \
1137 return lhs == INT##w##_C(0) ? INT##w##_C(0) : \
1138 lhs < INT##w##_C(0) ? zig_minInt_i(w, bits) : zig_maxInt_i(w, bits); \
1139 } \
1140\
1141 static inline uint##w##_t zig_adds_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
1142 uint##w##_t res; \
1143 return zig_addo_u##w(&res, lhs, rhs, bits) ? zig_maxInt_u(w, bits) : res; \
1144 } \
1145\
1146 static inline int##w##_t zig_adds_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
1147 int##w##_t res; \
1148 if (!zig_addo_i##w(&res, lhs, rhs, bits)) return res; \
1149 return res >= INT##w##_C(0) ? zig_minInt_i(w, bits) : zig_maxInt_i(w, bits); \
1150 } \
1151\
1152 static inline uint##w##_t zig_subs_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
1153 uint##w##_t res; \
1154 return zig_subo_u##w(&res, lhs, rhs, bits) ? zig_minInt_u(w, bits) : res; \
1155 } \
1156\
1157 static inline int##w##_t zig_subs_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
1158 int##w##_t res; \
1159 if (!zig_subo_i##w(&res, lhs, rhs, bits)) return res; \
1160 return res >= INT##w##_C(0) ? zig_minInt_i(w, bits) : zig_maxInt_i(w, bits); \
1161 } \
1162\
1163 static inline uint##w##_t zig_muls_u##w(uint##w##_t lhs, uint##w##_t rhs, uint8_t bits) { \
1164 uint##w##_t res; \
1165 return zig_mulo_u##w(&res, lhs, rhs, bits) ? zig_maxInt_u(w, bits) : res; \
1166 } \
1167\
1168 static inline int##w##_t zig_muls_i##w(int##w##_t lhs, int##w##_t rhs, uint8_t bits) { \
1169 int##w##_t res; \
1170 if (!zig_mulo_i##w(&res, lhs, rhs, bits)) return res; \
1171 return (lhs ^ rhs) < INT##w##_C(0) ? zig_minInt_i(w, bits) : zig_maxInt_i(w, bits); \
1172 }
1173zig_int_builtins(8)
1174zig_int_builtins(16)
1175zig_int_builtins(32)
1176zig_int_builtins(64)
1177
1178#define zig_builtin8(name, val) __builtin_##name(val)
1179typedef unsigned int zig_Builtin8;
1180
1181#define zig_builtin16(name, val) __builtin_##name(val)
1182typedef unsigned int zig_Builtin16;
1183
1184#if INT_MIN <= INT32_MIN
1185#define zig_builtin32(name, val) __builtin_##name(val)
1186typedef unsigned int zig_Builtin32;
1187#elif LONG_MIN <= INT32_MIN
1188#define zig_builtin32(name, val) __builtin_##name##l(val)
1189typedef unsigned long zig_Builtin32;
1190#endif
1191
1192#if INT_MIN <= INT64_MIN
1193#define zig_builtin64(name, val) __builtin_##name(val)
1194typedef unsigned int zig_Builtin64;
1195#elif LONG_MIN <= INT64_MIN
1196#define zig_builtin64(name, val) __builtin_##name##l(val)
1197typedef unsigned long zig_Builtin64;
1198#elif LLONG_MIN <= INT64_MIN
1199#define zig_builtin64(name, val) __builtin_##name##ll(val)
1200typedef unsigned long long zig_Builtin64;
1201#endif
1202
1203static inline uint8_t zig_byte_swap_u8(uint8_t val, uint8_t bits) {
1204 return zig_wrap_u8(val >> (8 - bits), bits);
1205}
1206
1207static inline int8_t zig_byte_swap_i8(int8_t val, uint8_t bits) {
1208 return zig_wrap_i8((int8_t)zig_byte_swap_u8((uint8_t)val, bits), bits);
1209}
1210
1211static inline uint16_t zig_byte_swap_u16(uint16_t val, uint8_t bits) {
1212 uint16_t full_res;
1213#if zig_has_builtin(bswap16) || defined(zig_gcc)
1214 full_res = __builtin_bswap16(val);
1215#else
1216 full_res = (uint16_t)zig_byte_swap_u8((uint8_t)(val >> 0), 8) << 8 |
1217 (uint16_t)zig_byte_swap_u8((uint8_t)(val >> 8), 8) >> 0;
1218#endif
1219 return zig_wrap_u16(full_res >> (16 - bits), bits);
1220}
1221
1222static inline int16_t zig_byte_swap_i16(int16_t val, uint8_t bits) {
1223 return zig_wrap_i16((int16_t)zig_byte_swap_u16((uint16_t)val, bits), bits);
1224}
1225
1226static inline uint32_t zig_byte_swap_u32(uint32_t val, uint8_t bits) {
1227 uint32_t full_res;
1228#if zig_has_builtin(bswap32) || defined(zig_gcc)
1229 full_res = __builtin_bswap32(val);
1230#else
1231 full_res = (uint32_t)zig_byte_swap_u16((uint16_t)(val >> 0), 16) << 16 |
1232 (uint32_t)zig_byte_swap_u16((uint16_t)(val >> 16), 16) >> 0;
1233#endif
1234 return zig_wrap_u32(full_res >> (32 - bits), bits);
1235}
1236
1237static inline int32_t zig_byte_swap_i32(int32_t val, uint8_t bits) {
1238 return zig_wrap_i32((int32_t)zig_byte_swap_u32((uint32_t)val, bits), bits);
1239}
1240
1241static inline uint64_t zig_byte_swap_u64(uint64_t val, uint8_t bits) {
1242 uint64_t full_res;
1243#if zig_has_builtin(bswap64) || defined(zig_gcc)
1244 full_res = __builtin_bswap64(val);
1245#else
1246 full_res = (uint64_t)zig_byte_swap_u32((uint32_t)(val >> 0), 32) << 32 |
1247 (uint64_t)zig_byte_swap_u32((uint32_t)(val >> 32), 32) >> 0;
1248#endif
1249 return zig_wrap_u64(full_res >> (64 - bits), bits);
1250}
1251
1252static inline int64_t zig_byte_swap_i64(int64_t val, uint8_t bits) {
1253 return zig_wrap_i64((int64_t)zig_byte_swap_u64((uint64_t)val, bits), bits);
1254}
1255
1256static inline uint8_t zig_bit_reverse_u8(uint8_t val, uint8_t bits) {
1257 uint8_t full_res;
1258#if zig_has_builtin(bitreverse8)
1259 full_res = __builtin_bitreverse8(val);
1260#else
1261 static uint8_t const lut[0x10] = {
1262 0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe,
1263 0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf
1264 };
1265 full_res = lut[val >> 0 & 0xF] << 4 | lut[val >> 4 & 0xF] << 0;
1266#endif
1267 return zig_wrap_u8(full_res >> (8 - bits), bits);
1268}
1269
1270static inline int8_t zig_bit_reverse_i8(int8_t val, uint8_t bits) {
1271 return zig_wrap_i8((int8_t)zig_bit_reverse_u8((uint8_t)val, bits), bits);
1272}
1273
1274static inline uint16_t zig_bit_reverse_u16(uint16_t val, uint8_t bits) {
1275 uint16_t full_res;
1276#if zig_has_builtin(bitreverse16)
1277 full_res = __builtin_bitreverse16(val);
1278#else
1279 full_res = (uint16_t)zig_bit_reverse_u8((uint8_t)(val >> 0), 8) << 8 |
1280 (uint16_t)zig_bit_reverse_u8((uint8_t)(val >> 8), 8) >> 0;
1281#endif
1282 return zig_wrap_u16(full_res >> (16 - bits), bits);
1283}
1284
1285static inline int16_t zig_bit_reverse_i16(int16_t val, uint8_t bits) {
1286 return zig_wrap_i16((int16_t)zig_bit_reverse_u16((uint16_t)val, bits), bits);
1287}
1288
1289static inline uint32_t zig_bit_reverse_u32(uint32_t val, uint8_t bits) {
1290 uint32_t full_res;
1291#if zig_has_builtin(bitreverse32)
1292 full_res = __builtin_bitreverse32(val);
1293#else
1294 full_res = (uint32_t)zig_bit_reverse_u16((uint16_t)(val >> 0), 16) << 16 |
1295 (uint32_t)zig_bit_reverse_u16((uint16_t)(val >> 16), 16) >> 0;
1296#endif
1297 return zig_wrap_u32(full_res >> (32 - bits), bits);
1298}
1299
1300static inline int32_t zig_bit_reverse_i32(int32_t val, uint8_t bits) {
1301 return zig_wrap_i32((int32_t)zig_bit_reverse_u32((uint32_t)val, bits), bits);
1302}
1303
1304static inline uint64_t zig_bit_reverse_u64(uint64_t val, uint8_t bits) {
1305 uint64_t full_res;
1306#if zig_has_builtin(bitreverse64)
1307 full_res = __builtin_bitreverse64(val);
1308#else
1309 full_res = (uint64_t)zig_bit_reverse_u32((uint32_t)(val >> 0), 32) << 32 |
1310 (uint64_t)zig_bit_reverse_u32((uint32_t)(val >> 32), 32) >> 0;
1311#endif
1312 return zig_wrap_u64(full_res >> (64 - bits), bits);
1313}
1314
1315static inline int64_t zig_bit_reverse_i64(int64_t val, uint8_t bits) {
1316 return zig_wrap_i64((int64_t)zig_bit_reverse_u64((uint64_t)val, bits), bits);
1317}
1318
1319#define zig_builtin_popcount_common(w) \
1320 static inline uint8_t zig_popcount_i##w(int##w##_t val, uint8_t bits) { \
1321 return zig_popcount_u##w((uint##w##_t)val, bits); \
1322 }
1323#if zig_has_builtin(popcount) || defined(zig_gcc) || defined(zig_tinyc)
1324#define zig_builtin_popcount(w) \
1325 static inline uint8_t zig_popcount_u##w(uint##w##_t val, uint8_t bits) { \
1326 (void)bits; \
1327 return zig_builtin##w(popcount, val); \
1328 } \
1329\
1330 zig_builtin_popcount_common(w)
1331#else
1332#define zig_builtin_popcount(w) \
1333 static inline uint8_t zig_popcount_u##w(uint##w##_t val, uint8_t bits) { \
1334 (void)bits; \
1335 uint##w##_t temp = val - ((val >> 1) & (UINT##w##_MAX / 3)); \
1336 temp = (temp & (UINT##w##_MAX / 5)) + ((temp >> 2) & (UINT##w##_MAX / 5)); \
1337 temp = (temp + (temp >> 4)) & (UINT##w##_MAX / 17); \
1338 return temp * (UINT##w##_MAX / 255) >> (UINT8_C(w) - UINT8_C(8)); \
1339 } \
1340\
1341 zig_builtin_popcount_common(w)
1342#endif
1343zig_builtin_popcount(8)
1344zig_builtin_popcount(16)
1345zig_builtin_popcount(32)
1346zig_builtin_popcount(64)
1347
1348#define zig_builtin_ctz_common(w) \
1349 static inline uint8_t zig_ctz_i##w(int##w##_t val, uint8_t bits) { \
1350 return zig_ctz_u##w((uint##w##_t)val, bits); \
1351 }
1352#if zig_has_builtin(ctz) || defined(zig_gcc) || defined(zig_tinyc)
1353#define zig_builtin_ctz(w) \
1354 static inline uint8_t zig_ctz_u##w(uint##w##_t val, uint8_t bits) { \
1355 if (val == 0) return bits; \
1356 return zig_builtin##w(ctz, val); \
1357 } \
1358\
1359 zig_builtin_ctz_common(w)
1360#else
1361#define zig_builtin_ctz(w) \
1362 static inline uint8_t zig_ctz_u##w(uint##w##_t val, uint8_t bits) { \
1363 return zig_popcount_u##w(zig_not_u##w(val, bits) & zig_subw_u##w(val, 1, bits), bits); \
1364 } \
1365\
1366 zig_builtin_ctz_common(w)
1367#endif
1368zig_builtin_ctz(8)
1369zig_builtin_ctz(16)
1370zig_builtin_ctz(32)
1371zig_builtin_ctz(64)
1372
1373#define zig_builtin_clz_common(w) \
1374 static inline uint8_t zig_clz_i##w(int##w##_t val, uint8_t bits) { \
1375 return zig_clz_u##w((uint##w##_t)val, bits); \
1376 }
1377#if zig_has_builtin(clz) || defined(zig_gcc) || defined(zig_tinyc)
1378#define zig_builtin_clz(w) \
1379 static inline uint8_t zig_clz_u##w(uint##w##_t val, uint8_t bits) { \
1380 if (val == 0) return bits; \
1381 return zig_builtin##w(clz, val) - (zig_bitSizeOf(zig_Builtin##w) - bits); \
1382 } \
1383\
1384 zig_builtin_clz_common(w)
1385#else
1386#define zig_builtin_clz(w) \
1387 static inline uint8_t zig_clz_u##w(uint##w##_t val, uint8_t bits) { \
1388 return zig_ctz_u##w(zig_bit_reverse_u##w(val, bits), bits); \
1389 } \
1390\
1391 zig_builtin_clz_common(w)
1392#endif
1393zig_builtin_clz(8)
1394zig_builtin_clz(16)
1395zig_builtin_clz(32)
1396zig_builtin_clz(64)
1397
1398/* ======================== 128-bit Integer Support ========================= */
1399
1400#if !defined(zig_has_int128)
1401# if defined(__SIZEOF_INT128__)
1402# define zig_has_int128 1
1403# else
1404# define zig_has_int128 0
1405# endif
1406#endif
1407
1408#if zig_has_int128
1409
1410typedef unsigned __int128 zig_u128;
1411typedef signed __int128 zig_i128;
1412
1413#define zig_make_u128(hi, lo) ((zig_u128)(hi)<<64|(lo))
1414#define zig_make_i128(hi, lo) ((zig_i128)zig_make_u128(hi, lo))
1415#define zig_init_u128(hi, lo) zig_make_u128(hi, lo)
1416#define zig_init_i128(hi, lo) zig_make_i128(hi, lo)
1417#define zig_hi_u128(val) ((uint64_t)((val) >> 64))
1418#define zig_lo_u128(val) ((uint64_t)((val) >> 0))
1419#define zig_hi_i128(val) (( int64_t)((val) >> 64))
1420#define zig_lo_i128(val) ((uint64_t)((val) >> 0))
1421#define zig_bitCast_u128(val) ((zig_u128)(val))
1422#define zig_bitCast_i128(val) ((zig_i128)(val))
1423#define zig_cmp_int128(Type) \
1424 static inline int32_t zig_cmp_##Type(zig_##Type lhs, zig_##Type rhs) { \
1425 return (lhs > rhs) - (lhs < rhs); \
1426 }
1427#define zig_bit_int128(Type, operation, operator) \
1428 static inline zig_##Type zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
1429 return lhs operator rhs; \
1430 }
1431
1432#else /* zig_has_int128 */
1433
1434#if zig_little_endian
1435typedef struct { zig_align(16) uint64_t lo; uint64_t hi; } zig_u128;
1436typedef struct { zig_align(16) uint64_t lo; int64_t hi; } zig_i128;
1437#else
1438typedef struct { zig_align(16) uint64_t hi; uint64_t lo; } zig_u128;
1439typedef struct { zig_align(16) int64_t hi; uint64_t lo; } zig_i128;
1440#endif
1441
1442#define zig_make_u128(hi, lo) ((zig_u128){ .h##i = (hi), .l##o = (lo) })
1443#define zig_make_i128(hi, lo) ((zig_i128){ .h##i = (hi), .l##o = (lo) })
1444
1445#if defined(zig_msvc) /* MSVC doesn't allow struct literals in constant expressions */
1446#define zig_init_u128(hi, lo) { .h##i = (hi), .l##o = (lo) }
1447#define zig_init_i128(hi, lo) { .h##i = (hi), .l##o = (lo) }
1448#else /* But non-MSVC doesn't like the unprotected commas */
1449#define zig_init_u128(hi, lo) zig_make_u128(hi, lo)
1450#define zig_init_i128(hi, lo) zig_make_i128(hi, lo)
1451#endif
1452#define zig_hi_u128(val) ((val).hi)
1453#define zig_lo_u128(val) ((val).lo)
1454#define zig_hi_i128(val) ((val).hi)
1455#define zig_lo_i128(val) ((val).lo)
1456#define zig_bitCast_u128(val) zig_make_u128((uint64_t)(val).hi, (val).lo)
1457#define zig_bitCast_i128(val) zig_make_i128(( int64_t)(val).hi, (val).lo)
1458#define zig_cmp_int128(Type) \
1459 static inline int32_t zig_cmp_##Type(zig_##Type lhs, zig_##Type rhs) { \
1460 return (lhs.hi == rhs.hi) \
1461 ? (lhs.lo > rhs.lo) - (lhs.lo < rhs.lo) \
1462 : (lhs.hi > rhs.hi) - (lhs.hi < rhs.hi); \
1463 }
1464#define zig_bit_int128(Type, operation, operator) \
1465 static inline zig_##Type zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
1466 return (zig_##Type){ .hi = lhs.hi operator rhs.hi, .lo = lhs.lo operator rhs.lo }; \
1467 }
1468
1469#endif /* zig_has_int128 */
1470
1471#define zig_minInt_u128 zig_make_u128(zig_minInt_u64, zig_minInt_u64)
1472#define zig_maxInt_u128 zig_make_u128(zig_maxInt_u64, zig_maxInt_u64)
1473#define zig_minInt_i128 zig_make_i128(zig_minInt_i64, zig_minInt_u64)
1474#define zig_maxInt_i128 zig_make_i128(zig_maxInt_i64, zig_maxInt_u64)
1475
1476zig_cmp_int128(u128)
1477zig_cmp_int128(i128)
1478
1479zig_bit_int128(u128, and, &)
1480zig_bit_int128(i128, and, &)
1481
1482zig_bit_int128(u128, or, |)
1483zig_bit_int128(i128, or, |)
1484
1485zig_bit_int128(u128, xor, ^)
1486zig_bit_int128(i128, xor, ^)
1487
1488static inline zig_u128 zig_shr_u128(zig_u128 lhs, uint8_t rhs);
1489
1490#if zig_has_int128
1491
1492static inline zig_u128 zig_not_u128(zig_u128 val, uint8_t bits) {
1493 return val ^ zig_maxInt_u(128, bits);
1494}
1495
1496static inline zig_i128 zig_not_i128(zig_i128 val, uint8_t bits) {
1497 (void)bits;
1498 return ~val;
1499}
1500
1501static inline zig_u128 zig_shr_u128(zig_u128 lhs, uint8_t rhs) {
1502 return lhs >> rhs;
1503}
1504
1505static inline zig_u128 zig_shl_u128(zig_u128 lhs, uint8_t rhs) {
1506 return lhs << rhs;
1507}
1508
1509static inline zig_i128 zig_shr_i128(zig_i128 lhs, uint8_t rhs) {
1510 // This works around a GCC miscompilation, but it has the side benefit of
1511 // emitting better code. It is behind the `#if` because it depends on
1512 // arithmetic right shift, which is implementation-defined in C, but should
1513 // be guaranteed on any GCC-compatible compiler.
1514#if defined(zig_gnuc)
1515 return lhs >> rhs;
1516#else
1517 zig_i128 sign_mask = lhs < zig_make_i128(0, 0) ? -zig_make_i128(0, 1) : zig_make_i128(0, 0);
1518 return ((lhs ^ sign_mask) >> rhs) ^ sign_mask;
1519#endif
1520}
1521
1522static inline zig_i128 zig_shl_i128(zig_i128 lhs, uint8_t rhs) {
1523 return lhs << rhs;
1524}
1525
1526static inline zig_u128 zig_add_u128(zig_u128 lhs, zig_u128 rhs) {
1527 return lhs + rhs;
1528}
1529
1530static inline zig_i128 zig_add_i128(zig_i128 lhs, zig_i128 rhs) {
1531 return lhs + rhs;
1532}
1533
1534static inline zig_u128 zig_sub_u128(zig_u128 lhs, zig_u128 rhs) {
1535 return lhs - rhs;
1536}
1537
1538static inline zig_i128 zig_sub_i128(zig_i128 lhs, zig_i128 rhs) {
1539 return lhs - rhs;
1540}
1541
1542static inline zig_u128 zig_mul_u128(zig_u128 lhs, zig_u128 rhs) {
1543 return lhs * rhs;
1544}
1545
1546static inline zig_i128 zig_mul_i128(zig_i128 lhs, zig_i128 rhs) {
1547 return lhs * rhs;
1548}
1549
1550static inline zig_u128 zig_div_trunc_u128(zig_u128 lhs, zig_u128 rhs) {
1551 return lhs / rhs;
1552}
1553
1554static inline zig_i128 zig_div_trunc_i128(zig_i128 lhs, zig_i128 rhs) {
1555 return lhs / rhs;
1556}
1557
1558static inline zig_u128 zig_rem_u128(zig_u128 lhs, zig_u128 rhs) {
1559 return lhs % rhs;
1560}
1561
1562static inline zig_i128 zig_rem_i128(zig_i128 lhs, zig_i128 rhs) {
1563 return lhs % rhs;
1564}
1565
1566#else /* zig_has_int128 */
1567
1568static inline zig_u128 zig_not_u128(zig_u128 val, uint8_t bits) {
1569 return (zig_u128){ .hi = zig_not_u64(val.hi, bits - UINT8_C(64)), .lo = zig_not_u64(val.lo, UINT8_C(64)) };
1570}
1571
1572static inline zig_i128 zig_not_i128(zig_i128 val, uint8_t bits) {
1573 return (zig_i128){ .hi = zig_not_i64(val.hi, bits - UINT8_C(64)), .lo = zig_not_u64(val.lo, UINT8_C(64)) };
1574}
1575
1576static inline zig_u128 zig_shr_u128(zig_u128 lhs, uint8_t rhs) {
1577 if (rhs == UINT8_C(0)) return lhs;
1578 if (rhs >= UINT8_C(64)) return (zig_u128){ .hi = zig_minInt_u64, .lo = lhs.hi >> (rhs - UINT8_C(64)) };
1579 return (zig_u128){ .hi = lhs.hi >> rhs, .lo = lhs.hi << (UINT8_C(64) - rhs) | lhs.lo >> rhs };
1580}
1581
1582static inline zig_u128 zig_shl_u128(zig_u128 lhs, uint8_t rhs) {
1583 if (rhs == UINT8_C(0)) return lhs;
1584 if (rhs >= UINT8_C(64)) return (zig_u128){ .hi = lhs.lo << (rhs - UINT8_C(64)), .lo = zig_minInt_u64 };
1585 return (zig_u128){ .hi = lhs.hi << rhs | lhs.lo >> (UINT8_C(64) - rhs), .lo = lhs.lo << rhs };
1586}
1587
1588static inline zig_i128 zig_shr_i128(zig_i128 lhs, uint8_t rhs) {
1589 if (rhs == UINT8_C(0)) return lhs;
1590 if (rhs >= UINT8_C(64)) return (zig_i128){ .hi = zig_shr_i64(lhs.hi, 63), .lo = zig_shr_i64(lhs.hi, (rhs - UINT8_C(64))) };
1591 return (zig_i128){ .hi = zig_shr_i64(lhs.hi, rhs), .lo = lhs.lo >> rhs | (uint64_t)lhs.hi << (UINT8_C(64) - rhs) };
1592}
1593
1594static inline zig_i128 zig_shl_i128(zig_i128 lhs, uint8_t rhs) {
1595 if (rhs == UINT8_C(0)) return lhs;
1596 if (rhs >= UINT8_C(64)) return (zig_i128){ .hi = lhs.lo << (rhs - UINT8_C(64)), .lo = zig_minInt_u64 };
1597 return (zig_i128){ .hi = lhs.hi << rhs | lhs.lo >> (UINT8_C(64) - rhs), .lo = lhs.lo << rhs };
1598}
1599
1600static inline zig_u128 zig_add_u128(zig_u128 lhs, zig_u128 rhs) {
1601 zig_u128 res;
1602 res.hi = lhs.hi + rhs.hi + zig_addo_u64(&res.lo, lhs.lo, rhs.lo, 64);
1603 return res;
1604}
1605
1606static inline zig_i128 zig_add_i128(zig_i128 lhs, zig_i128 rhs) {
1607 zig_i128 res;
1608 res.hi = lhs.hi + rhs.hi + zig_addo_u64(&res.lo, lhs.lo, rhs.lo, 64);
1609 return res;
1610}
1611
1612static inline zig_u128 zig_sub_u128(zig_u128 lhs, zig_u128 rhs) {
1613 zig_u128 res;
1614 res.hi = lhs.hi - rhs.hi - zig_subo_u64(&res.lo, lhs.lo, rhs.lo, 64);
1615 return res;
1616}
1617
1618static inline zig_i128 zig_sub_i128(zig_i128 lhs, zig_i128 rhs) {
1619 zig_i128 res;
1620 res.hi = lhs.hi - rhs.hi - zig_subo_u64(&res.lo, lhs.lo, rhs.lo, 64);
1621 return res;
1622}
1623
1624zig_extern zig_i128 __multi3(zig_i128 lhs, zig_i128 rhs);
1625static zig_i128 zig_mul_i128(zig_i128 lhs, zig_i128 rhs) {
1626 return __multi3(lhs, rhs);
1627}
1628
1629static zig_u128 zig_mul_u128(zig_u128 lhs, zig_u128 rhs) {
1630 return zig_bitCast_u128(zig_mul_i128(zig_bitCast_i128(lhs), zig_bitCast_i128(rhs)));
1631}
1632
1633zig_extern zig_u128 __udivti3(zig_u128 lhs, zig_u128 rhs);
1634static zig_u128 zig_div_trunc_u128(zig_u128 lhs, zig_u128 rhs) {
1635 return __udivti3(lhs, rhs);
1636}
1637
1638zig_extern zig_i128 __divti3(zig_i128 lhs, zig_i128 rhs);
1639static zig_i128 zig_div_trunc_i128(zig_i128 lhs, zig_i128 rhs) {
1640 return __divti3(lhs, rhs);
1641}
1642
1643zig_extern zig_u128 __umodti3(zig_u128 lhs, zig_u128 rhs);
1644static zig_u128 zig_rem_u128(zig_u128 lhs, zig_u128 rhs) {
1645 return __umodti3(lhs, rhs);
1646}
1647
1648zig_extern zig_i128 __modti3(zig_i128 lhs, zig_i128 rhs);
1649static zig_i128 zig_rem_i128(zig_i128 lhs, zig_i128 rhs) {
1650 return __modti3(lhs, rhs);
1651}
1652
1653#endif /* zig_has_int128 */
1654
1655#define zig_div_floor_u128 zig_div_trunc_u128
1656
1657static inline zig_i128 zig_div_floor_i128(zig_i128 lhs, zig_i128 rhs) {
1658 zig_i128 rem = zig_rem_i128(lhs, rhs);
1659 int64_t mask = zig_or_u64((uint64_t)zig_hi_i128(rem), zig_lo_i128(rem)) != UINT64_C(0)
1660 ? zig_shr_i64(zig_xor_i64(zig_hi_i128(lhs), zig_hi_i128(rhs)), UINT8_C(63)) : INT64_C(0);
1661 return zig_add_i128(zig_div_trunc_i128(lhs, rhs), zig_make_i128(mask, (uint64_t)mask));
1662}
1663
1664#define zig_mod_u128 zig_rem_u128
1665
1666static inline zig_i128 zig_mod_i128(zig_i128 lhs, zig_i128 rhs) {
1667 zig_i128 rem = zig_rem_i128(lhs, rhs);
1668 int64_t mask = zig_or_u64((uint64_t)zig_hi_i128(rem), zig_lo_i128(rem)) != UINT64_C(0)
1669 ? zig_shr_i64(zig_xor_i64(zig_hi_i128(lhs), zig_hi_i128(rhs)), UINT8_C(63)) : INT64_C(0);
1670 return zig_add_i128(rem, zig_and_i128(rhs, zig_make_i128(mask, (uint64_t)mask)));
1671}
1672
1673static inline zig_u128 zig_min_u128(zig_u128 lhs, zig_u128 rhs) {
1674 return zig_cmp_u128(lhs, rhs) < INT32_C(0) ? lhs : rhs;
1675}
1676
1677static inline zig_i128 zig_min_i128(zig_i128 lhs, zig_i128 rhs) {
1678 return zig_cmp_i128(lhs, rhs) < INT32_C(0) ? lhs : rhs;
1679}
1680
1681static inline zig_u128 zig_max_u128(zig_u128 lhs, zig_u128 rhs) {
1682 return zig_cmp_u128(lhs, rhs) > INT32_C(0) ? lhs : rhs;
1683}
1684
1685static inline zig_i128 zig_max_i128(zig_i128 lhs, zig_i128 rhs) {
1686 return zig_cmp_i128(lhs, rhs) > INT32_C(0) ? lhs : rhs;
1687}
1688
1689static inline zig_u128 zig_wrap_u128(zig_u128 val, uint8_t bits) {
1690 return zig_and_u128(val, zig_maxInt_u(128, bits));
1691}
1692
1693static inline zig_i128 zig_wrap_i128(zig_i128 val, uint8_t bits) {
1694 if (bits > UINT8_C(64)) return zig_make_i128(zig_wrap_i64(zig_hi_i128(val), bits - UINT8_C(64)), zig_lo_i128(val));
1695 int64_t lo = zig_wrap_i64((int64_t)zig_lo_i128(val), bits);
1696 return zig_make_i128(zig_shr_i64(lo, 63), (uint64_t)lo);
1697}
1698
1699static inline zig_u128 zig_shlw_u128(zig_u128 lhs, uint8_t rhs, uint8_t bits) {
1700 return zig_wrap_u128(zig_shl_u128(lhs, rhs), bits);
1701}
1702
1703static inline zig_i128 zig_shlw_i128(zig_i128 lhs, uint8_t rhs, uint8_t bits) {
1704 return zig_wrap_i128(zig_bitCast_i128(zig_shl_u128(zig_bitCast_u128(lhs), rhs)), bits);
1705}
1706
1707static inline zig_u128 zig_addw_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1708 return zig_wrap_u128(zig_add_u128(lhs, rhs), bits);
1709}
1710
1711static inline zig_i128 zig_addw_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1712 return zig_wrap_i128(zig_bitCast_i128(zig_add_u128(zig_bitCast_u128(lhs), zig_bitCast_u128(rhs))), bits);
1713}
1714
1715static inline zig_u128 zig_subw_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1716 return zig_wrap_u128(zig_sub_u128(lhs, rhs), bits);
1717}
1718
1719static inline zig_i128 zig_subw_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1720 return zig_wrap_i128(zig_bitCast_i128(zig_sub_u128(zig_bitCast_u128(lhs), zig_bitCast_u128(rhs))), bits);
1721}
1722
1723static inline zig_u128 zig_mulw_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1724 return zig_wrap_u128(zig_mul_u128(lhs, rhs), bits);
1725}
1726
1727static inline zig_i128 zig_mulw_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1728 return zig_wrap_i128(zig_bitCast_i128(zig_mul_u128(zig_bitCast_u128(lhs), zig_bitCast_u128(rhs))), bits);
1729}
1730
1731static inline zig_u128 zig_abs_i128(zig_i128 val) {
1732 zig_i128 tmp = zig_shr_i128(val, 127);
1733 return zig_bitCast_u128(zig_sub_i128(zig_xor_i128(val, tmp), tmp));
1734}
1735
1736#if zig_has_int128
1737
1738static inline bool zig_addo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1739#if zig_has_builtin(add_overflow)
1740 zig_u128 full_res;
1741 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
1742 *res = zig_wrap_u128(full_res, bits);
1743 return overflow || full_res < zig_minInt_u(128, bits) || full_res > zig_maxInt_u(128, bits);
1744#else
1745 *res = zig_addw_u128(lhs, rhs, bits);
1746 return *res < lhs;
1747#endif
1748}
1749
1750static inline bool zig_addo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1751#if zig_has_builtin(add_overflow)
1752 zig_i128 full_res;
1753 bool overflow = __builtin_add_overflow(lhs, rhs, &full_res);
1754#else
1755 zig_i128 full_res = (zig_i128)((zig_u128)lhs + (zig_u128)rhs);
1756 bool overflow = ((full_res ^ lhs) & (full_res ^ rhs)) < 0;
1757#endif
1758 *res = zig_wrap_i128(full_res, bits);
1759 return overflow || full_res < zig_minInt_i(128, bits) || full_res > zig_maxInt_i(128, bits);
1760}
1761
1762static inline bool zig_subo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1763#if zig_has_builtin(sub_overflow)
1764 zig_u128 full_res;
1765 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
1766 *res = zig_wrap_u128(full_res, bits);
1767 return overflow || full_res < zig_minInt_u(128, bits) || full_res > zig_maxInt_u(128, bits);
1768#else
1769 *res = zig_subw_u128(lhs, rhs, bits);
1770 return *res > lhs;
1771#endif
1772}
1773
1774static inline bool zig_subo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1775#if zig_has_builtin(sub_overflow)
1776 zig_i128 full_res;
1777 bool overflow = __builtin_sub_overflow(lhs, rhs, &full_res);
1778#else
1779 zig_i128 full_res = (zig_i128)((zig_u128)lhs - (zig_u128)rhs);
1780 bool overflow = ((lhs ^ rhs) & (full_res ^ lhs)) < 0;
1781#endif
1782 *res = zig_wrap_i128(full_res, bits);
1783 return overflow || full_res < zig_minInt_i(128, bits) || full_res > zig_maxInt_i(128, bits);
1784}
1785
1786static inline bool zig_mulo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1787#if zig_has_builtin(mul_overflow)
1788 zig_u128 full_res;
1789 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1790 *res = zig_wrap_u128(full_res, bits);
1791 return overflow || full_res < zig_minInt_u(128, bits) || full_res > zig_maxInt_u(128, bits);
1792#else
1793 *res = zig_mulw_u128(lhs, rhs, bits);
1794 return rhs != zig_make_u128(0, 0) && lhs > zig_maxInt_u(128, bits) / rhs;
1795#endif
1796}
1797
1798zig_extern zig_i128 __muloti4(zig_i128 lhs, zig_i128 rhs, int *overflow);
1799static inline bool zig_mulo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1800#if zig_has_builtin(mul_overflow)
1801 zig_i128 full_res;
1802 bool overflow = __builtin_mul_overflow(lhs, rhs, &full_res);
1803#else
1804 int overflow_int;
1805 zig_i128 full_res = __muloti4(lhs, rhs, &overflow_int);
1806 bool overflow = overflow_int != 0;
1807#endif
1808 *res = zig_wrap_i128(full_res, bits);
1809 return overflow || full_res < zig_minInt_i(128, bits) || full_res > zig_maxInt_i(128, bits);
1810}
1811
1812#else /* zig_has_int128 */
1813
1814static inline bool zig_addo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1815 uint64_t hi;
1816 bool overflow = zig_addo_u64(&hi, lhs.hi, rhs.hi, bits - 64);
1817 return overflow ^ zig_addo_u64(&res->hi, hi, zig_addo_u64(&res->lo, lhs.lo, rhs.lo, 64), bits - 64);
1818}
1819
1820static inline bool zig_addo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1821 int64_t hi;
1822 bool overflow = zig_addo_i64(&hi, lhs.hi, rhs.hi, bits - 64);
1823 return overflow ^ zig_addo_i64(&res->hi, hi, zig_addo_u64(&res->lo, lhs.lo, rhs.lo, 64), bits - 64);
1824}
1825
1826static inline bool zig_subo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1827 uint64_t hi;
1828 bool overflow = zig_subo_u64(&hi, lhs.hi, rhs.hi, bits - 64);
1829 return overflow ^ zig_subo_u64(&res->hi, hi, zig_subo_u64(&res->lo, lhs.lo, rhs.lo, 64), bits - 64);
1830}
1831
1832static inline bool zig_subo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1833 int64_t hi;
1834 bool overflow = zig_subo_i64(&hi, lhs.hi, rhs.hi, bits - 64);
1835 return overflow ^ zig_subo_i64(&res->hi, hi, zig_subo_u64(&res->lo, lhs.lo, rhs.lo, 64), bits - 64);
1836}
1837
1838static inline bool zig_mulo_u128(zig_u128 *res, zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1839 *res = zig_mulw_u128(lhs, rhs, bits);
1840 return zig_cmp_u128(*res, zig_make_u128(0, 0)) != INT32_C(0) &&
1841 zig_cmp_u128(lhs, zig_div_trunc_u128(zig_maxInt_u(128, bits), rhs)) > INT32_C(0);
1842}
1843
1844zig_extern zig_i128 __muloti4(zig_i128 lhs, zig_i128 rhs, int *overflow);
1845static inline bool zig_mulo_i128(zig_i128 *res, zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1846 int overflow_int;
1847 zig_i128 full_res = __muloti4(lhs, rhs, &overflow_int);
1848 bool overflow = overflow_int != 0 ||
1849 zig_cmp_i128(full_res, zig_minInt_i(128, bits)) < INT32_C(0) ||
1850 zig_cmp_i128(full_res, zig_maxInt_i(128, bits)) > INT32_C(0);
1851 *res = zig_wrap_i128(full_res, bits);
1852 return overflow;
1853}
1854
1855#endif /* zig_has_int128 */
1856
1857static inline bool zig_shlo_u128(zig_u128 *res, zig_u128 lhs, uint8_t rhs, uint8_t bits) {
1858 *res = zig_shlw_u128(lhs, rhs, bits);
1859 return zig_cmp_u128(lhs, zig_shr_u128(zig_maxInt_u(128, bits), rhs)) > INT32_C(0);
1860}
1861
1862static inline bool zig_shlo_i128(zig_i128 *res, zig_i128 lhs, uint8_t rhs, uint8_t bits) {
1863 *res = zig_shlw_i128(lhs, rhs, bits);
1864 zig_i128 mask = zig_bitCast_i128(zig_shl_u128(zig_maxInt_u128, bits - rhs - UINT8_C(1)));
1865 return zig_cmp_i128(zig_and_i128(lhs, mask), zig_make_i128(0, 0)) != INT32_C(0) &&
1866 zig_cmp_i128(zig_and_i128(lhs, mask), mask) != INT32_C(0);
1867}
1868
1869static inline zig_u128 zig_shls_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1870 zig_u128 res;
1871 if (zig_cmp_u128(rhs, zig_make_u128(0, bits)) < INT32_C(0) && !zig_shlo_u128(&res, lhs, (uint8_t)zig_lo_u128(rhs), bits)) return res;
1872 switch (zig_cmp_u128(lhs, zig_make_u128(0, 0))) {
1873 case 0: return zig_make_u128(0, 0);
1874 case 1: return zig_maxInt_u(128, bits);
1875 default: zig_unreachable();
1876 }
1877}
1878
1879static inline zig_i128 zig_shls_i128(zig_i128 lhs, zig_u128 rhs, uint8_t bits) {
1880 zig_i128 res;
1881 if (zig_cmp_u128(rhs, zig_make_u128(0, bits)) < INT32_C(0) && !zig_shlo_i128(&res, lhs, (uint8_t)zig_lo_u128(rhs), bits)) return res;
1882 switch (zig_cmp_i128(lhs, zig_make_i128(0, 0))) {
1883 case -1: return zig_minInt_i(128, bits);
1884 case 0: return zig_make_i128(0, 0);
1885 case 1: return zig_maxInt_i(128, bits);
1886 default: zig_unreachable();
1887 }
1888}
1889
1890static inline zig_u128 zig_adds_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1891 zig_u128 res;
1892 return zig_addo_u128(&res, lhs, rhs, bits) ? zig_maxInt_u(128, bits) : res;
1893}
1894
1895static inline zig_i128 zig_adds_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1896 zig_i128 res;
1897 if (!zig_addo_i128(&res, lhs, rhs, bits)) return res;
1898 return zig_cmp_i128(res, zig_make_i128(0, 0)) >= INT32_C(0) ? zig_minInt_i(128, bits) : zig_maxInt_i(128, bits);
1899}
1900
1901static inline zig_u128 zig_subs_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1902 zig_u128 res;
1903 return zig_subo_u128(&res, lhs, rhs, bits) ? zig_minInt_u(128, bits) : res;
1904}
1905
1906static inline zig_i128 zig_subs_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1907 zig_i128 res;
1908 if (!zig_subo_i128(&res, lhs, rhs, bits)) return res;
1909 return zig_cmp_i128(res, zig_make_i128(0, 0)) >= INT32_C(0) ? zig_minInt_i(128, bits) : zig_maxInt_i(128, bits);
1910}
1911
1912static inline zig_u128 zig_muls_u128(zig_u128 lhs, zig_u128 rhs, uint8_t bits) {
1913 zig_u128 res;
1914 return zig_mulo_u128(&res, lhs, rhs, bits) ? zig_maxInt_u(128, bits) : res;
1915}
1916
1917static inline zig_i128 zig_muls_i128(zig_i128 lhs, zig_i128 rhs, uint8_t bits) {
1918 zig_i128 res;
1919 if (!zig_mulo_i128(&res, lhs, rhs, bits)) return res;
1920 return zig_cmp_i128(zig_xor_i128(lhs, rhs), zig_make_i128(0, 0)) < INT32_C(0) ? zig_minInt_i(128, bits) : zig_maxInt_i(128, bits);
1921}
1922
1923static inline uint8_t zig_clz_u128(zig_u128 val, uint8_t bits) {
1924 if (bits <= UINT8_C(64)) return zig_clz_u64(zig_lo_u128(val), bits);
1925 if (zig_hi_u128(val) != 0) return zig_clz_u64(zig_hi_u128(val), bits - UINT8_C(64));
1926 return zig_clz_u64(zig_lo_u128(val), UINT8_C(64)) + (bits - UINT8_C(64));
1927}
1928
1929static inline uint8_t zig_clz_i128(zig_i128 val, uint8_t bits) {
1930 return zig_clz_u128(zig_bitCast_u128(val), bits);
1931}
1932
1933static inline uint8_t zig_ctz_u128(zig_u128 val, uint8_t bits) {
1934 if (zig_lo_u128(val) != 0) return zig_ctz_u64(zig_lo_u128(val), UINT8_C(64));
1935 return zig_ctz_u64(zig_hi_u128(val), bits - UINT8_C(64)) + UINT8_C(64);
1936}
1937
1938static inline uint8_t zig_ctz_i128(zig_i128 val, uint8_t bits) {
1939 return zig_ctz_u128(zig_bitCast_u128(val), bits);
1940}
1941
1942static inline uint8_t zig_popcount_u128(zig_u128 val, uint8_t bits) {
1943 return zig_popcount_u64(zig_hi_u128(val), bits - UINT8_C(64)) +
1944 zig_popcount_u64(zig_lo_u128(val), UINT8_C(64));
1945}
1946
1947static inline uint8_t zig_popcount_i128(zig_i128 val, uint8_t bits) {
1948 return zig_popcount_u128(zig_bitCast_u128(val), bits);
1949}
1950
1951static inline zig_u128 zig_byte_swap_u128(zig_u128 val, uint8_t bits) {
1952 zig_u128 full_res;
1953#if zig_has_builtin(bswap128)
1954 full_res = __builtin_bswap128(val);
1955#else
1956 full_res = zig_make_u128(zig_byte_swap_u64(zig_lo_u128(val), UINT8_C(64)),
1957 zig_byte_swap_u64(zig_hi_u128(val), UINT8_C(64)));
1958#endif
1959 return zig_shr_u128(full_res, UINT8_C(128) - bits);
1960}
1961
1962static inline zig_i128 zig_byte_swap_i128(zig_i128 val, uint8_t bits) {
1963 return zig_bitCast_i128(zig_byte_swap_u128(zig_bitCast_u128(val), bits));
1964}
1965
1966static inline zig_u128 zig_bit_reverse_u128(zig_u128 val, uint8_t bits) {
1967 return zig_shr_u128(zig_make_u128(zig_bit_reverse_u64(zig_lo_u128(val), UINT8_C(64)),
1968 zig_bit_reverse_u64(zig_hi_u128(val), UINT8_C(64))),
1969 UINT8_C(128) - bits);
1970}
1971
1972static inline zig_i128 zig_bit_reverse_i128(zig_i128 val, uint8_t bits) {
1973 return zig_bitCast_i128(zig_bit_reverse_u128(zig_bitCast_u128(val), bits));
1974}
1975
1976/* ========================== Big Integer Support =========================== */
1977
1978static inline uint16_t zig_int_bytes(uint16_t bits) {
1979 uint16_t bytes = (bits + CHAR_BIT - 1) / CHAR_BIT;
1980 uint16_t alignment = ZIG_TARGET_MAX_INT_ALIGNMENT;
1981 while (alignment / 2 >= bytes) alignment /= 2;
1982 return (bytes + alignment - 1) / alignment * alignment;
1983}
1984
1985static inline int32_t zig_cmp_big(const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
1986 const uint8_t *lhs_bytes = lhs;
1987 const uint8_t *rhs_bytes = rhs;
1988 uint16_t byte_offset = 0;
1989 bool do_signed = is_signed;
1990 uint16_t remaining_bytes = zig_int_bytes(bits);
1991
1992#if zig_little_endian
1993 byte_offset = remaining_bytes;
1994#endif
1995
1996 while (remaining_bytes >= 128 / CHAR_BIT) {
1997 int32_t limb_cmp;
1998
1999#if zig_little_endian
2000 byte_offset -= 128 / CHAR_BIT;
2001#endif
2002
2003 if (do_signed) {
2004 zig_i128 lhs_limb;
2005 zig_i128 rhs_limb;
2006
2007 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2008 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2009 limb_cmp = zig_cmp_i128(lhs_limb, rhs_limb);
2010 do_signed = false;
2011 } else {
2012 zig_u128 lhs_limb;
2013 zig_u128 rhs_limb;
2014
2015 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2016 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2017 limb_cmp = zig_cmp_u128(lhs_limb, rhs_limb);
2018 }
2019
2020 if (limb_cmp != 0) return limb_cmp;
2021 remaining_bytes -= 128 / CHAR_BIT;
2022
2023#if zig_big_endian
2024 byte_offset += 128 / CHAR_BIT;
2025#endif
2026 }
2027
2028 while (remaining_bytes >= 64 / CHAR_BIT) {
2029#if zig_little_endian
2030 byte_offset -= 64 / CHAR_BIT;
2031#endif
2032
2033 if (do_signed) {
2034 int64_t lhs_limb;
2035 int64_t rhs_limb;
2036
2037 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2038 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2039 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2040 do_signed = false;
2041 } else {
2042 uint64_t lhs_limb;
2043 uint64_t rhs_limb;
2044
2045 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2046 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2047 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2048 }
2049
2050 remaining_bytes -= 64 / CHAR_BIT;
2051
2052#if zig_big_endian
2053 byte_offset += 64 / CHAR_BIT;
2054#endif
2055 }
2056
2057 while (remaining_bytes >= 32 / CHAR_BIT) {
2058#if zig_little_endian
2059 byte_offset -= 32 / CHAR_BIT;
2060#endif
2061
2062 if (do_signed) {
2063 int32_t lhs_limb;
2064 int32_t rhs_limb;
2065
2066 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2067 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2068 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2069 do_signed = false;
2070 } else {
2071 uint32_t lhs_limb;
2072 uint32_t rhs_limb;
2073
2074 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2075 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2076 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2077 }
2078
2079 remaining_bytes -= 32 / CHAR_BIT;
2080
2081#if zig_big_endian
2082 byte_offset += 32 / CHAR_BIT;
2083#endif
2084 }
2085
2086 while (remaining_bytes >= 16 / CHAR_BIT) {
2087#if zig_little_endian
2088 byte_offset -= 16 / CHAR_BIT;
2089#endif
2090
2091 if (do_signed) {
2092 int16_t lhs_limb;
2093 int16_t rhs_limb;
2094
2095 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2096 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2097 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2098 do_signed = false;
2099 } else {
2100 uint16_t lhs_limb;
2101 uint16_t rhs_limb;
2102
2103 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2104 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2105 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2106 }
2107
2108 remaining_bytes -= 16 / CHAR_BIT;
2109
2110#if zig_big_endian
2111 byte_offset += 16 / CHAR_BIT;
2112#endif
2113 }
2114
2115 while (remaining_bytes >= 8 / CHAR_BIT) {
2116#if zig_little_endian
2117 byte_offset -= 8 / CHAR_BIT;
2118#endif
2119
2120 if (do_signed) {
2121 int8_t lhs_limb;
2122 int8_t rhs_limb;
2123
2124 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2125 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2126 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2127 do_signed = false;
2128 } else {
2129 uint8_t lhs_limb;
2130 uint8_t rhs_limb;
2131
2132 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2133 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2134 if (lhs_limb != rhs_limb) return (lhs_limb > rhs_limb) - (lhs_limb < rhs_limb);
2135 }
2136
2137 remaining_bytes -= 8 / CHAR_BIT;
2138
2139#if zig_big_endian
2140 byte_offset += 8 / CHAR_BIT;
2141#endif
2142 }
2143
2144 return 0;
2145}
2146
2147static inline void zig_and_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2148 uint8_t *res_bytes = res;
2149 const uint8_t *lhs_bytes = lhs;
2150 const uint8_t *rhs_bytes = rhs;
2151 uint16_t byte_offset = 0;
2152 uint16_t remaining_bytes = zig_int_bytes(bits);
2153 (void)is_signed;
2154
2155 while (remaining_bytes >= 128 / CHAR_BIT) {
2156 zig_u128 res_limb;
2157 zig_u128 lhs_limb;
2158 zig_u128 rhs_limb;
2159
2160 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2161 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2162 res_limb = zig_and_u128(lhs_limb, rhs_limb);
2163 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2164
2165 remaining_bytes -= 128 / CHAR_BIT;
2166 byte_offset += 128 / CHAR_BIT;
2167 }
2168
2169 while (remaining_bytes >= 64 / CHAR_BIT) {
2170 uint64_t res_limb;
2171 uint64_t lhs_limb;
2172 uint64_t rhs_limb;
2173
2174 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2175 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2176 res_limb = zig_and_u64(lhs_limb, rhs_limb);
2177 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2178
2179 remaining_bytes -= 64 / CHAR_BIT;
2180 byte_offset += 64 / CHAR_BIT;
2181 }
2182
2183 while (remaining_bytes >= 32 / CHAR_BIT) {
2184 uint32_t res_limb;
2185 uint32_t lhs_limb;
2186 uint32_t rhs_limb;
2187
2188 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2189 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2190 res_limb = zig_and_u32(lhs_limb, rhs_limb);
2191 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2192
2193 remaining_bytes -= 32 / CHAR_BIT;
2194 byte_offset += 32 / CHAR_BIT;
2195 }
2196
2197 while (remaining_bytes >= 16 / CHAR_BIT) {
2198 uint16_t res_limb;
2199 uint16_t lhs_limb;
2200 uint16_t rhs_limb;
2201
2202 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2203 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2204 res_limb = zig_and_u16(lhs_limb, rhs_limb);
2205 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2206
2207 remaining_bytes -= 16 / CHAR_BIT;
2208 byte_offset += 16 / CHAR_BIT;
2209 }
2210
2211 while (remaining_bytes >= 8 / CHAR_BIT) {
2212 uint8_t res_limb;
2213 uint8_t lhs_limb;
2214 uint8_t rhs_limb;
2215
2216 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2217 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2218 res_limb = zig_and_u8(lhs_limb, rhs_limb);
2219 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2220
2221 remaining_bytes -= 8 / CHAR_BIT;
2222 byte_offset += 8 / CHAR_BIT;
2223 }
2224}
2225
2226static inline void zig_or_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2227 uint8_t *res_bytes = res;
2228 const uint8_t *lhs_bytes = lhs;
2229 const uint8_t *rhs_bytes = rhs;
2230 uint16_t byte_offset = 0;
2231 uint16_t remaining_bytes = zig_int_bytes(bits);
2232 (void)is_signed;
2233
2234 while (remaining_bytes >= 128 / CHAR_BIT) {
2235 zig_u128 res_limb;
2236 zig_u128 lhs_limb;
2237 zig_u128 rhs_limb;
2238
2239 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2240 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2241 res_limb = zig_or_u128(lhs_limb, rhs_limb);
2242 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2243
2244 remaining_bytes -= 128 / CHAR_BIT;
2245 byte_offset += 128 / CHAR_BIT;
2246 }
2247
2248 while (remaining_bytes >= 64 / CHAR_BIT) {
2249 uint64_t res_limb;
2250 uint64_t lhs_limb;
2251 uint64_t rhs_limb;
2252
2253 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2254 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2255 res_limb = zig_or_u64(lhs_limb, rhs_limb);
2256 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2257
2258 remaining_bytes -= 64 / CHAR_BIT;
2259 byte_offset += 64 / CHAR_BIT;
2260 }
2261
2262 while (remaining_bytes >= 32 / CHAR_BIT) {
2263 uint32_t res_limb;
2264 uint32_t lhs_limb;
2265 uint32_t rhs_limb;
2266
2267 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2268 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2269 res_limb = zig_or_u32(lhs_limb, rhs_limb);
2270 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2271
2272 remaining_bytes -= 32 / CHAR_BIT;
2273 byte_offset += 32 / CHAR_BIT;
2274 }
2275
2276 while (remaining_bytes >= 16 / CHAR_BIT) {
2277 uint16_t res_limb;
2278 uint16_t lhs_limb;
2279 uint16_t rhs_limb;
2280
2281 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2282 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2283 res_limb = zig_or_u16(lhs_limb, rhs_limb);
2284 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2285
2286 remaining_bytes -= 16 / CHAR_BIT;
2287 byte_offset += 16 / CHAR_BIT;
2288 }
2289
2290 while (remaining_bytes >= 8 / CHAR_BIT) {
2291 uint8_t res_limb;
2292 uint8_t lhs_limb;
2293 uint8_t rhs_limb;
2294
2295 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2296 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2297 res_limb = zig_or_u8(lhs_limb, rhs_limb);
2298 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2299
2300 remaining_bytes -= 8 / CHAR_BIT;
2301 byte_offset += 8 / CHAR_BIT;
2302 }
2303}
2304
2305static inline void zig_xor_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2306 uint8_t *res_bytes = res;
2307 const uint8_t *lhs_bytes = lhs;
2308 const uint8_t *rhs_bytes = rhs;
2309 uint16_t byte_offset = 0;
2310 uint16_t remaining_bytes = zig_int_bytes(bits);
2311 (void)is_signed;
2312
2313 while (remaining_bytes >= 128 / CHAR_BIT) {
2314 zig_u128 res_limb;
2315 zig_u128 lhs_limb;
2316 zig_u128 rhs_limb;
2317
2318 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2319 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2320 res_limb = zig_xor_u128(lhs_limb, rhs_limb);
2321 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2322
2323 remaining_bytes -= 128 / CHAR_BIT;
2324 byte_offset += 128 / CHAR_BIT;
2325 }
2326
2327 while (remaining_bytes >= 64 / CHAR_BIT) {
2328 uint64_t res_limb;
2329 uint64_t lhs_limb;
2330 uint64_t rhs_limb;
2331
2332 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2333 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2334 res_limb = zig_xor_u64(lhs_limb, rhs_limb);
2335 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2336
2337 remaining_bytes -= 64 / CHAR_BIT;
2338 byte_offset += 64 / CHAR_BIT;
2339 }
2340
2341 while (remaining_bytes >= 32 / CHAR_BIT) {
2342 uint32_t res_limb;
2343 uint32_t lhs_limb;
2344 uint32_t rhs_limb;
2345
2346 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2347 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2348 res_limb = zig_xor_u32(lhs_limb, rhs_limb);
2349 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2350
2351 remaining_bytes -= 32 / CHAR_BIT;
2352 byte_offset += 32 / CHAR_BIT;
2353 }
2354
2355 while (remaining_bytes >= 16 / CHAR_BIT) {
2356 uint16_t res_limb;
2357 uint16_t lhs_limb;
2358 uint16_t rhs_limb;
2359
2360 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2361 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2362 res_limb = zig_xor_u16(lhs_limb, rhs_limb);
2363 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2364
2365 remaining_bytes -= 16 / CHAR_BIT;
2366 byte_offset += 16 / CHAR_BIT;
2367 }
2368
2369 while (remaining_bytes >= 8 / CHAR_BIT) {
2370 uint8_t res_limb;
2371 uint8_t lhs_limb;
2372 uint8_t rhs_limb;
2373
2374 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2375 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2376 res_limb = zig_xor_u8(lhs_limb, rhs_limb);
2377 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2378
2379 remaining_bytes -= 8 / CHAR_BIT;
2380 byte_offset += 8 / CHAR_BIT;
2381 }
2382}
2383
2384static inline bool zig_addo_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2385 uint8_t *res_bytes = res;
2386 const uint8_t *lhs_bytes = lhs;
2387 const uint8_t *rhs_bytes = rhs;
2388 uint16_t byte_offset = 0;
2389 uint16_t remaining_bytes = zig_int_bytes(bits);
2390 uint8_t top_bits = (uint8_t)(remaining_bytes * 8 - bits);
2391 bool overflow = false;
2392
2393#if zig_big_endian
2394 byte_offset = remaining_bytes;
2395#endif
2396
2397 while (remaining_bytes >= 128 / CHAR_BIT) {
2398 uint8_t limb_bits = 128 - (remaining_bytes == 128 / CHAR_BIT ? top_bits : 0);
2399
2400#if zig_big_endian
2401 byte_offset -= 128 / CHAR_BIT;
2402#endif
2403
2404 if (remaining_bytes == 128 / CHAR_BIT && is_signed) {
2405 zig_i128 res_limb;
2406 zig_i128 tmp_limb;
2407 zig_i128 lhs_limb;
2408 zig_i128 rhs_limb;
2409 bool limb_overflow;
2410
2411 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2412 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2413 limb_overflow = zig_addo_i128(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2414 overflow = limb_overflow ^ zig_addo_i128(&res_limb, tmp_limb, zig_make_i128(INT64_C(0), overflow ? UINT64_C(1) : UINT64_C(0)), limb_bits);
2415 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2416 } else {
2417 zig_u128 res_limb;
2418 zig_u128 tmp_limb;
2419 zig_u128 lhs_limb;
2420 zig_u128 rhs_limb;
2421 bool limb_overflow;
2422
2423 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2424 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2425 limb_overflow = zig_addo_u128(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2426 overflow = limb_overflow ^ zig_addo_u128(&res_limb, tmp_limb, zig_make_u128(UINT64_C(0), overflow ? UINT64_C(1) : UINT64_C(0)), limb_bits);
2427 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2428 }
2429
2430 remaining_bytes -= 128 / CHAR_BIT;
2431
2432#if zig_little_endian
2433 byte_offset += 128 / CHAR_BIT;
2434#endif
2435 }
2436
2437 while (remaining_bytes >= 64 / CHAR_BIT) {
2438 uint8_t limb_bits = 64 - (remaining_bytes == 64 / CHAR_BIT ? top_bits : 0);
2439
2440#if zig_big_endian
2441 byte_offset -= 64 / CHAR_BIT;
2442#endif
2443
2444 if (remaining_bytes == 64 / CHAR_BIT && is_signed) {
2445 int64_t res_limb;
2446 int64_t tmp_limb;
2447 int64_t lhs_limb;
2448 int64_t rhs_limb;
2449 bool limb_overflow;
2450
2451 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2452 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2453 limb_overflow = zig_addo_i64(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2454 overflow = limb_overflow ^ zig_addo_i64(&res_limb, tmp_limb, overflow ? INT64_C(1) : INT64_C(0), limb_bits);
2455 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2456 } else {
2457 uint64_t res_limb;
2458 uint64_t tmp_limb;
2459 uint64_t lhs_limb;
2460 uint64_t rhs_limb;
2461 bool limb_overflow;
2462
2463 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2464 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2465 limb_overflow = zig_addo_u64(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2466 overflow = limb_overflow ^ zig_addo_u64(&res_limb, tmp_limb, overflow ? UINT64_C(1) : UINT64_C(0), limb_bits);
2467 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2468 }
2469
2470 remaining_bytes -= 64 / CHAR_BIT;
2471
2472#if zig_little_endian
2473 byte_offset += 64 / CHAR_BIT;
2474#endif
2475 }
2476
2477 while (remaining_bytes >= 32 / CHAR_BIT) {
2478 uint8_t limb_bits = 32 - (remaining_bytes == 32 / CHAR_BIT ? top_bits : 0);
2479
2480#if zig_big_endian
2481 byte_offset -= 32 / CHAR_BIT;
2482#endif
2483
2484 if (remaining_bytes == 32 / CHAR_BIT && is_signed) {
2485 int32_t res_limb;
2486 int32_t tmp_limb;
2487 int32_t lhs_limb;
2488 int32_t rhs_limb;
2489 bool limb_overflow;
2490
2491 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2492 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2493 limb_overflow = zig_addo_i32(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2494 overflow = limb_overflow ^ zig_addo_i32(&res_limb, tmp_limb, overflow ? INT32_C(1) : INT32_C(0), limb_bits);
2495 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2496 } else {
2497 uint32_t res_limb;
2498 uint32_t tmp_limb;
2499 uint32_t lhs_limb;
2500 uint32_t rhs_limb;
2501 bool limb_overflow;
2502
2503 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2504 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2505 limb_overflow = zig_addo_u32(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2506 overflow = limb_overflow ^ zig_addo_u32(&res_limb, tmp_limb, overflow ? UINT32_C(1) : UINT32_C(0), limb_bits);
2507 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2508 }
2509
2510 remaining_bytes -= 32 / CHAR_BIT;
2511
2512#if zig_little_endian
2513 byte_offset += 32 / CHAR_BIT;
2514#endif
2515 }
2516
2517 while (remaining_bytes >= 16 / CHAR_BIT) {
2518 uint8_t limb_bits = 16 - (remaining_bytes == 16 / CHAR_BIT ? top_bits : 0);
2519
2520#if zig_big_endian
2521 byte_offset -= 16 / CHAR_BIT;
2522#endif
2523
2524 if (remaining_bytes == 16 / CHAR_BIT && is_signed) {
2525 int16_t res_limb;
2526 int16_t tmp_limb;
2527 int16_t lhs_limb;
2528 int16_t rhs_limb;
2529 bool limb_overflow;
2530
2531 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2532 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2533 limb_overflow = zig_addo_i16(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2534 overflow = limb_overflow ^ zig_addo_i16(&res_limb, tmp_limb, overflow ? INT16_C(1) : INT16_C(0), limb_bits);
2535 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2536 } else {
2537 uint16_t res_limb;
2538 uint16_t tmp_limb;
2539 uint16_t lhs_limb;
2540 uint16_t rhs_limb;
2541 bool limb_overflow;
2542
2543 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2544 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2545 limb_overflow = zig_addo_u16(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2546 overflow = limb_overflow ^ zig_addo_u16(&res_limb, tmp_limb, overflow ? UINT16_C(1) : UINT16_C(0), limb_bits);
2547 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2548 }
2549
2550 remaining_bytes -= 16 / CHAR_BIT;
2551
2552#if zig_little_endian
2553 byte_offset += 16 / CHAR_BIT;
2554#endif
2555 }
2556
2557 while (remaining_bytes >= 8 / CHAR_BIT) {
2558 uint8_t limb_bits = 8 - (remaining_bytes == 8 / CHAR_BIT ? top_bits : 0);
2559
2560#if zig_big_endian
2561 byte_offset -= 8 / CHAR_BIT;
2562#endif
2563
2564 if (remaining_bytes == 8 / CHAR_BIT && is_signed) {
2565 int8_t res_limb;
2566 int8_t tmp_limb;
2567 int8_t lhs_limb;
2568 int8_t rhs_limb;
2569 bool limb_overflow;
2570
2571 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2572 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2573 limb_overflow = zig_addo_i8(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2574 overflow = limb_overflow ^ zig_addo_i8(&res_limb, tmp_limb, overflow ? INT8_C(1) : INT8_C(0), limb_bits);
2575 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2576 } else {
2577 uint8_t res_limb;
2578 uint8_t tmp_limb;
2579 uint8_t lhs_limb;
2580 uint8_t rhs_limb;
2581 bool limb_overflow;
2582
2583 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2584 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2585 limb_overflow = zig_addo_u8(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2586 overflow = limb_overflow ^ zig_addo_u8(&res_limb, tmp_limb, overflow ? UINT8_C(1) : UINT8_C(0), limb_bits);
2587 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2588 }
2589
2590 remaining_bytes -= 8 / CHAR_BIT;
2591
2592#if zig_little_endian
2593 byte_offset += 8 / CHAR_BIT;
2594#endif
2595 }
2596
2597 return overflow;
2598}
2599
2600static inline bool zig_subo_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2601 uint8_t *res_bytes = res;
2602 const uint8_t *lhs_bytes = lhs;
2603 const uint8_t *rhs_bytes = rhs;
2604 uint16_t byte_offset = 0;
2605 uint16_t remaining_bytes = zig_int_bytes(bits);
2606 uint8_t top_bits = (uint8_t)(remaining_bytes * 8 - bits);
2607 bool overflow = false;
2608
2609#if zig_big_endian
2610 byte_offset = remaining_bytes;
2611#endif
2612
2613 while (remaining_bytes >= 128 / CHAR_BIT) {
2614 uint8_t limb_bits = 128 - (remaining_bytes == 128 / CHAR_BIT ? top_bits : 0);
2615
2616#if zig_big_endian
2617 byte_offset -= 128 / CHAR_BIT;
2618#endif
2619
2620 if (remaining_bytes == 128 / CHAR_BIT && is_signed) {
2621 zig_i128 res_limb;
2622 zig_i128 tmp_limb;
2623 zig_i128 lhs_limb;
2624 zig_i128 rhs_limb;
2625 bool limb_overflow;
2626
2627 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2628 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2629 limb_overflow = zig_subo_i128(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2630 overflow = limb_overflow ^ zig_subo_i128(&res_limb, tmp_limb, zig_make_i128(INT64_C(0), overflow ? UINT64_C(1) : UINT64_C(0)), limb_bits);
2631 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2632 } else {
2633 zig_u128 res_limb;
2634 zig_u128 tmp_limb;
2635 zig_u128 lhs_limb;
2636 zig_u128 rhs_limb;
2637 bool limb_overflow;
2638
2639 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2640 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2641 limb_overflow = zig_subo_u128(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2642 overflow = limb_overflow ^ zig_subo_u128(&res_limb, tmp_limb, zig_make_u128(UINT64_C(0), overflow ? UINT64_C(1) : UINT64_C(0)), limb_bits);
2643 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2644 }
2645
2646 remaining_bytes -= 128 / CHAR_BIT;
2647
2648#if zig_little_endian
2649 byte_offset += 128 / CHAR_BIT;
2650#endif
2651 }
2652
2653 while (remaining_bytes >= 64 / CHAR_BIT) {
2654 uint8_t limb_bits = 64 - (remaining_bytes == 64 / CHAR_BIT ? top_bits : 0);
2655
2656#if zig_big_endian
2657 byte_offset -= 64 / CHAR_BIT;
2658#endif
2659
2660 if (remaining_bytes == 64 / CHAR_BIT && is_signed) {
2661 int64_t res_limb;
2662 int64_t tmp_limb;
2663 int64_t lhs_limb;
2664 int64_t rhs_limb;
2665 bool limb_overflow;
2666
2667 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2668 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2669 limb_overflow = zig_subo_i64(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2670 overflow = limb_overflow ^ zig_subo_i64(&res_limb, tmp_limb, overflow ? INT64_C(1) : INT64_C(0), limb_bits);
2671 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2672 } else {
2673 uint64_t res_limb;
2674 uint64_t tmp_limb;
2675 uint64_t lhs_limb;
2676 uint64_t rhs_limb;
2677 bool limb_overflow;
2678
2679 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2680 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2681 limb_overflow = zig_subo_u64(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2682 overflow = limb_overflow ^ zig_subo_u64(&res_limb, tmp_limb, overflow ? UINT64_C(1) : UINT64_C(0), limb_bits);
2683 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2684 }
2685
2686 remaining_bytes -= 64 / CHAR_BIT;
2687
2688#if zig_little_endian
2689 byte_offset += 64 / CHAR_BIT;
2690#endif
2691 }
2692
2693 while (remaining_bytes >= 32 / CHAR_BIT) {
2694 uint8_t limb_bits = 32 - (remaining_bytes == 32 / CHAR_BIT ? top_bits : 0);
2695
2696#if zig_big_endian
2697 byte_offset -= 32 / CHAR_BIT;
2698#endif
2699
2700 if (remaining_bytes == 32 / CHAR_BIT && is_signed) {
2701 int32_t res_limb;
2702 int32_t tmp_limb;
2703 int32_t lhs_limb;
2704 int32_t rhs_limb;
2705 bool limb_overflow;
2706
2707 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2708 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2709 limb_overflow = zig_subo_i32(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2710 overflow = limb_overflow ^ zig_subo_i32(&res_limb, tmp_limb, overflow ? INT32_C(1) : INT32_C(0), limb_bits);
2711 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2712 } else {
2713 uint32_t res_limb;
2714 uint32_t tmp_limb;
2715 uint32_t lhs_limb;
2716 uint32_t rhs_limb;
2717 bool limb_overflow;
2718
2719 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2720 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2721 limb_overflow = zig_subo_u32(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2722 overflow = limb_overflow ^ zig_subo_u32(&res_limb, tmp_limb, overflow ? UINT32_C(1) : UINT32_C(0), limb_bits);
2723 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2724 }
2725
2726 remaining_bytes -= 32 / CHAR_BIT;
2727
2728#if zig_little_endian
2729 byte_offset += 32 / CHAR_BIT;
2730#endif
2731 }
2732
2733 while (remaining_bytes >= 16 / CHAR_BIT) {
2734 uint8_t limb_bits = 16 - (remaining_bytes == 16 / CHAR_BIT ? top_bits : 0);
2735
2736#if zig_big_endian
2737 byte_offset -= 16 / CHAR_BIT;
2738#endif
2739
2740 if (remaining_bytes == 16 / CHAR_BIT && is_signed) {
2741 int16_t res_limb;
2742 int16_t tmp_limb;
2743 int16_t lhs_limb;
2744 int16_t rhs_limb;
2745 bool limb_overflow;
2746
2747 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2748 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2749 limb_overflow = zig_subo_i16(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2750 overflow = limb_overflow ^ zig_subo_i16(&res_limb, tmp_limb, overflow ? INT16_C(1) : INT16_C(0), limb_bits);
2751 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2752 } else {
2753 uint16_t res_limb;
2754 uint16_t tmp_limb;
2755 uint16_t lhs_limb;
2756 uint16_t rhs_limb;
2757 bool limb_overflow;
2758
2759 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2760 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2761 limb_overflow = zig_subo_u16(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2762 overflow = limb_overflow ^ zig_subo_u16(&res_limb, tmp_limb, overflow ? UINT16_C(1) : UINT16_C(0), limb_bits);
2763 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2764 }
2765
2766 remaining_bytes -= 16 / CHAR_BIT;
2767
2768#if zig_little_endian
2769 byte_offset += 16 / CHAR_BIT;
2770#endif
2771 }
2772
2773 while (remaining_bytes >= 8 / CHAR_BIT) {
2774 uint8_t limb_bits = 8 - (remaining_bytes == 8 / CHAR_BIT ? top_bits : 0);
2775
2776#if zig_big_endian
2777 byte_offset -= 8 / CHAR_BIT;
2778#endif
2779
2780 if (remaining_bytes == 8 / CHAR_BIT && is_signed) {
2781 int8_t res_limb;
2782 int8_t tmp_limb;
2783 int8_t lhs_limb;
2784 int8_t rhs_limb;
2785 bool limb_overflow;
2786
2787 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2788 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2789 limb_overflow = zig_subo_i8(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2790 overflow = limb_overflow ^ zig_subo_i8(&res_limb, tmp_limb, overflow ? INT8_C(1) : INT8_C(0), limb_bits);
2791 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2792 } else {
2793 uint8_t res_limb;
2794 uint8_t tmp_limb;
2795 uint8_t lhs_limb;
2796 uint8_t rhs_limb;
2797 bool limb_overflow;
2798
2799 memcpy(&lhs_limb, &lhs_bytes[byte_offset], sizeof(lhs_limb));
2800 memcpy(&rhs_limb, &rhs_bytes[byte_offset], sizeof(rhs_limb));
2801 limb_overflow = zig_subo_u8(&tmp_limb, lhs_limb, rhs_limb, limb_bits);
2802 overflow = limb_overflow ^ zig_subo_u8(&res_limb, tmp_limb, overflow ? UINT8_C(1) : UINT8_C(0), limb_bits);
2803 memcpy(&res_bytes[byte_offset], &res_limb, sizeof(res_limb));
2804 }
2805
2806 remaining_bytes -= 8 / CHAR_BIT;
2807
2808#if zig_little_endian
2809 byte_offset += 8 / CHAR_BIT;
2810#endif
2811 }
2812
2813 return overflow;
2814}
2815
2816static inline void zig_addw_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2817 (void)zig_addo_big(res, lhs, rhs, is_signed, bits);
2818}
2819
2820static inline void zig_subw_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2821 (void)zig_subo_big(res, lhs, rhs, is_signed, bits);
2822}
2823
2824zig_extern void __udivei4(uint32_t *res, const uint32_t *lhs, const uint32_t *rhs, uintptr_t bits);
2825static inline void zig_div_trunc_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2826 if (!is_signed) {
2827 __udivei4(res, lhs, rhs, bits);
2828 return;
2829 }
2830
2831 zig_trap();
2832}
2833
2834static inline void zig_div_floor_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2835 if (!is_signed) {
2836 zig_div_trunc_big(res, lhs, rhs, is_signed, bits);
2837 return;
2838 }
2839
2840 zig_trap();
2841}
2842
2843zig_extern void __umodei4(uint32_t *res, const uint32_t *lhs, const uint32_t *rhs, uintptr_t bits);
2844static inline void zig_rem_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2845 if (!is_signed) {
2846 __umodei4(res, lhs, rhs, bits);
2847 return;
2848 }
2849
2850 zig_trap();
2851}
2852
2853static inline void zig_mod_big(void *res, const void *lhs, const void *rhs, bool is_signed, uint16_t bits) {
2854 if (!is_signed) {
2855 zig_rem_big(res, lhs, rhs, is_signed, bits);
2856 return;
2857 }
2858
2859 zig_trap();
2860}
2861
2862static inline uint16_t zig_clz_big(const void *val, bool is_signed, uint16_t bits) {
2863 const uint8_t *val_bytes = val;
2864 uint16_t byte_offset = 0;
2865 uint16_t remaining_bytes = zig_int_bytes(bits);
2866 uint16_t skip_bits = remaining_bytes * 8 - bits;
2867 uint16_t total_lz = 0;
2868 uint16_t limb_lz;
2869 (void)is_signed;
2870
2871#if zig_little_endian
2872 byte_offset = remaining_bytes;
2873#endif
2874
2875 while (remaining_bytes >= 128 / CHAR_BIT) {
2876#if zig_little_endian
2877 byte_offset -= 128 / CHAR_BIT;
2878#endif
2879
2880 {
2881 zig_u128 val_limb;
2882
2883 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
2884 limb_lz = zig_clz_u128(val_limb, 128 - skip_bits);
2885 }
2886
2887 total_lz += limb_lz;
2888 if (limb_lz < 128 - skip_bits) return total_lz;
2889 skip_bits = 0;
2890 remaining_bytes -= 128 / CHAR_BIT;
2891
2892#if zig_big_endian
2893 byte_offset += 128 / CHAR_BIT;
2894#endif
2895 }
2896
2897 while (remaining_bytes >= 64 / CHAR_BIT) {
2898#if zig_little_endian
2899 byte_offset -= 64 / CHAR_BIT;
2900#endif
2901
2902 {
2903 uint64_t val_limb;
2904
2905 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
2906 limb_lz = zig_clz_u64(val_limb, 64 - skip_bits);
2907 }
2908
2909 total_lz += limb_lz;
2910 if (limb_lz < 64 - skip_bits) return total_lz;
2911 skip_bits = 0;
2912 remaining_bytes -= 64 / CHAR_BIT;
2913
2914#if zig_big_endian
2915 byte_offset += 64 / CHAR_BIT;
2916#endif
2917 }
2918
2919 while (remaining_bytes >= 32 / CHAR_BIT) {
2920#if zig_little_endian
2921 byte_offset -= 32 / CHAR_BIT;
2922#endif
2923
2924 {
2925 uint32_t val_limb;
2926
2927 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
2928 limb_lz = zig_clz_u32(val_limb, 32 - skip_bits);
2929 }
2930
2931 total_lz += limb_lz;
2932 if (limb_lz < 32 - skip_bits) return total_lz;
2933 skip_bits = 0;
2934 remaining_bytes -= 32 / CHAR_BIT;
2935
2936#if zig_big_endian
2937 byte_offset += 32 / CHAR_BIT;
2938#endif
2939 }
2940
2941 while (remaining_bytes >= 16 / CHAR_BIT) {
2942#if zig_little_endian
2943 byte_offset -= 16 / CHAR_BIT;
2944#endif
2945
2946 {
2947 uint16_t val_limb;
2948
2949 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
2950 limb_lz = zig_clz_u16(val_limb, 16 - skip_bits);
2951 }
2952
2953 total_lz += limb_lz;
2954 if (limb_lz < 16 - skip_bits) return total_lz;
2955 skip_bits = 0;
2956 remaining_bytes -= 16 / CHAR_BIT;
2957
2958#if zig_big_endian
2959 byte_offset += 16 / CHAR_BIT;
2960#endif
2961 }
2962
2963 while (remaining_bytes >= 8 / CHAR_BIT) {
2964#if zig_little_endian
2965 byte_offset -= 8 / CHAR_BIT;
2966#endif
2967
2968 {
2969 uint8_t val_limb;
2970
2971 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
2972 limb_lz = zig_clz_u8(val_limb, 8 - skip_bits);
2973 }
2974
2975 total_lz += limb_lz;
2976 if (limb_lz < 8 - skip_bits) return total_lz;
2977 skip_bits = 0;
2978 remaining_bytes -= 8 / CHAR_BIT;
2979
2980#if zig_big_endian
2981 byte_offset += 8 / CHAR_BIT;
2982#endif
2983 }
2984
2985 return total_lz;
2986}
2987
2988static inline uint16_t zig_ctz_big(const void *val, bool is_signed, uint16_t bits) {
2989 const uint8_t *val_bytes = val;
2990 uint16_t byte_offset = 0;
2991 uint16_t remaining_bytes = zig_int_bytes(bits);
2992 uint16_t total_tz = 0;
2993 uint16_t limb_tz;
2994 (void)is_signed;
2995
2996#if zig_big_endian
2997 byte_offset = remaining_bytes;
2998#endif
2999
3000 while (remaining_bytes >= 128 / CHAR_BIT) {
3001#if zig_big_endian
3002 byte_offset -= 128 / CHAR_BIT;
3003#endif
3004
3005 {
3006 zig_u128 val_limb;
3007
3008 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3009 limb_tz = zig_ctz_u128(val_limb, 128);
3010 }
3011
3012 total_tz += limb_tz;
3013 if (limb_tz < 128) return total_tz;
3014 remaining_bytes -= 128 / CHAR_BIT;
3015
3016#if zig_little_endian
3017 byte_offset += 128 / CHAR_BIT;
3018#endif
3019 }
3020
3021 while (remaining_bytes >= 64 / CHAR_BIT) {
3022#if zig_big_endian
3023 byte_offset -= 64 / CHAR_BIT;
3024#endif
3025
3026 {
3027 uint64_t val_limb;
3028
3029 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3030 limb_tz = zig_ctz_u64(val_limb, 64);
3031 }
3032
3033 total_tz += limb_tz;
3034 if (limb_tz < 64) return total_tz;
3035 remaining_bytes -= 64 / CHAR_BIT;
3036
3037#if zig_little_endian
3038 byte_offset += 64 / CHAR_BIT;
3039#endif
3040 }
3041
3042 while (remaining_bytes >= 32 / CHAR_BIT) {
3043#if zig_big_endian
3044 byte_offset -= 32 / CHAR_BIT;
3045#endif
3046
3047 {
3048 uint32_t val_limb;
3049
3050 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3051 limb_tz = zig_ctz_u32(val_limb, 32);
3052 }
3053
3054 total_tz += limb_tz;
3055 if (limb_tz < 32) return total_tz;
3056 remaining_bytes -= 32 / CHAR_BIT;
3057
3058#if zig_little_endian
3059 byte_offset += 32 / CHAR_BIT;
3060#endif
3061 }
3062
3063 while (remaining_bytes >= 16 / CHAR_BIT) {
3064#if zig_big_endian
3065 byte_offset -= 16 / CHAR_BIT;
3066#endif
3067
3068 {
3069 uint16_t val_limb;
3070
3071 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3072 limb_tz = zig_ctz_u16(val_limb, 16);
3073 }
3074
3075 total_tz += limb_tz;
3076 if (limb_tz < 16) return total_tz;
3077 remaining_bytes -= 16 / CHAR_BIT;
3078
3079#if zig_little_endian
3080 byte_offset += 16 / CHAR_BIT;
3081#endif
3082 }
3083
3084 while (remaining_bytes >= 8 / CHAR_BIT) {
3085#if zig_big_endian
3086 byte_offset -= 8 / CHAR_BIT;
3087#endif
3088
3089 {
3090 uint8_t val_limb;
3091
3092 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3093 limb_tz = zig_ctz_u8(val_limb, 8);
3094 }
3095
3096 total_tz += limb_tz;
3097 if (limb_tz < 8) return total_tz;
3098 remaining_bytes -= 8 / CHAR_BIT;
3099
3100#if zig_little_endian
3101 byte_offset += 8 / CHAR_BIT;
3102#endif
3103 }
3104
3105 return total_tz;
3106}
3107
3108static inline uint16_t zig_popcount_big(const void *val, bool is_signed, uint16_t bits) {
3109 const uint8_t *val_bytes = val;
3110 uint16_t byte_offset = 0;
3111 uint16_t remaining_bytes = zig_int_bytes(bits);
3112 uint16_t total_pc = 0;
3113 (void)is_signed;
3114
3115#if zig_big_endian
3116 byte_offset = remaining_bytes;
3117#endif
3118
3119 while (remaining_bytes >= 128 / CHAR_BIT) {
3120#if zig_big_endian
3121 byte_offset -= 128 / CHAR_BIT;
3122#endif
3123
3124 {
3125 zig_u128 val_limb;
3126
3127 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3128 total_pc += zig_popcount_u128(val_limb, 128);
3129 }
3130
3131 remaining_bytes -= 128 / CHAR_BIT;
3132
3133#if zig_little_endian
3134 byte_offset += 128 / CHAR_BIT;
3135#endif
3136 }
3137
3138 while (remaining_bytes >= 64 / CHAR_BIT) {
3139#if zig_big_endian
3140 byte_offset -= 64 / CHAR_BIT;
3141#endif
3142
3143 {
3144 uint64_t val_limb;
3145
3146 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3147 total_pc += zig_popcount_u64(val_limb, 64);
3148 }
3149
3150 remaining_bytes -= 64 / CHAR_BIT;
3151
3152#if zig_little_endian
3153 byte_offset += 64 / CHAR_BIT;
3154#endif
3155 }
3156
3157 while (remaining_bytes >= 32 / CHAR_BIT) {
3158#if zig_big_endian
3159 byte_offset -= 32 / CHAR_BIT;
3160#endif
3161
3162 {
3163 uint32_t val_limb;
3164
3165 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3166 total_pc += zig_popcount_u32(val_limb, 32);
3167 }
3168
3169 remaining_bytes -= 32 / CHAR_BIT;
3170
3171#if zig_little_endian
3172 byte_offset += 32 / CHAR_BIT;
3173#endif
3174 }
3175
3176 while (remaining_bytes >= 16 / CHAR_BIT) {
3177#if zig_big_endian
3178 byte_offset -= 16 / CHAR_BIT;
3179#endif
3180
3181 {
3182 uint16_t val_limb;
3183
3184 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3185 total_pc = zig_popcount_u16(val_limb, 16);
3186 }
3187
3188 remaining_bytes -= 16 / CHAR_BIT;
3189
3190#if zig_little_endian
3191 byte_offset += 16 / CHAR_BIT;
3192#endif
3193 }
3194
3195 while (remaining_bytes >= 8 / CHAR_BIT) {
3196#if zig_big_endian
3197 byte_offset -= 8 / CHAR_BIT;
3198#endif
3199
3200 {
3201 uint8_t val_limb;
3202
3203 memcpy(&val_limb, &val_bytes[byte_offset], sizeof(val_limb));
3204 total_pc = zig_popcount_u8(val_limb, 8);
3205 }
3206
3207 remaining_bytes -= 8 / CHAR_BIT;
3208
3209#if zig_little_endian
3210 byte_offset += 8 / CHAR_BIT;
3211#endif
3212 }
3213
3214 return total_pc;
3215}
3216
3217/* ========================= Floating Point Support ========================= */
3218
3219#ifndef __STDC_WANT_IEC_60559_TYPES_EXT__
3220#define __STDC_WANT_IEC_60559_TYPES_EXT__
3221#endif
3222
3223#include <float.h>
3224
3225#if defined(zig_msvc)
3226float __cdecl nanf(char const* input);
3227double __cdecl nan(char const* input);
3228long double __cdecl nanl(char const* input);
3229
3230#define zig_msvc_flt_inf ((double)(1e+300 * 1e+300))
3231#define zig_msvc_flt_inff ((float)(1e+300 * 1e+300))
3232#define zig_msvc_flt_infl ((long double)(1e+300 * 1e+300))
3233#define zig_msvc_flt_nan ((double)(zig_msvc_flt_inf * 0.f))
3234#define zig_msvc_flt_nanf ((float)(zig_msvc_flt_inf * 0.f))
3235#define zig_msvc_flt_nanl ((long double)(zig_msvc_flt_inf * 0.f))
3236#define __builtin_nan(str) nan(str)
3237#define __builtin_nanf(str) nanf(str)
3238#define __builtin_nanl(str) nanl(str)
3239#define __builtin_inf() zig_msvc_flt_inf
3240#define __builtin_inff() zig_msvc_flt_inff
3241#define __builtin_infl() zig_msvc_flt_infl
3242#endif
3243
3244#if (zig_has_builtin(nan) && zig_has_builtin(nans) && zig_has_builtin(inf)) || defined(zig_gcc)
3245#define zig_make_special_f16(sign, name, arg, repr) sign zig_make_f16 (__builtin_##name, )(arg)
3246#define zig_make_special_f32(sign, name, arg, repr) sign zig_make_f32 (__builtin_##name, )(arg)
3247#define zig_make_special_f64(sign, name, arg, repr) sign zig_make_f64 (__builtin_##name, )(arg)
3248#define zig_make_special_f80(sign, name, arg, repr) sign zig_make_f80 (__builtin_##name, )(arg)
3249#define zig_make_special_f128(sign, name, arg, repr) sign zig_make_f128(__builtin_##name, )(arg)
3250#else
3251#define zig_make_special_f16(sign, name, arg, repr) zig_bitCast_f16 (repr)
3252#define zig_make_special_f32(sign, name, arg, repr) zig_bitCast_f32 (repr)
3253#define zig_make_special_f64(sign, name, arg, repr) zig_bitCast_f64 (repr)
3254#define zig_make_special_f80(sign, name, arg, repr) zig_bitCast_f80 (repr)
3255#define zig_make_special_f128(sign, name, arg, repr) zig_bitCast_f128(repr)
3256#endif
3257
3258#define zig_has_f16 1
3259#define zig_libc_name_f16(name) __##name##h
3260#define zig_init_special_f16(sign, name, arg, repr) zig_make_special_f16(sign, name, arg, repr)
3261#if FLT_MANT_DIG == 11
3262typedef float zig_f16;
3263#define zig_make_f16(fp, repr) fp##f
3264#elif DBL_MANT_DIG == 11
3265typedef double zig_f16;
3266#define zig_make_f16(fp, repr) fp
3267#elif LDBL_MANT_DIG == 11
3268typedef long double zig_f16;
3269#define zig_make_f16(fp, repr) fp##l
3270#elif FLT16_MANT_DIG == 11 && (zig_has_builtin(inff16) || defined(zig_gcc))
3271typedef _Float16 zig_f16;
3272#define zig_make_f16(fp, repr) fp##f16
3273#elif defined(__SIZEOF_FP16__)
3274typedef __fp16 zig_f16;
3275#define zig_make_f16(fp, repr) fp##f16
3276#else
3277#undef zig_has_f16
3278#define zig_has_f16 0
3279#define zig_repr_f16 u16
3280typedef uint16_t zig_f16;
3281#define zig_make_f16(fp, repr) repr
3282#undef zig_make_special_f16
3283#define zig_make_special_f16(sign, name, arg, repr) repr
3284#undef zig_init_special_f16
3285#define zig_init_special_f16(sign, name, arg, repr) repr
3286#endif
3287#if defined(zig_darwin) && defined(zig_x86)
3288typedef uint16_t zig_compiler_rt_f16;
3289#else
3290typedef zig_f16 zig_compiler_rt_f16;
3291#endif
3292
3293#define zig_has_f32 1
3294#define zig_libc_name_f32(name) name##f
3295#if defined(zig_msvc)
3296#define zig_init_special_f32(sign, name, arg, repr) sign zig_make_f32(zig_msvc_flt_##name, )
3297#else
3298#define zig_init_special_f32(sign, name, arg, repr) zig_make_special_f32(sign, name, arg, repr)
3299#endif
3300#if FLT_MANT_DIG == 24
3301typedef float zig_f32;
3302#define zig_make_f32(fp, repr) fp##f
3303#elif DBL_MANT_DIG == 24
3304typedef double zig_f32;
3305#define zig_make_f32(fp, repr) fp
3306#elif LDBL_MANT_DIG == 24
3307typedef long double zig_f32;
3308#define zig_make_f32(fp, repr) fp##l
3309#elif FLT32_MANT_DIG == 24
3310typedef _Float32 zig_f32;
3311#define zig_make_f32(fp, repr) fp##f32
3312#else
3313#undef zig_has_f32
3314#define zig_has_f32 0
3315#define zig_repr_f32 u32
3316typedef uint32_t zig_f32;
3317#define zig_make_f32(fp, repr) repr
3318#undef zig_make_special_f32
3319#define zig_make_special_f32(sign, name, arg, repr) repr
3320#undef zig_init_special_f32
3321#define zig_init_special_f32(sign, name, arg, repr) repr
3322#endif
3323
3324#define zig_has_f64 1
3325#define zig_libc_name_f64(name) name
3326
3327#if defined(zig_msvc)
3328#define zig_init_special_f64(sign, name, arg, repr) sign zig_make_f64(zig_msvc_flt_##name, )
3329#else
3330#define zig_init_special_f64(sign, name, arg, repr) zig_make_special_f64(sign, name, arg, repr)
3331#endif
3332#if FLT_MANT_DIG == 53
3333typedef float zig_f64;
3334#define zig_make_f64(fp, repr) fp##f
3335#elif DBL_MANT_DIG == 53
3336typedef double zig_f64;
3337#define zig_make_f64(fp, repr) fp
3338#elif LDBL_MANT_DIG == 53
3339typedef long double zig_f64;
3340#define zig_make_f64(fp, repr) fp##l
3341#elif FLT64_MANT_DIG == 53
3342typedef _Float64 zig_f64;
3343#define zig_make_f64(fp, repr) fp##f64
3344#elif FLT32X_MANT_DIG == 53
3345typedef _Float32x zig_f64;
3346#define zig_make_f64(fp, repr) fp##f32x
3347#else
3348#undef zig_has_f64
3349#define zig_has_f64 0
3350#define zig_repr_f64 u64
3351typedef uint64_t zig_f64;
3352#define zig_make_f64(fp, repr) repr
3353#undef zig_make_special_f64
3354#define zig_make_special_f64(sign, name, arg, repr) repr
3355#undef zig_init_special_f64
3356#define zig_init_special_f64(sign, name, arg, repr) repr
3357#endif
3358
3359#define zig_has_f80 1
3360#define zig_libc_name_f80(name) __##name##x
3361#define zig_init_special_f80(sign, name, arg, repr) zig_make_special_f80(sign, name, arg, repr)
3362#if FLT_MANT_DIG == 64
3363typedef float zig_f80;
3364#define zig_make_f80(fp, repr) fp##f
3365#elif DBL_MANT_DIG == 64
3366typedef double zig_f80;
3367#define zig_make_f80(fp, repr) fp
3368#elif LDBL_MANT_DIG == 64
3369typedef long double zig_f80;
3370#define zig_make_f80(fp, repr) fp##l
3371#elif FLT80_MANT_DIG == 64
3372typedef _Float80 zig_f80;
3373#define zig_make_f80(fp, repr) fp##f80
3374#elif FLT64X_MANT_DIG == 64
3375typedef _Float64x zig_f80;
3376#define zig_make_f80(fp, repr) fp##f64x
3377#elif defined(__SIZEOF_FLOAT80__)
3378typedef __float80 zig_f80;
3379#define zig_make_f80(fp, repr) fp##l
3380#else
3381#undef zig_has_f80
3382#define zig_has_f80 0
3383#define zig_repr_f80 u128
3384typedef zig_u128 zig_f80;
3385#define zig_make_f80(fp, repr) repr
3386#undef zig_make_special_f80
3387#define zig_make_special_f80(sign, name, arg, repr) repr
3388#undef zig_init_special_f80
3389#define zig_init_special_f80(sign, name, arg, repr) repr
3390#endif
3391
3392#if defined(zig_gcc) && defined(zig_x86)
3393#define zig_f128_has_miscompilations 1
3394#else
3395#define zig_f128_has_miscompilations 0
3396#endif
3397
3398#define zig_has_f128 1
3399#define zig_libc_name_f128(name) name##q
3400#define zig_init_special_f128(sign, name, arg, repr) zig_make_special_f128(sign, name, arg, repr)
3401#if !zig_f128_has_miscompilations && FLT_MANT_DIG == 113
3402typedef float zig_f128;
3403#define zig_make_f128(fp, repr) fp##f
3404#elif !zig_f128_has_miscompilations && DBL_MANT_DIG == 113
3405typedef double zig_f128;
3406#define zig_make_f128(fp, repr) fp
3407#elif !zig_f128_has_miscompilations && LDBL_MANT_DIG == 113
3408typedef long double zig_f128;
3409#define zig_make_f128(fp, repr) fp##l
3410#elif !zig_f128_has_miscompilations && FLT128_MANT_DIG == 113
3411typedef _Float128 zig_f128;
3412#define zig_make_f128(fp, repr) fp##f128
3413#elif !zig_f128_has_miscompilations && FLT64X_MANT_DIG == 113
3414typedef _Float64x zig_f128;
3415#define zig_make_f128(fp, repr) fp##f64x
3416#elif !zig_f128_has_miscompilations && defined(__SIZEOF_FLOAT128__)
3417typedef __float128 zig_f128;
3418#define zig_make_f128(fp, repr) fp##q
3419#undef zig_make_special_f128
3420#define zig_make_special_f128(sign, name, arg, repr) sign __builtin_##name##f128(arg)
3421#else
3422#undef zig_has_f128
3423#define zig_has_f128 0
3424#undef zig_make_special_f128
3425#undef zig_init_special_f128
3426#if defined(zig_darwin) || defined(zig_aarch64)
3427typedef __attribute__((__vector_size__(2 * sizeof(uint64_t)))) uint64_t zig_v2u64;
3428zig_basic_operator(zig_v2u64, xor_v2u64, ^)
3429#define zig_repr_f128 v2u64
3430typedef zig_v2u64 zig_f128;
3431#define zig_make_f128_zig_make_u128(hi, lo) (zig_f128){ lo, hi }
3432#define zig_make_f128_zig_init_u128 zig_make_f128_zig_make_u128
3433#define zig_make_f128(fp, repr) zig_make_f128_##repr
3434#define zig_make_special_f128(sign, name, arg, repr) zig_make_f128_##repr
3435#define zig_init_special_f128(sign, name, arg, repr) zig_make_f128_##repr
3436#else
3437#define zig_repr_f128 u128
3438typedef zig_u128 zig_f128;
3439#define zig_make_f128(fp, repr) repr
3440#define zig_make_special_f128(sign, name, arg, repr) repr
3441#define zig_init_special_f128(sign, name, arg, repr) repr
3442#endif
3443#endif
3444
3445#if !defined(zig_msvc) && defined(ZIG_TARGET_ABI_MSVC)
3446/* Emulate msvc abi on a gnu compiler */
3447typedef zig_f64 zig_c_longdouble;
3448#elif defined(zig_msvc) && !defined(ZIG_TARGET_ABI_MSVC)
3449/* Emulate gnu abi on an msvc compiler */
3450typedef zig_f128 zig_c_longdouble;
3451#else
3452/* Target and compiler abi match */
3453typedef long double zig_c_longdouble;
3454#endif
3455
3456#define zig_bitCast_float(Type, ReprType) \
3457 static inline zig_##Type zig_bitCast_##Type(ReprType repr) { \
3458 zig_##Type result; \
3459 memcpy(&result, &repr, sizeof(result)); \
3460 return result; \
3461 }
3462zig_bitCast_float(f16, uint16_t)
3463zig_bitCast_float(f32, uint32_t)
3464zig_bitCast_float(f64, uint64_t)
3465zig_bitCast_float(f80, zig_u128)
3466zig_bitCast_float(f128, zig_u128)
3467
3468#define zig_convert_builtin(ExternResType, ResType, operation, ExternArgType, ArgType, version) \
3469 zig_extern ExternResType zig_expand_concat(zig_expand_concat(zig_expand_concat(__##operation, \
3470 zig_compiler_rt_abbrev_##ArgType), zig_compiler_rt_abbrev_##ResType), version)(ExternArgType); \
3471 static inline ResType zig_expand_concat(zig_expand_concat(zig_##operation, \
3472 zig_compiler_rt_abbrev_##ArgType), zig_compiler_rt_abbrev_##ResType)(ArgType arg) { \
3473 ResType res; \
3474 ExternResType extern_res; \
3475 ExternArgType extern_arg; \
3476 memcpy(&extern_arg, &arg, sizeof(extern_arg)); \
3477 extern_res = zig_expand_concat(zig_expand_concat(zig_expand_concat(__##operation, \
3478 zig_compiler_rt_abbrev_##ArgType), zig_compiler_rt_abbrev_##ResType), version)(extern_arg); \
3479 memcpy(&res, &extern_res, sizeof(res)); \
3480 return extern_res; \
3481 }
3482zig_convert_builtin(zig_compiler_rt_f16, zig_f16, trunc, zig_f32, zig_f32, 2)
3483zig_convert_builtin(zig_compiler_rt_f16, zig_f16, trunc, zig_f64, zig_f64, 2)
3484zig_convert_builtin(zig_f16, zig_f16, trunc, zig_f80, zig_f80, 2)
3485zig_convert_builtin(zig_f16, zig_f16, trunc, zig_f128, zig_f128, 2)
3486zig_convert_builtin(zig_f32, zig_f32, extend, zig_compiler_rt_f16, zig_f16, 2)
3487zig_convert_builtin(zig_f32, zig_f32, trunc, zig_f80, zig_f80, 2)
3488zig_convert_builtin(zig_f32, zig_f32, trunc, zig_f128, zig_f128, 2)
3489zig_convert_builtin(zig_f64, zig_f64, extend, zig_compiler_rt_f16, zig_f16, 2)
3490zig_convert_builtin(zig_f64, zig_f64, trunc, zig_f80, zig_f80, 2)
3491zig_convert_builtin(zig_f64, zig_f64, trunc, zig_f128, zig_f128, 2)
3492zig_convert_builtin(zig_f80, zig_f80, extend, zig_f16, zig_f16, 2)
3493zig_convert_builtin(zig_f80, zig_f80, extend, zig_f32, zig_f32, 2)
3494zig_convert_builtin(zig_f80, zig_f80, extend, zig_f64, zig_f64, 2)
3495zig_convert_builtin(zig_f80, zig_f80, trunc, zig_f128, zig_f128, 2)
3496zig_convert_builtin(zig_f128, zig_f128, extend, zig_f16, zig_f16, 2)
3497zig_convert_builtin(zig_f128, zig_f128, extend, zig_f32, zig_f32, 2)
3498zig_convert_builtin(zig_f128, zig_f128, extend, zig_f64, zig_f64, 2)
3499zig_convert_builtin(zig_f128, zig_f128, extend, zig_f80, zig_f80, 2)
3500
3501#ifdef __ARM_EABI__
3502
3503zig_extern zig_callconv(pcs("aapcs")) zig_f32 __aeabi_d2f(zig_f64);
3504static inline zig_f32 zig_truncdfsf(zig_f64 arg) { return __aeabi_d2f(arg); }
3505
3506zig_extern zig_callconv(pcs("aapcs")) zig_f64 __aeabi_f2d(zig_f32);
3507static inline zig_f64 zig_extendsfdf(zig_f32 arg) { return __aeabi_f2d(arg); }
3508
3509#else /* __ARM_EABI__ */
3510
3511zig_convert_builtin(zig_f32, zig_f32, trunc, zig_f64, zig_f64, 2)
3512zig_convert_builtin(zig_f64, zig_f64, extend, zig_f32, zig_f32, 2)
3513
3514#endif /* __ARM_EABI__ */
3515
3516#define zig_float_negate_builtin_0(w, c, sb) \
3517 zig_expand_concat(zig_xor_, zig_repr_f##w)(arg, zig_make_f##w(-0x0.0p0, c sb))
3518#define zig_float_negate_builtin_1(w, c, sb) -arg
3519#define zig_float_negate_builtin(w, c, sb) \
3520 static inline zig_f##w zig_neg_f##w(zig_f##w arg) { \
3521 return zig_expand_concat(zig_float_negate_builtin_, zig_has_f##w)(w, c, sb); \
3522 }
3523zig_float_negate_builtin(16, , UINT16_C(1) << 15 )
3524zig_float_negate_builtin(32, , UINT32_C(1) << 31 )
3525zig_float_negate_builtin(64, , UINT64_C(1) << 63 )
3526zig_float_negate_builtin(80, zig_make_u128, (UINT64_C(1) << 15, UINT64_C(0)))
3527zig_float_negate_builtin(128, zig_make_u128, (UINT64_C(1) << 63, UINT64_C(0)))
3528
3529#define zig_float_less_builtin_0(Type, operation) \
3530 zig_extern int32_t zig_expand_concat(zig_expand_concat(__##operation, \
3531 zig_compiler_rt_abbrev_zig_##Type), 2)(zig_##Type, zig_##Type); \
3532 static inline int32_t zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
3533 return zig_expand_concat(zig_expand_concat(__##operation, zig_compiler_rt_abbrev_zig_##Type), 2)(lhs, rhs); \
3534 }
3535#define zig_float_less_builtin_1(Type, operation) \
3536 static inline int32_t zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
3537 return (!(lhs <= rhs) - (lhs < rhs)); \
3538 }
3539
3540#define zig_float_greater_builtin_0(Type, operation) \
3541 zig_float_less_builtin_0(Type, operation)
3542#define zig_float_greater_builtin_1(Type, operation) \
3543 static inline int32_t zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
3544 return ((lhs > rhs) - !(lhs >= rhs)); \
3545 }
3546
3547#define zig_float_binary_builtin_0(Type, operation, operator) \
3548 zig_extern zig_##Type zig_expand_concat(zig_expand_concat(__##operation, \
3549 zig_compiler_rt_abbrev_zig_##Type), 3)(zig_##Type, zig_##Type); \
3550 static inline zig_##Type zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
3551 return zig_expand_concat(zig_expand_concat(__##operation, zig_compiler_rt_abbrev_zig_##Type), 3)(lhs, rhs); \
3552 }
3553#define zig_float_binary_builtin_1(Type, operation, operator) \
3554 static inline zig_##Type zig_##operation##_##Type(zig_##Type lhs, zig_##Type rhs) { \
3555 return lhs operator rhs; \
3556 }
3557
3558#define zig_common_float_builtins(w) \
3559 zig_convert_builtin( int64_t, int64_t, fix, zig_f##w, zig_f##w, ) \
3560 zig_convert_builtin(zig_i128, zig_i128, fix, zig_f##w, zig_f##w, ) \
3561 zig_convert_builtin(zig_u128, zig_u128, fixuns, zig_f##w, zig_f##w, ) \
3562 zig_convert_builtin(zig_f##w, zig_f##w, float, int64_t, int64_t, ) \
3563 zig_convert_builtin(zig_f##w, zig_f##w, float, zig_i128, zig_i128, ) \
3564 zig_convert_builtin(zig_f##w, zig_f##w, floatun, zig_u128, zig_u128, ) \
3565 zig_expand_concat(zig_float_less_builtin_, zig_has_f##w)(f##w, cmp) \
3566 zig_expand_concat(zig_float_less_builtin_, zig_has_f##w)(f##w, ne) \
3567 zig_expand_concat(zig_float_less_builtin_, zig_has_f##w)(f##w, eq) \
3568 zig_expand_concat(zig_float_less_builtin_, zig_has_f##w)(f##w, lt) \
3569 zig_expand_concat(zig_float_less_builtin_, zig_has_f##w)(f##w, le) \
3570 zig_expand_concat(zig_float_greater_builtin_, zig_has_f##w)(f##w, gt) \
3571 zig_expand_concat(zig_float_greater_builtin_, zig_has_f##w)(f##w, ge) \
3572 zig_expand_concat(zig_float_binary_builtin_, zig_has_f##w)(f##w, add, +) \
3573 zig_expand_concat(zig_float_binary_builtin_, zig_has_f##w)(f##w, sub, -) \
3574 zig_expand_concat(zig_float_binary_builtin_, zig_has_f##w)(f##w, mul, *) \
3575 zig_expand_concat(zig_float_binary_builtin_, zig_has_f##w)(f##w, div, /) \
3576 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(sqrt)))(zig_f##w, zig_sqrt_f##w, zig_libc_name_f##w(sqrt), (zig_f##w x), (x)) \
3577 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(sin)))(zig_f##w, zig_sin_f##w, zig_libc_name_f##w(sin), (zig_f##w x), (x)) \
3578 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(cos)))(zig_f##w, zig_cos_f##w, zig_libc_name_f##w(cos), (zig_f##w x), (x)) \
3579 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(tan)))(zig_f##w, zig_tan_f##w, zig_libc_name_f##w(tan), (zig_f##w x), (x)) \
3580 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(exp)))(zig_f##w, zig_exp_f##w, zig_libc_name_f##w(exp), (zig_f##w x), (x)) \
3581 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(exp2)))(zig_f##w, zig_exp2_f##w, zig_libc_name_f##w(exp2), (zig_f##w x), (x)) \
3582 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(log)))(zig_f##w, zig_log_f##w, zig_libc_name_f##w(log), (zig_f##w x), (x)) \
3583 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(log2)))(zig_f##w, zig_log2_f##w, zig_libc_name_f##w(log2), (zig_f##w x), (x)) \
3584 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(log10)))(zig_f##w, zig_log10_f##w, zig_libc_name_f##w(log10), (zig_f##w x), (x)) \
3585 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(fabs)))(zig_f##w, zig_abs_f##w, zig_libc_name_f##w(fabs), (zig_f##w x), (x)) \
3586 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(floor)))(zig_f##w, zig_floor_f##w, zig_libc_name_f##w(floor), (zig_f##w x), (x)) \
3587 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(ceil)))(zig_f##w, zig_ceil_f##w, zig_libc_name_f##w(ceil), (zig_f##w x), (x)) \
3588 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(round)))(zig_f##w, zig_round_f##w, zig_libc_name_f##w(round), (zig_f##w x), (x)) \
3589 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(trunc)))(zig_f##w, zig_trunc_f##w, zig_libc_name_f##w(trunc), (zig_f##w x), (x)) \
3590 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(fmod)))(zig_f##w, zig_fmod_f##w, zig_libc_name_f##w(fmod), (zig_f##w x, zig_f##w y), (x, y)) \
3591 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(fmin)))(zig_f##w, zig_min_f##w, zig_libc_name_f##w(fmin), (zig_f##w x, zig_f##w y), (x, y)) \
3592 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(fmax)))(zig_f##w, zig_max_f##w, zig_libc_name_f##w(fmax), (zig_f##w x, zig_f##w y), (x, y)) \
3593 zig_expand_concat(zig_expand_import_, zig_expand_has_builtin(zig_libc_name_f##w(fma)))(zig_f##w, zig_fma_f##w, zig_libc_name_f##w(fma), (zig_f##w x, zig_f##w y, zig_f##w z), (x, y, z)) \
3594\
3595 static inline zig_f##w zig_div_trunc_f##w(zig_f##w lhs, zig_f##w rhs) { \
3596 return zig_trunc_f##w(zig_div_f##w(lhs, rhs)); \
3597 } \
3598\
3599 static inline zig_f##w zig_div_floor_f##w(zig_f##w lhs, zig_f##w rhs) { \
3600 return zig_floor_f##w(zig_div_f##w(lhs, rhs)); \
3601 } \
3602\
3603 static inline zig_f##w zig_mod_f##w(zig_f##w lhs, zig_f##w rhs) { \
3604 return zig_sub_f##w(lhs, zig_mul_f##w(zig_div_floor_f##w(lhs, rhs), rhs)); \
3605 }
3606zig_common_float_builtins(16)
3607zig_common_float_builtins(32)
3608zig_common_float_builtins(64)
3609zig_common_float_builtins(80)
3610zig_common_float_builtins(128)
3611
3612#define zig_float_builtins(w) \
3613 zig_convert_builtin( int32_t, int32_t, fix, zig_f##w, zig_f##w, ) \
3614 zig_convert_builtin(uint32_t, uint32_t, fixuns, zig_f##w, zig_f##w, ) \
3615 zig_convert_builtin(uint64_t, uint64_t, fixuns, zig_f##w, zig_f##w, ) \
3616 zig_convert_builtin(zig_f##w, zig_f##w, float, int32_t, int32_t, ) \
3617 zig_convert_builtin(zig_f##w, zig_f##w, floatun, uint32_t, uint32_t, ) \
3618 zig_convert_builtin(zig_f##w, zig_f##w, floatun, uint64_t, uint64_t, )
3619zig_float_builtins(16)
3620zig_float_builtins(80)
3621zig_float_builtins(128)
3622
3623#ifdef __ARM_EABI__
3624
3625zig_extern zig_callconv(pcs("aapcs")) int32_t __aeabi_f2iz(zig_f32);
3626static inline int32_t zig_fixsfsi(zig_f32 arg) { return __aeabi_f2iz(arg); }
3627
3628zig_extern zig_callconv(pcs("aapcs")) uint32_t __aeabi_f2uiz(zig_f32);
3629static inline uint32_t zig_fixunssfsi(zig_f32 arg) { return __aeabi_f2uiz(arg); }
3630
3631zig_extern zig_callconv(pcs("aapcs")) uint64_t __aeabi_f2ulz(zig_f32);
3632static inline uint64_t zig_fixunssfdi(zig_f32 arg) { return __aeabi_f2ulz(arg); }
3633
3634zig_extern zig_callconv(pcs("aapcs")) zig_f32 __aeabi_i2f(int32_t);
3635static inline zig_f32 zig_floatsisf(int32_t arg) { return __aeabi_i2f(arg); }
3636
3637zig_extern zig_callconv(pcs("aapcs")) zig_f32 __aeabi_ui2f(uint32_t);
3638static inline zig_f32 zig_floatunsisf(uint32_t arg) { return __aeabi_ui2f(arg); }
3639
3640zig_extern zig_callconv(pcs("aapcs")) zig_f32 __aeabi_ul2f(uint64_t);
3641static inline zig_f32 zig_floatundisf(uint64_t arg) { return __aeabi_ul2f(arg); }
3642
3643zig_extern zig_callconv(pcs("aapcs")) int32_t __aeabi_d2iz(zig_f64);
3644static inline int32_t zig_fixdfsi(zig_f64 arg) { return __aeabi_d2iz(arg); }
3645
3646zig_extern zig_callconv(pcs("aapcs")) uint32_t __aeabi_d2uiz(zig_f64);
3647static inline uint32_t zig_fixunsdfsi(zig_f64 arg) { return __aeabi_d2uiz(arg); }
3648
3649zig_extern zig_callconv(pcs("aapcs")) uint64_t __aeabi_d2ulz(zig_f64);
3650static inline uint64_t zig_fixunsdfdi(zig_f64 arg) { return __aeabi_d2ulz(arg); }
3651
3652zig_extern zig_callconv(pcs("aapcs")) zig_f64 __aeabi_i2d(int32_t);
3653static inline zig_f64 zig_floatsidf(int32_t arg) { return __aeabi_i2d(arg); }
3654
3655zig_extern zig_callconv(pcs("aapcs")) zig_f64 __aeabi_ui2d(uint32_t);
3656static inline zig_f64 zig_floatunsidf(uint32_t arg) { return __aeabi_ui2d(arg); }
3657
3658zig_extern zig_callconv(pcs("aapcs")) zig_f64 __aeabi_ul2d(uint64_t);
3659static inline zig_f64 zig_floatundidf(uint64_t arg) { return __aeabi_ul2d(arg); }
3660
3661#else /* __ARM_EABI__ */
3662
3663zig_float_builtins(32)
3664zig_float_builtins(64)
3665
3666#endif /* __ARM_EABI__ */
3667
3668/* ============================ Atomics Support ============================= */
3669
3670/* Note that atomics should be implemented as macros because most
3671 compilers silently discard runtime atomic order information. */
3672
3673/* Define fallback implementations first that can later be undef'd on compilers with builtin support. */
3674/* Note that zig_atomicrmw_expected is needed to handle aliasing between res and arg. */
3675#define zig_atomicrmw_xchg_float(res, obj, arg, order, Type, ReprType) do { \
3676 zig_##Type zig_atomicrmw_expected; \
3677 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3678 while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, arg, order, zig_memory_order_relaxed, Type, ReprType)); \
3679 res = zig_atomicrmw_expected; \
3680} while (0)
3681#define zig_atomicrmw_add_float(res, obj, arg, order, Type, ReprType) do { \
3682 zig_##Type zig_atomicrmw_expected; \
3683 zig_##Type zig_atomicrmw_desired; \
3684 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3685 do { \
3686 zig_atomicrmw_desired = zig_add_##Type(zig_atomicrmw_expected, arg); \
3687 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3688 res = zig_atomicrmw_expected; \
3689} while (0)
3690#define zig_atomicrmw_sub_float(res, obj, arg, order, Type, ReprType) do { \
3691 zig_##Type zig_atomicrmw_expected; \
3692 zig_##Type zig_atomicrmw_desired; \
3693 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3694 do { \
3695 zig_atomicrmw_desired = zig_sub_##Type(zig_atomicrmw_expected, arg); \
3696 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3697 res = zig_atomicrmw_expected; \
3698} while (0)
3699#define zig_atomicrmw_min_float(res, obj, arg, order, Type, ReprType) do { \
3700 zig_##Type zig_atomicrmw_expected; \
3701 zig_##Type zig_atomicrmw_desired; \
3702 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3703 do { \
3704 zig_atomicrmw_desired = zig_min_##Type(zig_atomicrmw_expected, arg); \
3705 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3706 res = zig_atomicrmw_expected; \
3707} while (0)
3708#define zig_atomicrmw_max_float(res, obj, arg, order, Type, ReprType) do { \
3709 zig_##Type zig_atomicrmw_expected; \
3710 zig_##Type zig_atomicrmw_desired; \
3711 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3712 do { \
3713 zig_atomicrmw_desired = zig_max_##Type(zig_atomicrmw_expected, arg); \
3714 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3715 res = zig_atomicrmw_expected; \
3716} while (0)
3717
3718#define zig_atomicrmw_xchg_int128(res, obj, arg, order, Type, ReprType) do { \
3719 zig_##Type zig_atomicrmw_expected; \
3720 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3721 while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, arg, order, zig_memory_order_relaxed, Type, ReprType)); \
3722 res = zig_atomicrmw_expected; \
3723} while (0)
3724#define zig_atomicrmw_add_int128(res, obj, arg, order, Type, ReprType) do { \
3725 zig_##Type zig_atomicrmw_expected; \
3726 zig_##Type zig_atomicrmw_desired; \
3727 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3728 do { \
3729 zig_atomicrmw_desired = zig_add_##Type(zig_atomicrmw_expected, arg); \
3730 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3731 res = zig_atomicrmw_expected; \
3732} while (0)
3733#define zig_atomicrmw_sub_int128(res, obj, arg, order, Type, ReprType) do { \
3734 zig_##Type zig_atomicrmw_expected; \
3735 zig_##Type zig_atomicrmw_desired; \
3736 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3737 do { \
3738 zig_atomicrmw_desired = zig_sub_##Type(zig_atomicrmw_expected, arg); \
3739 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3740 res = zig_atomicrmw_expected; \
3741} while (0)
3742#define zig_atomicrmw_and_int128(res, obj, arg, order, Type, ReprType) do { \
3743 zig_##Type zig_atomicrmw_expected; \
3744 zig_##Type zig_atomicrmw_desired; \
3745 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3746 do { \
3747 zig_atomicrmw_desired = zig_and_##Type(zig_atomicrmw_expected, arg); \
3748 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3749 res = zig_atomicrmw_expected; \
3750} while (0)
3751#define zig_atomicrmw_nand_int128(res, obj, arg, order, Type, ReprType) do { \
3752 zig_##Type zig_atomicrmw_expected; \
3753 zig_##Type zig_atomicrmw_desired; \
3754 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3755 do { \
3756 zig_atomicrmw_desired = zig_not_##Type(zig_and_##Type(zig_atomicrmw_expected, arg), 128); \
3757 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3758 res = zig_atomicrmw_expected; \
3759} while (0)
3760#define zig_atomicrmw_or_int128(res, obj, arg, order, Type, ReprType) do { \
3761 zig_##Type zig_atomicrmw_expected; \
3762 zig_##Type zig_atomicrmw_desired; \
3763 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3764 do { \
3765 zig_atomicrmw_desired = zig_or_##Type(zig_atomicrmw_expected, arg); \
3766 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3767 res = zig_atomicrmw_expected; \
3768} while (0)
3769#define zig_atomicrmw_xor_int128(res, obj, arg, order, Type, ReprType) do { \
3770 zig_##Type zig_atomicrmw_expected; \
3771 zig_##Type zig_atomicrmw_desired; \
3772 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3773 do { \
3774 zig_atomicrmw_desired = zig_xor_##Type(zig_atomicrmw_expected, arg); \
3775 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3776 res = zig_atomicrmw_expected; \
3777} while (0)
3778#define zig_atomicrmw_min_int128(res, obj, arg, order, Type, ReprType) do { \
3779 zig_##Type zig_atomicrmw_expected; \
3780 zig_##Type zig_atomicrmw_desired; \
3781 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3782 do { \
3783 zig_atomicrmw_desired = zig_min_##Type(zig_atomicrmw_expected, arg); \
3784 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3785 res = zig_atomicrmw_expected; \
3786} while (0)
3787#define zig_atomicrmw_max_int128(res, obj, arg, order, Type, ReprType) do { \
3788 zig_##Type zig_atomicrmw_expected; \
3789 zig_##Type zig_atomicrmw_desired; \
3790 zig_atomic_load(zig_atomicrmw_expected, obj, zig_memory_order_relaxed, Type, ReprType); \
3791 do { \
3792 zig_atomicrmw_desired = zig_max_##Type(zig_atomicrmw_expected, arg); \
3793 } while (!zig_cmpxchg_weak(obj, zig_atomicrmw_expected, zig_atomicrmw_desired, order, zig_memory_order_relaxed, Type, ReprType)); \
3794 res = zig_atomicrmw_expected; \
3795} while (0)
3796
3797#if (__STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__)) || (zig_has_include(<stdatomic.h>) && !defined(zig_msvc))
3798#define zig_c11_atomics
3799#endif
3800
3801#if defined(zig_c11_atomics)
3802#include <stdatomic.h>
3803typedef enum memory_order zig_memory_order;
3804#define zig_memory_order_relaxed memory_order_relaxed
3805#define zig_memory_order_acquire memory_order_acquire
3806#define zig_memory_order_release memory_order_release
3807#define zig_memory_order_acq_rel memory_order_acq_rel
3808#define zig_memory_order_seq_cst memory_order_seq_cst
3809#define zig_atomic(Type) _Atomic(Type)
3810#define zig_cmpxchg_strong( obj, expected, desired, succ, fail, Type, ReprType) atomic_compare_exchange_strong_explicit(obj, &(expected), desired, succ, fail)
3811#define zig_cmpxchg_weak( obj, expected, desired, succ, fail, Type, ReprType) atomic_compare_exchange_weak_explicit (obj, &(expected), desired, succ, fail)
3812#define zig_atomicrmw_xchg(res, obj, arg, order, Type, ReprType) res = atomic_exchange_explicit (obj, arg, order)
3813#define zig_atomicrmw_add(res, obj, arg, order, Type, ReprType) res = atomic_fetch_add_explicit (obj, arg, order)
3814#define zig_atomicrmw_sub(res, obj, arg, order, Type, ReprType) res = atomic_fetch_sub_explicit (obj, arg, order)
3815#define zig_atomicrmw_or(res, obj, arg, order, Type, ReprType) res = atomic_fetch_or_explicit (obj, arg, order)
3816#define zig_atomicrmw_xor(res, obj, arg, order, Type, ReprType) res = atomic_fetch_xor_explicit (obj, arg, order)
3817#define zig_atomicrmw_and(res, obj, arg, order, Type, ReprType) res = atomic_fetch_and_explicit (obj, arg, order)
3818#define zig_atomicrmw_nand(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_nand(obj, arg, order)
3819#define zig_atomicrmw_min(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_min (obj, arg, order)
3820#define zig_atomicrmw_max(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_max (obj, arg, order)
3821#define zig_atomic_store( obj, arg, order, Type, ReprType) atomic_store_explicit (obj, arg, order)
3822#define zig_atomic_load(res, obj, order, Type, ReprType) res = atomic_load_explicit (obj, order)
3823#undef zig_atomicrmw_xchg_float
3824#define zig_atomicrmw_xchg_float zig_atomicrmw_xchg
3825#undef zig_atomicrmw_add_float
3826#define zig_atomicrmw_add_float zig_atomicrmw_add
3827#undef zig_atomicrmw_sub_float
3828#define zig_atomicrmw_sub_float zig_atomicrmw_sub
3829#elif defined(zig_gnuc)
3830typedef int zig_memory_order;
3831#define zig_memory_order_relaxed __ATOMIC_RELAXED
3832#define zig_memory_order_acquire __ATOMIC_ACQUIRE
3833#define zig_memory_order_release __ATOMIC_RELEASE
3834#define zig_memory_order_acq_rel __ATOMIC_ACQ_REL
3835#define zig_memory_order_seq_cst __ATOMIC_SEQ_CST
3836#define zig_atomic(Type) Type
3837#define zig_cmpxchg_strong( obj, expected, desired, succ, fail, Type, ReprType) __atomic_compare_exchange(obj, (ReprType *)&(expected), (ReprType *)&(desired), false, succ, fail)
3838#define zig_cmpxchg_weak( obj, expected, desired, succ, fail, Type, ReprType) __atomic_compare_exchange(obj, (ReprType *)&(expected), (ReprType *)&(desired), true, succ, fail)
3839#define zig_atomicrmw_xchg(res, obj, arg, order, Type, ReprType) __atomic_exchange(obj, (ReprType *)&(arg), &(res), order)
3840#define zig_atomicrmw_add(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_add (obj, arg, order)
3841#define zig_atomicrmw_sub(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_sub (obj, arg, order)
3842#define zig_atomicrmw_or(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_or (obj, arg, order)
3843#define zig_atomicrmw_xor(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_xor (obj, arg, order)
3844#define zig_atomicrmw_and(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_and (obj, arg, order)
3845#define zig_atomicrmw_nand(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_nand(obj, arg, order)
3846#define zig_atomicrmw_min(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_min (obj, arg, order)
3847#define zig_atomicrmw_max(res, obj, arg, order, Type, ReprType) res = __atomic_fetch_max (obj, arg, order)
3848#define zig_atomic_store( obj, arg, order, Type, ReprType) __atomic_store (obj, (ReprType *)&(arg), order)
3849#define zig_atomic_load(res, obj, order, Type, ReprType) __atomic_load (obj, &(res), order)
3850#undef zig_atomicrmw_xchg_float
3851#define zig_atomicrmw_xchg_float zig_atomicrmw_xchg
3852#elif defined(zig_msvc) && defined(zig_x86)
3853#define zig_memory_order_relaxed 0
3854#define zig_memory_order_acquire 2
3855#define zig_memory_order_release 3
3856#define zig_memory_order_acq_rel 4
3857#define zig_memory_order_seq_cst 5
3858#define zig_atomic(Type) Type
3859#define zig_cmpxchg_strong( obj, expected, desired, succ, fail, Type, ReprType) zig_msvc_cmpxchg_##Type(obj, &(expected), desired)
3860#define zig_cmpxchg_weak( obj, expected, desired, succ, fail, Type, ReprType) zig_cmpxchg_strong(obj, expected, desired, succ, fail, Type, ReprType)
3861#define zig_atomicrmw_xchg(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_xchg_##Type(obj, arg)
3862#define zig_atomicrmw_add(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_add_ ##Type(obj, arg)
3863#define zig_atomicrmw_sub(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_sub_ ##Type(obj, arg)
3864#define zig_atomicrmw_or(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_or_ ##Type(obj, arg)
3865#define zig_atomicrmw_xor(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_xor_ ##Type(obj, arg)
3866#define zig_atomicrmw_and(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_and_ ##Type(obj, arg)
3867#define zig_atomicrmw_nand(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_nand_##Type(obj, arg)
3868#define zig_atomicrmw_min(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_min_ ##Type(obj, arg)
3869#define zig_atomicrmw_max(res, obj, arg, order, Type, ReprType) res = zig_msvc_atomicrmw_max_ ##Type(obj, arg)
3870#define zig_atomic_store( obj, arg, order, Type, ReprType) zig_msvc_atomic_store_ ##Type(obj, arg)
3871#define zig_atomic_load(res, obj, order, Type, ReprType) res = zig_msvc_atomic_load_ ##order##_##Type(obj)
3872/* TODO: zig_msvc && (zig_thumb || zig_aarch64) */
3873#else
3874#define zig_memory_order_relaxed 0
3875#define zig_memory_order_acquire 2
3876#define zig_memory_order_release 3
3877#define zig_memory_order_acq_rel 4
3878#define zig_memory_order_seq_cst 5
3879#define zig_atomic(Type) Type
3880#define zig_cmpxchg_strong( obj, expected, desired, succ, fail, Type, ReprType) zig_atomics_unavailable
3881#define zig_cmpxchg_weak( obj, expected, desired, succ, fail, Type, ReprType) zig_atomics_unavailable
3882#define zig_atomicrmw_xchg(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3883#define zig_atomicrmw_add(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3884#define zig_atomicrmw_sub(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3885#define zig_atomicrmw_or(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3886#define zig_atomicrmw_xor(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3887#define zig_atomicrmw_and(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3888#define zig_atomicrmw_nand(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3889#define zig_atomicrmw_min(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3890#define zig_atomicrmw_max(res, obj, arg, order, Type, ReprType) zig_atomics_unavailable
3891#define zig_atomic_store( obj, arg, order, Type, ReprType) zig_atomics_unavailable
3892#define zig_atomic_load(res, obj, order, Type, ReprType) zig_atomics_unavailable
3893#endif
3894
3895#if !defined(zig_c11_atomics) && defined(zig_msvc) && defined(zig_x86)
3896
3897/* TODO: zig_msvc_atomic_load should load 32 bit without interlocked on x86, and load 64 bit without interlocked on x64 */
3898
3899#define zig_msvc_atomics(ZigType, Type, SigType, suffix, iso_suffix) \
3900 static inline bool zig_msvc_cmpxchg_##ZigType(Type volatile* obj, Type* expected, Type desired) { \
3901 Type comparand = *expected; \
3902 Type initial = _InterlockedCompareExchange##suffix((SigType volatile*)obj, (SigType)desired, (SigType)comparand); \
3903 bool exchanged = initial == comparand; \
3904 if (!exchanged) { \
3905 *expected = initial; \
3906 } \
3907 return exchanged; \
3908 } \
3909 static inline Type zig_msvc_atomicrmw_xchg_##ZigType(Type volatile* obj, Type value) { \
3910 return _InterlockedExchange##suffix((SigType volatile*)obj, (SigType)value); \
3911 } \
3912 static inline Type zig_msvc_atomicrmw_add_##ZigType(Type volatile* obj, Type value) { \
3913 return _InterlockedExchangeAdd##suffix((SigType volatile*)obj, (SigType)value); \
3914 } \
3915 static inline Type zig_msvc_atomicrmw_sub_##ZigType(Type volatile* obj, Type value) { \
3916 bool success = false; \
3917 Type new; \
3918 Type prev; \
3919 while (!success) { \
3920 prev = *obj; \
3921 new = prev - value; \
3922 success = zig_msvc_cmpxchg_##ZigType(obj, &prev, new); \
3923 } \
3924 return prev; \
3925 } \
3926 static inline Type zig_msvc_atomicrmw_or_##ZigType(Type volatile* obj, Type value) { \
3927 return _InterlockedOr##suffix((SigType volatile*)obj, (SigType)value); \
3928 } \
3929 static inline Type zig_msvc_atomicrmw_xor_##ZigType(Type volatile* obj, Type value) { \
3930 return _InterlockedXor##suffix((SigType volatile*)obj, (SigType)value); \
3931 } \
3932 static inline Type zig_msvc_atomicrmw_and_##ZigType(Type volatile* obj, Type value) { \
3933 return _InterlockedAnd##suffix((SigType volatile*)obj, (SigType)value); \
3934 } \
3935 static inline Type zig_msvc_atomicrmw_nand_##ZigType(Type volatile* obj, Type value) { \
3936 bool success = false; \
3937 Type new; \
3938 Type prev; \
3939 while (!success) { \
3940 prev = *obj; \
3941 new = ~(prev & value); \
3942 success = zig_msvc_cmpxchg_##ZigType(obj, &prev, new); \
3943 } \
3944 return prev; \
3945 } \
3946 static inline Type zig_msvc_atomicrmw_min_##ZigType(Type volatile* obj, Type value) { \
3947 bool success = false; \
3948 Type new; \
3949 Type prev; \
3950 while (!success) { \
3951 prev = *obj; \
3952 new = value < prev ? value : prev; \
3953 success = zig_msvc_cmpxchg_##ZigType(obj, &prev, new); \
3954 } \
3955 return prev; \
3956 } \
3957 static inline Type zig_msvc_atomicrmw_max_##ZigType(Type volatile* obj, Type value) { \
3958 bool success = false; \
3959 Type new; \
3960 Type prev; \
3961 while (!success) { \
3962 prev = *obj; \
3963 new = value > prev ? value : prev; \
3964 success = zig_msvc_cmpxchg_##ZigType(obj, &prev, new); \
3965 } \
3966 return prev; \
3967 } \
3968 static inline void zig_msvc_atomic_store_##ZigType(Type volatile* obj, Type value) { \
3969 (void)_InterlockedExchange##suffix((SigType volatile*)obj, (SigType)value); \
3970 } \
3971 static inline Type zig_msvc_atomic_load_zig_memory_order_relaxed_##ZigType(Type volatile* obj) { \
3972 return __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
3973 } \
3974 static inline Type zig_msvc_atomic_load_zig_memory_order_acquire_##ZigType(Type volatile* obj) { \
3975 Type val = __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
3976 _ReadWriteBarrier(); \
3977 return val; \
3978 } \
3979 static inline Type zig_msvc_atomic_load_zig_memory_order_seq_cst_##ZigType(Type volatile* obj) { \
3980 Type val = __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
3981 _ReadWriteBarrier(); \
3982 return val; \
3983 }
3984
3985zig_msvc_atomics( u8, uint8_t, char, 8, 8)
3986zig_msvc_atomics( i8, int8_t, char, 8, 8)
3987zig_msvc_atomics(u16, uint16_t, short, 16, 16)
3988zig_msvc_atomics(i16, int16_t, short, 16, 16)
3989zig_msvc_atomics(u32, uint32_t, long, , 32)
3990zig_msvc_atomics(i32, int32_t, long, , 32)
3991
3992#if defined(zig_x86_64)
3993zig_msvc_atomics(u64, uint64_t, __int64, 64, 64)
3994zig_msvc_atomics(i64, int64_t, __int64, 64, 64)
3995#endif
3996
3997#define zig_msvc_flt_atomics(Type, SigType, suffix, iso_suffix) \
3998 static inline bool zig_msvc_cmpxchg_##Type(zig_##Type volatile* obj, zig_##Type* expected, zig_##Type desired) { \
3999 SigType exchange; \
4000 SigType comparand; \
4001 SigType initial; \
4002 bool success; \
4003 memcpy(&comparand, expected, sizeof(comparand)); \
4004 memcpy(&exchange, &desired, sizeof(exchange)); \
4005 initial = _InterlockedCompareExchange##suffix((SigType volatile*)obj, exchange, comparand); \
4006 success = initial == comparand; \
4007 if (!success) memcpy(expected, &initial, sizeof(*expected)); \
4008 return success; \
4009 } \
4010 static inline void zig_msvc_atomic_store_##Type(zig_##Type volatile* obj, zig_##Type arg) { \
4011 SigType value; \
4012 memcpy(&value, &arg, sizeof(value)); \
4013 (void)_InterlockedExchange##suffix((SigType volatile*)obj, value); \
4014 } \
4015 static inline zig_##Type zig_msvc_atomic_load_zig_memory_order_relaxed_##Type(zig_##Type volatile* obj) { \
4016 zig_##Type result; \
4017 SigType initial = __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
4018 memcpy(&result, &initial, sizeof(result)); \
4019 return result; \
4020 } \
4021 static inline zig_##Type zig_msvc_atomic_load_zig_memory_order_acquire_##Type(zig_##Type volatile* obj) { \
4022 zig_##Type result; \
4023 SigType initial = __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
4024 _ReadWriteBarrier(); \
4025 memcpy(&result, &initial, sizeof(result)); \
4026 return result; \
4027 } \
4028 static inline zig_##Type zig_msvc_atomic_load_zig_memory_order_seq_cst_##Type(zig_##Type volatile* obj) { \
4029 zig_##Type result; \
4030 SigType initial = __iso_volatile_load##iso_suffix((SigType volatile*)obj); \
4031 _ReadWriteBarrier(); \
4032 memcpy(&result, &initial, sizeof(result)); \
4033 return result; \
4034 }
4035
4036zig_msvc_flt_atomics(f32, long, , 32)
4037#if defined(zig_x86_64)
4038zig_msvc_flt_atomics(f64, int64_t, 64, 64)
4039#endif
4040
4041#if defined(zig_x86_32)
4042static inline void zig_msvc_atomic_barrier() {
4043 int32_t barrier;
4044 __asm {
4045 xchg barrier, eax
4046 }
4047}
4048
4049static inline void* zig_msvc_atomicrmw_xchg_p32(void volatile* obj, void* arg) {
4050 return _InterlockedExchangePointer(obj, arg);
4051}
4052
4053static inline void zig_msvc_atomic_store_p32(void volatile* obj, void* arg) {
4054 (void)_InterlockedExchangePointer(obj, arg);
4055}
4056
4057static inline void* zig_msvc_atomic_load_zig_memory_order_relaxed_p32(void volatile* obj) {
4058 return (void*)__iso_volatile_load32(obj);
4059}
4060
4061static inline void* zig_msvc_atomic_load_zig_memory_order_acquire_p32(void volatile* obj) {
4062 void* val = (void*)__iso_volatile_load32(obj);
4063 _ReadWriteBarrier();
4064 return val;
4065}
4066
4067static inline void* zig_msvc_atomic_load_zig_memory_order_seq_cst_p32(void volatile* obj) {
4068 return zig_msvc_atomic_load_zig_memory_order_acquire_p32(obj);
4069}
4070
4071static inline bool zig_msvc_cmpxchg_p32(void volatile* obj, void* expected, void* desired) {
4072 void* comparand = *(void**)expected;
4073 void* initial = _InterlockedCompareExchangePointer(obj, desired, comparand);
4074 bool success = initial == comparand;
4075 if (!success) *(void**)expected = initial;
4076 return success;
4077}
4078#else /* zig_x86_32 */
4079static inline void* zig_msvc_atomicrmw_xchg_p64(void volatile* obj, void* arg) {
4080 return _InterlockedExchangePointer(obj, arg);
4081}
4082
4083static inline void zig_msvc_atomic_store_p64(void volatile* obj, void* arg) {
4084 (void)_InterlockedExchangePointer(obj, arg);
4085}
4086
4087static inline void* zig_msvc_atomic_load_zig_memory_order_relaxed_p64(void volatile* obj) {
4088 return (void*)__iso_volatile_load64(obj);
4089}
4090
4091static inline void* zig_msvc_atomic_load_zig_memory_order_acquire_p64(void volatile* obj) {
4092 void* val = (void*)__iso_volatile_load64(obj);
4093 _ReadWriteBarrier();
4094 return val;
4095}
4096
4097static inline void* zig_msvc_atomic_load_zig_memory_order_seq_cst_p64(void volatile* obj) {
4098 return zig_msvc_atomic_load_zig_memory_order_acquire_p64(obj);
4099}
4100
4101static inline bool zig_msvc_cmpxchg_p64(void volatile* obj, void* expected, void* desired) {
4102 void* comparand = *(void**)expected;
4103 void* initial = _InterlockedCompareExchangePointer(obj, desired, comparand);
4104 bool success = initial == comparand;
4105 if (!success) *(void**)expected = initial;
4106 return success;
4107}
4108
4109static inline bool zig_msvc_cmpxchg_u128(zig_u128 volatile* obj, zig_u128* expected, zig_u128 desired) {
4110 return _InterlockedCompareExchange128((__int64 volatile*)obj, (__int64)zig_hi_u128(desired), (__int64)zig_lo_u128(desired), (__int64*)expected);
4111}
4112
4113static inline zig_u128 zig_msvc_atomic_load_u128(zig_u128 volatile* obj) {
4114 zig_u128 expected = zig_make_u128(UINT64_C(0), UINT64_C(0));
4115 (void)zig_cmpxchg_strong(obj, expected, expected, zig_memory_order_seq_cst, zig_memory_order_seq_cst, u128, zig_u128);
4116 return expected;
4117}
4118
4119static inline void zig_msvc_atomic_store_u128(zig_u128 volatile* obj, zig_u128 arg) {
4120 zig_u128 expected = zig_make_u128(UINT64_C(0), UINT64_C(0));
4121 while (!zig_cmpxchg_weak(obj, expected, arg, zig_memory_order_seq_cst, zig_memory_order_seq_cst, u128, zig_u128));
4122}
4123
4124static inline bool zig_msvc_cmpxchg_i128(zig_i128 volatile* obj, zig_i128* expected, zig_i128 desired) {
4125 return _InterlockedCompareExchange128((__int64 volatile*)obj, (__int64)zig_hi_i128(desired), (__int64)zig_lo_i128(desired), (__int64*)expected);
4126}
4127
4128static inline zig_i128 zig_msvc_atomic_load_i128(zig_i128 volatile* obj) {
4129 zig_i128 expected = zig_make_i128(INT64_C(0), UINT64_C(0));
4130 (void)zig_cmpxchg_strong(obj, expected, expected, zig_memory_order_seq_cst, zig_memory_order_seq_cst, i128, zig_i128);
4131 return expected;
4132}
4133
4134static inline void zig_msvc_atomic_store_i128(zig_i128 volatile* obj, zig_i128 arg) {
4135 zig_i128 expected = zig_make_i128(INT64_C(0), UINT64_C(0));
4136 while (!zig_cmpxchg_weak(obj, expected, arg, zig_memory_order_seq_cst, zig_memory_order_seq_cst, i128, zig_i128));
4137}
4138
4139#endif /* zig_x86_32 */
4140
4141#endif /* !zig_c11_atomics && zig_msvc && zig_x86 */
4142
4143/* ======================== Special Case Intrinsics ========================= */
4144
4145#if defined(zig_msvc)
4146#include <intrin.h>
4147#endif
4148
4149#if defined(zig_thumb)
4150
4151static inline void* zig_thumb_windows_teb(void) {
4152 void* teb = 0;
4153#if defined(zig_msvc)
4154 teb = (void*)_MoveFromCoprocessor(15, 0, 13, 0, 2);
4155#elif defined(zig_gnuc_asm)
4156 __asm__ ("mrc p15, 0, %[ptr], c13, c0, 2" : [ptr] "=r" (teb));
4157#endif
4158 return teb;
4159}
4160
4161#elif defined(zig_aarch64)
4162
4163static inline void* zig_aarch64_windows_teb(void) {
4164 void* teb = 0;
4165#if defined(zig_msvc)
4166 teb = (void*)__readx18qword(0x0);
4167#elif defined(zig_gnuc_asm)
4168 __asm__ ("mov %[ptr], x18" : [ptr] "=r" (teb));
4169#endif
4170 return teb;
4171}
4172
4173#elif defined(zig_x86_32)
4174
4175static inline void* zig_x86_windows_teb(void) {
4176 void* teb = 0;
4177#if defined(zig_msvc)
4178 teb = (void*)__readfsdword(0x18);
4179#elif defined(zig_gnuc_asm)
4180 __asm__ ("movl %%fs:0x18, %[ptr]" : [ptr] "=r" (teb));
4181#endif
4182 return teb;
4183}
4184
4185#elif defined(zig_x86_64)
4186
4187static inline void* zig_x86_64_windows_teb(void) {
4188 void* teb = 0;
4189#if defined(zig_msvc)
4190 teb = (void*)__readgsqword(0x30);
4191#elif defined(zig_gnuc_asm)
4192 __asm__ ("movq %%gs:0x30, %[ptr]" : [ptr] "=r" (teb));
4193#endif
4194 return teb;
4195}
4196
4197#endif
4198
4199#if defined(zig_loongarch)
4200
4201static inline void zig_loongarch_cpucfg(uint32_t word, uint32_t* result) {
4202#if defined(zig_gnuc_asm)
4203 __asm__("cpucfg %[result], %[word]" : [result] "=r" (result) : [word] "r" (word));
4204#else
4205 *result = 0;
4206#endif
4207}
4208
4209#elif defined(zig_x86) && !defined(zig_x86_16)
4210
4211static inline void zig_x86_cpuid(uint32_t leaf_id, uint32_t subid, uint32_t* eax, uint32_t* ebx, uint32_t* ecx, uint32_t* edx) {
4212#if defined(zig_msvc)
4213 int cpu_info[4];
4214 __cpuidex(cpu_info, leaf_id, subid);
4215 *eax = (uint32_t)cpu_info[0];
4216 *ebx = (uint32_t)cpu_info[1];
4217 *ecx = (uint32_t)cpu_info[2];
4218 *edx = (uint32_t)cpu_info[3];
4219#elif defined(zig_gnuc_asm)
4220 __asm__("cpuid" : "=a" (*eax), "=b" (*ebx), "=c" (*ecx), "=d" (*edx) : "a" (leaf_id), "c" (subid));
4221#else
4222 *eax = 0;
4223 *ebx = 0;
4224 *ecx = 0;
4225 *edx = 0;
4226#endif
4227}
4228
4229static inline uint32_t zig_x86_get_xcr0(void) {
4230#if defined(zig_msvc)
4231 return (uint32_t)_xgetbv(0);
4232#elif defined(zig_gnuc_asm)
4233 uint32_t eax;
4234 uint32_t edx;
4235 __asm__("xgetbv" : "=a" (eax), "=d" (edx) : "c" (0));
4236 return eax;
4237#else
4238 *eax = 0;
4239 *ebx = 0;
4240 *ecx = 0;
4241 *edx = 0;
4242#endif
4243}
4244
4245#endif