master
1//! This file is generated by tools/gen_outline_atomics.zig.
2const builtin = @import("builtin");
3const std = @import("std");
4const common = @import("common.zig");
5const always_has_lse = builtin.cpu.has(.aarch64, .lse);
6
7/// This default is overridden at runtime after inspecting CPU properties.
8/// It is intentionally not exported in order to make the machine code that
9/// uses it a statically predicted direct branch rather than using the PLT,
10/// which ARM is concerned would have too much overhead.
11var __aarch64_have_lse_atomics: u8 = @intFromBool(always_has_lse);
12
13fn __aarch64_cas1_relax() align(16) callconv(.naked) void {
14 @setRuntimeSafety(false);
15 asm volatile (
16 \\ cbz w16, 8f
17 \\ .inst 0x08a07c41 + 0x00000000 + 0x000000
18 \\ ret
19 \\8:
20 \\ uxtb w16, w0
21 \\0:
22 \\ ldxrb w0, [x2]
23 \\ cmp w0, w16
24 \\ bne 1f
25 \\ stxrb w17, w1, [x2]
26 \\ cbnz w17, 0b
27 \\1:
28 \\ ret
29 :
30 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
31 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
32 unreachable;
33}
34fn __aarch64_swp1_relax() align(16) callconv(.naked) void {
35 @setRuntimeSafety(false);
36 asm volatile (
37 \\ cbz w16, 8f
38 \\ .inst 0x38208020 + 0x00000000 + 0x000000
39 \\ ret
40 \\8:
41 \\ mov w16, w0
42 \\0:
43 \\ ldxrb w0, [x1]
44 \\ stxrb w17, w16, [x1]
45 \\ cbnz w17, 0b
46 \\1:
47 \\ ret
48 :
49 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
50 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
51 unreachable;
52}
53fn __aarch64_ldadd1_relax() align(16) callconv(.naked) void {
54 @setRuntimeSafety(false);
55 asm volatile (
56 \\ cbz w16, 8f
57 \\ .inst 0x38200020 + 0x0000 + 0x00000000 + 0x000000
58 \\ ret
59 \\8:
60 \\ mov w16, w0
61 \\0:
62 \\ ldxrb w0, [x1]
63 \\ add w17, w0, w16
64 \\ stxrb w15, w17, [x1]
65 \\ cbnz w15, 0b
66 \\1:
67 \\ ret
68 :
69 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
70 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
71 unreachable;
72}
73fn __aarch64_ldclr1_relax() align(16) callconv(.naked) void {
74 @setRuntimeSafety(false);
75 asm volatile (
76 \\ cbz w16, 8f
77 \\ .inst 0x38200020 + 0x1000 + 0x00000000 + 0x000000
78 \\ ret
79 \\8:
80 \\ mov w16, w0
81 \\0:
82 \\ ldxrb w0, [x1]
83 \\ bic w17, w0, w16
84 \\ stxrb w15, w17, [x1]
85 \\ cbnz w15, 0b
86 \\1:
87 \\ ret
88 :
89 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
90 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
91 unreachable;
92}
93fn __aarch64_ldeor1_relax() align(16) callconv(.naked) void {
94 @setRuntimeSafety(false);
95 asm volatile (
96 \\ cbz w16, 8f
97 \\ .inst 0x38200020 + 0x2000 + 0x00000000 + 0x000000
98 \\ ret
99 \\8:
100 \\ mov w16, w0
101 \\0:
102 \\ ldxrb w0, [x1]
103 \\ eor w17, w0, w16
104 \\ stxrb w15, w17, [x1]
105 \\ cbnz w15, 0b
106 \\1:
107 \\ ret
108 :
109 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
110 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
111 unreachable;
112}
113fn __aarch64_ldset1_relax() align(16) callconv(.naked) void {
114 @setRuntimeSafety(false);
115 asm volatile (
116 \\ cbz w16, 8f
117 \\ .inst 0x38200020 + 0x3000 + 0x00000000 + 0x000000
118 \\ ret
119 \\8:
120 \\ mov w16, w0
121 \\0:
122 \\ ldxrb w0, [x1]
123 \\ orr w17, w0, w16
124 \\ stxrb w15, w17, [x1]
125 \\ cbnz w15, 0b
126 \\1:
127 \\ ret
128 :
129 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
130 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
131 unreachable;
132}
133fn __aarch64_cas1_acq() align(16) callconv(.naked) void {
134 @setRuntimeSafety(false);
135 asm volatile (
136 \\ cbz w16, 8f
137 \\ .inst 0x08a07c41 + 0x00000000 + 0x400000
138 \\ ret
139 \\8:
140 \\ uxtb w16, w0
141 \\0:
142 \\ ldaxrb w0, [x2]
143 \\ cmp w0, w16
144 \\ bne 1f
145 \\ stxrb w17, w1, [x2]
146 \\ cbnz w17, 0b
147 \\1:
148 \\ ret
149 :
150 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
151 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
152 unreachable;
153}
154fn __aarch64_swp1_acq() align(16) callconv(.naked) void {
155 @setRuntimeSafety(false);
156 asm volatile (
157 \\ cbz w16, 8f
158 \\ .inst 0x38208020 + 0x00000000 + 0x800000
159 \\ ret
160 \\8:
161 \\ mov w16, w0
162 \\0:
163 \\ ldaxrb w0, [x1]
164 \\ stxrb w17, w16, [x1]
165 \\ cbnz w17, 0b
166 \\1:
167 \\ ret
168 :
169 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
170 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
171 unreachable;
172}
173fn __aarch64_ldadd1_acq() align(16) callconv(.naked) void {
174 @setRuntimeSafety(false);
175 asm volatile (
176 \\ cbz w16, 8f
177 \\ .inst 0x38200020 + 0x0000 + 0x00000000 + 0x800000
178 \\ ret
179 \\8:
180 \\ mov w16, w0
181 \\0:
182 \\ ldaxrb w0, [x1]
183 \\ add w17, w0, w16
184 \\ stxrb w15, w17, [x1]
185 \\ cbnz w15, 0b
186 \\1:
187 \\ ret
188 :
189 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
190 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
191 unreachable;
192}
193fn __aarch64_ldclr1_acq() align(16) callconv(.naked) void {
194 @setRuntimeSafety(false);
195 asm volatile (
196 \\ cbz w16, 8f
197 \\ .inst 0x38200020 + 0x1000 + 0x00000000 + 0x800000
198 \\ ret
199 \\8:
200 \\ mov w16, w0
201 \\0:
202 \\ ldaxrb w0, [x1]
203 \\ bic w17, w0, w16
204 \\ stxrb w15, w17, [x1]
205 \\ cbnz w15, 0b
206 \\1:
207 \\ ret
208 :
209 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
210 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
211 unreachable;
212}
213fn __aarch64_ldeor1_acq() align(16) callconv(.naked) void {
214 @setRuntimeSafety(false);
215 asm volatile (
216 \\ cbz w16, 8f
217 \\ .inst 0x38200020 + 0x2000 + 0x00000000 + 0x800000
218 \\ ret
219 \\8:
220 \\ mov w16, w0
221 \\0:
222 \\ ldaxrb w0, [x1]
223 \\ eor w17, w0, w16
224 \\ stxrb w15, w17, [x1]
225 \\ cbnz w15, 0b
226 \\1:
227 \\ ret
228 :
229 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
230 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
231 unreachable;
232}
233fn __aarch64_ldset1_acq() align(16) callconv(.naked) void {
234 @setRuntimeSafety(false);
235 asm volatile (
236 \\ cbz w16, 8f
237 \\ .inst 0x38200020 + 0x3000 + 0x00000000 + 0x800000
238 \\ ret
239 \\8:
240 \\ mov w16, w0
241 \\0:
242 \\ ldaxrb w0, [x1]
243 \\ orr w17, w0, w16
244 \\ stxrb w15, w17, [x1]
245 \\ cbnz w15, 0b
246 \\1:
247 \\ ret
248 :
249 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
250 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
251 unreachable;
252}
253fn __aarch64_cas1_rel() align(16) callconv(.naked) void {
254 @setRuntimeSafety(false);
255 asm volatile (
256 \\ cbz w16, 8f
257 \\ .inst 0x08a07c41 + 0x00000000 + 0x008000
258 \\ ret
259 \\8:
260 \\ uxtb w16, w0
261 \\0:
262 \\ ldxrb w0, [x2]
263 \\ cmp w0, w16
264 \\ bne 1f
265 \\ stlxrb w17, w1, [x2]
266 \\ cbnz w17, 0b
267 \\1:
268 \\ ret
269 :
270 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
271 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
272 unreachable;
273}
274fn __aarch64_swp1_rel() align(16) callconv(.naked) void {
275 @setRuntimeSafety(false);
276 asm volatile (
277 \\ cbz w16, 8f
278 \\ .inst 0x38208020 + 0x00000000 + 0x400000
279 \\ ret
280 \\8:
281 \\ mov w16, w0
282 \\0:
283 \\ ldxrb w0, [x1]
284 \\ stlxrb w17, w16, [x1]
285 \\ cbnz w17, 0b
286 \\1:
287 \\ ret
288 :
289 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
290 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
291 unreachable;
292}
293fn __aarch64_ldadd1_rel() align(16) callconv(.naked) void {
294 @setRuntimeSafety(false);
295 asm volatile (
296 \\ cbz w16, 8f
297 \\ .inst 0x38200020 + 0x0000 + 0x00000000 + 0x400000
298 \\ ret
299 \\8:
300 \\ mov w16, w0
301 \\0:
302 \\ ldxrb w0, [x1]
303 \\ add w17, w0, w16
304 \\ stlxrb w15, w17, [x1]
305 \\ cbnz w15, 0b
306 \\1:
307 \\ ret
308 :
309 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
310 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
311 unreachable;
312}
313fn __aarch64_ldclr1_rel() align(16) callconv(.naked) void {
314 @setRuntimeSafety(false);
315 asm volatile (
316 \\ cbz w16, 8f
317 \\ .inst 0x38200020 + 0x1000 + 0x00000000 + 0x400000
318 \\ ret
319 \\8:
320 \\ mov w16, w0
321 \\0:
322 \\ ldxrb w0, [x1]
323 \\ bic w17, w0, w16
324 \\ stlxrb w15, w17, [x1]
325 \\ cbnz w15, 0b
326 \\1:
327 \\ ret
328 :
329 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
330 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
331 unreachable;
332}
333fn __aarch64_ldeor1_rel() align(16) callconv(.naked) void {
334 @setRuntimeSafety(false);
335 asm volatile (
336 \\ cbz w16, 8f
337 \\ .inst 0x38200020 + 0x2000 + 0x00000000 + 0x400000
338 \\ ret
339 \\8:
340 \\ mov w16, w0
341 \\0:
342 \\ ldxrb w0, [x1]
343 \\ eor w17, w0, w16
344 \\ stlxrb w15, w17, [x1]
345 \\ cbnz w15, 0b
346 \\1:
347 \\ ret
348 :
349 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
350 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
351 unreachable;
352}
353fn __aarch64_ldset1_rel() align(16) callconv(.naked) void {
354 @setRuntimeSafety(false);
355 asm volatile (
356 \\ cbz w16, 8f
357 \\ .inst 0x38200020 + 0x3000 + 0x00000000 + 0x400000
358 \\ ret
359 \\8:
360 \\ mov w16, w0
361 \\0:
362 \\ ldxrb w0, [x1]
363 \\ orr w17, w0, w16
364 \\ stlxrb w15, w17, [x1]
365 \\ cbnz w15, 0b
366 \\1:
367 \\ ret
368 :
369 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
370 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
371 unreachable;
372}
373fn __aarch64_cas1_acq_rel() align(16) callconv(.naked) void {
374 @setRuntimeSafety(false);
375 asm volatile (
376 \\ cbz w16, 8f
377 \\ .inst 0x08a07c41 + 0x00000000 + 0x408000
378 \\ ret
379 \\8:
380 \\ uxtb w16, w0
381 \\0:
382 \\ ldaxrb w0, [x2]
383 \\ cmp w0, w16
384 \\ bne 1f
385 \\ stlxrb w17, w1, [x2]
386 \\ cbnz w17, 0b
387 \\1:
388 \\ ret
389 :
390 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
391 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
392 unreachable;
393}
394fn __aarch64_swp1_acq_rel() align(16) callconv(.naked) void {
395 @setRuntimeSafety(false);
396 asm volatile (
397 \\ cbz w16, 8f
398 \\ .inst 0x38208020 + 0x00000000 + 0xc00000
399 \\ ret
400 \\8:
401 \\ mov w16, w0
402 \\0:
403 \\ ldaxrb w0, [x1]
404 \\ stlxrb w17, w16, [x1]
405 \\ cbnz w17, 0b
406 \\1:
407 \\ ret
408 :
409 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
410 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
411 unreachable;
412}
413fn __aarch64_ldadd1_acq_rel() align(16) callconv(.naked) void {
414 @setRuntimeSafety(false);
415 asm volatile (
416 \\ cbz w16, 8f
417 \\ .inst 0x38200020 + 0x0000 + 0x00000000 + 0xc00000
418 \\ ret
419 \\8:
420 \\ mov w16, w0
421 \\0:
422 \\ ldaxrb w0, [x1]
423 \\ add w17, w0, w16
424 \\ stlxrb w15, w17, [x1]
425 \\ cbnz w15, 0b
426 \\1:
427 \\ ret
428 :
429 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
430 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
431 unreachable;
432}
433fn __aarch64_ldclr1_acq_rel() align(16) callconv(.naked) void {
434 @setRuntimeSafety(false);
435 asm volatile (
436 \\ cbz w16, 8f
437 \\ .inst 0x38200020 + 0x1000 + 0x00000000 + 0xc00000
438 \\ ret
439 \\8:
440 \\ mov w16, w0
441 \\0:
442 \\ ldaxrb w0, [x1]
443 \\ bic w17, w0, w16
444 \\ stlxrb w15, w17, [x1]
445 \\ cbnz w15, 0b
446 \\1:
447 \\ ret
448 :
449 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
450 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
451 unreachable;
452}
453fn __aarch64_ldeor1_acq_rel() align(16) callconv(.naked) void {
454 @setRuntimeSafety(false);
455 asm volatile (
456 \\ cbz w16, 8f
457 \\ .inst 0x38200020 + 0x2000 + 0x00000000 + 0xc00000
458 \\ ret
459 \\8:
460 \\ mov w16, w0
461 \\0:
462 \\ ldaxrb w0, [x1]
463 \\ eor w17, w0, w16
464 \\ stlxrb w15, w17, [x1]
465 \\ cbnz w15, 0b
466 \\1:
467 \\ ret
468 :
469 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
470 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
471 unreachable;
472}
473fn __aarch64_ldset1_acq_rel() align(16) callconv(.naked) void {
474 @setRuntimeSafety(false);
475 asm volatile (
476 \\ cbz w16, 8f
477 \\ .inst 0x38200020 + 0x3000 + 0x00000000 + 0xc00000
478 \\ ret
479 \\8:
480 \\ mov w16, w0
481 \\0:
482 \\ ldaxrb w0, [x1]
483 \\ orr w17, w0, w16
484 \\ stlxrb w15, w17, [x1]
485 \\ cbnz w15, 0b
486 \\1:
487 \\ ret
488 :
489 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
490 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
491 unreachable;
492}
493fn __aarch64_cas2_relax() align(16) callconv(.naked) void {
494 @setRuntimeSafety(false);
495 asm volatile (
496 \\ cbz w16, 8f
497 \\ .inst 0x08a07c41 + 0x40000000 + 0x000000
498 \\ ret
499 \\8:
500 \\ uxth w16, w0
501 \\0:
502 \\ ldxrh w0, [x2]
503 \\ cmp w0, w16
504 \\ bne 1f
505 \\ stxrh w17, w1, [x2]
506 \\ cbnz w17, 0b
507 \\1:
508 \\ ret
509 :
510 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
511 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
512 unreachable;
513}
514fn __aarch64_swp2_relax() align(16) callconv(.naked) void {
515 @setRuntimeSafety(false);
516 asm volatile (
517 \\ cbz w16, 8f
518 \\ .inst 0x38208020 + 0x40000000 + 0x000000
519 \\ ret
520 \\8:
521 \\ mov w16, w0
522 \\0:
523 \\ ldxrh w0, [x1]
524 \\ stxrh w17, w16, [x1]
525 \\ cbnz w17, 0b
526 \\1:
527 \\ ret
528 :
529 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
530 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
531 unreachable;
532}
533fn __aarch64_ldadd2_relax() align(16) callconv(.naked) void {
534 @setRuntimeSafety(false);
535 asm volatile (
536 \\ cbz w16, 8f
537 \\ .inst 0x38200020 + 0x0000 + 0x40000000 + 0x000000
538 \\ ret
539 \\8:
540 \\ mov w16, w0
541 \\0:
542 \\ ldxrh w0, [x1]
543 \\ add w17, w0, w16
544 \\ stxrh w15, w17, [x1]
545 \\ cbnz w15, 0b
546 \\1:
547 \\ ret
548 :
549 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
550 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
551 unreachable;
552}
553fn __aarch64_ldclr2_relax() align(16) callconv(.naked) void {
554 @setRuntimeSafety(false);
555 asm volatile (
556 \\ cbz w16, 8f
557 \\ .inst 0x38200020 + 0x1000 + 0x40000000 + 0x000000
558 \\ ret
559 \\8:
560 \\ mov w16, w0
561 \\0:
562 \\ ldxrh w0, [x1]
563 \\ bic w17, w0, w16
564 \\ stxrh w15, w17, [x1]
565 \\ cbnz w15, 0b
566 \\1:
567 \\ ret
568 :
569 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
570 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
571 unreachable;
572}
573fn __aarch64_ldeor2_relax() align(16) callconv(.naked) void {
574 @setRuntimeSafety(false);
575 asm volatile (
576 \\ cbz w16, 8f
577 \\ .inst 0x38200020 + 0x2000 + 0x40000000 + 0x000000
578 \\ ret
579 \\8:
580 \\ mov w16, w0
581 \\0:
582 \\ ldxrh w0, [x1]
583 \\ eor w17, w0, w16
584 \\ stxrh w15, w17, [x1]
585 \\ cbnz w15, 0b
586 \\1:
587 \\ ret
588 :
589 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
590 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
591 unreachable;
592}
593fn __aarch64_ldset2_relax() align(16) callconv(.naked) void {
594 @setRuntimeSafety(false);
595 asm volatile (
596 \\ cbz w16, 8f
597 \\ .inst 0x38200020 + 0x3000 + 0x40000000 + 0x000000
598 \\ ret
599 \\8:
600 \\ mov w16, w0
601 \\0:
602 \\ ldxrh w0, [x1]
603 \\ orr w17, w0, w16
604 \\ stxrh w15, w17, [x1]
605 \\ cbnz w15, 0b
606 \\1:
607 \\ ret
608 :
609 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
610 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
611 unreachable;
612}
613fn __aarch64_cas2_acq() align(16) callconv(.naked) void {
614 @setRuntimeSafety(false);
615 asm volatile (
616 \\ cbz w16, 8f
617 \\ .inst 0x08a07c41 + 0x40000000 + 0x400000
618 \\ ret
619 \\8:
620 \\ uxth w16, w0
621 \\0:
622 \\ ldaxrh w0, [x2]
623 \\ cmp w0, w16
624 \\ bne 1f
625 \\ stxrh w17, w1, [x2]
626 \\ cbnz w17, 0b
627 \\1:
628 \\ ret
629 :
630 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
631 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
632 unreachable;
633}
634fn __aarch64_swp2_acq() align(16) callconv(.naked) void {
635 @setRuntimeSafety(false);
636 asm volatile (
637 \\ cbz w16, 8f
638 \\ .inst 0x38208020 + 0x40000000 + 0x800000
639 \\ ret
640 \\8:
641 \\ mov w16, w0
642 \\0:
643 \\ ldaxrh w0, [x1]
644 \\ stxrh w17, w16, [x1]
645 \\ cbnz w17, 0b
646 \\1:
647 \\ ret
648 :
649 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
650 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
651 unreachable;
652}
653fn __aarch64_ldadd2_acq() align(16) callconv(.naked) void {
654 @setRuntimeSafety(false);
655 asm volatile (
656 \\ cbz w16, 8f
657 \\ .inst 0x38200020 + 0x0000 + 0x40000000 + 0x800000
658 \\ ret
659 \\8:
660 \\ mov w16, w0
661 \\0:
662 \\ ldaxrh w0, [x1]
663 \\ add w17, w0, w16
664 \\ stxrh w15, w17, [x1]
665 \\ cbnz w15, 0b
666 \\1:
667 \\ ret
668 :
669 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
670 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
671 unreachable;
672}
673fn __aarch64_ldclr2_acq() align(16) callconv(.naked) void {
674 @setRuntimeSafety(false);
675 asm volatile (
676 \\ cbz w16, 8f
677 \\ .inst 0x38200020 + 0x1000 + 0x40000000 + 0x800000
678 \\ ret
679 \\8:
680 \\ mov w16, w0
681 \\0:
682 \\ ldaxrh w0, [x1]
683 \\ bic w17, w0, w16
684 \\ stxrh w15, w17, [x1]
685 \\ cbnz w15, 0b
686 \\1:
687 \\ ret
688 :
689 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
690 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
691 unreachable;
692}
693fn __aarch64_ldeor2_acq() align(16) callconv(.naked) void {
694 @setRuntimeSafety(false);
695 asm volatile (
696 \\ cbz w16, 8f
697 \\ .inst 0x38200020 + 0x2000 + 0x40000000 + 0x800000
698 \\ ret
699 \\8:
700 \\ mov w16, w0
701 \\0:
702 \\ ldaxrh w0, [x1]
703 \\ eor w17, w0, w16
704 \\ stxrh w15, w17, [x1]
705 \\ cbnz w15, 0b
706 \\1:
707 \\ ret
708 :
709 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
710 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
711 unreachable;
712}
713fn __aarch64_ldset2_acq() align(16) callconv(.naked) void {
714 @setRuntimeSafety(false);
715 asm volatile (
716 \\ cbz w16, 8f
717 \\ .inst 0x38200020 + 0x3000 + 0x40000000 + 0x800000
718 \\ ret
719 \\8:
720 \\ mov w16, w0
721 \\0:
722 \\ ldaxrh w0, [x1]
723 \\ orr w17, w0, w16
724 \\ stxrh w15, w17, [x1]
725 \\ cbnz w15, 0b
726 \\1:
727 \\ ret
728 :
729 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
730 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
731 unreachable;
732}
733fn __aarch64_cas2_rel() align(16) callconv(.naked) void {
734 @setRuntimeSafety(false);
735 asm volatile (
736 \\ cbz w16, 8f
737 \\ .inst 0x08a07c41 + 0x40000000 + 0x008000
738 \\ ret
739 \\8:
740 \\ uxth w16, w0
741 \\0:
742 \\ ldxrh w0, [x2]
743 \\ cmp w0, w16
744 \\ bne 1f
745 \\ stlxrh w17, w1, [x2]
746 \\ cbnz w17, 0b
747 \\1:
748 \\ ret
749 :
750 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
751 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
752 unreachable;
753}
754fn __aarch64_swp2_rel() align(16) callconv(.naked) void {
755 @setRuntimeSafety(false);
756 asm volatile (
757 \\ cbz w16, 8f
758 \\ .inst 0x38208020 + 0x40000000 + 0x400000
759 \\ ret
760 \\8:
761 \\ mov w16, w0
762 \\0:
763 \\ ldxrh w0, [x1]
764 \\ stlxrh w17, w16, [x1]
765 \\ cbnz w17, 0b
766 \\1:
767 \\ ret
768 :
769 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
770 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
771 unreachable;
772}
773fn __aarch64_ldadd2_rel() align(16) callconv(.naked) void {
774 @setRuntimeSafety(false);
775 asm volatile (
776 \\ cbz w16, 8f
777 \\ .inst 0x38200020 + 0x0000 + 0x40000000 + 0x400000
778 \\ ret
779 \\8:
780 \\ mov w16, w0
781 \\0:
782 \\ ldxrh w0, [x1]
783 \\ add w17, w0, w16
784 \\ stlxrh w15, w17, [x1]
785 \\ cbnz w15, 0b
786 \\1:
787 \\ ret
788 :
789 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
790 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
791 unreachable;
792}
793fn __aarch64_ldclr2_rel() align(16) callconv(.naked) void {
794 @setRuntimeSafety(false);
795 asm volatile (
796 \\ cbz w16, 8f
797 \\ .inst 0x38200020 + 0x1000 + 0x40000000 + 0x400000
798 \\ ret
799 \\8:
800 \\ mov w16, w0
801 \\0:
802 \\ ldxrh w0, [x1]
803 \\ bic w17, w0, w16
804 \\ stlxrh w15, w17, [x1]
805 \\ cbnz w15, 0b
806 \\1:
807 \\ ret
808 :
809 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
810 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
811 unreachable;
812}
813fn __aarch64_ldeor2_rel() align(16) callconv(.naked) void {
814 @setRuntimeSafety(false);
815 asm volatile (
816 \\ cbz w16, 8f
817 \\ .inst 0x38200020 + 0x2000 + 0x40000000 + 0x400000
818 \\ ret
819 \\8:
820 \\ mov w16, w0
821 \\0:
822 \\ ldxrh w0, [x1]
823 \\ eor w17, w0, w16
824 \\ stlxrh w15, w17, [x1]
825 \\ cbnz w15, 0b
826 \\1:
827 \\ ret
828 :
829 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
830 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
831 unreachable;
832}
833fn __aarch64_ldset2_rel() align(16) callconv(.naked) void {
834 @setRuntimeSafety(false);
835 asm volatile (
836 \\ cbz w16, 8f
837 \\ .inst 0x38200020 + 0x3000 + 0x40000000 + 0x400000
838 \\ ret
839 \\8:
840 \\ mov w16, w0
841 \\0:
842 \\ ldxrh w0, [x1]
843 \\ orr w17, w0, w16
844 \\ stlxrh w15, w17, [x1]
845 \\ cbnz w15, 0b
846 \\1:
847 \\ ret
848 :
849 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
850 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
851 unreachable;
852}
853fn __aarch64_cas2_acq_rel() align(16) callconv(.naked) void {
854 @setRuntimeSafety(false);
855 asm volatile (
856 \\ cbz w16, 8f
857 \\ .inst 0x08a07c41 + 0x40000000 + 0x408000
858 \\ ret
859 \\8:
860 \\ uxth w16, w0
861 \\0:
862 \\ ldaxrh w0, [x2]
863 \\ cmp w0, w16
864 \\ bne 1f
865 \\ stlxrh w17, w1, [x2]
866 \\ cbnz w17, 0b
867 \\1:
868 \\ ret
869 :
870 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
871 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
872 unreachable;
873}
874fn __aarch64_swp2_acq_rel() align(16) callconv(.naked) void {
875 @setRuntimeSafety(false);
876 asm volatile (
877 \\ cbz w16, 8f
878 \\ .inst 0x38208020 + 0x40000000 + 0xc00000
879 \\ ret
880 \\8:
881 \\ mov w16, w0
882 \\0:
883 \\ ldaxrh w0, [x1]
884 \\ stlxrh w17, w16, [x1]
885 \\ cbnz w17, 0b
886 \\1:
887 \\ ret
888 :
889 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
890 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
891 unreachable;
892}
893fn __aarch64_ldadd2_acq_rel() align(16) callconv(.naked) void {
894 @setRuntimeSafety(false);
895 asm volatile (
896 \\ cbz w16, 8f
897 \\ .inst 0x38200020 + 0x0000 + 0x40000000 + 0xc00000
898 \\ ret
899 \\8:
900 \\ mov w16, w0
901 \\0:
902 \\ ldaxrh w0, [x1]
903 \\ add w17, w0, w16
904 \\ stlxrh w15, w17, [x1]
905 \\ cbnz w15, 0b
906 \\1:
907 \\ ret
908 :
909 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
910 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
911 unreachable;
912}
913fn __aarch64_ldclr2_acq_rel() align(16) callconv(.naked) void {
914 @setRuntimeSafety(false);
915 asm volatile (
916 \\ cbz w16, 8f
917 \\ .inst 0x38200020 + 0x1000 + 0x40000000 + 0xc00000
918 \\ ret
919 \\8:
920 \\ mov w16, w0
921 \\0:
922 \\ ldaxrh w0, [x1]
923 \\ bic w17, w0, w16
924 \\ stlxrh w15, w17, [x1]
925 \\ cbnz w15, 0b
926 \\1:
927 \\ ret
928 :
929 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
930 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
931 unreachable;
932}
933fn __aarch64_ldeor2_acq_rel() align(16) callconv(.naked) void {
934 @setRuntimeSafety(false);
935 asm volatile (
936 \\ cbz w16, 8f
937 \\ .inst 0x38200020 + 0x2000 + 0x40000000 + 0xc00000
938 \\ ret
939 \\8:
940 \\ mov w16, w0
941 \\0:
942 \\ ldaxrh w0, [x1]
943 \\ eor w17, w0, w16
944 \\ stlxrh w15, w17, [x1]
945 \\ cbnz w15, 0b
946 \\1:
947 \\ ret
948 :
949 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
950 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
951 unreachable;
952}
953fn __aarch64_ldset2_acq_rel() align(16) callconv(.naked) void {
954 @setRuntimeSafety(false);
955 asm volatile (
956 \\ cbz w16, 8f
957 \\ .inst 0x38200020 + 0x3000 + 0x40000000 + 0xc00000
958 \\ ret
959 \\8:
960 \\ mov w16, w0
961 \\0:
962 \\ ldaxrh w0, [x1]
963 \\ orr w17, w0, w16
964 \\ stlxrh w15, w17, [x1]
965 \\ cbnz w15, 0b
966 \\1:
967 \\ ret
968 :
969 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
970 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
971 unreachable;
972}
973fn __aarch64_cas4_relax() align(16) callconv(.naked) void {
974 @setRuntimeSafety(false);
975 asm volatile (
976 \\ cbz w16, 8f
977 \\ .inst 0x08a07c41 + 0x80000000 + 0x000000
978 \\ ret
979 \\8:
980 \\ mov w16, w0
981 \\0:
982 \\ ldxr w0, [x2]
983 \\ cmp w0, w16
984 \\ bne 1f
985 \\ stxr w17, w1, [x2]
986 \\ cbnz w17, 0b
987 \\1:
988 \\ ret
989 :
990 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
991 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
992 unreachable;
993}
994fn __aarch64_swp4_relax() align(16) callconv(.naked) void {
995 @setRuntimeSafety(false);
996 asm volatile (
997 \\ cbz w16, 8f
998 \\ .inst 0x38208020 + 0x80000000 + 0x000000
999 \\ ret
1000 \\8:
1001 \\ mov w16, w0
1002 \\0:
1003 \\ ldxr w0, [x1]
1004 \\ stxr w17, w16, [x1]
1005 \\ cbnz w17, 0b
1006 \\1:
1007 \\ ret
1008 :
1009 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1010 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1011 unreachable;
1012}
1013fn __aarch64_ldadd4_relax() align(16) callconv(.naked) void {
1014 @setRuntimeSafety(false);
1015 asm volatile (
1016 \\ cbz w16, 8f
1017 \\ .inst 0x38200020 + 0x0000 + 0x80000000 + 0x000000
1018 \\ ret
1019 \\8:
1020 \\ mov w16, w0
1021 \\0:
1022 \\ ldxr w0, [x1]
1023 \\ add w17, w0, w16
1024 \\ stxr w15, w17, [x1]
1025 \\ cbnz w15, 0b
1026 \\1:
1027 \\ ret
1028 :
1029 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1030 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1031 unreachable;
1032}
1033fn __aarch64_ldclr4_relax() align(16) callconv(.naked) void {
1034 @setRuntimeSafety(false);
1035 asm volatile (
1036 \\ cbz w16, 8f
1037 \\ .inst 0x38200020 + 0x1000 + 0x80000000 + 0x000000
1038 \\ ret
1039 \\8:
1040 \\ mov w16, w0
1041 \\0:
1042 \\ ldxr w0, [x1]
1043 \\ bic w17, w0, w16
1044 \\ stxr w15, w17, [x1]
1045 \\ cbnz w15, 0b
1046 \\1:
1047 \\ ret
1048 :
1049 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1050 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1051 unreachable;
1052}
1053fn __aarch64_ldeor4_relax() align(16) callconv(.naked) void {
1054 @setRuntimeSafety(false);
1055 asm volatile (
1056 \\ cbz w16, 8f
1057 \\ .inst 0x38200020 + 0x2000 + 0x80000000 + 0x000000
1058 \\ ret
1059 \\8:
1060 \\ mov w16, w0
1061 \\0:
1062 \\ ldxr w0, [x1]
1063 \\ eor w17, w0, w16
1064 \\ stxr w15, w17, [x1]
1065 \\ cbnz w15, 0b
1066 \\1:
1067 \\ ret
1068 :
1069 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1070 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1071 unreachable;
1072}
1073fn __aarch64_ldset4_relax() align(16) callconv(.naked) void {
1074 @setRuntimeSafety(false);
1075 asm volatile (
1076 \\ cbz w16, 8f
1077 \\ .inst 0x38200020 + 0x3000 + 0x80000000 + 0x000000
1078 \\ ret
1079 \\8:
1080 \\ mov w16, w0
1081 \\0:
1082 \\ ldxr w0, [x1]
1083 \\ orr w17, w0, w16
1084 \\ stxr w15, w17, [x1]
1085 \\ cbnz w15, 0b
1086 \\1:
1087 \\ ret
1088 :
1089 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1090 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1091 unreachable;
1092}
1093fn __aarch64_cas4_acq() align(16) callconv(.naked) void {
1094 @setRuntimeSafety(false);
1095 asm volatile (
1096 \\ cbz w16, 8f
1097 \\ .inst 0x08a07c41 + 0x80000000 + 0x400000
1098 \\ ret
1099 \\8:
1100 \\ mov w16, w0
1101 \\0:
1102 \\ ldaxr w0, [x2]
1103 \\ cmp w0, w16
1104 \\ bne 1f
1105 \\ stxr w17, w1, [x2]
1106 \\ cbnz w17, 0b
1107 \\1:
1108 \\ ret
1109 :
1110 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1111 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1112 unreachable;
1113}
1114fn __aarch64_swp4_acq() align(16) callconv(.naked) void {
1115 @setRuntimeSafety(false);
1116 asm volatile (
1117 \\ cbz w16, 8f
1118 \\ .inst 0x38208020 + 0x80000000 + 0x800000
1119 \\ ret
1120 \\8:
1121 \\ mov w16, w0
1122 \\0:
1123 \\ ldaxr w0, [x1]
1124 \\ stxr w17, w16, [x1]
1125 \\ cbnz w17, 0b
1126 \\1:
1127 \\ ret
1128 :
1129 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1130 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1131 unreachable;
1132}
1133fn __aarch64_ldadd4_acq() align(16) callconv(.naked) void {
1134 @setRuntimeSafety(false);
1135 asm volatile (
1136 \\ cbz w16, 8f
1137 \\ .inst 0x38200020 + 0x0000 + 0x80000000 + 0x800000
1138 \\ ret
1139 \\8:
1140 \\ mov w16, w0
1141 \\0:
1142 \\ ldaxr w0, [x1]
1143 \\ add w17, w0, w16
1144 \\ stxr w15, w17, [x1]
1145 \\ cbnz w15, 0b
1146 \\1:
1147 \\ ret
1148 :
1149 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1150 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1151 unreachable;
1152}
1153fn __aarch64_ldclr4_acq() align(16) callconv(.naked) void {
1154 @setRuntimeSafety(false);
1155 asm volatile (
1156 \\ cbz w16, 8f
1157 \\ .inst 0x38200020 + 0x1000 + 0x80000000 + 0x800000
1158 \\ ret
1159 \\8:
1160 \\ mov w16, w0
1161 \\0:
1162 \\ ldaxr w0, [x1]
1163 \\ bic w17, w0, w16
1164 \\ stxr w15, w17, [x1]
1165 \\ cbnz w15, 0b
1166 \\1:
1167 \\ ret
1168 :
1169 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1170 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1171 unreachable;
1172}
1173fn __aarch64_ldeor4_acq() align(16) callconv(.naked) void {
1174 @setRuntimeSafety(false);
1175 asm volatile (
1176 \\ cbz w16, 8f
1177 \\ .inst 0x38200020 + 0x2000 + 0x80000000 + 0x800000
1178 \\ ret
1179 \\8:
1180 \\ mov w16, w0
1181 \\0:
1182 \\ ldaxr w0, [x1]
1183 \\ eor w17, w0, w16
1184 \\ stxr w15, w17, [x1]
1185 \\ cbnz w15, 0b
1186 \\1:
1187 \\ ret
1188 :
1189 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1190 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1191 unreachable;
1192}
1193fn __aarch64_ldset4_acq() align(16) callconv(.naked) void {
1194 @setRuntimeSafety(false);
1195 asm volatile (
1196 \\ cbz w16, 8f
1197 \\ .inst 0x38200020 + 0x3000 + 0x80000000 + 0x800000
1198 \\ ret
1199 \\8:
1200 \\ mov w16, w0
1201 \\0:
1202 \\ ldaxr w0, [x1]
1203 \\ orr w17, w0, w16
1204 \\ stxr w15, w17, [x1]
1205 \\ cbnz w15, 0b
1206 \\1:
1207 \\ ret
1208 :
1209 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1210 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1211 unreachable;
1212}
1213fn __aarch64_cas4_rel() align(16) callconv(.naked) void {
1214 @setRuntimeSafety(false);
1215 asm volatile (
1216 \\ cbz w16, 8f
1217 \\ .inst 0x08a07c41 + 0x80000000 + 0x008000
1218 \\ ret
1219 \\8:
1220 \\ mov w16, w0
1221 \\0:
1222 \\ ldxr w0, [x2]
1223 \\ cmp w0, w16
1224 \\ bne 1f
1225 \\ stlxr w17, w1, [x2]
1226 \\ cbnz w17, 0b
1227 \\1:
1228 \\ ret
1229 :
1230 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1231 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1232 unreachable;
1233}
1234fn __aarch64_swp4_rel() align(16) callconv(.naked) void {
1235 @setRuntimeSafety(false);
1236 asm volatile (
1237 \\ cbz w16, 8f
1238 \\ .inst 0x38208020 + 0x80000000 + 0x400000
1239 \\ ret
1240 \\8:
1241 \\ mov w16, w0
1242 \\0:
1243 \\ ldxr w0, [x1]
1244 \\ stlxr w17, w16, [x1]
1245 \\ cbnz w17, 0b
1246 \\1:
1247 \\ ret
1248 :
1249 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1250 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1251 unreachable;
1252}
1253fn __aarch64_ldadd4_rel() align(16) callconv(.naked) void {
1254 @setRuntimeSafety(false);
1255 asm volatile (
1256 \\ cbz w16, 8f
1257 \\ .inst 0x38200020 + 0x0000 + 0x80000000 + 0x400000
1258 \\ ret
1259 \\8:
1260 \\ mov w16, w0
1261 \\0:
1262 \\ ldxr w0, [x1]
1263 \\ add w17, w0, w16
1264 \\ stlxr w15, w17, [x1]
1265 \\ cbnz w15, 0b
1266 \\1:
1267 \\ ret
1268 :
1269 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1270 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1271 unreachable;
1272}
1273fn __aarch64_ldclr4_rel() align(16) callconv(.naked) void {
1274 @setRuntimeSafety(false);
1275 asm volatile (
1276 \\ cbz w16, 8f
1277 \\ .inst 0x38200020 + 0x1000 + 0x80000000 + 0x400000
1278 \\ ret
1279 \\8:
1280 \\ mov w16, w0
1281 \\0:
1282 \\ ldxr w0, [x1]
1283 \\ bic w17, w0, w16
1284 \\ stlxr w15, w17, [x1]
1285 \\ cbnz w15, 0b
1286 \\1:
1287 \\ ret
1288 :
1289 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1290 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1291 unreachable;
1292}
1293fn __aarch64_ldeor4_rel() align(16) callconv(.naked) void {
1294 @setRuntimeSafety(false);
1295 asm volatile (
1296 \\ cbz w16, 8f
1297 \\ .inst 0x38200020 + 0x2000 + 0x80000000 + 0x400000
1298 \\ ret
1299 \\8:
1300 \\ mov w16, w0
1301 \\0:
1302 \\ ldxr w0, [x1]
1303 \\ eor w17, w0, w16
1304 \\ stlxr w15, w17, [x1]
1305 \\ cbnz w15, 0b
1306 \\1:
1307 \\ ret
1308 :
1309 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1310 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1311 unreachable;
1312}
1313fn __aarch64_ldset4_rel() align(16) callconv(.naked) void {
1314 @setRuntimeSafety(false);
1315 asm volatile (
1316 \\ cbz w16, 8f
1317 \\ .inst 0x38200020 + 0x3000 + 0x80000000 + 0x400000
1318 \\ ret
1319 \\8:
1320 \\ mov w16, w0
1321 \\0:
1322 \\ ldxr w0, [x1]
1323 \\ orr w17, w0, w16
1324 \\ stlxr w15, w17, [x1]
1325 \\ cbnz w15, 0b
1326 \\1:
1327 \\ ret
1328 :
1329 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1330 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1331 unreachable;
1332}
1333fn __aarch64_cas4_acq_rel() align(16) callconv(.naked) void {
1334 @setRuntimeSafety(false);
1335 asm volatile (
1336 \\ cbz w16, 8f
1337 \\ .inst 0x08a07c41 + 0x80000000 + 0x408000
1338 \\ ret
1339 \\8:
1340 \\ mov w16, w0
1341 \\0:
1342 \\ ldaxr w0, [x2]
1343 \\ cmp w0, w16
1344 \\ bne 1f
1345 \\ stlxr w17, w1, [x2]
1346 \\ cbnz w17, 0b
1347 \\1:
1348 \\ ret
1349 :
1350 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1351 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1352 unreachable;
1353}
1354fn __aarch64_swp4_acq_rel() align(16) callconv(.naked) void {
1355 @setRuntimeSafety(false);
1356 asm volatile (
1357 \\ cbz w16, 8f
1358 \\ .inst 0x38208020 + 0x80000000 + 0xc00000
1359 \\ ret
1360 \\8:
1361 \\ mov w16, w0
1362 \\0:
1363 \\ ldaxr w0, [x1]
1364 \\ stlxr w17, w16, [x1]
1365 \\ cbnz w17, 0b
1366 \\1:
1367 \\ ret
1368 :
1369 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1370 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1371 unreachable;
1372}
1373fn __aarch64_ldadd4_acq_rel() align(16) callconv(.naked) void {
1374 @setRuntimeSafety(false);
1375 asm volatile (
1376 \\ cbz w16, 8f
1377 \\ .inst 0x38200020 + 0x0000 + 0x80000000 + 0xc00000
1378 \\ ret
1379 \\8:
1380 \\ mov w16, w0
1381 \\0:
1382 \\ ldaxr w0, [x1]
1383 \\ add w17, w0, w16
1384 \\ stlxr w15, w17, [x1]
1385 \\ cbnz w15, 0b
1386 \\1:
1387 \\ ret
1388 :
1389 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1390 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1391 unreachable;
1392}
1393fn __aarch64_ldclr4_acq_rel() align(16) callconv(.naked) void {
1394 @setRuntimeSafety(false);
1395 asm volatile (
1396 \\ cbz w16, 8f
1397 \\ .inst 0x38200020 + 0x1000 + 0x80000000 + 0xc00000
1398 \\ ret
1399 \\8:
1400 \\ mov w16, w0
1401 \\0:
1402 \\ ldaxr w0, [x1]
1403 \\ bic w17, w0, w16
1404 \\ stlxr w15, w17, [x1]
1405 \\ cbnz w15, 0b
1406 \\1:
1407 \\ ret
1408 :
1409 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1410 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1411 unreachable;
1412}
1413fn __aarch64_ldeor4_acq_rel() align(16) callconv(.naked) void {
1414 @setRuntimeSafety(false);
1415 asm volatile (
1416 \\ cbz w16, 8f
1417 \\ .inst 0x38200020 + 0x2000 + 0x80000000 + 0xc00000
1418 \\ ret
1419 \\8:
1420 \\ mov w16, w0
1421 \\0:
1422 \\ ldaxr w0, [x1]
1423 \\ eor w17, w0, w16
1424 \\ stlxr w15, w17, [x1]
1425 \\ cbnz w15, 0b
1426 \\1:
1427 \\ ret
1428 :
1429 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1430 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1431 unreachable;
1432}
1433fn __aarch64_ldset4_acq_rel() align(16) callconv(.naked) void {
1434 @setRuntimeSafety(false);
1435 asm volatile (
1436 \\ cbz w16, 8f
1437 \\ .inst 0x38200020 + 0x3000 + 0x80000000 + 0xc00000
1438 \\ ret
1439 \\8:
1440 \\ mov w16, w0
1441 \\0:
1442 \\ ldaxr w0, [x1]
1443 \\ orr w17, w0, w16
1444 \\ stlxr w15, w17, [x1]
1445 \\ cbnz w15, 0b
1446 \\1:
1447 \\ ret
1448 :
1449 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1450 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1451 unreachable;
1452}
1453fn __aarch64_cas8_relax() align(16) callconv(.naked) void {
1454 @setRuntimeSafety(false);
1455 asm volatile (
1456 \\ cbz w16, 8f
1457 \\ .inst 0x08a07c41 + 0xc0000000 + 0x000000
1458 \\ ret
1459 \\8:
1460 \\ mov x16, x0
1461 \\0:
1462 \\ ldxr x0, [x2]
1463 \\ cmp x0, x16
1464 \\ bne 1f
1465 \\ stxr w17, x1, [x2]
1466 \\ cbnz w17, 0b
1467 \\1:
1468 \\ ret
1469 :
1470 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1471 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1472 unreachable;
1473}
1474fn __aarch64_swp8_relax() align(16) callconv(.naked) void {
1475 @setRuntimeSafety(false);
1476 asm volatile (
1477 \\ cbz w16, 8f
1478 \\ .inst 0x38208020 + 0xc0000000 + 0x000000
1479 \\ ret
1480 \\8:
1481 \\ mov x16, x0
1482 \\0:
1483 \\ ldxr x0, [x1]
1484 \\ stxr w17, x16, [x1]
1485 \\ cbnz w17, 0b
1486 \\1:
1487 \\ ret
1488 :
1489 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1490 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1491 unreachable;
1492}
1493fn __aarch64_ldadd8_relax() align(16) callconv(.naked) void {
1494 @setRuntimeSafety(false);
1495 asm volatile (
1496 \\ cbz w16, 8f
1497 \\ .inst 0x38200020 + 0x0000 + 0xc0000000 + 0x000000
1498 \\ ret
1499 \\8:
1500 \\ mov x16, x0
1501 \\0:
1502 \\ ldxr x0, [x1]
1503 \\ add x17, x0, x16
1504 \\ stxr w15, x17, [x1]
1505 \\ cbnz w15, 0b
1506 \\1:
1507 \\ ret
1508 :
1509 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1510 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1511 unreachable;
1512}
1513fn __aarch64_ldclr8_relax() align(16) callconv(.naked) void {
1514 @setRuntimeSafety(false);
1515 asm volatile (
1516 \\ cbz w16, 8f
1517 \\ .inst 0x38200020 + 0x1000 + 0xc0000000 + 0x000000
1518 \\ ret
1519 \\8:
1520 \\ mov x16, x0
1521 \\0:
1522 \\ ldxr x0, [x1]
1523 \\ bic x17, x0, x16
1524 \\ stxr w15, x17, [x1]
1525 \\ cbnz w15, 0b
1526 \\1:
1527 \\ ret
1528 :
1529 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1530 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1531 unreachable;
1532}
1533fn __aarch64_ldeor8_relax() align(16) callconv(.naked) void {
1534 @setRuntimeSafety(false);
1535 asm volatile (
1536 \\ cbz w16, 8f
1537 \\ .inst 0x38200020 + 0x2000 + 0xc0000000 + 0x000000
1538 \\ ret
1539 \\8:
1540 \\ mov x16, x0
1541 \\0:
1542 \\ ldxr x0, [x1]
1543 \\ eor x17, x0, x16
1544 \\ stxr w15, x17, [x1]
1545 \\ cbnz w15, 0b
1546 \\1:
1547 \\ ret
1548 :
1549 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1550 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1551 unreachable;
1552}
1553fn __aarch64_ldset8_relax() align(16) callconv(.naked) void {
1554 @setRuntimeSafety(false);
1555 asm volatile (
1556 \\ cbz w16, 8f
1557 \\ .inst 0x38200020 + 0x3000 + 0xc0000000 + 0x000000
1558 \\ ret
1559 \\8:
1560 \\ mov x16, x0
1561 \\0:
1562 \\ ldxr x0, [x1]
1563 \\ orr x17, x0, x16
1564 \\ stxr w15, x17, [x1]
1565 \\ cbnz w15, 0b
1566 \\1:
1567 \\ ret
1568 :
1569 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1570 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1571 unreachable;
1572}
1573fn __aarch64_cas8_acq() align(16) callconv(.naked) void {
1574 @setRuntimeSafety(false);
1575 asm volatile (
1576 \\ cbz w16, 8f
1577 \\ .inst 0x08a07c41 + 0xc0000000 + 0x400000
1578 \\ ret
1579 \\8:
1580 \\ mov x16, x0
1581 \\0:
1582 \\ ldaxr x0, [x2]
1583 \\ cmp x0, x16
1584 \\ bne 1f
1585 \\ stxr w17, x1, [x2]
1586 \\ cbnz w17, 0b
1587 \\1:
1588 \\ ret
1589 :
1590 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1591 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1592 unreachable;
1593}
1594fn __aarch64_swp8_acq() align(16) callconv(.naked) void {
1595 @setRuntimeSafety(false);
1596 asm volatile (
1597 \\ cbz w16, 8f
1598 \\ .inst 0x38208020 + 0xc0000000 + 0x800000
1599 \\ ret
1600 \\8:
1601 \\ mov x16, x0
1602 \\0:
1603 \\ ldaxr x0, [x1]
1604 \\ stxr w17, x16, [x1]
1605 \\ cbnz w17, 0b
1606 \\1:
1607 \\ ret
1608 :
1609 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1610 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1611 unreachable;
1612}
1613fn __aarch64_ldadd8_acq() align(16) callconv(.naked) void {
1614 @setRuntimeSafety(false);
1615 asm volatile (
1616 \\ cbz w16, 8f
1617 \\ .inst 0x38200020 + 0x0000 + 0xc0000000 + 0x800000
1618 \\ ret
1619 \\8:
1620 \\ mov x16, x0
1621 \\0:
1622 \\ ldaxr x0, [x1]
1623 \\ add x17, x0, x16
1624 \\ stxr w15, x17, [x1]
1625 \\ cbnz w15, 0b
1626 \\1:
1627 \\ ret
1628 :
1629 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1630 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1631 unreachable;
1632}
1633fn __aarch64_ldclr8_acq() align(16) callconv(.naked) void {
1634 @setRuntimeSafety(false);
1635 asm volatile (
1636 \\ cbz w16, 8f
1637 \\ .inst 0x38200020 + 0x1000 + 0xc0000000 + 0x800000
1638 \\ ret
1639 \\8:
1640 \\ mov x16, x0
1641 \\0:
1642 \\ ldaxr x0, [x1]
1643 \\ bic x17, x0, x16
1644 \\ stxr w15, x17, [x1]
1645 \\ cbnz w15, 0b
1646 \\1:
1647 \\ ret
1648 :
1649 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1650 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1651 unreachable;
1652}
1653fn __aarch64_ldeor8_acq() align(16) callconv(.naked) void {
1654 @setRuntimeSafety(false);
1655 asm volatile (
1656 \\ cbz w16, 8f
1657 \\ .inst 0x38200020 + 0x2000 + 0xc0000000 + 0x800000
1658 \\ ret
1659 \\8:
1660 \\ mov x16, x0
1661 \\0:
1662 \\ ldaxr x0, [x1]
1663 \\ eor x17, x0, x16
1664 \\ stxr w15, x17, [x1]
1665 \\ cbnz w15, 0b
1666 \\1:
1667 \\ ret
1668 :
1669 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1670 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1671 unreachable;
1672}
1673fn __aarch64_ldset8_acq() align(16) callconv(.naked) void {
1674 @setRuntimeSafety(false);
1675 asm volatile (
1676 \\ cbz w16, 8f
1677 \\ .inst 0x38200020 + 0x3000 + 0xc0000000 + 0x800000
1678 \\ ret
1679 \\8:
1680 \\ mov x16, x0
1681 \\0:
1682 \\ ldaxr x0, [x1]
1683 \\ orr x17, x0, x16
1684 \\ stxr w15, x17, [x1]
1685 \\ cbnz w15, 0b
1686 \\1:
1687 \\ ret
1688 :
1689 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1690 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1691 unreachable;
1692}
1693fn __aarch64_cas8_rel() align(16) callconv(.naked) void {
1694 @setRuntimeSafety(false);
1695 asm volatile (
1696 \\ cbz w16, 8f
1697 \\ .inst 0x08a07c41 + 0xc0000000 + 0x008000
1698 \\ ret
1699 \\8:
1700 \\ mov x16, x0
1701 \\0:
1702 \\ ldxr x0, [x2]
1703 \\ cmp x0, x16
1704 \\ bne 1f
1705 \\ stlxr w17, x1, [x2]
1706 \\ cbnz w17, 0b
1707 \\1:
1708 \\ ret
1709 :
1710 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1711 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1712 unreachable;
1713}
1714fn __aarch64_swp8_rel() align(16) callconv(.naked) void {
1715 @setRuntimeSafety(false);
1716 asm volatile (
1717 \\ cbz w16, 8f
1718 \\ .inst 0x38208020 + 0xc0000000 + 0x400000
1719 \\ ret
1720 \\8:
1721 \\ mov x16, x0
1722 \\0:
1723 \\ ldxr x0, [x1]
1724 \\ stlxr w17, x16, [x1]
1725 \\ cbnz w17, 0b
1726 \\1:
1727 \\ ret
1728 :
1729 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1730 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1731 unreachable;
1732}
1733fn __aarch64_ldadd8_rel() align(16) callconv(.naked) void {
1734 @setRuntimeSafety(false);
1735 asm volatile (
1736 \\ cbz w16, 8f
1737 \\ .inst 0x38200020 + 0x0000 + 0xc0000000 + 0x400000
1738 \\ ret
1739 \\8:
1740 \\ mov x16, x0
1741 \\0:
1742 \\ ldxr x0, [x1]
1743 \\ add x17, x0, x16
1744 \\ stlxr w15, x17, [x1]
1745 \\ cbnz w15, 0b
1746 \\1:
1747 \\ ret
1748 :
1749 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1750 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1751 unreachable;
1752}
1753fn __aarch64_ldclr8_rel() align(16) callconv(.naked) void {
1754 @setRuntimeSafety(false);
1755 asm volatile (
1756 \\ cbz w16, 8f
1757 \\ .inst 0x38200020 + 0x1000 + 0xc0000000 + 0x400000
1758 \\ ret
1759 \\8:
1760 \\ mov x16, x0
1761 \\0:
1762 \\ ldxr x0, [x1]
1763 \\ bic x17, x0, x16
1764 \\ stlxr w15, x17, [x1]
1765 \\ cbnz w15, 0b
1766 \\1:
1767 \\ ret
1768 :
1769 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1770 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1771 unreachable;
1772}
1773fn __aarch64_ldeor8_rel() align(16) callconv(.naked) void {
1774 @setRuntimeSafety(false);
1775 asm volatile (
1776 \\ cbz w16, 8f
1777 \\ .inst 0x38200020 + 0x2000 + 0xc0000000 + 0x400000
1778 \\ ret
1779 \\8:
1780 \\ mov x16, x0
1781 \\0:
1782 \\ ldxr x0, [x1]
1783 \\ eor x17, x0, x16
1784 \\ stlxr w15, x17, [x1]
1785 \\ cbnz w15, 0b
1786 \\1:
1787 \\ ret
1788 :
1789 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1790 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1791 unreachable;
1792}
1793fn __aarch64_ldset8_rel() align(16) callconv(.naked) void {
1794 @setRuntimeSafety(false);
1795 asm volatile (
1796 \\ cbz w16, 8f
1797 \\ .inst 0x38200020 + 0x3000 + 0xc0000000 + 0x400000
1798 \\ ret
1799 \\8:
1800 \\ mov x16, x0
1801 \\0:
1802 \\ ldxr x0, [x1]
1803 \\ orr x17, x0, x16
1804 \\ stlxr w15, x17, [x1]
1805 \\ cbnz w15, 0b
1806 \\1:
1807 \\ ret
1808 :
1809 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1810 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1811 unreachable;
1812}
1813fn __aarch64_cas8_acq_rel() align(16) callconv(.naked) void {
1814 @setRuntimeSafety(false);
1815 asm volatile (
1816 \\ cbz w16, 8f
1817 \\ .inst 0x08a07c41 + 0xc0000000 + 0x408000
1818 \\ ret
1819 \\8:
1820 \\ mov x16, x0
1821 \\0:
1822 \\ ldaxr x0, [x2]
1823 \\ cmp x0, x16
1824 \\ bne 1f
1825 \\ stlxr w17, x1, [x2]
1826 \\ cbnz w17, 0b
1827 \\1:
1828 \\ ret
1829 :
1830 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1831 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1832 unreachable;
1833}
1834fn __aarch64_swp8_acq_rel() align(16) callconv(.naked) void {
1835 @setRuntimeSafety(false);
1836 asm volatile (
1837 \\ cbz w16, 8f
1838 \\ .inst 0x38208020 + 0xc0000000 + 0xc00000
1839 \\ ret
1840 \\8:
1841 \\ mov x16, x0
1842 \\0:
1843 \\ ldaxr x0, [x1]
1844 \\ stlxr w17, x16, [x1]
1845 \\ cbnz w17, 0b
1846 \\1:
1847 \\ ret
1848 :
1849 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1850 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1851 unreachable;
1852}
1853fn __aarch64_ldadd8_acq_rel() align(16) callconv(.naked) void {
1854 @setRuntimeSafety(false);
1855 asm volatile (
1856 \\ cbz w16, 8f
1857 \\ .inst 0x38200020 + 0x0000 + 0xc0000000 + 0xc00000
1858 \\ ret
1859 \\8:
1860 \\ mov x16, x0
1861 \\0:
1862 \\ ldaxr x0, [x1]
1863 \\ add x17, x0, x16
1864 \\ stlxr w15, x17, [x1]
1865 \\ cbnz w15, 0b
1866 \\1:
1867 \\ ret
1868 :
1869 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1870 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1871 unreachable;
1872}
1873fn __aarch64_ldclr8_acq_rel() align(16) callconv(.naked) void {
1874 @setRuntimeSafety(false);
1875 asm volatile (
1876 \\ cbz w16, 8f
1877 \\ .inst 0x38200020 + 0x1000 + 0xc0000000 + 0xc00000
1878 \\ ret
1879 \\8:
1880 \\ mov x16, x0
1881 \\0:
1882 \\ ldaxr x0, [x1]
1883 \\ bic x17, x0, x16
1884 \\ stlxr w15, x17, [x1]
1885 \\ cbnz w15, 0b
1886 \\1:
1887 \\ ret
1888 :
1889 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1890 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1891 unreachable;
1892}
1893fn __aarch64_ldeor8_acq_rel() align(16) callconv(.naked) void {
1894 @setRuntimeSafety(false);
1895 asm volatile (
1896 \\ cbz w16, 8f
1897 \\ .inst 0x38200020 + 0x2000 + 0xc0000000 + 0xc00000
1898 \\ ret
1899 \\8:
1900 \\ mov x16, x0
1901 \\0:
1902 \\ ldaxr x0, [x1]
1903 \\ eor x17, x0, x16
1904 \\ stlxr w15, x17, [x1]
1905 \\ cbnz w15, 0b
1906 \\1:
1907 \\ ret
1908 :
1909 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1910 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1911 unreachable;
1912}
1913fn __aarch64_ldset8_acq_rel() align(16) callconv(.naked) void {
1914 @setRuntimeSafety(false);
1915 asm volatile (
1916 \\ cbz w16, 8f
1917 \\ .inst 0x38200020 + 0x3000 + 0xc0000000 + 0xc00000
1918 \\ ret
1919 \\8:
1920 \\ mov x16, x0
1921 \\0:
1922 \\ ldaxr x0, [x1]
1923 \\ orr x17, x0, x16
1924 \\ stlxr w15, x17, [x1]
1925 \\ cbnz w15, 0b
1926 \\1:
1927 \\ ret
1928 :
1929 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1930 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1931 unreachable;
1932}
1933fn __aarch64_cas16_relax() align(16) callconv(.naked) void {
1934 @setRuntimeSafety(false);
1935 asm volatile (
1936 \\ cbz w16, 8f
1937 \\ .inst 0x48207c82 + 0x000000
1938 \\ ret
1939 \\8:
1940 \\ mov x16, x0
1941 \\ mov x17, x1
1942 \\0:
1943 \\ ldxp x0, x1, [x4]
1944 \\ cmp x0, x16
1945 \\ ccmp x1, x17, #0, eq
1946 \\ bne 1f
1947 \\ stxp w15, x2, x3, [x4]
1948 \\ cbnz w15, 0b
1949 \\1:
1950 \\ ret
1951 :
1952 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1953 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1954 unreachable;
1955}
1956fn __aarch64_cas16_acq() align(16) callconv(.naked) void {
1957 @setRuntimeSafety(false);
1958 asm volatile (
1959 \\ cbz w16, 8f
1960 \\ .inst 0x48207c82 + 0x400000
1961 \\ ret
1962 \\8:
1963 \\ mov x16, x0
1964 \\ mov x17, x1
1965 \\0:
1966 \\ ldaxp x0, x1, [x4]
1967 \\ cmp x0, x16
1968 \\ ccmp x1, x17, #0, eq
1969 \\ bne 1f
1970 \\ stxp w15, x2, x3, [x4]
1971 \\ cbnz w15, 0b
1972 \\1:
1973 \\ ret
1974 :
1975 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1976 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
1977 unreachable;
1978}
1979fn __aarch64_cas16_rel() align(16) callconv(.naked) void {
1980 @setRuntimeSafety(false);
1981 asm volatile (
1982 \\ cbz w16, 8f
1983 \\ .inst 0x48207c82 + 0x008000
1984 \\ ret
1985 \\8:
1986 \\ mov x16, x0
1987 \\ mov x17, x1
1988 \\0:
1989 \\ ldxp x0, x1, [x4]
1990 \\ cmp x0, x16
1991 \\ ccmp x1, x17, #0, eq
1992 \\ bne 1f
1993 \\ stlxp w15, x2, x3, [x4]
1994 \\ cbnz w15, 0b
1995 \\1:
1996 \\ ret
1997 :
1998 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
1999 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
2000 unreachable;
2001}
2002fn __aarch64_cas16_acq_rel() align(16) callconv(.naked) void {
2003 @setRuntimeSafety(false);
2004 asm volatile (
2005 \\ cbz w16, 8f
2006 \\ .inst 0x48207c82 + 0x408000
2007 \\ ret
2008 \\8:
2009 \\ mov x16, x0
2010 \\ mov x17, x1
2011 \\0:
2012 \\ ldaxp x0, x1, [x4]
2013 \\ cmp x0, x16
2014 \\ ccmp x1, x17, #0, eq
2015 \\ bne 1f
2016 \\ stlxp w15, x2, x3, [x4]
2017 \\ cbnz w15, 0b
2018 \\1:
2019 \\ ret
2020 :
2021 : [__aarch64_have_lse_atomics] "{w16}" (__aarch64_have_lse_atomics),
2022 : .{ .w15 = true, .w16 = true, .w17 = true, .memory = true });
2023 unreachable;
2024}
2025
2026comptime {
2027 @export(&__aarch64_cas1_relax, .{ .name = "__aarch64_cas1_relax", .linkage = common.linkage, .visibility = common.visibility });
2028 @export(&__aarch64_swp1_relax, .{ .name = "__aarch64_swp1_relax", .linkage = common.linkage, .visibility = common.visibility });
2029 @export(&__aarch64_ldadd1_relax, .{ .name = "__aarch64_ldadd1_relax", .linkage = common.linkage, .visibility = common.visibility });
2030 @export(&__aarch64_ldclr1_relax, .{ .name = "__aarch64_ldclr1_relax", .linkage = common.linkage, .visibility = common.visibility });
2031 @export(&__aarch64_ldeor1_relax, .{ .name = "__aarch64_ldeor1_relax", .linkage = common.linkage, .visibility = common.visibility });
2032 @export(&__aarch64_ldset1_relax, .{ .name = "__aarch64_ldset1_relax", .linkage = common.linkage, .visibility = common.visibility });
2033 @export(&__aarch64_cas1_acq, .{ .name = "__aarch64_cas1_acq", .linkage = common.linkage, .visibility = common.visibility });
2034 @export(&__aarch64_swp1_acq, .{ .name = "__aarch64_swp1_acq", .linkage = common.linkage, .visibility = common.visibility });
2035 @export(&__aarch64_ldadd1_acq, .{ .name = "__aarch64_ldadd1_acq", .linkage = common.linkage, .visibility = common.visibility });
2036 @export(&__aarch64_ldclr1_acq, .{ .name = "__aarch64_ldclr1_acq", .linkage = common.linkage, .visibility = common.visibility });
2037 @export(&__aarch64_ldeor1_acq, .{ .name = "__aarch64_ldeor1_acq", .linkage = common.linkage, .visibility = common.visibility });
2038 @export(&__aarch64_ldset1_acq, .{ .name = "__aarch64_ldset1_acq", .linkage = common.linkage, .visibility = common.visibility });
2039 @export(&__aarch64_cas1_rel, .{ .name = "__aarch64_cas1_rel", .linkage = common.linkage, .visibility = common.visibility });
2040 @export(&__aarch64_swp1_rel, .{ .name = "__aarch64_swp1_rel", .linkage = common.linkage, .visibility = common.visibility });
2041 @export(&__aarch64_ldadd1_rel, .{ .name = "__aarch64_ldadd1_rel", .linkage = common.linkage, .visibility = common.visibility });
2042 @export(&__aarch64_ldclr1_rel, .{ .name = "__aarch64_ldclr1_rel", .linkage = common.linkage, .visibility = common.visibility });
2043 @export(&__aarch64_ldeor1_rel, .{ .name = "__aarch64_ldeor1_rel", .linkage = common.linkage, .visibility = common.visibility });
2044 @export(&__aarch64_ldset1_rel, .{ .name = "__aarch64_ldset1_rel", .linkage = common.linkage, .visibility = common.visibility });
2045 @export(&__aarch64_cas1_acq_rel, .{ .name = "__aarch64_cas1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2046 @export(&__aarch64_swp1_acq_rel, .{ .name = "__aarch64_swp1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2047 @export(&__aarch64_ldadd1_acq_rel, .{ .name = "__aarch64_ldadd1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2048 @export(&__aarch64_ldclr1_acq_rel, .{ .name = "__aarch64_ldclr1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2049 @export(&__aarch64_ldeor1_acq_rel, .{ .name = "__aarch64_ldeor1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2050 @export(&__aarch64_ldset1_acq_rel, .{ .name = "__aarch64_ldset1_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2051 @export(&__aarch64_cas2_relax, .{ .name = "__aarch64_cas2_relax", .linkage = common.linkage, .visibility = common.visibility });
2052 @export(&__aarch64_swp2_relax, .{ .name = "__aarch64_swp2_relax", .linkage = common.linkage, .visibility = common.visibility });
2053 @export(&__aarch64_ldadd2_relax, .{ .name = "__aarch64_ldadd2_relax", .linkage = common.linkage, .visibility = common.visibility });
2054 @export(&__aarch64_ldclr2_relax, .{ .name = "__aarch64_ldclr2_relax", .linkage = common.linkage, .visibility = common.visibility });
2055 @export(&__aarch64_ldeor2_relax, .{ .name = "__aarch64_ldeor2_relax", .linkage = common.linkage, .visibility = common.visibility });
2056 @export(&__aarch64_ldset2_relax, .{ .name = "__aarch64_ldset2_relax", .linkage = common.linkage, .visibility = common.visibility });
2057 @export(&__aarch64_cas2_acq, .{ .name = "__aarch64_cas2_acq", .linkage = common.linkage, .visibility = common.visibility });
2058 @export(&__aarch64_swp2_acq, .{ .name = "__aarch64_swp2_acq", .linkage = common.linkage, .visibility = common.visibility });
2059 @export(&__aarch64_ldadd2_acq, .{ .name = "__aarch64_ldadd2_acq", .linkage = common.linkage, .visibility = common.visibility });
2060 @export(&__aarch64_ldclr2_acq, .{ .name = "__aarch64_ldclr2_acq", .linkage = common.linkage, .visibility = common.visibility });
2061 @export(&__aarch64_ldeor2_acq, .{ .name = "__aarch64_ldeor2_acq", .linkage = common.linkage, .visibility = common.visibility });
2062 @export(&__aarch64_ldset2_acq, .{ .name = "__aarch64_ldset2_acq", .linkage = common.linkage, .visibility = common.visibility });
2063 @export(&__aarch64_cas2_rel, .{ .name = "__aarch64_cas2_rel", .linkage = common.linkage, .visibility = common.visibility });
2064 @export(&__aarch64_swp2_rel, .{ .name = "__aarch64_swp2_rel", .linkage = common.linkage, .visibility = common.visibility });
2065 @export(&__aarch64_ldadd2_rel, .{ .name = "__aarch64_ldadd2_rel", .linkage = common.linkage, .visibility = common.visibility });
2066 @export(&__aarch64_ldclr2_rel, .{ .name = "__aarch64_ldclr2_rel", .linkage = common.linkage, .visibility = common.visibility });
2067 @export(&__aarch64_ldeor2_rel, .{ .name = "__aarch64_ldeor2_rel", .linkage = common.linkage, .visibility = common.visibility });
2068 @export(&__aarch64_ldset2_rel, .{ .name = "__aarch64_ldset2_rel", .linkage = common.linkage, .visibility = common.visibility });
2069 @export(&__aarch64_cas2_acq_rel, .{ .name = "__aarch64_cas2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2070 @export(&__aarch64_swp2_acq_rel, .{ .name = "__aarch64_swp2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2071 @export(&__aarch64_ldadd2_acq_rel, .{ .name = "__aarch64_ldadd2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2072 @export(&__aarch64_ldclr2_acq_rel, .{ .name = "__aarch64_ldclr2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2073 @export(&__aarch64_ldeor2_acq_rel, .{ .name = "__aarch64_ldeor2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2074 @export(&__aarch64_ldset2_acq_rel, .{ .name = "__aarch64_ldset2_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2075 @export(&__aarch64_cas4_relax, .{ .name = "__aarch64_cas4_relax", .linkage = common.linkage, .visibility = common.visibility });
2076 @export(&__aarch64_swp4_relax, .{ .name = "__aarch64_swp4_relax", .linkage = common.linkage, .visibility = common.visibility });
2077 @export(&__aarch64_ldadd4_relax, .{ .name = "__aarch64_ldadd4_relax", .linkage = common.linkage, .visibility = common.visibility });
2078 @export(&__aarch64_ldclr4_relax, .{ .name = "__aarch64_ldclr4_relax", .linkage = common.linkage, .visibility = common.visibility });
2079 @export(&__aarch64_ldeor4_relax, .{ .name = "__aarch64_ldeor4_relax", .linkage = common.linkage, .visibility = common.visibility });
2080 @export(&__aarch64_ldset4_relax, .{ .name = "__aarch64_ldset4_relax", .linkage = common.linkage, .visibility = common.visibility });
2081 @export(&__aarch64_cas4_acq, .{ .name = "__aarch64_cas4_acq", .linkage = common.linkage, .visibility = common.visibility });
2082 @export(&__aarch64_swp4_acq, .{ .name = "__aarch64_swp4_acq", .linkage = common.linkage, .visibility = common.visibility });
2083 @export(&__aarch64_ldadd4_acq, .{ .name = "__aarch64_ldadd4_acq", .linkage = common.linkage, .visibility = common.visibility });
2084 @export(&__aarch64_ldclr4_acq, .{ .name = "__aarch64_ldclr4_acq", .linkage = common.linkage, .visibility = common.visibility });
2085 @export(&__aarch64_ldeor4_acq, .{ .name = "__aarch64_ldeor4_acq", .linkage = common.linkage, .visibility = common.visibility });
2086 @export(&__aarch64_ldset4_acq, .{ .name = "__aarch64_ldset4_acq", .linkage = common.linkage, .visibility = common.visibility });
2087 @export(&__aarch64_cas4_rel, .{ .name = "__aarch64_cas4_rel", .linkage = common.linkage, .visibility = common.visibility });
2088 @export(&__aarch64_swp4_rel, .{ .name = "__aarch64_swp4_rel", .linkage = common.linkage, .visibility = common.visibility });
2089 @export(&__aarch64_ldadd4_rel, .{ .name = "__aarch64_ldadd4_rel", .linkage = common.linkage, .visibility = common.visibility });
2090 @export(&__aarch64_ldclr4_rel, .{ .name = "__aarch64_ldclr4_rel", .linkage = common.linkage, .visibility = common.visibility });
2091 @export(&__aarch64_ldeor4_rel, .{ .name = "__aarch64_ldeor4_rel", .linkage = common.linkage, .visibility = common.visibility });
2092 @export(&__aarch64_ldset4_rel, .{ .name = "__aarch64_ldset4_rel", .linkage = common.linkage, .visibility = common.visibility });
2093 @export(&__aarch64_cas4_acq_rel, .{ .name = "__aarch64_cas4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2094 @export(&__aarch64_swp4_acq_rel, .{ .name = "__aarch64_swp4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2095 @export(&__aarch64_ldadd4_acq_rel, .{ .name = "__aarch64_ldadd4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2096 @export(&__aarch64_ldclr4_acq_rel, .{ .name = "__aarch64_ldclr4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2097 @export(&__aarch64_ldeor4_acq_rel, .{ .name = "__aarch64_ldeor4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2098 @export(&__aarch64_ldset4_acq_rel, .{ .name = "__aarch64_ldset4_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2099 @export(&__aarch64_cas8_relax, .{ .name = "__aarch64_cas8_relax", .linkage = common.linkage, .visibility = common.visibility });
2100 @export(&__aarch64_swp8_relax, .{ .name = "__aarch64_swp8_relax", .linkage = common.linkage, .visibility = common.visibility });
2101 @export(&__aarch64_ldadd8_relax, .{ .name = "__aarch64_ldadd8_relax", .linkage = common.linkage, .visibility = common.visibility });
2102 @export(&__aarch64_ldclr8_relax, .{ .name = "__aarch64_ldclr8_relax", .linkage = common.linkage, .visibility = common.visibility });
2103 @export(&__aarch64_ldeor8_relax, .{ .name = "__aarch64_ldeor8_relax", .linkage = common.linkage, .visibility = common.visibility });
2104 @export(&__aarch64_ldset8_relax, .{ .name = "__aarch64_ldset8_relax", .linkage = common.linkage, .visibility = common.visibility });
2105 @export(&__aarch64_cas8_acq, .{ .name = "__aarch64_cas8_acq", .linkage = common.linkage, .visibility = common.visibility });
2106 @export(&__aarch64_swp8_acq, .{ .name = "__aarch64_swp8_acq", .linkage = common.linkage, .visibility = common.visibility });
2107 @export(&__aarch64_ldadd8_acq, .{ .name = "__aarch64_ldadd8_acq", .linkage = common.linkage, .visibility = common.visibility });
2108 @export(&__aarch64_ldclr8_acq, .{ .name = "__aarch64_ldclr8_acq", .linkage = common.linkage, .visibility = common.visibility });
2109 @export(&__aarch64_ldeor8_acq, .{ .name = "__aarch64_ldeor8_acq", .linkage = common.linkage, .visibility = common.visibility });
2110 @export(&__aarch64_ldset8_acq, .{ .name = "__aarch64_ldset8_acq", .linkage = common.linkage, .visibility = common.visibility });
2111 @export(&__aarch64_cas8_rel, .{ .name = "__aarch64_cas8_rel", .linkage = common.linkage, .visibility = common.visibility });
2112 @export(&__aarch64_swp8_rel, .{ .name = "__aarch64_swp8_rel", .linkage = common.linkage, .visibility = common.visibility });
2113 @export(&__aarch64_ldadd8_rel, .{ .name = "__aarch64_ldadd8_rel", .linkage = common.linkage, .visibility = common.visibility });
2114 @export(&__aarch64_ldclr8_rel, .{ .name = "__aarch64_ldclr8_rel", .linkage = common.linkage, .visibility = common.visibility });
2115 @export(&__aarch64_ldeor8_rel, .{ .name = "__aarch64_ldeor8_rel", .linkage = common.linkage, .visibility = common.visibility });
2116 @export(&__aarch64_ldset8_rel, .{ .name = "__aarch64_ldset8_rel", .linkage = common.linkage, .visibility = common.visibility });
2117 @export(&__aarch64_cas8_acq_rel, .{ .name = "__aarch64_cas8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2118 @export(&__aarch64_swp8_acq_rel, .{ .name = "__aarch64_swp8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2119 @export(&__aarch64_ldadd8_acq_rel, .{ .name = "__aarch64_ldadd8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2120 @export(&__aarch64_ldclr8_acq_rel, .{ .name = "__aarch64_ldclr8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2121 @export(&__aarch64_ldeor8_acq_rel, .{ .name = "__aarch64_ldeor8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2122 @export(&__aarch64_ldset8_acq_rel, .{ .name = "__aarch64_ldset8_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2123 @export(&__aarch64_cas16_relax, .{ .name = "__aarch64_cas16_relax", .linkage = common.linkage, .visibility = common.visibility });
2124 @export(&__aarch64_cas16_acq, .{ .name = "__aarch64_cas16_acq", .linkage = common.linkage, .visibility = common.visibility });
2125 @export(&__aarch64_cas16_rel, .{ .name = "__aarch64_cas16_rel", .linkage = common.linkage, .visibility = common.visibility });
2126 @export(&__aarch64_cas16_acq_rel, .{ .name = "__aarch64_cas16_acq_rel", .linkage = common.linkage, .visibility = common.visibility });
2127}