master
  1//===-- sanitizer_syscall_linux_aarch64.inc --------------------*- C++ -*-===//
  2//
  3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4// See https://llvm.org/LICENSE.txt for license information.
  5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6//
  7//===----------------------------------------------------------------------===//
  8//
  9// Implementations of internal_syscall and internal_iserror for Linux/aarch64.
 10//
 11//===----------------------------------------------------------------------===//
 12
 13#define SYSCALL(name) __NR_ ## name
 14
 15static uptr __internal_syscall(u64 nr) {
 16  register u64 x8 asm("x8") = nr;
 17  register u64 x0 asm("x0");
 18  asm volatile("svc 0"
 19               : "=r"(x0)
 20               : "r"(x8)
 21               : "memory", "cc");
 22  return x0;
 23}
 24#define __internal_syscall0(n) \
 25  (__internal_syscall)(n)
 26
 27static uptr __internal_syscall(u64 nr, u64 arg1) {
 28  register u64 x8 asm("x8") = nr;
 29  register u64 x0 asm("x0") = arg1;
 30  asm volatile("svc 0"
 31               : "=r"(x0)
 32               : "r"(x8), "0"(x0)
 33               : "memory", "cc");
 34  return x0;
 35}
 36#define __internal_syscall1(n, a1) \
 37  (__internal_syscall)(n, (u64)(a1))
 38
 39static uptr __internal_syscall(u64 nr, u64 arg1, long arg2) {
 40  register u64 x8 asm("x8") = nr;
 41  register u64 x0 asm("x0") = arg1;
 42  register u64 x1 asm("x1") = arg2;
 43  asm volatile("svc 0"
 44               : "=r"(x0)
 45               : "r"(x8), "0"(x0), "r"(x1)
 46               : "memory", "cc");
 47  return x0;
 48}
 49#define __internal_syscall2(n, a1, a2) \
 50  (__internal_syscall)(n, (u64)(a1), (long)(a2))
 51
 52static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3) {
 53  register u64 x8 asm("x8") = nr;
 54  register u64 x0 asm("x0") = arg1;
 55  register u64 x1 asm("x1") = arg2;
 56  register u64 x2 asm("x2") = arg3;
 57  asm volatile("svc 0"
 58               : "=r"(x0)
 59               : "r"(x8), "0"(x0), "r"(x1), "r"(x2)
 60               : "memory", "cc");
 61  return x0;
 62}
 63#define __internal_syscall3(n, a1, a2, a3) \
 64  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3))
 65
 66static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
 67                               u64 arg4) {
 68  register u64 x8 asm("x8") = nr;
 69  register u64 x0 asm("x0") = arg1;
 70  register u64 x1 asm("x1") = arg2;
 71  register u64 x2 asm("x2") = arg3;
 72  register u64 x3 asm("x3") = arg4;
 73  asm volatile("svc 0"
 74               : "=r"(x0)
 75               : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3)
 76               : "memory", "cc");
 77  return x0;
 78}
 79#define __internal_syscall4(n, a1, a2, a3, a4) \
 80  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4))
 81
 82static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
 83                               u64 arg4, long arg5) {
 84  register u64 x8 asm("x8") = nr;
 85  register u64 x0 asm("x0") = arg1;
 86  register u64 x1 asm("x1") = arg2;
 87  register u64 x2 asm("x2") = arg3;
 88  register u64 x3 asm("x3") = arg4;
 89  register u64 x4 asm("x4") = arg5;
 90  asm volatile("svc 0"
 91               : "=r"(x0)
 92               : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4)
 93               : "memory", "cc");
 94  return x0;
 95}
 96#define __internal_syscall5(n, a1, a2, a3, a4, a5) \
 97  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
 98                       (u64)(a5))
 99
100static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
101                               u64 arg4, long arg5, long arg6) {
102  register u64 x8 asm("x8") = nr;
103  register u64 x0 asm("x0") = arg1;
104  register u64 x1 asm("x1") = arg2;
105  register u64 x2 asm("x2") = arg3;
106  register u64 x3 asm("x3") = arg4;
107  register u64 x4 asm("x4") = arg5;
108  register u64 x5 asm("x5") = arg6;
109  asm volatile("svc 0"
110               : "=r"(x0)
111               : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4), "r"(x5)
112               : "memory", "cc");
113  return x0;
114}
115#define __internal_syscall6(n, a1, a2, a3, a4, a5, a6) \
116  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
117                       (u64)(a5), (long)(a6))
118
119#define __SYSCALL_NARGS_X(a1, a2, a3, a4, a5, a6, a7, a8, n, ...) n
120#define __SYSCALL_NARGS(...) \
121  __SYSCALL_NARGS_X(__VA_ARGS__, 7, 6, 5, 4, 3, 2, 1, 0, )
122#define __SYSCALL_CONCAT_X(a, b) a##b
123#define __SYSCALL_CONCAT(a, b) __SYSCALL_CONCAT_X(a, b)
124#define __SYSCALL_DISP(b, ...) \
125  __SYSCALL_CONCAT(b, __SYSCALL_NARGS(__VA_ARGS__))(__VA_ARGS__)
126
127#define internal_syscall(...) __SYSCALL_DISP(__internal_syscall, __VA_ARGS__)
128
129// Helper function used to avoid cobbler errno.
130bool internal_iserror(uptr retval, int *rverrno) {
131  if (retval >= (uptr)-4095) {
132    if (rverrno)
133      *rverrno = -retval;
134    return true;
135  }
136  return false;
137}