master
  1#define __SYSCALL_LL_E(x) \
  2((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4#define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5
  6#ifdef __thumb__
  7
  8/* Avoid use of r7 in asm constraints when producing thumb code,
  9 * since it's reserved as frame pointer and might not be supported. */
 10#define __ASM____R7__
 11#define __asm_syscall(...) do { \
 12	__asm__ __volatile__ ( "mov %1,r7 ; mov r7,%2 ; svc 0 ; mov r7,%1" \
 13	: "=r"(r0), "=&r"((int){0}) : __VA_ARGS__ : "memory"); \
 14	return r0; \
 15	} while (0)
 16
 17#else
 18
 19#define __ASM____R7__ __asm__("r7")
 20#define __asm_syscall(...) do { \
 21	__asm__ __volatile__ ( "svc 0" \
 22	: "=r"(r0) : __VA_ARGS__ : "memory"); \
 23	return r0; \
 24	} while (0)
 25#endif
 26
 27/* For thumb2, we can allow 8-bit immediate syscall numbers, saving a
 28 * register in the above dance around r7. Does not work for thumb1 where
 29 * only movs, not mov, supports immediates, and we can't use movs because
 30 * it doesn't support high regs. */
 31#ifdef __thumb2__
 32#define R7_OPERAND "rI"(r7)
 33#else
 34#define R7_OPERAND "r"(r7)
 35#endif
 36
 37static inline long __syscall0(long n)
 38{
 39	register long r7 __ASM____R7__ = n;
 40	register long r0 __asm__("r0");
 41	__asm_syscall(R7_OPERAND);
 42}
 43
 44static inline long __syscall1(long n, long a)
 45{
 46	register long r7 __ASM____R7__ = n;
 47	register long r0 __asm__("r0") = a;
 48	__asm_syscall(R7_OPERAND, "0"(r0));
 49}
 50
 51static inline long __syscall2(long n, long a, long b)
 52{
 53	register long r7 __ASM____R7__ = n;
 54	register long r0 __asm__("r0") = a;
 55	register long r1 __asm__("r1") = b;
 56	__asm_syscall(R7_OPERAND, "0"(r0), "r"(r1));
 57}
 58
 59static inline long __syscall3(long n, long a, long b, long c)
 60{
 61	register long r7 __ASM____R7__ = n;
 62	register long r0 __asm__("r0") = a;
 63	register long r1 __asm__("r1") = b;
 64	register long r2 __asm__("r2") = c;
 65	__asm_syscall(R7_OPERAND, "0"(r0), "r"(r1), "r"(r2));
 66}
 67
 68static inline long __syscall4(long n, long a, long b, long c, long d)
 69{
 70	register long r7 __ASM____R7__ = n;
 71	register long r0 __asm__("r0") = a;
 72	register long r1 __asm__("r1") = b;
 73	register long r2 __asm__("r2") = c;
 74	register long r3 __asm__("r3") = d;
 75	__asm_syscall(R7_OPERAND, "0"(r0), "r"(r1), "r"(r2), "r"(r3));
 76}
 77
 78static inline long __syscall5(long n, long a, long b, long c, long d, long e)
 79{
 80	register long r7 __ASM____R7__ = n;
 81	register long r0 __asm__("r0") = a;
 82	register long r1 __asm__("r1") = b;
 83	register long r2 __asm__("r2") = c;
 84	register long r3 __asm__("r3") = d;
 85	register long r4 __asm__("r4") = e;
 86	__asm_syscall(R7_OPERAND, "0"(r0), "r"(r1), "r"(r2), "r"(r3), "r"(r4));
 87}
 88
 89static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
 90{
 91	register long r7 __ASM____R7__ = n;
 92	register long r0 __asm__("r0") = a;
 93	register long r1 __asm__("r1") = b;
 94	register long r2 __asm__("r2") = c;
 95	register long r3 __asm__("r3") = d;
 96	register long r4 __asm__("r4") = e;
 97	register long r5 __asm__("r5") = f;
 98	__asm_syscall(R7_OPERAND, "0"(r0), "r"(r1), "r"(r2), "r"(r3), "r"(r4), "r"(r5));
 99}
100
101#define SYSCALL_FADVISE_6_ARG
102
103#define SYSCALL_IPC_BROKEN_MODE
104
105#define VDSO_USEFUL
106#define VDSO_CGT32_SYM "__vdso_clock_gettime"
107#define VDSO_CGT32_VER "LINUX_2.6"
108#define VDSO_CGT_SYM "__vdso_clock_gettime64"
109#define VDSO_CGT_VER "LINUX_2.6"
110#define VDSO_CGT_WORKAROUND 1