master
  1#define a_ctz_32 a_ctz_32
  2static inline int a_ctz_32(unsigned long x)
  3{
  4	__asm__(
  5		"%0 = ct0(%0)\n\t"
  6		: "+r"(x));
  7	return x;
  8}
  9
 10#define a_ctz_64 a_ctz_64
 11static inline int a_ctz_64(uint64_t x)
 12{
 13	int count;
 14	__asm__(
 15		"%0 = ct0(%1)\n\t"
 16		: "=r"(count) : "r"(x));
 17	return count;
 18}
 19#define a_clz_64 a_clz_64
 20static inline int a_clz_64(uint64_t x)
 21{
 22        __asm__(
 23                "%0 = brev(%0)\n\t"
 24		: "+r"(x));
 25        return a_ctz_64(x);
 26}
 27
 28#define a_cas a_cas
 29static inline int a_cas(volatile int *p, int t, int s)
 30{
 31	int dummy;
 32	__asm__ __volatile__(
 33		"1:	%0 = memw_locked(%1)\n\t"
 34		"	{ p0 = cmp.eq(%0, %2)\n\t"
 35		"	  if (!p0.new) jump:nt 2f }\n\t"
 36		"	memw_locked(%1, p0) = %3\n\t"
 37		"	if (!p0) jump 1b\n\t"
 38		"2:	\n\t"
 39		: "=&r"(dummy)
 40		: "r"(p), "r"(t), "r"(s)
 41		: "p0", "memory" );
 42        return dummy;
 43}
 44
 45#define a_cas_p a_cas_p
 46static inline void *a_cas_p(volatile void *p, void *t, void *s)
 47{
 48	return (void *)a_cas(p, (int)t, (int)s);
 49}
 50
 51#define a_swap a_swap
 52static inline int a_swap(volatile int *x, int v)
 53{
 54	int old, dummy;
 55	__asm__ __volatile__(
 56		"	%1 = %3\n\t"
 57		"1:	%0 = memw_locked(%2)\n\t"
 58		"	memw_locked(%2, p0) = %1\n\t"
 59		"	if (!p0) jump 1b\n\t"
 60		: "=&r"(old), "=&r"(dummy)
 61		: "r"(x), "r"(v)
 62		: "p0", "memory" );
 63        return old;
 64}
 65
 66#define a_fetch_add a_fetch_add
 67static inline int a_fetch_add(volatile int *x, int v)
 68{
 69	int old, dummy;
 70	__asm__ __volatile__(
 71		"1:	%0 = memw_locked(%2)\n\t"
 72		"	%1 = add(%0, %3)\n\t"
 73		"	memw_locked(%2, p0) = %1\n\t"
 74		"	if (!p0) jump 1b\n\t"
 75		: "=&r"(old), "=&r"(dummy)
 76		: "r"(x), "r"(v)
 77		: "p0", "memory" );
 78        return old;
 79}
 80
 81#define a_inc a_inc
 82static inline void a_inc(volatile int *x)
 83{
 84	a_fetch_add(x, 1);
 85}
 86
 87#define a_dec a_dec
 88static inline void a_dec(volatile int *x)
 89{
 90	int dummy;
 91	__asm__ __volatile__(
 92		"1:	%0 = memw_locked(%1)\n\t"
 93		"	%0 = add(%0, #-1)\n\t"
 94		"	memw_locked(%1, p0) = %0\n\t"
 95		"	if (!p0) jump 1b\n\t"
 96		: "=&r"(dummy)
 97		: "r"(x)
 98		: "p0", "memory" );
 99}
100
101#define a_store a_store
102static inline void a_store(volatile int *p, int x)
103{
104	int dummy;
105	__asm__ __volatile__(
106		"1:	%0 = memw_locked(%1)\n\t"
107		"	memw_locked(%1, p0) = %2\n\t"
108		"	if (!p0) jump 1b\n\t"
109		: "=&r"(dummy)
110		: "r"(p), "r"(x)
111		: "p0", "memory" );
112}
113
114#define a_barrier a_barrier
115static inline void a_barrier()
116{
117	__asm__ __volatile__ ("barrier" ::: "memory");
118}
119#define a_spin a_spin
120static inline void a_spin()
121{
122	__asm__ __volatile__ ("pause(#255)" :::);
123}
124
125#define a_crash a_crash
126static inline void a_crash()
127{
128	*(volatile char *)0=0;
129}
130
131#define a_and a_and
132static inline void a_and(volatile int *p, int v)
133{
134	int dummy;
135	__asm__ __volatile__(
136		"1:	%0 = memw_locked(%1)\n\t"
137		"	%0 = and(%0, %2)\n\t"
138		"	memw_locked(%1, p0) = %0\n\t"
139		"	if (!p0) jump 1b\n\t"
140		: "=&r"(dummy)
141		: "r"(p), "r"(v)
142		: "p0", "memory" );
143}
144
145#define  a_or a_or
146static inline void a_or(volatile int *p, int v)
147{
148	int dummy;
149	__asm__ __volatile__(
150		"1:	%0 = memw_locked(%1)\n\t"
151		"	%0 = or(%0, %2)\n\t"
152		"	memw_locked(%1, p0) = %0\n\t"
153		"	if (!p0) jump 1b\n\t"
154		: "=&r"(dummy)
155		: "r"(p), "r"(v)
156		: "p0", "memory" );
157}
158
159#define a_or_l a_or_l
160static inline void a_or_l(volatile void *p, long v)
161{
162	a_or(p, v);
163}
164
165#define a_and_64 a_and_64
166static inline void a_and_64(volatile uint64_t *p, uint64_t v)
167{
168	uint64_t dummy;
169	__asm__ __volatile__(
170		"1:	%0 = memd_locked(%1)\n\t"
171		"	%0 = and(%0, %2)\n\t"
172		"	memd_locked(%1, p0) = %0\n\t"
173		"	if (!p0) jump 1b\n\t"
174		: "=&r"(dummy)
175		: "r"(p), "r"(v)
176		: "p0", "memory" );
177}
178
179#define  a_or_64 a_or_64
180static inline void a_or_64(volatile uint64_t *p, uint64_t v)
181{
182	uint64_t dummy;
183	__asm__ __volatile__(
184		"1:	%0 = memd_locked(%1)\n\t"
185		"	%0 = or(%0, %2)\n\t"
186		"	memd_locked(%1, p0) = %0\n\t"
187		"	if (!p0) jump 1b\n\t"
188		: "=&r"(dummy)
189		: "r"(p), "r"(v)
190		: "p0", "memory" );
191}