1/*	$NetBSD: asm.h,v 1.71.4.1 2023/07/31 13:36:30 martin Exp $	*/
  2
  3/*
  4 * Copyright (c) 1992, 1993
  5 *	The Regents of the University of California.  All rights reserved.
  6 *
  7 * This code is derived from software contributed to Berkeley by
  8 * Ralph Campbell.
  9 *
 10 * Redistribution and use in source and binary forms, with or without
 11 * modification, are permitted provided that the following conditions
 12 * are met:
 13 * 1. Redistributions of source code must retain the above copyright
 14 *    notice, this list of conditions and the following disclaimer.
 15 * 2. Redistributions in binary form must reproduce the above copyright
 16 *    notice, this list of conditions and the following disclaimer in the
 17 *    documentation and/or other materials provided with the distribution.
 18 * 3. Neither the name of the University nor the names of its contributors
 19 *    may be used to endorse or promote products derived from this software
 20 *    without specific prior written permission.
 21 *
 22 * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
 23 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 24 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 25 * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
 26 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
 27 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
 28 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 29 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
 30 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
 31 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
 32 * SUCH DAMAGE.
 33 *
 34 *	@(#)machAsmDefs.h	8.1 (Berkeley) 6/10/93
 35 */
 36
 37/*
 38 * machAsmDefs.h --
 39 *
 40 *	Macros used when writing assembler programs.
 41 *
 42 *	Copyright (C) 1989 Digital Equipment Corporation.
 43 *	Permission to use, copy, modify, and distribute this software and
 44 *	its documentation for any purpose and without fee is hereby granted,
 45 *	provided that the above copyright notice appears in all copies.
 46 *	Digital Equipment Corporation makes no representations about the
 47 *	suitability of this software for any purpose.  It is provided "as is"
 48 *	without express or implied warranty.
 49 *
 50 * from: Header: /sprite/src/kernel/mach/ds3100.md/RCS/machAsmDefs.h,
 51 *	v 1.2 89/08/15 18:28:24 rab Exp  SPRITE (DECWRL)
 52 */
 53
 54#ifndef _MIPS_ASM_H
 55#define	_MIPS_ASM_H
 56
 57#include <sys/cdefs.h>		/* for API selection */
 58#include <mips/regdef.h>
 59
 60#if defined(_KERNEL_OPT)
 61#include "opt_gprof.h"
 62#endif
 63
 64#ifdef __ASSEMBLER__
 65#define	__BIT(n)	(1 << (n))
 66#define	__BITS(hi,lo)	((~((~0)<<((hi)+1)))&((~0)<<(lo)))
 67
 68#define	__LOWEST_SET_BIT(__mask) ((((__mask) - 1) & (__mask)) ^ (__mask))
 69#define	__SHIFTOUT(__x, __mask) (((__x) & (__mask)) / __LOWEST_SET_BIT(__mask))
 70#define	__SHIFTIN(__x, __mask) ((__x) * __LOWEST_SET_BIT(__mask))
 71#endif	/* __ASSEMBLER__ */
 72
 73/*
 74 * Define -pg profile entry code.
 75 * Must always be noreorder, must never use a macro instruction.
 76 */
 77#if defined(__mips_o32)		/* Old 32-bit ABI */
 78/*
 79 * The old ABI version must also decrement two less words off the
 80 * stack and the final addiu to t9 must always equal the size of this
 81 * _MIPS_ASM_MCOUNT.
 82 */
 83#define	_MIPS_ASM_MCOUNT					\
 84	.set	push;						\
 85	.set	noreorder;					\
 86	.set	noat;						\
 87	subu	sp,16;						\
 88	sw	t9,12(sp);					\
 89	move	AT,ra;						\
 90	lui	t9,%hi(_mcount); 				\
 91	addiu	t9,t9,%lo(_mcount);				\
 92	jalr	t9;						\
 93	 nop;							\
 94	lw	t9,4(sp);					\
 95	addiu	sp,8;						\
 96	addiu	t9,40;						\
 97	.set	pop;
 98#elif defined(__mips_o64)	/* Old 64-bit ABI */
 99# error yeahnah
100#else				/* New (n32/n64) ABI */
101/*
102 * The new ABI version just needs to put the return address in AT and
103 * call _mcount().  For the no abicalls case, skip the reloc dance.
104 */
105#ifdef __mips_abicalls
106#define	_MIPS_ASM_MCOUNT					\
107	.set	push;						\
108	.set	noreorder;					\
109	.set	noat;						\
110	subu	sp,16;						\
111	sw	t9,8(sp);					\
112	move	AT,ra;						\
113	lui	t9,%hi(_mcount); 				\
114	addiu	t9,t9,%lo(_mcount);				\
115	jalr	t9;						\
116	 nop;							\
117	lw	t9,8(sp);					\
118	addiu	sp,16;						\
119	.set	pop;
120#else /* !__mips_abicalls */
121#define	_MIPS_ASM_MCOUNT					\
122	.set	push;						\
123	.set	noreorder;					\
124	.set	noat;						\
125	move	AT,ra;						\
126	jal	_mcount;					\
127	 nop;							\
128	.set	pop;
129#endif /* !__mips_abicalls */
130#endif /* n32/n64 */
131
132#ifdef GPROF
133#define	MCOUNT _MIPS_ASM_MCOUNT
134#else
135#define	MCOUNT
136#endif
137
138#ifdef USE_AENT
139#define	AENT(x)				\
140	.aent	x, 0
141#else
142#define	AENT(x)
143#endif
144
145/*
146 * WEAK_ALIAS: create a weak alias.
147 */
148#define	WEAK_ALIAS(alias,sym)						\
149	.weak alias;							\
150	alias = sym
151/*
152 * STRONG_ALIAS: create a strong alias.
153 */
154#define	STRONG_ALIAS(alias,sym)						\
155	.globl alias;							\
156	alias = sym
157
158/*
159 * WARN_REFERENCES: create a warning if the specified symbol is referenced.
160 */
161#define	WARN_REFERENCES(sym,msg)					\
162	.pushsection __CONCAT(.gnu.warning.,sym);			\
163	.ascii msg;							\
164	.popsection
165
166/*
167 * STATIC_LEAF_NOPROFILE
168 *	No profilable local leaf routine.
169 */
170#define	STATIC_LEAF_NOPROFILE(x)	\
171	.ent	_C_LABEL(x);		\
172_C_LABEL(x): ;				\
173	.frame sp, 0, ra
174
175/*
176 * LEAF_NOPROFILE
177 *	No profilable leaf routine.
178 */
179#define	LEAF_NOPROFILE(x)		\
180	.globl	_C_LABEL(x);		\
181	STATIC_LEAF_NOPROFILE(x)
182
183/*
184 * STATIC_LEAF
185 *	Declare a local leaf function.
186 */
187#define	STATIC_LEAF(x)			\
188	STATIC_LEAF_NOPROFILE(x);	\
189	MCOUNT
190
191/*
192 * LEAF
193 *	A leaf routine does
194 *	- call no other function,
195 *	- never use any register that callee-saved (S0-S8), and
196 *	- not use any local stack storage.
197 */
198#define	LEAF(x)				\
199	LEAF_NOPROFILE(x);		\
200	MCOUNT
201
202/*
203 * STATIC_XLEAF
204 *	declare alternate entry to a static leaf routine
205 */
206#define	STATIC_XLEAF(x)			\
207	AENT (_C_LABEL(x));		\
208_C_LABEL(x):
209
210/*
211 * XLEAF
212 *	declare alternate entry to leaf routine
213 */
214#define	XLEAF(x)			\
215	.globl	_C_LABEL(x);		\
216	STATIC_XLEAF(x)
217
218/*
219 * STATIC_NESTED_NOPROFILE
220 *	No profilable local nested routine.
221 */
222#define	STATIC_NESTED_NOPROFILE(x, fsize, retpc)	\
223	.ent	_C_LABEL(x);				\
224	.type	_C_LABEL(x), @function;			\
225_C_LABEL(x): ;						\
226	.frame	sp, fsize, retpc
227
228/*
229 * NESTED_NOPROFILE
230 *	No profilable nested routine.
231 */
232#define	NESTED_NOPROFILE(x, fsize, retpc)	\
233	.globl	_C_LABEL(x);			\
234	STATIC_NESTED_NOPROFILE(x, fsize, retpc)
235
236/*
237 * NESTED
238 *	A function calls other functions and needs
239 *	therefore stack space to save/restore registers.
240 */
241#define	NESTED(x, fsize, retpc)			\
242	NESTED_NOPROFILE(x, fsize, retpc);	\
243	MCOUNT
244
245/*
246 * STATIC_NESTED
247 *	No profilable local nested routine.
248 */
249#define	STATIC_NESTED(x, fsize, retpc)			\
250	STATIC_NESTED_NOPROFILE(x, fsize, retpc);	\
251	MCOUNT
252
253/*
254 * XNESTED
255 *	declare alternate entry point to nested routine.
256 */
257#define	XNESTED(x)			\
258	.globl	_C_LABEL(x);		\
259	AENT (_C_LABEL(x));		\
260_C_LABEL(x):
261
262/*
263 * END
264 *	Mark end of a procedure.
265 */
266#define	END(x)				\
267	.end _C_LABEL(x);		\
268	.size _C_LABEL(x), . - _C_LABEL(x)
269
270/*
271 * IMPORT -- import external symbol
272 */
273#define	IMPORT(sym, size)		\
274	.extern _C_LABEL(sym),size
275
276/*
277 * EXPORT -- export definition of symbol
278 */
279#define	EXPORT(x)			\
280	.globl	_C_LABEL(x);		\
281_C_LABEL(x):
282
283/*
284 * EXPORT_OBJECT -- export definition of symbol of symbol
285 * type Object, visible to ksyms(4) address search.
286 */
287#define	EXPORT_OBJECT(x)		\
288	EXPORT(x);			\
289	.type	_C_LABEL(x), @object;
290
291/*
292 * VECTOR
293 *	exception vector entrypoint
294 *	XXX: regmask should be used to generate .mask
295 */
296#define	VECTOR(x, regmask)		\
297	.ent	_C_LABEL(x);		\
298	EXPORT(x);			\
299
300#define	VECTOR_END(x)			\
301	EXPORT(__CONCAT(x,_end));	\
302	END(x);				\
303	.org _C_LABEL(x) + 0x80
304
305/*
306 * Macros to panic and printf from assembly language.
307 */
308#define	PANIC(msg)			\
309	PTR_LA	a0, 9f;			\
310	jal	_C_LABEL(panic);	\
311	nop;				\
312	MSG(msg)
313
314#define	PRINTF(msg)			\
315	PTR_LA	a0, 9f;			\
316	jal	_C_LABEL(printf);	\
317	nop;				\
318	MSG(msg)
319
320#define	MSG(msg)			\
321	.rdata;				\
3229:	.asciz	msg;			\
323	.text
324
325#define	ASMSTR(str)			\
326	.asciz str;			\
327	.align	3
328
329#define	RCSID(x)	.pushsection ".ident","MS",@progbits,1;		\
330			.asciz x;					\
331			.popsection
332
333/*
334 * XXX retain dialects XXX
335 */
336#define	ALEAF(x)			XLEAF(x)
337#define	NLEAF(x)			LEAF_NOPROFILE(x)
338#define	NON_LEAF(x, fsize, retpc)	NESTED(x, fsize, retpc)
339#define	NNON_LEAF(x, fsize, retpc)	NESTED_NOPROFILE(x, fsize, retpc)
340
341#if defined(__mips_o32)
342#define	SZREG	4
343#else
344#define	SZREG	8
345#endif
346
347#if defined(__mips_o32) || defined(__mips_o64)
348#define	ALSK	7		/* stack alignment */
349#define	ALMASK	-7		/* stack alignment */
350#define	SZFPREG	4
351#define	FP_L	lwc1
352#define	FP_S	swc1
353#else
354#define	ALSK	15		/* stack alignment */
355#define	ALMASK	-15		/* stack alignment */
356#define	SZFPREG	8
357#define	FP_L	ldc1
358#define	FP_S	sdc1
359#endif
360
361/*
362 *  standard callframe {
363 *  	register_t cf_args[4];		arg0 - arg3 (only on o32 and o64)
364 *	register_t cf_pad[N];		o32/64 (N=0), n32 (N=1) n64 (N=1)
365 *  	register_t cf_gp;		global pointer (only on n32 and n64)
366 *  	register_t cf_sp;		frame pointer
367 *  	register_t cf_ra;		return address
368 *  };
369 */
370#if defined(__mips_o32) || defined(__mips_o64)
371#define	CALLFRAME_SIZ	(SZREG * (4 + 2))
372#define	CALLFRAME_S0	0
373#elif defined(__mips_n32) || defined(__mips_n64)
374#define	CALLFRAME_SIZ	(SZREG * 4)
375#define	CALLFRAME_S0	(CALLFRAME_SIZ - 4 * SZREG)
376#endif
377#ifndef _KERNEL
378#define	CALLFRAME_GP	(CALLFRAME_SIZ - 3 * SZREG)
379#endif
380#define	CALLFRAME_SP	(CALLFRAME_SIZ - 2 * SZREG)
381#define	CALLFRAME_RA	(CALLFRAME_SIZ - 1 * SZREG)
382
383/*
384 * While it would be nice to be compatible with the SGI
385 * REG_L and REG_S macros, because they do not take parameters, it
386 * is impossible to use them with the _MIPS_SIM_ABIX32 model.
387 *
388 * These macros hide the use of mips3 instructions from the
389 * assembler to prevent the assembler from generating 64-bit style
390 * ABI calls.
391 */
392#ifdef __mips_o32
393#define	PTR_ADD		add
394#define	PTR_ADDI	addi
395#define	PTR_ADDU	addu
396#define	PTR_ADDIU	addiu
397#define	PTR_SUB		subu
398#define	PTR_SUBI	subi
399#define	PTR_SUBU	subu
400#define	PTR_SUBIU	subu
401#define	PTR_L		lw
402#define	PTR_LA		la
403#define	PTR_S		sw
404#define	PTR_SLL		sll
405#define	PTR_SLLV	sllv
406#define	PTR_SRL		srl
407#define	PTR_SRLV	srlv
408#define	PTR_SRA		sra
409#define	PTR_SRAV	srav
410#define	PTR_LL		ll
411#define	PTR_SC		sc
412#define	PTR_WORD	.word
413#define	PTR_SCALESHIFT	2
414#else /* _MIPS_SZPTR == 64 */
415#define	PTR_ADD		dadd
416#define	PTR_ADDI	daddi
417#define	PTR_ADDU	daddu
418#define	PTR_ADDIU	daddiu
419#define	PTR_SUB		dsubu
420#define	PTR_SUBI	dsubi
421#define	PTR_SUBU	dsubu
422#define	PTR_SUBIU	dsubu
423#ifdef __mips_n32
424#define	PTR_L		lw
425#define	PTR_LL		ll
426#define	PTR_SC		sc
427#define	PTR_S		sw
428#define	PTR_SCALESHIFT	2
429#define	PTR_WORD	.word
430#else
431#define	PTR_L		ld
432#define	PTR_LL		lld
433#define	PTR_SC		scd
434#define	PTR_S		sd
435#define	PTR_SCALESHIFT	3
436#define	PTR_WORD	.dword
437#endif
438#define	PTR_LA		dla
439#define	PTR_SLL		dsll
440#define	PTR_SLLV	dsllv
441#define	PTR_SRL		dsrl
442#define	PTR_SRLV	dsrlv
443#define	PTR_SRA		dsra
444#define	PTR_SRAV	dsrav
445#endif /* _MIPS_SZPTR == 64 */
446
447#if _MIPS_SZINT == 32
448#define	INT_ADD		add
449#define	INT_ADDI	addi
450#define	INT_ADDU	addu
451#define	INT_ADDIU	addiu
452#define	INT_SUB		subu
453#define	INT_SUBI	subi
454#define	INT_SUBU	subu
455#define	INT_SUBIU	subu
456#define	INT_L		lw
457#define	INT_LA		la
458#define	INT_S		sw
459#define	INT_SLL		sll
460#define	INT_SLLV	sllv
461#define	INT_SRL		srl
462#define	INT_SRLV	srlv
463#define	INT_SRA		sra
464#define	INT_SRAV	srav
465#define	INT_LL		ll
466#define	INT_SC		sc
467#define	INT_WORD	.word
468#define	INT_SCALESHIFT	2
469#else
470#define	INT_ADD		dadd
471#define	INT_ADDI	daddi
472#define	INT_ADDU	daddu
473#define	INT_ADDIU	daddiu
474#define	INT_SUB		dsubu
475#define	INT_SUBI	dsubi
476#define	INT_SUBU	dsubu
477#define	INT_SUBIU	dsubu
478#define	INT_L		ld
479#define	INT_LA		dla
480#define	INT_S		sd
481#define	INT_SLL		dsll
482#define	INT_SLLV	dsllv
483#define	INT_SRL		dsrl
484#define	INT_SRLV	dsrlv
485#define	INT_SRA		dsra
486#define	INT_SRAV	dsrav
487#define	INT_LL		lld
488#define	INT_SC		scd
489#define	INT_WORD	.dword
490#define	INT_SCALESHIFT	3
491#endif
492
493#if _MIPS_SZLONG == 32
494#define	LONG_ADD	add
495#define	LONG_ADDI	addi
496#define	LONG_ADDU	addu
497#define	LONG_ADDIU	addiu
498#define	LONG_SUB	subu
499#define	LONG_SUBI	subi
500#define	LONG_SUBU	subu
501#define	LONG_SUBIU	subu
502#define	LONG_L		lw
503#define	LONG_LA		la
504#define	LONG_S		sw
505#define	LONG_SLL	sll
506#define	LONG_SLLV	sllv
507#define	LONG_SRL	srl
508#define	LONG_SRLV	srlv
509#define	LONG_SRA	sra
510#define	LONG_SRAV	srav
511#define	LONG_LL		ll
512#define	LONG_SC		sc
513#define	LONG_WORD	.word
514#define	LONG_SCALESHIFT	2
515#else
516#define	LONG_ADD	dadd
517#define	LONG_ADDI	daddi
518#define	LONG_ADDU	daddu
519#define	LONG_ADDIU	daddiu
520#define	LONG_SUB	dsubu
521#define	LONG_SUBI	dsubi
522#define	LONG_SUBU	dsubu
523#define	LONG_SUBIU	dsubu
524#define	LONG_L		ld
525#define	LONG_LA		dla
526#define	LONG_S		sd
527#define	LONG_SLL	dsll
528#define	LONG_SLLV	dsllv
529#define	LONG_SRL	dsrl
530#define	LONG_SRLV	dsrlv
531#define	LONG_SRA	dsra
532#define	LONG_SRAV	dsrav
533#define	LONG_LL		lld
534#define	LONG_SC		scd
535#define	LONG_WORD	.dword
536#define	LONG_SCALESHIFT	3
537#endif
538
539#if SZREG == 4
540#define	REG_L		lw
541#define	REG_S		sw
542#define	REG_LI		li
543#define	REG_ADDU	addu
544#define	REG_SLL		sll
545#define	REG_SLLV	sllv
546#define	REG_SRL		srl
547#define	REG_SRLV	srlv
548#define	REG_SRA		sra
549#define	REG_SRAV	srav
550#define	REG_LL		ll
551#define	REG_SC		sc
552#define	REG_SCALESHIFT	2
553#else
554#define	REG_L		ld
555#define	REG_S		sd
556#define	REG_LI		dli
557#define	REG_ADDU	daddu
558#define	REG_SLL		dsll
559#define	REG_SLLV	dsllv
560#define	REG_SRL		dsrl
561#define	REG_SRLV	dsrlv
562#define	REG_SRA		dsra
563#define	REG_SRAV	dsrav
564#define	REG_LL		lld
565#define	REG_SC		scd
566#define	REG_SCALESHIFT	3
567#endif
568
569#if (MIPS1 + MIPS2) > 0
570#define	NOP_L		nop
571#else
572#define	NOP_L		/* nothing */
573#endif
574
575/* compiler define */
576#if defined(__OCTEON__)
577/*
578 * See common/lib/libc/arch/mips/atomic/membar_ops.S for notes on
579 * Octeon memory ordering guarantees and barriers.
580 *
581 * cnMIPS also has a quirk where the store buffer can get clogged and
582 * we need to apply a plunger to it _after_ releasing a lock or else
583 * other CPUs may spin for hundreds of thousands of cycles before they
584 * see the lock is released.  So we also have the quirky SYNC_PLUNGER
585 * barrier as syncw.
586 */
587#define	LLSCSYNC	/* nothing */
588#define	BDSYNC		sync
589#define	BDSYNC_ACQ	nop
590#define	SYNC_ACQ	/* nothing */
591#define	SYNC_REL	sync 4
592#define	BDSYNC_PLUNGER	sync 4
593#define	SYNC_PLUNGER	sync 4
594#elif __mips >= 3 || !defined(__mips_o32)
595#define	LLSCSYNC	/* nothing */
596#define	BDSYNC		sync
597#define	BDSYNC_ACQ	sync
598#define	SYNC_ACQ	sync
599#define	SYNC_REL	sync
600#define	BDSYNC_PLUNGER	nop
601#define	SYNC_PLUNGER	/* nothing */
602#else
603#define	LLSCSYNC	/* nothing */
604#define	BDSYNC		nop
605#define	BDSYNC_ACQ	nop
606#define	SYNC_ACQ	/* nothing */
607#define	SYNC_REL	/* nothing */
608#define	BDSYNC_PLUNGER	nop
609#define	SYNC_PLUNGER	/* nothing */
610#endif
611
612/*
613 * Store-before-load barrier.  Do not use this unless you know what
614 * you're doing.
615 */
616#ifdef MULTIPROCESSOR
617#define	SYNC_DEKKER	sync
618#else
619#define	SYNC_DEKKER	/* nothing */
620#endif
621
622/*
623 * Store-before-store and load-before-load barriers.  These could be
624 * made weaker than release (load/store-before-store) and acquire
625 * (load-before-load/store) barriers, and newer MIPS does have
626 * instruction encodings for finer-grained barriers like this, but I
627 * dunno how to appropriately conditionalize their use or get the
628 * assembler to be happy with them, so we'll use these definitions for
629 * now.
630 */
631#define	SYNC_PRODUCER	SYNC_REL
632#define	SYNC_CONSUMER	SYNC_ACQ
633
634/* CPU dependent hook for cp0 load delays */
635#if defined(MIPS1) || defined(MIPS2) || defined(MIPS3)
636#define	MFC0_HAZARD	sll $0,$0,1	/* super scalar nop */
637#else
638#define	MFC0_HAZARD	/* nothing */
639#endif
640
641#if _MIPS_ISA == _MIPS_ISA_MIPS1 || _MIPS_ISA == _MIPS_ISA_MIPS2 || \
642    _MIPS_ISA == _MIPS_ISA_MIPS32
643#define	MFC0		mfc0
644#define	MTC0		mtc0
645#endif
646#if _MIPS_ISA == _MIPS_ISA_MIPS3 || _MIPS_ISA == _MIPS_ISA_MIPS4 || \
647    _MIPS_ISA == _MIPS_ISA_MIPS64
648#define	MFC0		dmfc0
649#define	MTC0		dmtc0
650#endif
651
652#if defined(__mips_o32) || defined(__mips_o64)
653
654#ifdef __mips_abicalls
655#define	CPRESTORE(r)	.cprestore r
656#define	CPLOAD(r)	.cpload r
657#else
658#define	CPRESTORE(r)	/* not needed */
659#define	CPLOAD(r)	/* not needed */
660#endif
661
662#define	SETUP_GP	\
663			.set push;				\
664			.set noreorder;				\
665			.cpload	t9;				\
666			.set pop
667#define	SETUP_GPX(r)	\
668			.set push;				\
669			.set noreorder;				\
670			move	r,ra;	/* save old ra */	\
671			bal	7f;				\
672			nop;					\
673		7:	.cpload	ra;				\
674			move	ra,r;				\
675			.set pop
676#define	SETUP_GPX_L(r,lbl)	\
677			.set push;				\
678			.set noreorder;				\
679			move	r,ra;	/* save old ra */	\
680			bal	lbl;				\
681			nop;					\
682		lbl:	.cpload	ra;				\
683			move	ra,r;				\
684			.set pop
685#define	SAVE_GP(x)	.cprestore x
686
687#define	SETUP_GP64(a,b)		/* n32/n64 specific */
688#define	SETUP_GP64_R(a,b)	/* n32/n64 specific */
689#define	SETUP_GPX64(a,b)	/* n32/n64 specific */
690#define	SETUP_GPX64_L(a,b,c)	/* n32/n64 specific */
691#define	RESTORE_GP64		/* n32/n64 specific */
692#define	USE_ALT_CP(a)		/* n32/n64 specific */
693#endif /* __mips_o32 || __mips_o64 */
694
695#if defined(__mips_o32) || defined(__mips_o64)
696#define	REG_PROLOGUE	.set push
697#define	REG_EPILOGUE	.set pop
698#endif
699#if defined(__mips_n32) || defined(__mips_n64)
700#define	REG_PROLOGUE	.set push ; .set mips3
701#define	REG_EPILOGUE	.set pop
702#endif
703
704#if defined(__mips_n32) || defined(__mips_n64)
705#define	SETUP_GP		/* o32 specific */
706#define	SETUP_GPX(r)		/* o32 specific */
707#define	SETUP_GPX_L(r,lbl)	/* o32 specific */
708#define	SAVE_GP(x)		/* o32 specific */
709#define	SETUP_GP64(a,b)		.cpsetup t9, a, b
710#define	SETUP_GPX64(a,b)	\
711				.set push;			\
712				move	b,ra;			\
713				.set noreorder;			\
714				bal	7f;			\
715				nop;				\
716			7:	.set pop;			\
717				.cpsetup ra, a, 7b;		\
718				move	ra,b
719#define	SETUP_GPX64_L(a,b,c)	\
720				.set push;			\
721				move	b,ra;			\
722				.set noreorder;			\
723				bal	c;			\
724				nop;				\
725			c:	.set pop;			\
726				.cpsetup ra, a, c;		\
727				move	ra,b
728#define	RESTORE_GP64		.cpreturn
729#define	USE_ALT_CP(a)		.cplocal a
730#endif	/* __mips_n32 || __mips_n64 */
731
732/*
733 * The DYNAMIC_STATUS_MASK option adds an additional masking operation
734 * when updating the hardware interrupt mask in the status register.
735 *
736 * This is useful for platforms that need to at run-time mask
737 * interrupts based on motherboard configuration or to handle
738 * slowly clearing interrupts.
739 *
740 * XXX this is only currently implemented for mips3.
741 */
742#ifdef MIPS_DYNAMIC_STATUS_MASK
743#define	DYNAMIC_STATUS_MASK(sr,scratch)	\
744	lw	scratch, mips_dynamic_status_mask; \
745	and	sr, sr, scratch
746
747#define	DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)		\
748	ori	sr, (MIPS_INT_MASK | MIPS_SR_INT_IE);	\
749	DYNAMIC_STATUS_MASK(sr,scratch1)
750#else
751#define	DYNAMIC_STATUS_MASK(sr,scratch)
752#define	DYNAMIC_STATUS_MASK_TOUSER(sr,scratch1)
753#endif
754
755/* See lock_stubs.S. */
756#define	LOG2_MIPS_LOCK_RAS_SIZE	8
757#define	MIPS_LOCK_RAS_SIZE	256	/* 16 bytes left over */
758
759#define	CPUVAR(off) _C_LABEL(cpu_info_store)+__CONCAT(CPU_INFO_,off)
760
761#endif /* _MIPS_ASM_H */