xref: /linux/arch/sparc/include/asm/winmacro.h (revision 6679ce6e5f519096612b480d255d9ca97be0c2be)
1 /*
2  * winmacro.h: Window loading-unloading macros.
3  *
4  * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu)
5  */
6 
7 #ifndef _SPARC_WINMACRO_H
8 #define _SPARC_WINMACRO_H
9 
10 #include <asm/ptrace.h>
11 
12 /* Store the register window onto the 8-byte aligned area starting
13  * at %reg.  It might be %sp, it might not, we don't care.
14  */
15 #define STORE_WINDOW(reg) \
16 	std	%l0, [%reg + RW_L0]; \
17 	std	%l2, [%reg + RW_L2]; \
18 	std	%l4, [%reg + RW_L4]; \
19 	std	%l6, [%reg + RW_L6]; \
20 	std	%i0, [%reg + RW_I0]; \
21 	std	%i2, [%reg + RW_I2]; \
22 	std	%i4, [%reg + RW_I4]; \
23 	std	%i6, [%reg + RW_I6];
24 
25 /* Load a register window from the area beginning at %reg. */
26 #define LOAD_WINDOW(reg) \
27 	ldd	[%reg + RW_L0], %l0; \
28 	ldd	[%reg + RW_L2], %l2; \
29 	ldd	[%reg + RW_L4], %l4; \
30 	ldd	[%reg + RW_L6], %l6; \
31 	ldd	[%reg + RW_I0], %i0; \
32 	ldd	[%reg + RW_I2], %i2; \
33 	ldd	[%reg + RW_I4], %i4; \
34 	ldd	[%reg + RW_I6], %i6;
35 
36 /* Loading and storing struct pt_reg trap frames. */
37 #define LOAD_PT_INS(base_reg) \
38         ldd     [%base_reg + STACKFRAME_SZ + PT_I0], %i0; \
39         ldd     [%base_reg + STACKFRAME_SZ + PT_I2], %i2; \
40         ldd     [%base_reg + STACKFRAME_SZ + PT_I4], %i4; \
41         ldd     [%base_reg + STACKFRAME_SZ + PT_I6], %i6;
42 
43 #define LOAD_PT_GLOBALS(base_reg) \
44         ld      [%base_reg + STACKFRAME_SZ + PT_G1], %g1; \
45         ldd     [%base_reg + STACKFRAME_SZ + PT_G2], %g2; \
46         ldd     [%base_reg + STACKFRAME_SZ + PT_G4], %g4; \
47         ldd     [%base_reg + STACKFRAME_SZ + PT_G6], %g6;
48 
49 #define LOAD_PT_YREG(base_reg, scratch) \
50         ld      [%base_reg + STACKFRAME_SZ + PT_Y], %scratch; \
51         wr      %scratch, 0x0, %y;
52 
53 #define LOAD_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc) \
54         ld      [%base_reg + STACKFRAME_SZ + PT_PSR], %pt_psr; \
55         ld      [%base_reg + STACKFRAME_SZ + PT_PC], %pt_pc; \
56         ld      [%base_reg + STACKFRAME_SZ + PT_NPC], %pt_npc;
57 
58 #define LOAD_PT_ALL(base_reg, pt_psr, pt_pc, pt_npc, scratch) \
59         LOAD_PT_YREG(base_reg, scratch) \
60         LOAD_PT_INS(base_reg) \
61         LOAD_PT_GLOBALS(base_reg) \
62         LOAD_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc)
63 
64 #define STORE_PT_INS(base_reg) \
65         std     %i0, [%base_reg + STACKFRAME_SZ + PT_I0]; \
66         std     %i2, [%base_reg + STACKFRAME_SZ + PT_I2]; \
67         std     %i4, [%base_reg + STACKFRAME_SZ + PT_I4]; \
68         std     %i6, [%base_reg + STACKFRAME_SZ + PT_I6];
69 
70 #define STORE_PT_GLOBALS(base_reg) \
71         st      %g1, [%base_reg + STACKFRAME_SZ + PT_G1]; \
72         std     %g2, [%base_reg + STACKFRAME_SZ + PT_G2]; \
73         std     %g4, [%base_reg + STACKFRAME_SZ + PT_G4]; \
74         std     %g6, [%base_reg + STACKFRAME_SZ + PT_G6];
75 
76 #define STORE_PT_YREG(base_reg, scratch) \
77         rd      %y, %scratch; \
78         st      %scratch, [%base_reg + STACKFRAME_SZ + PT_Y];
79 
80 #define STORE_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc) \
81         st      %pt_psr, [%base_reg + STACKFRAME_SZ + PT_PSR]; \
82         st      %pt_pc,  [%base_reg + STACKFRAME_SZ + PT_PC]; \
83         st      %pt_npc, [%base_reg + STACKFRAME_SZ + PT_NPC];
84 
85 #define STORE_PT_ALL(base_reg, reg_psr, reg_pc, reg_npc, g_scratch) \
86         STORE_PT_PRIV(base_reg, reg_psr, reg_pc, reg_npc) \
87         STORE_PT_GLOBALS(base_reg) \
88         STORE_PT_YREG(base_reg, g_scratch) \
89         STORE_PT_INS(base_reg)
90 
91 #define SAVE_BOLIXED_USER_STACK(cur_reg, scratch) \
92         ld       [%cur_reg + TI_W_SAVED], %scratch; \
93         sll      %scratch, 2, %scratch; \
94         add      %scratch, %cur_reg, %scratch; \
95         st       %sp, [%scratch + TI_RWIN_SPTRS]; \
96         sub      %scratch, %cur_reg, %scratch; \
97         sll      %scratch, 4, %scratch; \
98         add      %scratch, %cur_reg, %scratch; \
99         STORE_WINDOW(scratch + TI_REG_WINDOW); \
100         sub      %scratch, %cur_reg, %scratch; \
101         srl      %scratch, 6, %scratch; \
102         add      %scratch, 1, %scratch; \
103         st       %scratch, [%cur_reg + TI_W_SAVED];
104 
105 #ifdef CONFIG_SMP
106 #define LOAD_CURRENT4M(dest_reg, idreg) \
107         rd       %tbr, %idreg; \
108 	sethi    %hi(current_set), %dest_reg; \
109         srl      %idreg, 10, %idreg; \
110 	or       %dest_reg, %lo(current_set), %dest_reg; \
111 	and      %idreg, 0xc, %idreg; \
112 	ld       [%idreg + %dest_reg], %dest_reg;
113 
114 #define LOAD_CURRENT4D(dest_reg, idreg) \
115 	lda	 [%g0] ASI_M_VIKING_TMP1, %idreg; \
116 	sethi	%hi(C_LABEL(current_set)), %dest_reg; \
117 	sll	%idreg, 2, %idreg; \
118 	or	%dest_reg, %lo(C_LABEL(current_set)), %dest_reg; \
119 	ld	[%idreg + %dest_reg], %dest_reg;
120 
121 /* Blackbox - take care with this... - check smp4m and smp4d before changing this. */
122 #define LOAD_CURRENT(dest_reg, idreg) 					\
123 	sethi	 %hi(___b_load_current), %idreg;			\
124 	sethi    %hi(current_set), %dest_reg; 			\
125 	sethi    %hi(boot_cpu_id4), %idreg; 			\
126 	or       %dest_reg, %lo(current_set), %dest_reg; 	\
127 	ldub	 [%idreg + %lo(boot_cpu_id4)], %idreg;		\
128 	ld       [%idreg + %dest_reg], %dest_reg;
129 #else
130 #define LOAD_CURRENT(dest_reg, idreg) \
131         sethi    %hi(current_set), %idreg; \
132         ld       [%idreg + %lo(current_set)], %dest_reg;
133 #endif
134 
135 #endif /* !(_SPARC_WINMACRO_H) */
136