xref: /linux/arch/x86/entry/calling.h (revision 29a6d7964d6853f5bcd84dfb92c074fb41d00563)
1 #include <linux/jump_label.h>
2 
3 /*
4 
5  x86 function call convention, 64-bit:
6  -------------------------------------
7   arguments           |  callee-saved      | extra caller-saved | return
8  [callee-clobbered]   |                    | [callee-clobbered] |
9  ---------------------------------------------------------------------------
10  rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11             | rax, rdx [**]
11 
12  ( rsp is obviously invariant across normal function calls. (gcc can 'merge'
13    functions when it sees tail-call optimization possibilities) rflags is
14    clobbered. Leftover arguments are passed over the stack frame.)
15 
16  [*]  In the frame-pointers case rbp is fixed to the stack frame.
17 
18  [**] for struct return values wider than 64 bits the return convention is a
19       bit more complex: up to 128 bits width we return small structures
20       straight in rax, rdx. For structures larger than that (3 words or
21       larger) the caller puts a pointer to an on-stack return struct
22       [allocated in the caller's stack frame] into the first argument - i.e.
23       into rdi. All other arguments shift up by one in this case.
24       Fortunately this case is rare in the kernel.
25 
26 For 32-bit we have the following conventions - kernel is built with
27 -mregparm=3 and -freg-struct-return:
28 
29  x86 function calling convention, 32-bit:
30  ----------------------------------------
31   arguments         | callee-saved        | extra caller-saved | return
32  [callee-clobbered] |                     | [callee-clobbered] |
33  -------------------------------------------------------------------------
34  eax edx ecx        | ebx edi esi ebp [*] | <none>             | eax, edx [**]
35 
36  ( here too esp is obviously invariant across normal function calls. eflags
37    is clobbered. Leftover arguments are passed over the stack frame. )
38 
39  [*]  In the frame-pointers case ebp is fixed to the stack frame.
40 
41  [**] We build with -freg-struct-return, which on 32-bit means similar
42       semantics as on 64-bit: edx can be used for a second return value
43       (i.e. covering integer and structure sizes up to 64 bits) - after that
44       it gets more complex and more expensive: 3-word or larger struct returns
45       get done in the caller's frame and the pointer to the return struct goes
46       into regparm0, i.e. eax - the other arguments shift up and the
47       function's register parameters degenerate to regparm=2 in essence.
48 
49 */
50 
51 #ifdef CONFIG_X86_64
52 
53 /*
54  * 64-bit system call stack frame layout defines and helpers,
55  * for assembly code:
56  */
57 
58 /* The layout forms the "struct pt_regs" on the stack: */
59 /*
60  * C ABI says these regs are callee-preserved. They aren't saved on kernel entry
61  * unless syscall needs a complete, fully filled "struct pt_regs".
62  */
63 #define R15		0*8
64 #define R14		1*8
65 #define R13		2*8
66 #define R12		3*8
67 #define RBP		4*8
68 #define RBX		5*8
69 /* These regs are callee-clobbered. Always saved on kernel entry. */
70 #define R11		6*8
71 #define R10		7*8
72 #define R9		8*8
73 #define R8		9*8
74 #define RAX		10*8
75 #define RCX		11*8
76 #define RDX		12*8
77 #define RSI		13*8
78 #define RDI		14*8
79 /*
80  * On syscall entry, this is syscall#. On CPU exception, this is error code.
81  * On hw interrupt, it's IRQ number:
82  */
83 #define ORIG_RAX	15*8
84 /* Return frame for iretq */
85 #define RIP		16*8
86 #define CS		17*8
87 #define EFLAGS		18*8
88 #define RSP		19*8
89 #define SS		20*8
90 
91 #define SIZEOF_PTREGS	21*8
92 
93 	.macro ALLOC_PT_GPREGS_ON_STACK
94 	addq	$-(15*8), %rsp
95 	.endm
96 
97 	.macro SAVE_C_REGS_HELPER offset=0 rax=1 rcx=1 r8910=1 r11=1
98 	.if \r11
99 	movq %r11, 6*8+\offset(%rsp)
100 	.endif
101 	.if \r8910
102 	movq %r10, 7*8+\offset(%rsp)
103 	movq %r9,  8*8+\offset(%rsp)
104 	movq %r8,  9*8+\offset(%rsp)
105 	.endif
106 	.if \rax
107 	movq %rax, 10*8+\offset(%rsp)
108 	.endif
109 	.if \rcx
110 	movq %rcx, 11*8+\offset(%rsp)
111 	.endif
112 	movq %rdx, 12*8+\offset(%rsp)
113 	movq %rsi, 13*8+\offset(%rsp)
114 	movq %rdi, 14*8+\offset(%rsp)
115 	.endm
116 	.macro SAVE_C_REGS offset=0
117 	SAVE_C_REGS_HELPER \offset, 1, 1, 1, 1
118 	.endm
119 	.macro SAVE_C_REGS_EXCEPT_RAX_RCX offset=0
120 	SAVE_C_REGS_HELPER \offset, 0, 0, 1, 1
121 	.endm
122 	.macro SAVE_C_REGS_EXCEPT_R891011
123 	SAVE_C_REGS_HELPER 0, 1, 1, 0, 0
124 	.endm
125 	.macro SAVE_C_REGS_EXCEPT_RCX_R891011
126 	SAVE_C_REGS_HELPER 0, 1, 0, 0, 0
127 	.endm
128 	.macro SAVE_C_REGS_EXCEPT_RAX_RCX_R11
129 	SAVE_C_REGS_HELPER 0, 0, 0, 1, 0
130 	.endm
131 
132 	.macro SAVE_EXTRA_REGS offset=0
133 	movq %r15, 0*8+\offset(%rsp)
134 	movq %r14, 1*8+\offset(%rsp)
135 	movq %r13, 2*8+\offset(%rsp)
136 	movq %r12, 3*8+\offset(%rsp)
137 	movq %rbp, 4*8+\offset(%rsp)
138 	movq %rbx, 5*8+\offset(%rsp)
139 	.endm
140 
141 	.macro RESTORE_EXTRA_REGS offset=0
142 	movq 0*8+\offset(%rsp), %r15
143 	movq 1*8+\offset(%rsp), %r14
144 	movq 2*8+\offset(%rsp), %r13
145 	movq 3*8+\offset(%rsp), %r12
146 	movq 4*8+\offset(%rsp), %rbp
147 	movq 5*8+\offset(%rsp), %rbx
148 	.endm
149 
150 	.macro RESTORE_C_REGS_HELPER rstor_rax=1, rstor_rcx=1, rstor_r11=1, rstor_r8910=1, rstor_rdx=1
151 	.if \rstor_r11
152 	movq 6*8(%rsp), %r11
153 	.endif
154 	.if \rstor_r8910
155 	movq 7*8(%rsp), %r10
156 	movq 8*8(%rsp), %r9
157 	movq 9*8(%rsp), %r8
158 	.endif
159 	.if \rstor_rax
160 	movq 10*8(%rsp), %rax
161 	.endif
162 	.if \rstor_rcx
163 	movq 11*8(%rsp), %rcx
164 	.endif
165 	.if \rstor_rdx
166 	movq 12*8(%rsp), %rdx
167 	.endif
168 	movq 13*8(%rsp), %rsi
169 	movq 14*8(%rsp), %rdi
170 	.endm
171 	.macro RESTORE_C_REGS
172 	RESTORE_C_REGS_HELPER 1,1,1,1,1
173 	.endm
174 	.macro RESTORE_C_REGS_EXCEPT_RAX
175 	RESTORE_C_REGS_HELPER 0,1,1,1,1
176 	.endm
177 	.macro RESTORE_C_REGS_EXCEPT_RCX
178 	RESTORE_C_REGS_HELPER 1,0,1,1,1
179 	.endm
180 	.macro RESTORE_C_REGS_EXCEPT_R11
181 	RESTORE_C_REGS_HELPER 1,1,0,1,1
182 	.endm
183 	.macro RESTORE_C_REGS_EXCEPT_RCX_R11
184 	RESTORE_C_REGS_HELPER 1,0,0,1,1
185 	.endm
186 
187 	.macro REMOVE_PT_GPREGS_FROM_STACK addskip=0
188 	subq $-(15*8+\addskip), %rsp
189 	.endm
190 
191 	.macro icebp
192 	.byte 0xf1
193 	.endm
194 
195 #endif /* CONFIG_X86_64 */
196 
197 /*
198  * This does 'call enter_from_user_mode' unless we can avoid it based on
199  * kernel config or using the static jump infrastructure.
200  */
201 .macro CALL_enter_from_user_mode
202 #ifdef CONFIG_CONTEXT_TRACKING
203 #ifdef HAVE_JUMP_LABEL
204 	STATIC_JUMP_IF_FALSE .Lafter_call_\@, context_tracking_enabled, def=0
205 #endif
206 	call enter_from_user_mode
207 .Lafter_call_\@:
208 #endif
209 .endm
210