xref: /linux/arch/s390/include/asm/stacktrace.h (revision 69bfec7548f4c1595bac0e3ddfc0458a5af31f4c)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_S390_STACKTRACE_H
3 #define _ASM_S390_STACKTRACE_H
4 
5 #include <linux/uaccess.h>
6 #include <linux/ptrace.h>
7 #include <asm/switch_to.h>
8 
9 enum stack_type {
10 	STACK_TYPE_UNKNOWN,
11 	STACK_TYPE_TASK,
12 	STACK_TYPE_IRQ,
13 	STACK_TYPE_NODAT,
14 	STACK_TYPE_RESTART,
15 	STACK_TYPE_MCCK,
16 };
17 
18 struct stack_info {
19 	enum stack_type type;
20 	unsigned long begin, end;
21 };
22 
23 const char *stack_type_name(enum stack_type type);
24 int get_stack_info(unsigned long sp, struct task_struct *task,
25 		   struct stack_info *info, unsigned long *visit_mask);
26 
27 static inline bool on_stack(struct stack_info *info,
28 			    unsigned long addr, size_t len)
29 {
30 	if (info->type == STACK_TYPE_UNKNOWN)
31 		return false;
32 	if (addr + len < addr)
33 		return false;
34 	return addr >= info->begin && addr + len <= info->end;
35 }
36 
37 /*
38  * Stack layout of a C stack frame.
39  * Kernel uses the packed stack layout (-mpacked-stack).
40  */
41 struct stack_frame {
42 	union {
43 		unsigned long empty[9];
44 		struct {
45 			unsigned long sie_control_block;
46 			unsigned long sie_savearea;
47 			unsigned long sie_reason;
48 			unsigned long sie_flags;
49 			unsigned long sie_control_block_phys;
50 		};
51 	};
52 	unsigned long gprs[10];
53 	unsigned long back_chain;
54 };
55 
56 /*
57  * Unlike current_stack_pointer which simply contains the current value of %r15
58  * current_frame_address() returns function stack frame address, which matches
59  * %r15 upon function invocation. It may differ from %r15 later if function
60  * allocates stack for local variables or new stack frame to call other
61  * functions.
62  */
63 #define current_frame_address()						\
64 	((unsigned long)__builtin_frame_address(0) -			\
65 	 offsetof(struct stack_frame, back_chain))
66 
67 static __always_inline unsigned long get_stack_pointer(struct task_struct *task,
68 						       struct pt_regs *regs)
69 {
70 	if (regs)
71 		return (unsigned long)kernel_stack_pointer(regs);
72 	if (task == current)
73 		return current_frame_address();
74 	return (unsigned long)task->thread.ksp;
75 }
76 
77 /*
78  * To keep this simple mark register 2-6 as being changed (volatile)
79  * by the called function, even though register 6 is saved/nonvolatile.
80  */
81 #define CALL_FMT_0 "=&d" (r2)
82 #define CALL_FMT_1 "+&d" (r2)
83 #define CALL_FMT_2 CALL_FMT_1, "+&d" (r3)
84 #define CALL_FMT_3 CALL_FMT_2, "+&d" (r4)
85 #define CALL_FMT_4 CALL_FMT_3, "+&d" (r5)
86 #define CALL_FMT_5 CALL_FMT_4, "+&d" (r6)
87 
88 #define CALL_CLOBBER_5 "0", "1", "14", "cc", "memory"
89 #define CALL_CLOBBER_4 CALL_CLOBBER_5
90 #define CALL_CLOBBER_3 CALL_CLOBBER_4, "5"
91 #define CALL_CLOBBER_2 CALL_CLOBBER_3, "4"
92 #define CALL_CLOBBER_1 CALL_CLOBBER_2, "3"
93 #define CALL_CLOBBER_0 CALL_CLOBBER_1
94 
95 #define CALL_LARGS_0(...)						\
96 	long dummy = 0
97 #define CALL_LARGS_1(t1, a1)						\
98 	long arg1  = (long)(t1)(a1)
99 #define CALL_LARGS_2(t1, a1, t2, a2)					\
100 	CALL_LARGS_1(t1, a1);						\
101 	long arg2 = (long)(t2)(a2)
102 #define CALL_LARGS_3(t1, a1, t2, a2, t3, a3)				\
103 	CALL_LARGS_2(t1, a1, t2, a2);					\
104 	long arg3 = (long)(t3)(a3)
105 #define CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4)			\
106 	CALL_LARGS_3(t1, a1, t2, a2, t3, a3);				\
107 	long arg4  = (long)(t4)(a4)
108 #define CALL_LARGS_5(t1, a1, t2, a2, t3, a3, t4, a4, t5, a5)		\
109 	CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4);			\
110 	long arg5 = (long)(t5)(a5)
111 
112 #define CALL_REGS_0							\
113 	register long r2 asm("2") = dummy
114 #define CALL_REGS_1							\
115 	register long r2 asm("2") = arg1
116 #define CALL_REGS_2							\
117 	CALL_REGS_1;							\
118 	register long r3 asm("3") = arg2
119 #define CALL_REGS_3							\
120 	CALL_REGS_2;							\
121 	register long r4 asm("4") = arg3
122 #define CALL_REGS_4							\
123 	CALL_REGS_3;							\
124 	register long r5 asm("5") = arg4
125 #define CALL_REGS_5							\
126 	CALL_REGS_4;							\
127 	register long r6 asm("6") = arg5
128 
129 #define CALL_TYPECHECK_0(...)
130 #define CALL_TYPECHECK_1(t, a, ...)					\
131 	typecheck(t, a)
132 #define CALL_TYPECHECK_2(t, a, ...)					\
133 	CALL_TYPECHECK_1(__VA_ARGS__);					\
134 	typecheck(t, a)
135 #define CALL_TYPECHECK_3(t, a, ...)					\
136 	CALL_TYPECHECK_2(__VA_ARGS__);					\
137 	typecheck(t, a)
138 #define CALL_TYPECHECK_4(t, a, ...)					\
139 	CALL_TYPECHECK_3(__VA_ARGS__);					\
140 	typecheck(t, a)
141 #define CALL_TYPECHECK_5(t, a, ...)					\
142 	CALL_TYPECHECK_4(__VA_ARGS__);					\
143 	typecheck(t, a)
144 
145 #define CALL_PARM_0(...) void
146 #define CALL_PARM_1(t, a, ...) t
147 #define CALL_PARM_2(t, a, ...) t, CALL_PARM_1(__VA_ARGS__)
148 #define CALL_PARM_3(t, a, ...) t, CALL_PARM_2(__VA_ARGS__)
149 #define CALL_PARM_4(t, a, ...) t, CALL_PARM_3(__VA_ARGS__)
150 #define CALL_PARM_5(t, a, ...) t, CALL_PARM_4(__VA_ARGS__)
151 #define CALL_PARM_6(t, a, ...) t, CALL_PARM_5(__VA_ARGS__)
152 
153 /*
154  * Use call_on_stack() to call a function switching to a specified
155  * stack. Proper sign and zero extension of function arguments is
156  * done. Usage:
157  *
158  * rc = call_on_stack(nr, stack, rettype, fn, t1, a1, t2, a2, ...)
159  *
160  * - nr specifies the number of function arguments of fn.
161  * - stack specifies the stack to be used.
162  * - fn is the function to be called.
163  * - rettype is the return type of fn.
164  * - t1, a1, ... are pairs, where t1 must match the type of the first
165  *   argument of fn, t2 the second, etc. a1 is the corresponding
166  *   first function argument (not name), etc.
167  */
168 #define call_on_stack(nr, stack, rettype, fn, ...)			\
169 ({									\
170 	rettype (*__fn)(CALL_PARM_##nr(__VA_ARGS__)) = fn;		\
171 	unsigned long frame = current_frame_address();			\
172 	unsigned long __stack = stack;					\
173 	unsigned long prev;						\
174 	CALL_LARGS_##nr(__VA_ARGS__);					\
175 	CALL_REGS_##nr;							\
176 									\
177 	CALL_TYPECHECK_##nr(__VA_ARGS__);				\
178 	asm volatile(							\
179 		"	lgr	%[_prev],15\n"				\
180 		"	lg	15,%[_stack]\n"				\
181 		"	stg	%[_frame],%[_bc](15)\n"			\
182 		"	brasl	14,%[_fn]\n"				\
183 		"	lgr	15,%[_prev]\n"				\
184 		: [_prev] "=&d" (prev), CALL_FMT_##nr			\
185 		: [_stack] "R" (__stack),				\
186 		  [_bc] "i" (offsetof(struct stack_frame, back_chain)),	\
187 		  [_frame] "d" (frame),					\
188 		  [_fn] "X" (__fn) : CALL_CLOBBER_##nr);		\
189 	(rettype)r2;							\
190 })
191 
192 #define call_on_stack_noreturn(fn, stack)				\
193 ({									\
194 	void (*__fn)(void) = fn;					\
195 									\
196 	asm volatile(							\
197 		"	la	15,0(%[_stack])\n"			\
198 		"	xc	%[_bc](8,15),%[_bc](15)\n"		\
199 		"	brasl	14,%[_fn]\n"				\
200 		::[_bc] "i" (offsetof(struct stack_frame, back_chain)),	\
201 		  [_stack] "a" (stack), [_fn] "X" (__fn));		\
202 	BUG();								\
203 })
204 
205 #endif /* _ASM_S390_STACKTRACE_H */
206