1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Copyright IBM Corp. 2008, 2009 4 * 5 */ 6 7#include <linux/linkage.h> 8#include <asm/asm-offsets.h> 9#include <asm/ftrace.h> 10#include <asm/nospec-insn.h> 11#include <asm/ptrace.h> 12#include <asm/export.h> 13 14 15#define STACK_FRAME_SIZE (STACK_FRAME_OVERHEAD + __PT_SIZE) 16#define STACK_PTREGS (STACK_FRAME_OVERHEAD) 17#define STACK_PTREGS_GPRS (STACK_PTREGS + __PT_GPRS) 18#define STACK_PTREGS_PSW (STACK_PTREGS + __PT_PSW) 19#define STACK_PTREGS_ORIG_GPR2 (STACK_PTREGS + __PT_ORIG_GPR2) 20#define STACK_PTREGS_FLAGS (STACK_PTREGS + __PT_FLAGS) 21/* packed stack: allocate just enough for r14, r15 and backchain */ 22#define TRACED_FUNC_FRAME_SIZE 24 23 24#ifdef CONFIG_FUNCTION_TRACER 25 26 GEN_BR_THUNK %r1 27 GEN_BR_THUNK %r14 28 29 .section .kprobes.text, "ax" 30 31SYM_FUNC_START(ftrace_stub) 32 BR_EX %r14 33SYM_FUNC_END(ftrace_stub) 34 35SYM_CODE_START(ftrace_stub_direct_tramp) 36 lgr %r1, %r0 37 BR_EX %r1 38SYM_CODE_END(ftrace_stub_direct_tramp) 39 40 .macro ftrace_regs_entry, allregs=0 41 stg %r14,(__SF_GPRS+8*8)(%r15) # save traced function caller 42 43 .if \allregs == 1 44 # save psw mask 45 # don't put any instructions clobbering CC before this point 46 epsw %r1,%r14 47 risbg %r14,%r1,0,31,32 48 .endif 49 50 lgr %r1,%r15 51 # allocate stack frame for ftrace_caller to contain traced function 52 aghi %r15,-TRACED_FUNC_FRAME_SIZE 53 stg %r1,__SF_BACKCHAIN(%r15) 54 stg %r0,(__SF_GPRS+8*8)(%r15) 55 stg %r15,(__SF_GPRS+9*8)(%r15) 56 # allocate pt_regs and stack frame for ftrace_trace_function 57 aghi %r15,-STACK_FRAME_SIZE 58 stg %r1,(STACK_PTREGS_GPRS+15*8)(%r15) 59 xc STACK_PTREGS_ORIG_GPR2(8,%r15),STACK_PTREGS_ORIG_GPR2(%r15) 60 61 .if \allregs == 1 62 stg %r14,(STACK_PTREGS_PSW)(%r15) 63 mvghi STACK_PTREGS_FLAGS(%r15),_PIF_FTRACE_FULL_REGS 64 .else 65 xc STACK_PTREGS_FLAGS(8,%r15),STACK_PTREGS_FLAGS(%r15) 66 .endif 67 68 lg %r14,(__SF_GPRS+8*8)(%r1) # restore original return address 69 aghi %r1,-TRACED_FUNC_FRAME_SIZE 70 stg %r1,__SF_BACKCHAIN(%r15) 71 stg %r0,(STACK_PTREGS_PSW+8)(%r15) 72 stmg %r2,%r14,(STACK_PTREGS_GPRS+2*8)(%r15) 73 .endm 74 75SYM_CODE_START(ftrace_regs_caller) 76 ftrace_regs_entry 1 77 j ftrace_common 78SYM_CODE_END(ftrace_regs_caller) 79 80SYM_CODE_START(ftrace_caller) 81 ftrace_regs_entry 0 82 j ftrace_common 83SYM_CODE_END(ftrace_caller) 84 85SYM_CODE_START(ftrace_common) 86#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES 87 aghik %r2,%r0,-MCOUNT_INSN_SIZE 88 lgrl %r4,function_trace_op 89 lgrl %r1,ftrace_func 90#else 91 lgr %r2,%r0 92 aghi %r2,-MCOUNT_INSN_SIZE 93 larl %r4,function_trace_op 94 lg %r4,0(%r4) 95 larl %r1,ftrace_func 96 lg %r1,0(%r1) 97#endif 98 lgr %r3,%r14 99 la %r5,STACK_PTREGS(%r15) 100 BASR_EX %r14,%r1 101#ifdef CONFIG_FUNCTION_GRAPH_TRACER 102# The j instruction gets runtime patched to a nop instruction. 103# See ftrace_enable_ftrace_graph_caller. 104SYM_INNER_LABEL(ftrace_graph_caller, SYM_L_GLOBAL) 105 j .Lftrace_graph_caller_end 106 lmg %r2,%r3,(STACK_PTREGS_GPRS+14*8)(%r15) 107 lg %r4,(STACK_PTREGS_PSW+8)(%r15) 108 brasl %r14,prepare_ftrace_return 109 stg %r2,(STACK_PTREGS_GPRS+14*8)(%r15) 110.Lftrace_graph_caller_end: 111#endif 112 lg %r0,(STACK_PTREGS_PSW+8)(%r15) 113#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES 114 ltg %r1,STACK_PTREGS_ORIG_GPR2(%r15) 115 locgrz %r1,%r0 116#else 117 lg %r1,STACK_PTREGS_ORIG_GPR2(%r15) 118 ltgr %r1,%r1 119 jnz 0f 120 lgr %r1,%r0 121#endif 1220: lmg %r2,%r15,(STACK_PTREGS_GPRS+2*8)(%r15) 123 BR_EX %r1 124SYM_CODE_END(ftrace_common) 125 126#ifdef CONFIG_FUNCTION_GRAPH_TRACER 127 128SYM_FUNC_START(return_to_handler) 129 stmg %r2,%r5,32(%r15) 130 lgr %r1,%r15 131 aghi %r15,-STACK_FRAME_OVERHEAD 132 stg %r1,__SF_BACKCHAIN(%r15) 133 brasl %r14,ftrace_return_to_handler 134 aghi %r15,STACK_FRAME_OVERHEAD 135 lgr %r14,%r2 136 lmg %r2,%r5,32(%r15) 137 BR_EX %r14 138SYM_FUNC_END(return_to_handler) 139 140#endif 141#endif /* CONFIG_FUNCTION_TRACER */ 142 143SYM_CODE_START(ftrace_shared_hotpatch_trampoline_br) 144 lmg %r0,%r1,2(%r1) 145 br %r1 146SYM_INNER_LABEL(ftrace_shared_hotpatch_trampoline_br_end, SYM_L_GLOBAL) 147SYM_CODE_END(ftrace_shared_hotpatch_trampoline_br) 148 149#ifdef CONFIG_EXPOLINE 150SYM_CODE_START(ftrace_shared_hotpatch_trampoline_exrl) 151 lmg %r0,%r1,2(%r1) 152 exrl %r0,0f 153 j . 1540: br %r1 155SYM_INNER_LABEL(ftrace_shared_hotpatch_trampoline_exrl_end, SYM_L_GLOBAL) 156SYM_CODE_END(ftrace_shared_hotpatch_trampoline_exrl) 157#endif /* CONFIG_EXPOLINE */ 158 159#ifdef CONFIG_RETHOOK 160 161SYM_CODE_START(arch_rethook_trampoline) 162 stg %r14,(__SF_GPRS+8*8)(%r15) 163 lay %r15,-STACK_FRAME_SIZE(%r15) 164 stmg %r0,%r14,STACK_PTREGS_GPRS(%r15) 165 166 # store original stack pointer in backchain and pt_regs 167 lay %r7,STACK_FRAME_SIZE(%r15) 168 stg %r7,__SF_BACKCHAIN(%r15) 169 stg %r7,STACK_PTREGS_GPRS+(15*8)(%r15) 170 171 # store full psw 172 epsw %r2,%r3 173 risbg %r3,%r2,0,31,32 174 stg %r3,STACK_PTREGS_PSW(%r15) 175 larl %r1,arch_rethook_trampoline 176 stg %r1,STACK_PTREGS_PSW+8(%r15) 177 178 lay %r2,STACK_PTREGS(%r15) 179 brasl %r14,arch_rethook_trampoline_callback 180 181 mvc __SF_EMPTY(16,%r7),STACK_PTREGS_PSW(%r15) 182 lmg %r0,%r15,STACK_PTREGS_GPRS(%r15) 183 lpswe __SF_EMPTY(%r15) 184SYM_CODE_END(arch_rethook_trampoline) 185 186#endif /* CONFIG_RETHOOK */ 187