1/* 2 * arch/sh/lib/mcount.S 3 * 4 * Copyright (C) 2008 Paul Mundt 5 * Copyright (C) 2008 Matt Fleming 6 * 7 * This file is subject to the terms and conditions of the GNU General Public 8 * License. See the file "COPYING" in the main directory of this archive 9 * for more details. 10 */ 11#include <asm/ftrace.h> 12 13#define MCOUNT_ENTER() \ 14 mov.l r4, @-r15; \ 15 mov.l r5, @-r15; \ 16 mov.l r6, @-r15; \ 17 mov.l r7, @-r15; \ 18 sts.l pr, @-r15; \ 19 \ 20 mov.l @(20,r15),r4; \ 21 sts pr, r5 22 23#define MCOUNT_LEAVE() \ 24 lds.l @r15+, pr; \ 25 mov.l @r15+, r7; \ 26 mov.l @r15+, r6; \ 27 mov.l @r15+, r5; \ 28 rts; \ 29 mov.l @r15+, r4 30 31 .align 2 32 .globl _mcount 33 .type _mcount,@function 34 .globl mcount 35 .type mcount,@function 36_mcount: 37mcount: 38 MCOUNT_ENTER() 39 40#ifdef CONFIG_DYNAMIC_FTRACE 41 .globl mcount_call 42mcount_call: 43 mov.l .Lftrace_stub, r6 44#else 45 mov.l .Lftrace_trace_function, r6 46 mov.l ftrace_stub, r7 47 cmp/eq r6, r7 48 bt skip_trace 49 mov.l @r6, r6 50#endif 51 52 jsr @r6 53 nop 54 55skip_trace: 56 MCOUNT_LEAVE() 57 58 .align 2 59.Lftrace_trace_function: 60 .long ftrace_trace_function 61 62#ifdef CONFIG_DYNAMIC_FTRACE 63 .globl ftrace_caller 64ftrace_caller: 65 MCOUNT_ENTER() 66 67 .globl ftrace_call 68ftrace_call: 69 mov.l .Lftrace_stub, r6 70 jsr @r6 71 nop 72 73 MCOUNT_LEAVE() 74#endif /* CONFIG_DYNAMIC_FTRACE */ 75 76/* 77 * NOTE: From here on the locations of the .Lftrace_stub label and 78 * ftrace_stub itself are fixed. Adding additional data here will skew 79 * the displacement for the memory table and break the block replacement. 80 * Place new labels either after the ftrace_stub body, or before 81 * ftrace_caller. You have been warned. 82 */ 83 .align 2 84.Lftrace_stub: 85 .long ftrace_stub 86 87 .globl ftrace_stub 88ftrace_stub: 89 rts 90 nop 91