1 | /* |
2 | * MIPS specific _mcount support |
3 | * |
4 | * This file is subject to the terms and conditions of the GNU General Public |
5 | * License. See the file "COPYING" in the main directory of this archive for |
6 | * more details. |
7 | * |
8 | * Copyright (C) 2009 Lemote Inc. & DSLab, Lanzhou University, China |
9 | * Copyright (C) 2010 DSLab, Lanzhou University, China |
10 | * Author: Wu Zhangjin <wuzhangjin@gmail.com> |
11 | */ |
12 | |
13 | #include <linux/export.h> |
14 | #include <asm/regdef.h> |
15 | #include <asm/stackframe.h> |
16 | #include <asm/ftrace.h> |
17 | |
18 | .text |
19 | .set noreorder |
20 | .set noat |
21 | |
22 | .macro MCOUNT_SAVE_REGS |
23 | PTR_SUBU sp, PT_SIZE |
24 | PTR_S ra, PT_R31(sp) |
25 | PTR_S AT, PT_R1(sp) |
26 | PTR_S a0, PT_R4(sp) |
27 | PTR_S a1, PT_R5(sp) |
28 | PTR_S a2, PT_R6(sp) |
29 | PTR_S a3, PT_R7(sp) |
30 | #ifdef CONFIG_64BIT |
31 | PTR_S a4, PT_R8(sp) |
32 | PTR_S a5, PT_R9(sp) |
33 | PTR_S a6, PT_R10(sp) |
34 | PTR_S a7, PT_R11(sp) |
35 | #endif |
36 | .endm |
37 | |
38 | .macro MCOUNT_RESTORE_REGS |
39 | PTR_L ra, PT_R31(sp) |
40 | PTR_L AT, PT_R1(sp) |
41 | PTR_L a0, PT_R4(sp) |
42 | PTR_L a1, PT_R5(sp) |
43 | PTR_L a2, PT_R6(sp) |
44 | PTR_L a3, PT_R7(sp) |
45 | #ifdef CONFIG_64BIT |
46 | PTR_L a4, PT_R8(sp) |
47 | PTR_L a5, PT_R9(sp) |
48 | PTR_L a6, PT_R10(sp) |
49 | PTR_L a7, PT_R11(sp) |
50 | #endif |
51 | PTR_ADDIU sp, PT_SIZE |
52 | .endm |
53 | |
54 | .macro RETURN_BACK |
55 | jr ra |
56 | move ra, AT |
57 | .endm |
58 | |
59 | /* |
60 | * The -mmcount-ra-address option of gcc 4.5 uses register $12 to pass |
61 | * the location of the parent's return address. |
62 | */ |
63 | #define MCOUNT_RA_ADDRESS_REG $12 |
64 | |
65 | #ifdef CONFIG_DYNAMIC_FTRACE |
66 | |
67 | NESTED(ftrace_caller, PT_SIZE, ra) |
68 | .globl _mcount |
69 | _mcount: |
70 | EXPORT_SYMBOL(_mcount) |
71 | b ftrace_stub |
72 | #ifdef CONFIG_32BIT |
73 | addiu sp,sp,8 |
74 | #else |
75 | nop |
76 | #endif |
77 | |
78 | /* When tracing is activated, it calls ftrace_caller+8 (aka here) */ |
79 | MCOUNT_SAVE_REGS |
80 | #ifdef KBUILD_MCOUNT_RA_ADDRESS |
81 | PTR_S MCOUNT_RA_ADDRESS_REG, PT_R12(sp) |
82 | #endif |
83 | |
84 | PTR_SUBU a0, ra, 8 /* arg1: self address */ |
85 | PTR_LA t1, _stext |
86 | sltu t2, a0, t1 /* t2 = (a0 < _stext) */ |
87 | PTR_LA t1, _etext |
88 | sltu t3, t1, a0 /* t3 = (a0 > _etext) */ |
89 | or t1, t2, t3 |
90 | beqz t1, ftrace_call |
91 | nop |
92 | #if defined(KBUILD_MCOUNT_RA_ADDRESS) && defined(CONFIG_32BIT) |
93 | PTR_SUBU a0, a0, 16 /* arg1: adjust to module's recorded callsite */ |
94 | #else |
95 | PTR_SUBU a0, a0, 12 |
96 | #endif |
97 | |
98 | .globl ftrace_call |
99 | ftrace_call: |
100 | nop /* a placeholder for the call to a real tracing function */ |
101 | move a1, AT /* arg2: parent's return address */ |
102 | |
103 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER |
104 | .globl ftrace_graph_call |
105 | ftrace_graph_call: |
106 | nop |
107 | nop |
108 | #endif |
109 | |
110 | MCOUNT_RESTORE_REGS |
111 | .globl ftrace_stub |
112 | ftrace_stub: |
113 | RETURN_BACK |
114 | END(ftrace_caller) |
115 | |
116 | #else /* ! CONFIG_DYNAMIC_FTRACE */ |
117 | |
118 | NESTED(_mcount, PT_SIZE, ra) |
119 | EXPORT_SYMBOL(_mcount) |
120 | PTR_LA t1, ftrace_stub |
121 | PTR_L t2, ftrace_trace_function /* Prepare t2 for (1) */ |
122 | beq t1, t2, fgraph_trace |
123 | nop |
124 | |
125 | MCOUNT_SAVE_REGS |
126 | |
127 | move a0, ra /* arg1: self return address */ |
128 | jalr t2 /* (1) call *ftrace_trace_function */ |
129 | move a1, AT /* arg2: parent's return address */ |
130 | |
131 | MCOUNT_RESTORE_REGS |
132 | |
133 | fgraph_trace: |
134 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER |
135 | PTR_LA t1, ftrace_stub |
136 | PTR_L t3, ftrace_graph_return |
137 | bne t1, t3, ftrace_graph_caller |
138 | nop |
139 | PTR_LA t1, ftrace_graph_entry_stub |
140 | PTR_L t3, ftrace_graph_entry |
141 | bne t1, t3, ftrace_graph_caller |
142 | nop |
143 | #endif |
144 | |
145 | #ifdef CONFIG_32BIT |
146 | addiu sp, sp, 8 |
147 | #endif |
148 | |
149 | .globl ftrace_stub |
150 | ftrace_stub: |
151 | RETURN_BACK |
152 | END(_mcount) |
153 | |
154 | #endif /* ! CONFIG_DYNAMIC_FTRACE */ |
155 | |
156 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER |
157 | |
158 | NESTED(ftrace_graph_caller, PT_SIZE, ra) |
159 | #ifndef CONFIG_DYNAMIC_FTRACE |
160 | MCOUNT_SAVE_REGS |
161 | #endif |
162 | |
163 | /* arg1: Get the location of the parent's return address */ |
164 | #ifdef KBUILD_MCOUNT_RA_ADDRESS |
165 | #ifdef CONFIG_DYNAMIC_FTRACE |
166 | PTR_L a0, PT_R12(sp) |
167 | #else |
168 | move a0, MCOUNT_RA_ADDRESS_REG |
169 | #endif |
170 | bnez a0, 1f /* non-leaf func: stored in MCOUNT_RA_ADDRESS_REG */ |
171 | nop |
172 | #endif |
173 | PTR_LA a0, PT_R1(sp) /* leaf func: the location in current stack */ |
174 | 1: |
175 | |
176 | /* arg2: Get self return address */ |
177 | #ifdef CONFIG_DYNAMIC_FTRACE |
178 | PTR_L a1, PT_R31(sp) |
179 | #else |
180 | move a1, ra |
181 | #endif |
182 | |
183 | /* arg3: Get frame pointer of current stack */ |
184 | #ifdef CONFIG_64BIT |
185 | PTR_LA a2, PT_SIZE(sp) |
186 | #else |
187 | PTR_LA a2, (PT_SIZE+8)(sp) |
188 | #endif |
189 | |
190 | jal prepare_ftrace_return |
191 | nop |
192 | MCOUNT_RESTORE_REGS |
193 | #ifndef CONFIG_DYNAMIC_FTRACE |
194 | #ifdef CONFIG_32BIT |
195 | addiu sp, sp, 8 |
196 | #endif |
197 | #endif |
198 | RETURN_BACK |
199 | END(ftrace_graph_caller) |
200 | |
201 | .align 2 |
202 | .globl return_to_handler |
203 | return_to_handler: |
204 | PTR_SUBU sp, PT_SIZE |
205 | PTR_S v0, PT_R2(sp) |
206 | |
207 | jal ftrace_return_to_handler |
208 | PTR_S v1, PT_R3(sp) |
209 | |
210 | /* restore the real parent address: v0 -> ra */ |
211 | move ra, v0 |
212 | |
213 | PTR_L v0, PT_R2(sp) |
214 | PTR_L v1, PT_R3(sp) |
215 | jr ra |
216 | PTR_ADDIU sp, PT_SIZE |
217 | #endif /* CONFIG_FUNCTION_GRAPH_TRACER */ |
218 | |
219 | .set at |
220 | .set reorder |
221 | |