2 * Copyright IBM Corp. 2008,2009
4 * Author(s): Heiko Carstens <heiko.carstens@de.ibm.com>,
8 #include <asm/asm-offsets.h>
16 #ifdef CONFIG_DYNAMIC_FTRACE
24 larl %r1,function_trace_stop
31 stg %r1,__SF_BACKCHAIN(%r15)
34 larl %r14,ftrace_dyn_func
37 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
38 .globl ftrace_graph_caller
40 # This unconditional branch gets runtime patched. Change only if
41 # you know what you are doing. See ftrace_enable_graph_caller().
45 brasl %r14,prepare_ftrace_return
55 .globl ftrace_dyn_func
60 #else /* CONFIG_DYNAMIC_FTRACE */
64 larl %r1,function_trace_stop
71 stg %r1,__SF_BACKCHAIN(%r15)
74 larl %r14,ftrace_trace_function
77 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
80 brasl %r14,prepare_ftrace_return
88 #endif /* CONFIG_DYNAMIC_FTRACE */
90 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
92 .globl return_to_handler
97 stg %r1,__SF_BACKCHAIN(%r15)
98 brasl %r14,ftrace_return_to_handler
104 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
106 #else /* CONFIG_64BIT */
108 #ifdef CONFIG_DYNAMIC_FTRACE
118 0: .long ftrace_trace_function
119 1: .long function_trace_stop
128 st %r0,__SF_BACKCHAIN(%r15)
133 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
134 .globl ftrace_graph_caller
136 # This unconditional branch gets runtime patched. Change only if
137 # you know what you are doing. See ftrace_enable_graph_caller().
140 .long prepare_ftrace_return
150 3: lm %r2,%r5,16(%r15)
154 .globl ftrace_dyn_func
159 #else /* CONFIG_DYNAMIC_FTRACE */
165 0: .long ftrace_trace_function
166 1: .long function_trace_stop
175 st %r0,__SF_BACKCHAIN(%r15)
180 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
182 .long prepare_ftrace_return
191 3: lm %r2,%r5,16(%r15)
194 #endif /* CONFIG_DYNAMIC_FTRACE */
196 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
198 .globl return_to_handler
204 st %r0,__SF_BACKCHAIN(%r15)
206 .long ftrace_return_to_handler
214 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
216 #endif /* CONFIG_64BIT */