1 #include <linux/sched.h>
2 #include <linux/sched/debug.h>
3 #include <linux/stacktrace.h>
4 #include <linux/thread_info.h>
5 #include <linux/ftrace.h>
6 #include <linux/export.h>
7 #include <asm/ptrace.h>
8 #include <asm/stacktrace.h>
12 static void __save_stack_trace(struct thread_info *tp,
13 struct stack_trace *trace,
16 unsigned long ksp, fp;
17 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
18 struct task_struct *t;
22 if (tp == current_thread_info()) {
24 __asm__ __volatile__("mov %%fp, %0" : "=r" (ksp));
29 fp = ksp + STACK_BIAS;
30 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
34 struct sparc_stackf *sf;
38 if (!kstack_valid(tp, fp))
41 sf = (struct sparc_stackf *) fp;
42 regs = (struct pt_regs *) (sf + 1);
44 if (kstack_is_trap_frame(tp, regs)) {
45 if (!(regs->tstate & TSTATE_PRIV))
48 fp = regs->u_regs[UREG_I6] + STACK_BIAS;
51 fp = (unsigned long)sf->fp + STACK_BIAS;
56 else if (!skip_sched || !in_sched_functions(pc)) {
57 trace->entries[trace->nr_entries++] = pc;
58 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
59 if ((pc + 8UL) == (unsigned long) &return_to_handler) {
60 struct ftrace_ret_stack *ret_stack;
61 ret_stack = ftrace_graph_get_ret_stack(t,
65 if (trace->nr_entries <
67 trace->entries[trace->nr_entries++] = pc;
73 } while (trace->nr_entries < trace->max_entries);
76 void save_stack_trace(struct stack_trace *trace)
78 __save_stack_trace(current_thread_info(), trace, false);
80 EXPORT_SYMBOL_GPL(save_stack_trace);
82 void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
84 struct thread_info *tp = task_thread_info(tsk);
86 __save_stack_trace(tp, trace, true);
88 EXPORT_SYMBOL_GPL(save_stack_trace_tsk);