1
2
3
4
5#ifndef __ASM_STACKTRACE_H
6#define __ASM_STACKTRACE_H
7
8#include <linux/percpu.h>
9#include <linux/sched.h>
10#include <linux/sched/task_stack.h>
11#include <linux/types.h>
12
13#include <asm/memory.h>
14#include <asm/ptrace.h>
15#include <asm/sdei.h>
16
17enum stack_type {
18 STACK_TYPE_UNKNOWN,
19 STACK_TYPE_TASK,
20 STACK_TYPE_IRQ,
21 STACK_TYPE_OVERFLOW,
22 STACK_TYPE_SDEI_NORMAL,
23 STACK_TYPE_SDEI_CRITICAL,
24 __NR_STACK_TYPES
25};
26
27struct stack_info {
28 unsigned long low;
29 unsigned long high;
30 enum stack_type type;
31};
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53struct stackframe {
54 unsigned long fp;
55 unsigned long pc;
56 DECLARE_BITMAP(stacks_done, __NR_STACK_TYPES);
57 unsigned long prev_fp;
58 enum stack_type prev_type;
59#ifdef CONFIG_FUNCTION_GRAPH_TRACER
60 int graph;
61#endif
62};
63
64extern int unwind_frame(struct task_struct *tsk, struct stackframe *frame);
65extern void walk_stackframe(struct task_struct *tsk, struct stackframe *frame,
66 bool (*fn)(void *, unsigned long), void *data);
67extern void dump_backtrace(struct pt_regs *regs, struct task_struct *tsk,
68 const char *loglvl);
69
70DECLARE_PER_CPU(unsigned long *, irq_stack_ptr);
71
72static inline bool on_stack(unsigned long sp, unsigned long low,
73 unsigned long high, enum stack_type type,
74 struct stack_info *info)
75{
76 if (!low)
77 return false;
78
79 if (sp < low || sp >= high)
80 return false;
81
82 if (info) {
83 info->low = low;
84 info->high = high;
85 info->type = type;
86 }
87 return true;
88}
89
90static inline bool on_irq_stack(unsigned long sp,
91 struct stack_info *info)
92{
93 unsigned long low = (unsigned long)raw_cpu_read(irq_stack_ptr);
94 unsigned long high = low + IRQ_STACK_SIZE;
95
96 return on_stack(sp, low, high, STACK_TYPE_IRQ, info);
97}
98
99static inline bool on_task_stack(const struct task_struct *tsk,
100 unsigned long sp,
101 struct stack_info *info)
102{
103 unsigned long low = (unsigned long)task_stack_page(tsk);
104 unsigned long high = low + THREAD_SIZE;
105
106 return on_stack(sp, low, high, STACK_TYPE_TASK, info);
107}
108
109#ifdef CONFIG_VMAP_STACK
110DECLARE_PER_CPU(unsigned long [OVERFLOW_STACK_SIZE/sizeof(long)], overflow_stack);
111
112static inline bool on_overflow_stack(unsigned long sp,
113 struct stack_info *info)
114{
115 unsigned long low = (unsigned long)raw_cpu_ptr(overflow_stack);
116 unsigned long high = low + OVERFLOW_STACK_SIZE;
117
118 return on_stack(sp, low, high, STACK_TYPE_OVERFLOW, info);
119}
120#else
121static inline bool on_overflow_stack(unsigned long sp,
122 struct stack_info *info) { return false; }
123#endif
124
125
126
127
128
129
130static inline bool on_accessible_stack(const struct task_struct *tsk,
131 unsigned long sp,
132 struct stack_info *info)
133{
134 if (info)
135 info->type = STACK_TYPE_UNKNOWN;
136
137 if (on_task_stack(tsk, sp, info))
138 return true;
139 if (tsk != current || preemptible())
140 return false;
141 if (on_irq_stack(sp, info))
142 return true;
143 if (on_overflow_stack(sp, info))
144 return true;
145 if (on_sdei_stack(sp, info))
146 return true;
147
148 return false;
149}
150
151void start_backtrace(struct stackframe *frame, unsigned long fp,
152 unsigned long pc);
153
154#endif
155