1
2#ifndef __ASM_PREEMPT_H
3#define __ASM_PREEMPT_H
4
5#include <asm/rmwcc.h>
6#include <asm/percpu.h>
7#include <linux/thread_info.h>
8#include <linux/static_call_types.h>
9
10DECLARE_PER_CPU(int, __preempt_count);
11
12
13#define PREEMPT_NEED_RESCHED 0x80000000
14
15
16
17
18
19#define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
20
21
22
23
24
25static __always_inline int preempt_count(void)
26{
27 return raw_cpu_read_4(__preempt_count) & ~PREEMPT_NEED_RESCHED;
28}
29
30static __always_inline void preempt_count_set(int pc)
31{
32 int old, new;
33
34 do {
35 old = raw_cpu_read_4(__preempt_count);
36 new = (old & PREEMPT_NEED_RESCHED) |
37 (pc & ~PREEMPT_NEED_RESCHED);
38 } while (raw_cpu_cmpxchg_4(__preempt_count, old, new) != old);
39}
40
41
42
43
44#define init_task_preempt_count(p) do { } while (0)
45
46#define init_idle_preempt_count(p, cpu) do { \
47 per_cpu(__preempt_count, (cpu)) = PREEMPT_DISABLED; \
48} while (0)
49
50
51
52
53
54
55
56
57
58
59static __always_inline void set_preempt_need_resched(void)
60{
61 raw_cpu_and_4(__preempt_count, ~PREEMPT_NEED_RESCHED);
62}
63
64static __always_inline void clear_preempt_need_resched(void)
65{
66 raw_cpu_or_4(__preempt_count, PREEMPT_NEED_RESCHED);
67}
68
69static __always_inline bool test_preempt_need_resched(void)
70{
71 return !(raw_cpu_read_4(__preempt_count) & PREEMPT_NEED_RESCHED);
72}
73
74
75
76
77
78static __always_inline void __preempt_count_add(int val)
79{
80 raw_cpu_add_4(__preempt_count, val);
81}
82
83static __always_inline void __preempt_count_sub(int val)
84{
85 raw_cpu_add_4(__preempt_count, -val);
86}
87
88
89
90
91
92
93static __always_inline bool __preempt_count_dec_and_test(void)
94{
95 return GEN_UNARY_RMWcc("decl", __preempt_count, e, __percpu_arg([var]));
96}
97
98
99
100
101static __always_inline bool should_resched(int preempt_offset)
102{
103 return unlikely(raw_cpu_read_4(__preempt_count) == preempt_offset);
104}
105
106#ifdef CONFIG_PREEMPTION
107
108extern asmlinkage void preempt_schedule(void);
109extern asmlinkage void preempt_schedule_thunk(void);
110
111#define __preempt_schedule_func preempt_schedule_thunk
112
113extern asmlinkage void preempt_schedule_notrace(void);
114extern asmlinkage void preempt_schedule_notrace_thunk(void);
115
116#define __preempt_schedule_notrace_func preempt_schedule_notrace_thunk
117
118#ifdef CONFIG_PREEMPT_DYNAMIC
119
120DECLARE_STATIC_CALL(preempt_schedule, __preempt_schedule_func);
121
122#define __preempt_schedule() \
123do { \
124 __STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule); \
125 asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule) : ASM_CALL_CONSTRAINT); \
126} while (0)
127
128DECLARE_STATIC_CALL(preempt_schedule_notrace, __preempt_schedule_notrace_func);
129
130#define __preempt_schedule_notrace() \
131do { \
132 __STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule_notrace); \
133 asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule_notrace) : ASM_CALL_CONSTRAINT); \
134} while (0)
135
136#else
137
138#define __preempt_schedule() \
139 asm volatile ("call preempt_schedule_thunk" : ASM_CALL_CONSTRAINT);
140
141#define __preempt_schedule_notrace() \
142 asm volatile ("call preempt_schedule_notrace_thunk" : ASM_CALL_CONSTRAINT);
143
144#endif
145
146#endif
147
148#endif
149