1
2#include <linux/jump_label.h>
3#include <asm/unwind_hints.h>
4#include <asm/cpufeatures.h>
5#include <asm/page_types.h>
6#include <asm/percpu.h>
7#include <asm/asm-offsets.h>
8#include <asm/processor-flags.h>
9#include <asm/ptrace-abi.h>
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59#ifdef CONFIG_X86_64
60
61
62
63
64
65
66.macro PUSH_REGS rdx=%rdx rax=%rax save_ret=0
67 .if \save_ret
68 pushq %rsi
69 movq 8(%rsp), %rsi
70 movq %rdi, 8(%rsp)
71 .else
72 pushq %rdi
73 pushq %rsi
74 .endif
75 pushq \rdx
76 pushq %rcx
77 pushq \rax
78 pushq %r8
79 pushq %r9
80 pushq %r10
81 pushq %r11
82 pushq %rbx
83 pushq %rbp
84 pushq %r12
85 pushq %r13
86 pushq %r14
87 pushq %r15
88 UNWIND_HINT_REGS
89
90 .if \save_ret
91 pushq %rsi
92 .endif
93.endm
94
95.macro CLEAR_REGS
96
97
98
99
100
101
102 xorl %edx, %edx
103 xorl %ecx, %ecx
104 xorl %r8d, %r8d
105 xorl %r9d, %r9d
106 xorl %r10d, %r10d
107 xorl %r11d, %r11d
108 xorl %ebx, %ebx
109 xorl %ebp, %ebp
110 xorl %r12d, %r12d
111 xorl %r13d, %r13d
112 xorl %r14d, %r14d
113 xorl %r15d, %r15d
114
115.endm
116
117.macro PUSH_AND_CLEAR_REGS rdx=%rdx rax=%rax save_ret=0
118 PUSH_REGS rdx=\rdx, rax=\rax, save_ret=\save_ret
119 CLEAR_REGS
120.endm
121
122.macro POP_REGS pop_rdi=1 skip_r11rcx=0
123 popq %r15
124 popq %r14
125 popq %r13
126 popq %r12
127 popq %rbp
128 popq %rbx
129 .if \skip_r11rcx
130 popq %rsi
131 .else
132 popq %r11
133 .endif
134 popq %r10
135 popq %r9
136 popq %r8
137 popq %rax
138 .if \skip_r11rcx
139 popq %rsi
140 .else
141 popq %rcx
142 .endif
143 popq %rdx
144 popq %rsi
145 .if \pop_rdi
146 popq %rdi
147 .endif
148.endm
149
150#ifdef CONFIG_PAGE_TABLE_ISOLATION
151
152
153
154
155
156#define PTI_USER_PGTABLE_BIT PAGE_SHIFT
157#define PTI_USER_PGTABLE_MASK (1 << PTI_USER_PGTABLE_BIT)
158#define PTI_USER_PCID_BIT X86_CR3_PTI_PCID_USER_BIT
159#define PTI_USER_PCID_MASK (1 << PTI_USER_PCID_BIT)
160#define PTI_USER_PGTABLE_AND_PCID_MASK (PTI_USER_PCID_MASK | PTI_USER_PGTABLE_MASK)
161
162.macro SET_NOFLUSH_BIT reg:req
163 bts $X86_CR3_PCID_NOFLUSH_BIT, \reg
164.endm
165
166.macro ADJUST_KERNEL_CR3 reg:req
167 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID
168
169 andq $(~PTI_USER_PGTABLE_AND_PCID_MASK), \reg
170.endm
171
172.macro SWITCH_TO_KERNEL_CR3 scratch_reg:req
173 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
174 mov %cr3, \scratch_reg
175 ADJUST_KERNEL_CR3 \scratch_reg
176 mov \scratch_reg, %cr3
177.Lend_\@:
178.endm
179
180#define THIS_CPU_user_pcid_flush_mask \
181 PER_CPU_VAR(cpu_tlbstate) + TLB_STATE_user_pcid_flush_mask
182
183.macro SWITCH_TO_USER_CR3_NOSTACK scratch_reg:req scratch_reg2:req
184 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
185 mov %cr3, \scratch_reg
186
187 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
188
189
190
191
192 movq \scratch_reg, \scratch_reg2
193 andq $(0x7FF), \scratch_reg
194 bt \scratch_reg, THIS_CPU_user_pcid_flush_mask
195 jnc .Lnoflush_\@
196
197
198 btr \scratch_reg, THIS_CPU_user_pcid_flush_mask
199 movq \scratch_reg2, \scratch_reg
200 jmp .Lwrcr3_pcid_\@
201
202.Lnoflush_\@:
203 movq \scratch_reg2, \scratch_reg
204 SET_NOFLUSH_BIT \scratch_reg
205
206.Lwrcr3_pcid_\@:
207
208 orq $(PTI_USER_PCID_MASK), \scratch_reg
209
210.Lwrcr3_\@:
211
212 orq $(PTI_USER_PGTABLE_MASK), \scratch_reg
213 mov \scratch_reg, %cr3
214.Lend_\@:
215.endm
216
217.macro SWITCH_TO_USER_CR3_STACK scratch_reg:req
218 pushq %rax
219 SWITCH_TO_USER_CR3_NOSTACK scratch_reg=\scratch_reg scratch_reg2=%rax
220 popq %rax
221.endm
222
223.macro SAVE_AND_SWITCH_TO_KERNEL_CR3 scratch_reg:req save_reg:req
224 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI
225 movq %cr3, \scratch_reg
226 movq \scratch_reg, \save_reg
227
228
229
230
231
232 bt $PTI_USER_PGTABLE_BIT, \scratch_reg
233 jnc .Ldone_\@
234
235 ADJUST_KERNEL_CR3 \scratch_reg
236 movq \scratch_reg, %cr3
237
238.Ldone_\@:
239.endm
240
241.macro RESTORE_CR3 scratch_reg:req save_reg:req
242 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
243
244 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
245
246
247
248
249
250 bt $PTI_USER_PGTABLE_BIT, \save_reg
251 jnc .Lnoflush_\@
252
253
254
255
256
257 movq \save_reg, \scratch_reg
258 andq $(0x7FF), \scratch_reg
259 bt \scratch_reg, THIS_CPU_user_pcid_flush_mask
260 jnc .Lnoflush_\@
261
262 btr \scratch_reg, THIS_CPU_user_pcid_flush_mask
263 jmp .Lwrcr3_\@
264
265.Lnoflush_\@:
266 SET_NOFLUSH_BIT \save_reg
267
268.Lwrcr3_\@:
269
270
271
272
273 movq \save_reg, %cr3
274.Lend_\@:
275.endm
276
277#else
278
279.macro SWITCH_TO_KERNEL_CR3 scratch_reg:req
280.endm
281.macro SWITCH_TO_USER_CR3_NOSTACK scratch_reg:req scratch_reg2:req
282.endm
283.macro SWITCH_TO_USER_CR3_STACK scratch_reg:req
284.endm
285.macro SAVE_AND_SWITCH_TO_KERNEL_CR3 scratch_reg:req save_reg:req
286.endm
287.macro RESTORE_CR3 scratch_reg:req save_reg:req
288.endm
289
290#endif
291
292
293
294
295
296
297
298
299
300
301
302.macro FENCE_SWAPGS_USER_ENTRY
303 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER
304.endm
305.macro FENCE_SWAPGS_KERNEL_ENTRY
306 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_KERNEL
307.endm
308
309.macro STACKLEAK_ERASE_NOCLOBBER
310#ifdef CONFIG_GCC_PLUGIN_STACKLEAK
311 PUSH_AND_CLEAR_REGS
312 call stackleak_erase
313 POP_REGS
314#endif
315.endm
316
317.macro SAVE_AND_SET_GSBASE scratch_reg:req save_reg:req
318 rdgsbase \save_reg
319 GET_PERCPU_BASE \scratch_reg
320 wrgsbase \scratch_reg
321.endm
322
323#else
324# undef UNWIND_HINT_IRET_REGS
325# define UNWIND_HINT_IRET_REGS
326#endif
327
328.macro STACKLEAK_ERASE
329#ifdef CONFIG_GCC_PLUGIN_STACKLEAK
330 call stackleak_erase
331#endif
332.endm
333
334#ifdef CONFIG_SMP
335
336
337
338
339
340.macro LOAD_CPU_AND_NODE_SEG_LIMIT reg:req
341 movq $__CPUNODE_SEG, \reg
342 lsl \reg, \reg
343.endm
344
345
346
347
348
349
350
351
352
353
354
355.macro GET_PERCPU_BASE reg:req
356 LOAD_CPU_AND_NODE_SEG_LIMIT \reg
357 andq $VDSO_CPUNODE_MASK, \reg
358 movq __per_cpu_offset(, \reg, 8), \reg
359.endm
360
361#else
362
363.macro GET_PERCPU_BASE reg:req
364 movq pcpu_unit_offsets(%rip), \reg
365.endm
366
367#endif
368