1#ifndef _ASM_X86_SYSTEM_H
2#define _ASM_X86_SYSTEM_H
3
4#include <asm/asm.h>
5#include <asm/segment.h>
6#include <asm/cpufeature.h>
7#include <asm/cmpxchg.h>
8#include <asm/nops.h>
9
10#include <linux/kernel.h>
11#include <linux/irqflags.h>
12
13
14#ifdef CONFIG_IA32_EMULATION
15# define AT_VECTOR_SIZE_ARCH 2
16#else
17# define AT_VECTOR_SIZE_ARCH 1
18#endif
19
20struct task_struct;
21struct task_struct *__switch_to(struct task_struct *prev,
22 struct task_struct *next);
23struct tss_struct;
24void __switch_to_xtra(struct task_struct *prev_p, struct task_struct *next_p,
25 struct tss_struct *tss);
26
27#ifdef CONFIG_X86_32
28
29#ifdef CONFIG_CC_STACKPROTECTOR
30#define __switch_canary \
31 "movl %P[task_canary](%[next]), %%ebx\n\t" \
32 "movl %%ebx, "__percpu_arg([stack_canary])"\n\t"
33#define __switch_canary_oparam \
34 , [stack_canary] "=m" (per_cpu_var(stack_canary.canary))
35#define __switch_canary_iparam \
36 , [task_canary] "i" (offsetof(struct task_struct, stack_canary))
37#else
38#define __switch_canary
39#define __switch_canary_oparam
40#define __switch_canary_iparam
41#endif
42
43
44
45
46
47#define switch_to(prev, next, last) \
48do { \
49
50
51
52
53
54
55 \
56 unsigned long ebx, ecx, edx, esi, edi; \
57 \
58 asm volatile("pushfl\n\t" \
59 "pushl %%ebp\n\t" \
60 "movl %%esp,%[prev_sp]\n\t" \
61 "movl %[next_sp],%%esp\n\t" \
62 "movl $1f,%[prev_ip]\n\t" \
63 "pushl %[next_ip]\n\t" \
64 __switch_canary \
65 "jmp __switch_to\n" \
66 "1:\t" \
67 "popl %%ebp\n\t" \
68 "popfl\n" \
69 \
70 \
71 : [prev_sp] "=m" (prev->thread.sp), \
72 [prev_ip] "=m" (prev->thread.ip), \
73 "=a" (last), \
74 \
75 \
76 "=b" (ebx), "=c" (ecx), "=d" (edx), \
77 "=S" (esi), "=D" (edi) \
78 \
79 __switch_canary_oparam \
80 \
81 \
82 : [next_sp] "m" (next->thread.sp), \
83 [next_ip] "m" (next->thread.ip), \
84 \
85 \
86 [prev] "a" (prev), \
87 [next] "d" (next) \
88 \
89 __switch_canary_iparam \
90 \
91 : \
92 "memory"); \
93} while (0)
94
95
96
97
98#define HAVE_DISABLE_HLT
99#else
100#define __SAVE(reg, offset) "movq %%" #reg ",(14-" #offset ")*8(%%rsp)\n\t"
101#define __RESTORE(reg, offset) "movq (14-" #offset ")*8(%%rsp),%%" #reg "\n\t"
102
103
104#define SAVE_CONTEXT "pushf ; pushq %%rbp ; movq %%rsi,%%rbp\n\t"
105#define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp ; popf\t"
106
107#define __EXTRA_CLOBBER \
108 , "rcx", "rbx", "rdx", "r8", "r9", "r10", "r11", \
109 "r12", "r13", "r14", "r15"
110
111#ifdef CONFIG_CC_STACKPROTECTOR
112#define __switch_canary \
113 "movq %P[task_canary](%%rsi),%%r8\n\t" \
114 "movq %%r8,"__percpu_arg([gs_canary])"\n\t"
115#define __switch_canary_oparam \
116 , [gs_canary] "=m" (per_cpu_var(irq_stack_union.stack_canary))
117#define __switch_canary_iparam \
118 , [task_canary] "i" (offsetof(struct task_struct, stack_canary))
119#else
120#define __switch_canary
121#define __switch_canary_oparam
122#define __switch_canary_iparam
123#endif
124
125
126#define switch_to(prev, next, last) \
127 asm volatile(SAVE_CONTEXT \
128 "movq %%rsp,%P[threadrsp](%[prev])\n\t" \
129 "movq %P[threadrsp](%[next]),%%rsp\n\t" \
130 "call __switch_to\n\t" \
131 ".globl thread_return\n" \
132 "thread_return:\n\t" \
133 "movq "__percpu_arg([current_task])",%%rsi\n\t" \
134 __switch_canary \
135 "movq %P[thread_info](%%rsi),%%r8\n\t" \
136 "movq %%rax,%%rdi\n\t" \
137 "testl %[_tif_fork],%P[ti_flags](%%r8)\n\t" \
138 "jnz ret_from_fork\n\t" \
139 RESTORE_CONTEXT \
140 : "=a" (last) \
141 __switch_canary_oparam \
142 : [next] "S" (next), [prev] "D" (prev), \
143 [threadrsp] "i" (offsetof(struct task_struct, thread.sp)), \
144 [ti_flags] "i" (offsetof(struct thread_info, flags)), \
145 [_tif_fork] "i" (_TIF_FORK), \
146 [thread_info] "i" (offsetof(struct task_struct, stack)), \
147 [current_task] "m" (per_cpu_var(current_task)) \
148 __switch_canary_iparam \
149 : "memory", "cc" __EXTRA_CLOBBER)
150#endif
151
152#ifdef __KERNEL__
153
154extern void native_load_gs_index(unsigned);
155
156
157
158
159
160#define loadsegment(seg, value) \
161 asm volatile("\n" \
162 "1:\t" \
163 "movl %k0,%%" #seg "\n" \
164 "2:\n" \
165 ".section .fixup,\"ax\"\n" \
166 "3:\t" \
167 "movl %k1, %%" #seg "\n\t" \
168 "jmp 2b\n" \
169 ".previous\n" \
170 _ASM_EXTABLE(1b,3b) \
171 : :"r" (value), "r" (0) : "memory")
172
173
174
175
176
177#define savesegment(seg, value) \
178 asm("mov %%" #seg ",%0":"=r" (value) : : "memory")
179
180
181
182
183#ifdef CONFIG_X86_32
184#ifdef CONFIG_X86_32_LAZY_GS
185#define get_user_gs(regs) (u16)({unsigned long v; savesegment(gs, v); v;})
186#define set_user_gs(regs, v) loadsegment(gs, (unsigned long)(v))
187#define task_user_gs(tsk) ((tsk)->thread.gs)
188#define lazy_save_gs(v) savesegment(gs, (v))
189#define lazy_load_gs(v) loadsegment(gs, (v))
190#else
191#define get_user_gs(regs) (u16)((regs)->gs)
192#define set_user_gs(regs, v) do { (regs)->gs = (v); } while (0)
193#define task_user_gs(tsk) (task_pt_regs(tsk)->gs)
194#define lazy_save_gs(v) do { } while (0)
195#define lazy_load_gs(v) do { } while (0)
196#endif
197#endif
198
199static inline unsigned long get_limit(unsigned long segment)
200{
201 unsigned long __limit;
202 asm("lsll %1,%0" : "=r" (__limit) : "r" (segment));
203 return __limit + 1;
204}
205
206static inline void native_clts(void)
207{
208 asm volatile("clts");
209}
210
211
212
213
214
215
216
217
218static unsigned long __force_order;
219
220static inline unsigned long native_read_cr0(void)
221{
222 unsigned long val;
223 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
224 return val;
225}
226
227static inline void native_write_cr0(unsigned long val)
228{
229 asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
230}
231
232static inline unsigned long native_read_cr2(void)
233{
234 unsigned long val;
235 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
236 return val;
237}
238
239static inline void native_write_cr2(unsigned long val)
240{
241 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
242}
243
244static inline unsigned long native_read_cr3(void)
245{
246 unsigned long val;
247 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
248 return val;
249}
250
251static inline void native_write_cr3(unsigned long val)
252{
253 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
254}
255
256static inline unsigned long native_read_cr4(void)
257{
258 unsigned long val;
259 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
260 return val;
261}
262
263static inline unsigned long native_read_cr4_safe(void)
264{
265 unsigned long val;
266
267
268#ifdef CONFIG_X86_32
269 asm volatile("1: mov %%cr4, %0\n"
270 "2:\n"
271 _ASM_EXTABLE(1b, 2b)
272 : "=r" (val), "=m" (__force_order) : "0" (0));
273#else
274 val = native_read_cr4();
275#endif
276 return val;
277}
278
279static inline void native_write_cr4(unsigned long val)
280{
281 asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
282}
283
284#ifdef CONFIG_X86_64
285static inline unsigned long native_read_cr8(void)
286{
287 unsigned long cr8;
288 asm volatile("movq %%cr8,%0" : "=r" (cr8));
289 return cr8;
290}
291
292static inline void native_write_cr8(unsigned long val)
293{
294 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
295}
296#endif
297
298static inline void native_wbinvd(void)
299{
300 asm volatile("wbinvd": : :"memory");
301}
302
303#ifdef CONFIG_PARAVIRT
304#include <asm/paravirt.h>
305#else
306#define read_cr0() (native_read_cr0())
307#define write_cr0(x) (native_write_cr0(x))
308#define read_cr2() (native_read_cr2())
309#define write_cr2(x) (native_write_cr2(x))
310#define read_cr3() (native_read_cr3())
311#define write_cr3(x) (native_write_cr3(x))
312#define read_cr4() (native_read_cr4())
313#define read_cr4_safe() (native_read_cr4_safe())
314#define write_cr4(x) (native_write_cr4(x))
315#define wbinvd() (native_wbinvd())
316#ifdef CONFIG_X86_64
317#define read_cr8() (native_read_cr8())
318#define write_cr8(x) (native_write_cr8(x))
319#define load_gs_index native_load_gs_index
320#endif
321
322
323#define clts() (native_clts())
324
325#endif
326
327#define stts() write_cr0(read_cr0() | X86_CR0_TS)
328
329#endif
330
331static inline void clflush(volatile void *__p)
332{
333 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
334}
335
336#define nop() asm volatile ("nop")
337
338void disable_hlt(void);
339void enable_hlt(void);
340
341void cpu_idle_wait(void);
342
343extern unsigned long arch_align_stack(unsigned long sp);
344extern void free_init_pages(char *what, unsigned long begin, unsigned long end);
345
346void default_idle(void);
347
348void stop_this_cpu(void *dummy);
349
350
351
352
353
354
355#ifdef CONFIG_X86_32
356
357
358
359
360#define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
361#define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
362#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
363#else
364#define mb() asm volatile("mfence":::"memory")
365#define rmb() asm volatile("lfence":::"memory")
366#define wmb() asm volatile("sfence" ::: "memory")
367#endif
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421#define read_barrier_depends() do { } while (0)
422
423#ifdef CONFIG_SMP
424#define smp_mb() mb()
425#ifdef CONFIG_X86_PPRO_FENCE
426# define smp_rmb() rmb()
427#else
428# define smp_rmb() barrier()
429#endif
430#ifdef CONFIG_X86_OOSTORE
431# define smp_wmb() wmb()
432#else
433# define smp_wmb() barrier()
434#endif
435#define smp_read_barrier_depends() read_barrier_depends()
436#define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
437#else
438#define smp_mb() barrier()
439#define smp_rmb() barrier()
440#define smp_wmb() barrier()
441#define smp_read_barrier_depends() do { } while (0)
442#define set_mb(var, value) do { var = value; barrier(); } while (0)
443#endif
444
445
446
447
448
449
450
451
452static inline void rdtsc_barrier(void)
453{
454 alternative(ASM_NOP3, "mfence", X86_FEATURE_MFENCE_RDTSC);
455 alternative(ASM_NOP3, "lfence", X86_FEATURE_LFENCE_RDTSC);
456}
457
458#endif
459