1
2#ifndef _ASM_X86_BARRIER_H
3#define _ASM_X86_BARRIER_H
4
5#include <asm/alternative.h>
6#include <asm/nops.h>
7
8
9
10
11
12
13
14#ifdef CONFIG_X86_32
15#define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
16 X86_FEATURE_XMM2) ::: "memory", "cc")
17#define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
18 X86_FEATURE_XMM2) ::: "memory", "cc")
19#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
20 X86_FEATURE_XMM2) ::: "memory", "cc")
21#else
22#define mb() asm volatile("mfence":::"memory")
23#define rmb() asm volatile("lfence":::"memory")
24#define wmb() asm volatile("sfence" ::: "memory")
25#endif
26
27
28
29
30
31
32
33
34
35
36static inline unsigned long array_index_mask_nospec(unsigned long index,
37 unsigned long size)
38{
39 unsigned long mask;
40
41 asm volatile ("cmp %1,%2; sbb %0,%0;"
42 :"=r" (mask)
43 :"g"(size),"r" (index)
44 :"cc");
45 return mask;
46}
47
48
49#define array_index_mask_nospec array_index_mask_nospec
50
51
52#define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC)
53
54#define dma_rmb() barrier()
55#define dma_wmb() barrier()
56
57#define __smp_mb() asm volatile("lock; addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc")
58
59#define __smp_rmb() dma_rmb()
60#define __smp_wmb() barrier()
61#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
62
63#define __smp_store_release(p, v) \
64do { \
65 compiletime_assert_atomic_type(*p); \
66 barrier(); \
67 WRITE_ONCE(*p, v); \
68} while (0)
69
70#define __smp_load_acquire(p) \
71({ \
72 typeof(*p) ___p1 = READ_ONCE(*p); \
73 compiletime_assert_atomic_type(*p); \
74 barrier(); \
75 ___p1; \
76})
77
78
79#define __smp_mb__before_atomic() do { } while (0)
80#define __smp_mb__after_atomic() do { } while (0)
81
82#include <asm-generic/barrier.h>
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97static inline void weak_wrmsr_fence(void)
98{
99 asm volatile("mfence; lfence" : : : "memory");
100}
101
102#endif
103