0001
0002 #ifndef _ASM_X86_BARRIER_H
0003 #define _ASM_X86_BARRIER_H
0004
0005 #include <asm/alternative.h>
0006 #include <asm/nops.h>
0007
0008
0009
0010
0011
0012
0013
0014 #ifdef CONFIG_X86_32
0015 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
0016 X86_FEATURE_XMM2) ::: "memory", "cc")
0017 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
0018 X86_FEATURE_XMM2) ::: "memory", "cc")
0019 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
0020 X86_FEATURE_XMM2) ::: "memory", "cc")
0021 #else
0022 #define __mb() asm volatile("mfence":::"memory")
0023 #define __rmb() asm volatile("lfence":::"memory")
0024 #define __wmb() asm volatile("sfence" ::: "memory")
0025 #endif
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036 static inline unsigned long array_index_mask_nospec(unsigned long index,
0037 unsigned long size)
0038 {
0039 unsigned long mask;
0040
0041 asm volatile ("cmp %1,%2; sbb %0,%0;"
0042 :"=r" (mask)
0043 :"g"(size),"r" (index)
0044 :"cc");
0045 return mask;
0046 }
0047
0048
0049 #define array_index_mask_nospec array_index_mask_nospec
0050
0051
0052 #define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC)
0053
0054 #define __dma_rmb() barrier()
0055 #define __dma_wmb() barrier()
0056
0057 #define __smp_mb() asm volatile("lock; addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc")
0058
0059 #define __smp_rmb() dma_rmb()
0060 #define __smp_wmb() barrier()
0061 #define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
0062
0063 #define __smp_store_release(p, v) \
0064 do { \
0065 compiletime_assert_atomic_type(*p); \
0066 barrier(); \
0067 WRITE_ONCE(*p, v); \
0068 } while (0)
0069
0070 #define __smp_load_acquire(p) \
0071 ({ \
0072 typeof(*p) ___p1 = READ_ONCE(*p); \
0073 compiletime_assert_atomic_type(*p); \
0074 barrier(); \
0075 ___p1; \
0076 })
0077
0078
0079 #define __smp_mb__before_atomic() do { } while (0)
0080 #define __smp_mb__after_atomic() do { } while (0)
0081
0082 #include <asm-generic/barrier.h>
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097 static inline void weak_wrmsr_fence(void)
0098 {
0099 asm volatile("mfence; lfence" : : : "memory");
0100 }
0101
0102 #endif