0001
0002 #ifndef _TOOLS_LINUX_ASM_X86_BARRIER_H
0003 #define _TOOLS_LINUX_ASM_X86_BARRIER_H
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015 #if defined(__i386__)
0016
0017
0018
0019
0020 #define mb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
0021 #define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
0022 #define wmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
0023 #elif defined(__x86_64__)
0024 #define mb() asm volatile("mfence" ::: "memory")
0025 #define rmb() asm volatile("lfence" ::: "memory")
0026 #define wmb() asm volatile("sfence" ::: "memory")
0027 #define smp_rmb() barrier()
0028 #define smp_wmb() barrier()
0029 #define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc")
0030 #endif
0031
0032 #if defined(__x86_64__)
0033 #define smp_store_release(p, v) \
0034 do { \
0035 barrier(); \
0036 WRITE_ONCE(*p, v); \
0037 } while (0)
0038
0039 #define smp_load_acquire(p) \
0040 ({ \
0041 typeof(*p) ___p1 = READ_ONCE(*p); \
0042 barrier(); \
0043 ___p1; \
0044 })
0045 #endif
0046 #endif