0001
0002 #ifndef _ASM_X86_CMPXCHG_32_H
0003 #define _ASM_X86_CMPXCHG_32_H
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 static inline void set_64bit(volatile u64 *ptr, u64 value)
0025 {
0026 u32 low = value;
0027 u32 high = value >> 32;
0028 u64 prev = *ptr;
0029
0030 asm volatile("\n1:\t"
0031 LOCK_PREFIX "cmpxchg8b %0\n\t"
0032 "jnz 1b"
0033 : "=m" (*ptr), "+A" (prev)
0034 : "b" (low), "c" (high)
0035 : "memory");
0036 }
0037
0038 #ifdef CONFIG_X86_CMPXCHG64
0039 #define arch_cmpxchg64(ptr, o, n) \
0040 ((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
0041 (unsigned long long)(n)))
0042 #define arch_cmpxchg64_local(ptr, o, n) \
0043 ((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
0044 (unsigned long long)(n)))
0045 #define arch_try_cmpxchg64(ptr, po, n) \
0046 __try_cmpxchg64((ptr), (unsigned long long *)(po), \
0047 (unsigned long long)(n))
0048 #endif
0049
0050 static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
0051 {
0052 u64 prev;
0053 asm volatile(LOCK_PREFIX "cmpxchg8b %1"
0054 : "=A" (prev),
0055 "+m" (*ptr)
0056 : "b" ((u32)new),
0057 "c" ((u32)(new >> 32)),
0058 "0" (old)
0059 : "memory");
0060 return prev;
0061 }
0062
0063 static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
0064 {
0065 u64 prev;
0066 asm volatile("cmpxchg8b %1"
0067 : "=A" (prev),
0068 "+m" (*ptr)
0069 : "b" ((u32)new),
0070 "c" ((u32)(new >> 32)),
0071 "0" (old)
0072 : "memory");
0073 return prev;
0074 }
0075
0076 static inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *pold, u64 new)
0077 {
0078 bool success;
0079 u64 old = *pold;
0080 asm volatile(LOCK_PREFIX "cmpxchg8b %[ptr]"
0081 CC_SET(z)
0082 : CC_OUT(z) (success),
0083 [ptr] "+m" (*ptr),
0084 "+A" (old)
0085 : "b" ((u32)new),
0086 "c" ((u32)(new >> 32))
0087 : "memory");
0088
0089 if (unlikely(!success))
0090 *pold = old;
0091 return success;
0092 }
0093
0094 #ifndef CONFIG_X86_CMPXCHG64
0095
0096
0097
0098
0099
0100 #define arch_cmpxchg64(ptr, o, n) \
0101 ({ \
0102 __typeof__(*(ptr)) __ret; \
0103 __typeof__(*(ptr)) __old = (o); \
0104 __typeof__(*(ptr)) __new = (n); \
0105 alternative_io(LOCK_PREFIX_HERE \
0106 "call cmpxchg8b_emu", \
0107 "lock; cmpxchg8b (%%esi)" , \
0108 X86_FEATURE_CX8, \
0109 "=A" (__ret), \
0110 "S" ((ptr)), "0" (__old), \
0111 "b" ((unsigned int)__new), \
0112 "c" ((unsigned int)(__new>>32)) \
0113 : "memory"); \
0114 __ret; })
0115
0116
0117 #define arch_cmpxchg64_local(ptr, o, n) \
0118 ({ \
0119 __typeof__(*(ptr)) __ret; \
0120 __typeof__(*(ptr)) __old = (o); \
0121 __typeof__(*(ptr)) __new = (n); \
0122 alternative_io("call cmpxchg8b_emu", \
0123 "cmpxchg8b (%%esi)" , \
0124 X86_FEATURE_CX8, \
0125 "=A" (__ret), \
0126 "S" ((ptr)), "0" (__old), \
0127 "b" ((unsigned int)__new), \
0128 "c" ((unsigned int)(__new>>32)) \
0129 : "memory"); \
0130 __ret; })
0131
0132 #endif
0133
0134 #define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX8)
0135
0136 #endif