0001
0002 #ifndef ASM_X86_CMPXCHG_H
0003 #define ASM_X86_CMPXCHG_H
0004
0005 #include <linux/compiler.h>
0006 #include <asm/cpufeatures.h>
0007 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
0008
0009
0010
0011
0012
0013 extern void __xchg_wrong_size(void)
0014 __compiletime_error("Bad argument size for xchg");
0015 extern void __cmpxchg_wrong_size(void)
0016 __compiletime_error("Bad argument size for cmpxchg");
0017 extern void __xadd_wrong_size(void)
0018 __compiletime_error("Bad argument size for xadd");
0019 extern void __add_wrong_size(void)
0020 __compiletime_error("Bad argument size for add");
0021
0022
0023
0024
0025
0026
0027
0028
0029 #define __X86_CASE_B 1
0030 #define __X86_CASE_W 2
0031 #define __X86_CASE_L 4
0032 #ifdef CONFIG_64BIT
0033 #define __X86_CASE_Q 8
0034 #else
0035 #define __X86_CASE_Q -1
0036 #endif
0037
0038
0039
0040
0041
0042 #define __xchg_op(ptr, arg, op, lock) \
0043 ({ \
0044 __typeof__ (*(ptr)) __ret = (arg); \
0045 switch (sizeof(*(ptr))) { \
0046 case __X86_CASE_B: \
0047 asm volatile (lock #op "b %b0, %1\n" \
0048 : "+q" (__ret), "+m" (*(ptr)) \
0049 : : "memory", "cc"); \
0050 break; \
0051 case __X86_CASE_W: \
0052 asm volatile (lock #op "w %w0, %1\n" \
0053 : "+r" (__ret), "+m" (*(ptr)) \
0054 : : "memory", "cc"); \
0055 break; \
0056 case __X86_CASE_L: \
0057 asm volatile (lock #op "l %0, %1\n" \
0058 : "+r" (__ret), "+m" (*(ptr)) \
0059 : : "memory", "cc"); \
0060 break; \
0061 case __X86_CASE_Q: \
0062 asm volatile (lock #op "q %q0, %1\n" \
0063 : "+r" (__ret), "+m" (*(ptr)) \
0064 : : "memory", "cc"); \
0065 break; \
0066 default: \
0067 __ ## op ## _wrong_size(); \
0068 } \
0069 __ret; \
0070 })
0071
0072
0073
0074
0075
0076
0077
0078 #define arch_xchg(ptr, v) __xchg_op((ptr), (v), xchg, "")
0079
0080
0081
0082
0083
0084
0085 #define __raw_cmpxchg(ptr, old, new, size, lock) \
0086 ({ \
0087 __typeof__(*(ptr)) __ret; \
0088 __typeof__(*(ptr)) __old = (old); \
0089 __typeof__(*(ptr)) __new = (new); \
0090 switch (size) { \
0091 case __X86_CASE_B: \
0092 { \
0093 volatile u8 *__ptr = (volatile u8 *)(ptr); \
0094 asm volatile(lock "cmpxchgb %2,%1" \
0095 : "=a" (__ret), "+m" (*__ptr) \
0096 : "q" (__new), "0" (__old) \
0097 : "memory"); \
0098 break; \
0099 } \
0100 case __X86_CASE_W: \
0101 { \
0102 volatile u16 *__ptr = (volatile u16 *)(ptr); \
0103 asm volatile(lock "cmpxchgw %2,%1" \
0104 : "=a" (__ret), "+m" (*__ptr) \
0105 : "r" (__new), "0" (__old) \
0106 : "memory"); \
0107 break; \
0108 } \
0109 case __X86_CASE_L: \
0110 { \
0111 volatile u32 *__ptr = (volatile u32 *)(ptr); \
0112 asm volatile(lock "cmpxchgl %2,%1" \
0113 : "=a" (__ret), "+m" (*__ptr) \
0114 : "r" (__new), "0" (__old) \
0115 : "memory"); \
0116 break; \
0117 } \
0118 case __X86_CASE_Q: \
0119 { \
0120 volatile u64 *__ptr = (volatile u64 *)(ptr); \
0121 asm volatile(lock "cmpxchgq %2,%1" \
0122 : "=a" (__ret), "+m" (*__ptr) \
0123 : "r" (__new), "0" (__old) \
0124 : "memory"); \
0125 break; \
0126 } \
0127 default: \
0128 __cmpxchg_wrong_size(); \
0129 } \
0130 __ret; \
0131 })
0132
0133 #define __cmpxchg(ptr, old, new, size) \
0134 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
0135
0136 #define __sync_cmpxchg(ptr, old, new, size) \
0137 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
0138
0139 #define __cmpxchg_local(ptr, old, new, size) \
0140 __raw_cmpxchg((ptr), (old), (new), (size), "")
0141
0142 #ifdef CONFIG_X86_32
0143 # include <asm/cmpxchg_32.h>
0144 #else
0145 # include <asm/cmpxchg_64.h>
0146 #endif
0147
0148 #define arch_cmpxchg(ptr, old, new) \
0149 __cmpxchg(ptr, old, new, sizeof(*(ptr)))
0150
0151 #define arch_sync_cmpxchg(ptr, old, new) \
0152 __sync_cmpxchg(ptr, old, new, sizeof(*(ptr)))
0153
0154 #define arch_cmpxchg_local(ptr, old, new) \
0155 __cmpxchg_local(ptr, old, new, sizeof(*(ptr)))
0156
0157
0158 #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock) \
0159 ({ \
0160 bool success; \
0161 __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold); \
0162 __typeof__(*(_ptr)) __old = *_old; \
0163 __typeof__(*(_ptr)) __new = (_new); \
0164 switch (size) { \
0165 case __X86_CASE_B: \
0166 { \
0167 volatile u8 *__ptr = (volatile u8 *)(_ptr); \
0168 asm volatile(lock "cmpxchgb %[new], %[ptr]" \
0169 CC_SET(z) \
0170 : CC_OUT(z) (success), \
0171 [ptr] "+m" (*__ptr), \
0172 [old] "+a" (__old) \
0173 : [new] "q" (__new) \
0174 : "memory"); \
0175 break; \
0176 } \
0177 case __X86_CASE_W: \
0178 { \
0179 volatile u16 *__ptr = (volatile u16 *)(_ptr); \
0180 asm volatile(lock "cmpxchgw %[new], %[ptr]" \
0181 CC_SET(z) \
0182 : CC_OUT(z) (success), \
0183 [ptr] "+m" (*__ptr), \
0184 [old] "+a" (__old) \
0185 : [new] "r" (__new) \
0186 : "memory"); \
0187 break; \
0188 } \
0189 case __X86_CASE_L: \
0190 { \
0191 volatile u32 *__ptr = (volatile u32 *)(_ptr); \
0192 asm volatile(lock "cmpxchgl %[new], %[ptr]" \
0193 CC_SET(z) \
0194 : CC_OUT(z) (success), \
0195 [ptr] "+m" (*__ptr), \
0196 [old] "+a" (__old) \
0197 : [new] "r" (__new) \
0198 : "memory"); \
0199 break; \
0200 } \
0201 case __X86_CASE_Q: \
0202 { \
0203 volatile u64 *__ptr = (volatile u64 *)(_ptr); \
0204 asm volatile(lock "cmpxchgq %[new], %[ptr]" \
0205 CC_SET(z) \
0206 : CC_OUT(z) (success), \
0207 [ptr] "+m" (*__ptr), \
0208 [old] "+a" (__old) \
0209 : [new] "r" (__new) \
0210 : "memory"); \
0211 break; \
0212 } \
0213 default: \
0214 __cmpxchg_wrong_size(); \
0215 } \
0216 if (unlikely(!success)) \
0217 *_old = __old; \
0218 likely(success); \
0219 })
0220
0221 #define __try_cmpxchg(ptr, pold, new, size) \
0222 __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX)
0223
0224 #define arch_try_cmpxchg(ptr, pold, new) \
0225 __try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr)))
0226
0227
0228
0229
0230
0231
0232
0233 #define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock)
0234 #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
0235
0236 #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2) \
0237 ({ \
0238 bool __ret; \
0239 __typeof__(*(p1)) __old1 = (o1), __new1 = (n1); \
0240 __typeof__(*(p2)) __old2 = (o2), __new2 = (n2); \
0241 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
0242 BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
0243 VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long))); \
0244 VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2)); \
0245 asm volatile(pfx "cmpxchg%c5b %1" \
0246 CC_SET(e) \
0247 : CC_OUT(e) (__ret), \
0248 "+m" (*(p1)), "+m" (*(p2)), \
0249 "+a" (__old1), "+d" (__old2) \
0250 : "i" (2 * sizeof(long)), \
0251 "b" (__new1), "c" (__new2)); \
0252 __ret; \
0253 })
0254
0255 #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \
0256 __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
0257
0258 #define arch_cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
0259 __cmpxchg_double(, p1, p2, o1, o2, n1, n2)
0260
0261 #endif