0001
0002
0003 #ifndef __ASM_CSKY_CMPXCHG_H
0004 #define __ASM_CSKY_CMPXCHG_H
0005
0006 #ifdef CONFIG_SMP
0007 #include <linux/bug.h>
0008 #include <asm/barrier.h>
0009
0010 #define __xchg_relaxed(new, ptr, size) \
0011 ({ \
0012 __typeof__(ptr) __ptr = (ptr); \
0013 __typeof__(new) __new = (new); \
0014 __typeof__(*(ptr)) __ret; \
0015 unsigned long tmp; \
0016 switch (size) { \
0017 case 2: { \
0018 u32 ret; \
0019 u32 shif = ((ulong)__ptr & 2) ? 16 : 0; \
0020 u32 mask = 0xffff << shif; \
0021 __ptr = (__typeof__(ptr))((ulong)__ptr & ~2); \
0022 __asm__ __volatile__ ( \
0023 "1: ldex.w %0, (%4)\n" \
0024 " and %1, %0, %2\n" \
0025 " or %1, %1, %3\n" \
0026 " stex.w %1, (%4)\n" \
0027 " bez %1, 1b\n" \
0028 : "=&r" (ret), "=&r" (tmp) \
0029 : "r" (~mask), \
0030 "r" ((u32)__new << shif), \
0031 "r" (__ptr) \
0032 : "memory"); \
0033 __ret = (__typeof__(*(ptr))) \
0034 ((ret & mask) >> shif); \
0035 break; \
0036 } \
0037 case 4: \
0038 asm volatile ( \
0039 "1: ldex.w %0, (%3) \n" \
0040 " mov %1, %2 \n" \
0041 " stex.w %1, (%3) \n" \
0042 " bez %1, 1b \n" \
0043 : "=&r" (__ret), "=&r" (tmp) \
0044 : "r" (__new), "r"(__ptr) \
0045 :); \
0046 break; \
0047 default: \
0048 BUILD_BUG(); \
0049 } \
0050 __ret; \
0051 })
0052
0053 #define arch_xchg_relaxed(ptr, x) \
0054 (__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
0055
0056 #define __cmpxchg_relaxed(ptr, old, new, size) \
0057 ({ \
0058 __typeof__(ptr) __ptr = (ptr); \
0059 __typeof__(new) __new = (new); \
0060 __typeof__(new) __tmp; \
0061 __typeof__(old) __old = (old); \
0062 __typeof__(*(ptr)) __ret; \
0063 switch (size) { \
0064 case 4: \
0065 asm volatile ( \
0066 "1: ldex.w %0, (%3) \n" \
0067 " cmpne %0, %4 \n" \
0068 " bt 2f \n" \
0069 " mov %1, %2 \n" \
0070 " stex.w %1, (%3) \n" \
0071 " bez %1, 1b \n" \
0072 "2: \n" \
0073 : "=&r" (__ret), "=&r" (__tmp) \
0074 : "r" (__new), "r"(__ptr), "r"(__old) \
0075 :); \
0076 break; \
0077 default: \
0078 BUILD_BUG(); \
0079 } \
0080 __ret; \
0081 })
0082
0083 #define arch_cmpxchg_relaxed(ptr, o, n) \
0084 (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
0085
0086 #define __cmpxchg_acquire(ptr, old, new, size) \
0087 ({ \
0088 __typeof__(ptr) __ptr = (ptr); \
0089 __typeof__(new) __new = (new); \
0090 __typeof__(new) __tmp; \
0091 __typeof__(old) __old = (old); \
0092 __typeof__(*(ptr)) __ret; \
0093 switch (size) { \
0094 case 4: \
0095 asm volatile ( \
0096 "1: ldex.w %0, (%3) \n" \
0097 " cmpne %0, %4 \n" \
0098 " bt 2f \n" \
0099 " mov %1, %2 \n" \
0100 " stex.w %1, (%3) \n" \
0101 " bez %1, 1b \n" \
0102 ACQUIRE_FENCE \
0103 "2: \n" \
0104 : "=&r" (__ret), "=&r" (__tmp) \
0105 : "r" (__new), "r"(__ptr), "r"(__old) \
0106 :); \
0107 break; \
0108 default: \
0109 BUILD_BUG(); \
0110 } \
0111 __ret; \
0112 })
0113
0114 #define arch_cmpxchg_acquire(ptr, o, n) \
0115 (__cmpxchg_acquire((ptr), (o), (n), sizeof(*(ptr))))
0116
0117 #define __cmpxchg(ptr, old, new, size) \
0118 ({ \
0119 __typeof__(ptr) __ptr = (ptr); \
0120 __typeof__(new) __new = (new); \
0121 __typeof__(new) __tmp; \
0122 __typeof__(old) __old = (old); \
0123 __typeof__(*(ptr)) __ret; \
0124 switch (size) { \
0125 case 4: \
0126 asm volatile ( \
0127 RELEASE_FENCE \
0128 "1: ldex.w %0, (%3) \n" \
0129 " cmpne %0, %4 \n" \
0130 " bt 2f \n" \
0131 " mov %1, %2 \n" \
0132 " stex.w %1, (%3) \n" \
0133 " bez %1, 1b \n" \
0134 FULL_FENCE \
0135 "2: \n" \
0136 : "=&r" (__ret), "=&r" (__tmp) \
0137 : "r" (__new), "r"(__ptr), "r"(__old) \
0138 :); \
0139 break; \
0140 default: \
0141 BUILD_BUG(); \
0142 } \
0143 __ret; \
0144 })
0145
0146 #define arch_cmpxchg(ptr, o, n) \
0147 (__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
0148
0149 #define arch_cmpxchg_local(ptr, o, n) \
0150 (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
0151 #else
0152 #include <asm-generic/cmpxchg.h>
0153 #endif
0154
0155 #endif