0001
0002
0003
0004
0005
0006
0007
0008 #ifndef __ASM_CMPXCHG_H
0009 #define __ASM_CMPXCHG_H
0010
0011 #include <linux/bug.h>
0012 #include <linux/irqflags.h>
0013 #include <asm/asm.h>
0014 #include <asm/compiler.h>
0015 #include <asm/sync.h>
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
0028 __compiletime_error("Bad argument size for cmpxchg");
0029 extern unsigned long __cmpxchg64_unsupported(void)
0030 __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
0031 extern unsigned long __xchg_called_with_bad_pointer(void)
0032 __compiletime_error("Bad argument size for xchg");
0033
0034 #define __xchg_asm(ld, st, m, val) \
0035 ({ \
0036 __typeof(*(m)) __ret; \
0037 \
0038 if (kernel_uses_llsc) { \
0039 __asm__ __volatile__( \
0040 " .set push \n" \
0041 " .set noat \n" \
0042 " .set push \n" \
0043 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
0044 " " __SYNC(full, loongson3_war) " \n" \
0045 "1: " ld " %0, %2 # __xchg_asm \n" \
0046 " .set pop \n" \
0047 " move $1, %z3 \n" \
0048 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
0049 " " st " $1, %1 \n" \
0050 "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
0051 " .set pop \n" \
0052 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
0053 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
0054 : __LLSC_CLOBBER); \
0055 } else { \
0056 unsigned long __flags; \
0057 \
0058 raw_local_irq_save(__flags); \
0059 __ret = *m; \
0060 *m = val; \
0061 raw_local_irq_restore(__flags); \
0062 } \
0063 \
0064 __ret; \
0065 })
0066
0067 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
0068 unsigned int size);
0069
0070 static __always_inline
0071 unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
0072 {
0073 switch (size) {
0074 case 1:
0075 case 2:
0076 return __xchg_small(ptr, x, size);
0077
0078 case 4:
0079 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
0080
0081 case 8:
0082 if (!IS_ENABLED(CONFIG_64BIT))
0083 return __xchg_called_with_bad_pointer();
0084
0085 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
0086
0087 default:
0088 return __xchg_called_with_bad_pointer();
0089 }
0090 }
0091
0092 #define arch_xchg(ptr, x) \
0093 ({ \
0094 __typeof__(*(ptr)) __res; \
0095 \
0096
0097
0098
0099
0100 \
0101 if (__SYNC_loongson3_war == 0) \
0102 smp_mb__before_llsc(); \
0103 \
0104 __res = (__typeof__(*(ptr))) \
0105 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
0106 \
0107 smp_llsc_mb(); \
0108 \
0109 __res; \
0110 })
0111
0112 #define __cmpxchg_asm(ld, st, m, old, new) \
0113 ({ \
0114 __typeof(*(m)) __ret; \
0115 \
0116 if (kernel_uses_llsc) { \
0117 __asm__ __volatile__( \
0118 " .set push \n" \
0119 " .set noat \n" \
0120 " .set push \n" \
0121 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
0122 " " __SYNC(full, loongson3_war) " \n" \
0123 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
0124 " bne %0, %z3, 2f \n" \
0125 " .set pop \n" \
0126 " move $1, %z4 \n" \
0127 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
0128 " " st " $1, %1 \n" \
0129 "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
0130 " .set pop \n" \
0131 "2: " __SYNC(full, loongson3_war) " \n" \
0132 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
0133 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
0134 : __LLSC_CLOBBER); \
0135 } else { \
0136 unsigned long __flags; \
0137 \
0138 raw_local_irq_save(__flags); \
0139 __ret = *m; \
0140 if (__ret == old) \
0141 *m = new; \
0142 raw_local_irq_restore(__flags); \
0143 } \
0144 \
0145 __ret; \
0146 })
0147
0148 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
0149 unsigned long new, unsigned int size);
0150
0151 static __always_inline
0152 unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
0153 unsigned long new, unsigned int size)
0154 {
0155 switch (size) {
0156 case 1:
0157 case 2:
0158 return __cmpxchg_small(ptr, old, new, size);
0159
0160 case 4:
0161 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
0162 (u32)old, new);
0163
0164 case 8:
0165
0166 if (!IS_ENABLED(CONFIG_64BIT))
0167 return __cmpxchg_called_with_bad_pointer();
0168
0169 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
0170 (u64)old, new);
0171
0172 default:
0173 return __cmpxchg_called_with_bad_pointer();
0174 }
0175 }
0176
0177 #define arch_cmpxchg_local(ptr, old, new) \
0178 ((__typeof__(*(ptr))) \
0179 __cmpxchg((ptr), \
0180 (unsigned long)(__typeof__(*(ptr)))(old), \
0181 (unsigned long)(__typeof__(*(ptr)))(new), \
0182 sizeof(*(ptr))))
0183
0184 #define arch_cmpxchg(ptr, old, new) \
0185 ({ \
0186 __typeof__(*(ptr)) __res; \
0187 \
0188
0189
0190
0191
0192 \
0193 if (__SYNC_loongson3_war == 0) \
0194 smp_mb__before_llsc(); \
0195 \
0196 __res = arch_cmpxchg_local((ptr), (old), (new)); \
0197 \
0198
0199
0200
0201
0202 \
0203 if (__SYNC_loongson3_war == 0) \
0204 smp_llsc_mb(); \
0205 \
0206 __res; \
0207 })
0208
0209 #ifdef CONFIG_64BIT
0210 #define arch_cmpxchg64_local(ptr, o, n) \
0211 ({ \
0212 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
0213 arch_cmpxchg_local((ptr), (o), (n)); \
0214 })
0215
0216 #define arch_cmpxchg64(ptr, o, n) \
0217 ({ \
0218 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
0219 arch_cmpxchg((ptr), (o), (n)); \
0220 })
0221 #else
0222
0223 # include <asm-generic/cmpxchg-local.h>
0224 # define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
0225
0226 # ifdef CONFIG_SMP
0227
0228 static inline unsigned long __cmpxchg64(volatile void *ptr,
0229 unsigned long long old,
0230 unsigned long long new)
0231 {
0232 unsigned long long tmp, ret;
0233 unsigned long flags;
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243 local_irq_save(flags);
0244
0245 asm volatile(
0246 " .set push \n"
0247 " .set " MIPS_ISA_ARCH_LEVEL " \n"
0248
0249 " " __SYNC(full, loongson3_war) " \n"
0250 "1: lld %L0, %3 # __cmpxchg64 \n"
0251 " .set pop \n"
0252
0253
0254
0255
0256 " dsra %M0, %L0, 32 \n"
0257 " sll %L0, %L0, 0 \n"
0258
0259
0260
0261
0262 " bne %M0, %M4, 2f \n"
0263 " bne %L0, %L4, 2f \n"
0264
0265
0266
0267
0268 # if MIPS_ISA_REV >= 2
0269 " move %L1, %L5 \n"
0270 " dins %L1, %M5, 32, 32 \n"
0271 # else
0272 " dsll %L1, %L5, 32 \n"
0273 " dsrl %L1, %L1, 32 \n"
0274 " .set noat \n"
0275 " dsll $at, %M5, 32 \n"
0276 " or %L1, %L1, $at \n"
0277 " .set at \n"
0278 # endif
0279 " .set push \n"
0280 " .set " MIPS_ISA_ARCH_LEVEL " \n"
0281
0282 " scd %L1, %2 \n"
0283
0284 "\t" __stringify(SC_BEQZ) " %L1, 1b \n"
0285 "2: " __SYNC(full, loongson3_war) " \n"
0286 " .set pop \n"
0287 : "=&r"(ret),
0288 "=&r"(tmp),
0289 "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
0290 : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
0291 "r" (old),
0292 "r" (new)
0293 : "memory");
0294
0295 local_irq_restore(flags);
0296 return ret;
0297 }
0298
0299 # define arch_cmpxchg64(ptr, o, n) ({ \
0300 unsigned long long __old = (__typeof__(*(ptr)))(o); \
0301 unsigned long long __new = (__typeof__(*(ptr)))(n); \
0302 __typeof__(*(ptr)) __res; \
0303 \
0304
0305
0306
0307
0308
0309 \
0310 if (cpu_has_64bits && kernel_uses_llsc) { \
0311 smp_mb__before_llsc(); \
0312 __res = __cmpxchg64((ptr), __old, __new); \
0313 smp_llsc_mb(); \
0314 } else { \
0315 __res = __cmpxchg64_unsupported(); \
0316 } \
0317 \
0318 __res; \
0319 })
0320
0321 # else
0322 # define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
0323 # endif
0324 #endif
0325
0326 #endif