0001
0002
0003
0004
0005
0006
0007 #include <linux/bitops.h>
0008 #include <asm/cmpxchg.h>
0009
0010 unsigned long __xchg_small(volatile void *ptr, unsigned long val, unsigned int size)
0011 {
0012 u32 old32, new32, load32, mask;
0013 volatile u32 *ptr32;
0014 unsigned int shift;
0015
0016
0017 WARN_ON((unsigned long)ptr & (size - 1));
0018
0019
0020 mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
0021 val &= mask;
0022
0023
0024
0025
0026
0027
0028 shift = (unsigned long)ptr & 0x3;
0029 if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
0030 shift ^= sizeof(u32) - size;
0031 shift *= BITS_PER_BYTE;
0032 mask <<= shift;
0033
0034
0035
0036
0037
0038 ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
0039 load32 = *ptr32;
0040
0041 do {
0042 old32 = load32;
0043 new32 = (load32 & ~mask) | (val << shift);
0044 load32 = arch_cmpxchg(ptr32, old32, new32);
0045 } while (load32 != old32);
0046
0047 return (load32 & mask) >> shift;
0048 }
0049
0050 unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
0051 unsigned long new, unsigned int size)
0052 {
0053 u32 mask, old32, new32, load32, load;
0054 volatile u32 *ptr32;
0055 unsigned int shift;
0056
0057
0058 WARN_ON((unsigned long)ptr & (size - 1));
0059
0060
0061 mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
0062 old &= mask;
0063 new &= mask;
0064
0065
0066
0067
0068
0069
0070 shift = (unsigned long)ptr & 0x3;
0071 if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
0072 shift ^= sizeof(u32) - size;
0073 shift *= BITS_PER_BYTE;
0074 mask <<= shift;
0075
0076
0077
0078
0079
0080 ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
0081 load32 = *ptr32;
0082
0083 while (true) {
0084
0085
0086
0087
0088 load = (load32 & mask) >> shift;
0089 if (load != old)
0090 return load;
0091
0092
0093
0094
0095
0096
0097
0098 old32 = (load32 & ~mask) | (old << shift);
0099 new32 = (load32 & ~mask) | (new << shift);
0100 load32 = arch_cmpxchg(ptr32, old32, new32);
0101 if (load32 == old32)
0102 return old;
0103 }
0104 }