0001
0002
0003
0004
0005
0006
0007
0008 #ifndef __ASM_CMPXCHG_H
0009 #define __ASM_CMPXCHG_H
0010
0011 #include <linux/mmdebug.h>
0012 #include <linux/types.h>
0013 #include <linux/bug.h>
0014
0015 void __xchg_called_with_bad_pointer(void);
0016
0017 static __always_inline unsigned long __xchg(unsigned long x,
0018 unsigned long address, int size)
0019 {
0020 unsigned long old;
0021 int shift;
0022
0023 switch (size) {
0024 case 1:
0025 shift = (3 ^ (address & 3)) << 3;
0026 address ^= address & 3;
0027 asm volatile(
0028 " l %0,%1\n"
0029 "0: lr 0,%0\n"
0030 " nr 0,%3\n"
0031 " or 0,%2\n"
0032 " cs %0,0,%1\n"
0033 " jl 0b\n"
0034 : "=&d" (old), "+Q" (*(int *) address)
0035 : "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
0036 : "memory", "cc", "0");
0037 return old >> shift;
0038 case 2:
0039 shift = (2 ^ (address & 2)) << 3;
0040 address ^= address & 2;
0041 asm volatile(
0042 " l %0,%1\n"
0043 "0: lr 0,%0\n"
0044 " nr 0,%3\n"
0045 " or 0,%2\n"
0046 " cs %0,0,%1\n"
0047 " jl 0b\n"
0048 : "=&d" (old), "+Q" (*(int *) address)
0049 : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
0050 : "memory", "cc", "0");
0051 return old >> shift;
0052 case 4:
0053 asm volatile(
0054 " l %0,%1\n"
0055 "0: cs %0,%2,%1\n"
0056 " jl 0b\n"
0057 : "=&d" (old), "+Q" (*(int *) address)
0058 : "d" (x)
0059 : "memory", "cc");
0060 return old;
0061 case 8:
0062 asm volatile(
0063 " lg %0,%1\n"
0064 "0: csg %0,%2,%1\n"
0065 " jl 0b\n"
0066 : "=&d" (old), "+QS" (*(long *) address)
0067 : "d" (x)
0068 : "memory", "cc");
0069 return old;
0070 }
0071 __xchg_called_with_bad_pointer();
0072 return x;
0073 }
0074
0075 #define arch_xchg(ptr, x) \
0076 ({ \
0077 __typeof__(*(ptr)) __ret; \
0078 \
0079 __ret = (__typeof__(*(ptr))) \
0080 __xchg((unsigned long)(x), (unsigned long)(ptr), \
0081 sizeof(*(ptr))); \
0082 __ret; \
0083 })
0084
0085 void __cmpxchg_called_with_bad_pointer(void);
0086
0087 static __always_inline unsigned long __cmpxchg(unsigned long address,
0088 unsigned long old,
0089 unsigned long new, int size)
0090 {
0091 unsigned long prev, tmp;
0092 int shift;
0093
0094 switch (size) {
0095 case 1:
0096 shift = (3 ^ (address & 3)) << 3;
0097 address ^= address & 3;
0098 asm volatile(
0099 " l %0,%2\n"
0100 "0: nr %0,%5\n"
0101 " lr %1,%0\n"
0102 " or %0,%3\n"
0103 " or %1,%4\n"
0104 " cs %0,%1,%2\n"
0105 " jnl 1f\n"
0106 " xr %1,%0\n"
0107 " nr %1,%5\n"
0108 " jnz 0b\n"
0109 "1:"
0110 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
0111 : "d" ((old & 0xff) << shift),
0112 "d" ((new & 0xff) << shift),
0113 "d" (~(0xff << shift))
0114 : "memory", "cc");
0115 return prev >> shift;
0116 case 2:
0117 shift = (2 ^ (address & 2)) << 3;
0118 address ^= address & 2;
0119 asm volatile(
0120 " l %0,%2\n"
0121 "0: nr %0,%5\n"
0122 " lr %1,%0\n"
0123 " or %0,%3\n"
0124 " or %1,%4\n"
0125 " cs %0,%1,%2\n"
0126 " jnl 1f\n"
0127 " xr %1,%0\n"
0128 " nr %1,%5\n"
0129 " jnz 0b\n"
0130 "1:"
0131 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
0132 : "d" ((old & 0xffff) << shift),
0133 "d" ((new & 0xffff) << shift),
0134 "d" (~(0xffff << shift))
0135 : "memory", "cc");
0136 return prev >> shift;
0137 case 4:
0138 asm volatile(
0139 " cs %0,%3,%1\n"
0140 : "=&d" (prev), "+Q" (*(int *) address)
0141 : "0" (old), "d" (new)
0142 : "memory", "cc");
0143 return prev;
0144 case 8:
0145 asm volatile(
0146 " csg %0,%3,%1\n"
0147 : "=&d" (prev), "+QS" (*(long *) address)
0148 : "0" (old), "d" (new)
0149 : "memory", "cc");
0150 return prev;
0151 }
0152 __cmpxchg_called_with_bad_pointer();
0153 return old;
0154 }
0155
0156 #define arch_cmpxchg(ptr, o, n) \
0157 ({ \
0158 __typeof__(*(ptr)) __ret; \
0159 \
0160 __ret = (__typeof__(*(ptr))) \
0161 __cmpxchg((unsigned long)(ptr), (unsigned long)(o), \
0162 (unsigned long)(n), sizeof(*(ptr))); \
0163 __ret; \
0164 })
0165
0166 #define arch_cmpxchg64 arch_cmpxchg
0167 #define arch_cmpxchg_local arch_cmpxchg
0168 #define arch_cmpxchg64_local arch_cmpxchg
0169
0170 #define system_has_cmpxchg_double() 1
0171
0172 static __always_inline int __cmpxchg_double(unsigned long p1, unsigned long p2,
0173 unsigned long o1, unsigned long o2,
0174 unsigned long n1, unsigned long n2)
0175 {
0176 union register_pair old = { .even = o1, .odd = o2, };
0177 union register_pair new = { .even = n1, .odd = n2, };
0178 int cc;
0179
0180 asm volatile(
0181 " cdsg %[old],%[new],%[ptr]\n"
0182 " ipm %[cc]\n"
0183 " srl %[cc],28\n"
0184 : [cc] "=&d" (cc), [old] "+&d" (old.pair)
0185 : [new] "d" (new.pair),
0186 [ptr] "QS" (*(unsigned long *)p1), "Q" (*(unsigned long *)p2)
0187 : "memory", "cc");
0188 return !cc;
0189 }
0190
0191 #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \
0192 ({ \
0193 typeof(p1) __p1 = (p1); \
0194 typeof(p2) __p2 = (p2); \
0195 \
0196 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
0197 BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
0198 VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
0199 __cmpxchg_double((unsigned long)__p1, (unsigned long)__p2, \
0200 (unsigned long)(o1), (unsigned long)(o2), \
0201 (unsigned long)(n1), (unsigned long)(n2)); \
0202 })
0203
0204 #endif