Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef _ALPHA_CMPXCHG_H
0003 #error Do not include xchg.h directly!
0004 #else
0005 /*
0006  * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
0007  * except that local version do not have the expensive memory barrier.
0008  * So this file is included twice from asm/cmpxchg.h.
0009  */
0010 
0011 /*
0012  * Atomic exchange.
0013  * Since it can be used to implement critical sections
0014  * it must clobber "memory" (also for interrupts in UP).
0015  */
0016 
0017 static inline unsigned long
0018 ____xchg(_u8, volatile char *m, unsigned long val)
0019 {
0020     unsigned long ret, tmp, addr64;
0021 
0022     __asm__ __volatile__(
0023     "   andnot  %4,7,%3\n"
0024     "   insbl   %1,%4,%1\n"
0025     "1: ldq_l   %2,0(%3)\n"
0026     "   extbl   %2,%4,%0\n"
0027     "   mskbl   %2,%4,%2\n"
0028     "   or  %1,%2,%2\n"
0029     "   stq_c   %2,0(%3)\n"
0030     "   beq %2,2f\n"
0031     ".subsection 2\n"
0032     "2: br  1b\n"
0033     ".previous"
0034     : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
0035     : "r" ((long)m), "1" (val) : "memory");
0036 
0037     return ret;
0038 }
0039 
0040 static inline unsigned long
0041 ____xchg(_u16, volatile short *m, unsigned long val)
0042 {
0043     unsigned long ret, tmp, addr64;
0044 
0045     __asm__ __volatile__(
0046     "   andnot  %4,7,%3\n"
0047     "   inswl   %1,%4,%1\n"
0048     "1: ldq_l   %2,0(%3)\n"
0049     "   extwl   %2,%4,%0\n"
0050     "   mskwl   %2,%4,%2\n"
0051     "   or  %1,%2,%2\n"
0052     "   stq_c   %2,0(%3)\n"
0053     "   beq %2,2f\n"
0054     ".subsection 2\n"
0055     "2: br  1b\n"
0056     ".previous"
0057     : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
0058     : "r" ((long)m), "1" (val) : "memory");
0059 
0060     return ret;
0061 }
0062 
0063 static inline unsigned long
0064 ____xchg(_u32, volatile int *m, unsigned long val)
0065 {
0066     unsigned long dummy;
0067 
0068     __asm__ __volatile__(
0069     "1: ldl_l %0,%4\n"
0070     "   bis $31,%3,%1\n"
0071     "   stl_c %1,%2\n"
0072     "   beq %1,2f\n"
0073     ".subsection 2\n"
0074     "2: br 1b\n"
0075     ".previous"
0076     : "=&r" (val), "=&r" (dummy), "=m" (*m)
0077     : "rI" (val), "m" (*m) : "memory");
0078 
0079     return val;
0080 }
0081 
0082 static inline unsigned long
0083 ____xchg(_u64, volatile long *m, unsigned long val)
0084 {
0085     unsigned long dummy;
0086 
0087     __asm__ __volatile__(
0088     "1: ldq_l %0,%4\n"
0089     "   bis $31,%3,%1\n"
0090     "   stq_c %1,%2\n"
0091     "   beq %1,2f\n"
0092     ".subsection 2\n"
0093     "2: br 1b\n"
0094     ".previous"
0095     : "=&r" (val), "=&r" (dummy), "=m" (*m)
0096     : "rI" (val), "m" (*m) : "memory");
0097 
0098     return val;
0099 }
0100 
0101 /* This function doesn't exist, so you'll get a linker error
0102    if something tries to do an invalid xchg().  */
0103 extern void __xchg_called_with_bad_pointer(void);
0104 
0105 static __always_inline unsigned long
0106 ____xchg(, volatile void *ptr, unsigned long x, int size)
0107 {
0108     switch (size) {
0109         case 1:
0110             return ____xchg(_u8, ptr, x);
0111         case 2:
0112             return ____xchg(_u16, ptr, x);
0113         case 4:
0114             return ____xchg(_u32, ptr, x);
0115         case 8:
0116             return ____xchg(_u64, ptr, x);
0117     }
0118     __xchg_called_with_bad_pointer();
0119     return x;
0120 }
0121 
0122 /*
0123  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
0124  * store NEW in MEM.  Return the initial value in MEM.  Success is
0125  * indicated by comparing RETURN with OLD.
0126  */
0127 
0128 static inline unsigned long
0129 ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
0130 {
0131     unsigned long prev, tmp, cmp, addr64;
0132 
0133     __asm__ __volatile__(
0134     "   andnot  %5,7,%4\n"
0135     "   insbl   %1,%5,%1\n"
0136     "1: ldq_l   %2,0(%4)\n"
0137     "   extbl   %2,%5,%0\n"
0138     "   cmpeq   %0,%6,%3\n"
0139     "   beq %3,2f\n"
0140     "   mskbl   %2,%5,%2\n"
0141     "   or  %1,%2,%2\n"
0142     "   stq_c   %2,0(%4)\n"
0143     "   beq %2,3f\n"
0144     "2:\n"
0145     ".subsection 2\n"
0146     "3: br  1b\n"
0147     ".previous"
0148     : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
0149     : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
0150 
0151     return prev;
0152 }
0153 
0154 static inline unsigned long
0155 ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
0156 {
0157     unsigned long prev, tmp, cmp, addr64;
0158 
0159     __asm__ __volatile__(
0160     "   andnot  %5,7,%4\n"
0161     "   inswl   %1,%5,%1\n"
0162     "1: ldq_l   %2,0(%4)\n"
0163     "   extwl   %2,%5,%0\n"
0164     "   cmpeq   %0,%6,%3\n"
0165     "   beq %3,2f\n"
0166     "   mskwl   %2,%5,%2\n"
0167     "   or  %1,%2,%2\n"
0168     "   stq_c   %2,0(%4)\n"
0169     "   beq %2,3f\n"
0170     "2:\n"
0171     ".subsection 2\n"
0172     "3: br  1b\n"
0173     ".previous"
0174     : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
0175     : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
0176 
0177     return prev;
0178 }
0179 
0180 static inline unsigned long
0181 ____cmpxchg(_u32, volatile int *m, int old, int new)
0182 {
0183     unsigned long prev, cmp;
0184 
0185     __asm__ __volatile__(
0186     "1: ldl_l %0,%5\n"
0187     "   cmpeq %0,%3,%1\n"
0188     "   beq %1,2f\n"
0189     "   mov %4,%1\n"
0190     "   stl_c %1,%2\n"
0191     "   beq %1,3f\n"
0192     "2:\n"
0193     ".subsection 2\n"
0194     "3: br 1b\n"
0195     ".previous"
0196     : "=&r"(prev), "=&r"(cmp), "=m"(*m)
0197     : "r"((long) old), "r"(new), "m"(*m) : "memory");
0198 
0199     return prev;
0200 }
0201 
0202 static inline unsigned long
0203 ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
0204 {
0205     unsigned long prev, cmp;
0206 
0207     __asm__ __volatile__(
0208     "1: ldq_l %0,%5\n"
0209     "   cmpeq %0,%3,%1\n"
0210     "   beq %1,2f\n"
0211     "   mov %4,%1\n"
0212     "   stq_c %1,%2\n"
0213     "   beq %1,3f\n"
0214     "2:\n"
0215     ".subsection 2\n"
0216     "3: br 1b\n"
0217     ".previous"
0218     : "=&r"(prev), "=&r"(cmp), "=m"(*m)
0219     : "r"((long) old), "r"(new), "m"(*m) : "memory");
0220 
0221     return prev;
0222 }
0223 
0224 /* This function doesn't exist, so you'll get a linker error
0225    if something tries to do an invalid cmpxchg().  */
0226 extern void __cmpxchg_called_with_bad_pointer(void);
0227 
0228 static __always_inline unsigned long
0229 ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
0230           int size)
0231 {
0232     switch (size) {
0233         case 1:
0234             return ____cmpxchg(_u8, ptr, old, new);
0235         case 2:
0236             return ____cmpxchg(_u16, ptr, old, new);
0237         case 4:
0238             return ____cmpxchg(_u32, ptr, old, new);
0239         case 8:
0240             return ____cmpxchg(_u64, ptr, old, new);
0241     }
0242     __cmpxchg_called_with_bad_pointer();
0243     return old;
0244 }
0245 
0246 #endif