Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-only */
0002 /*
0003  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
0004  */
0005 
0006 #ifndef __ASM_ARC_CMPXCHG_H
0007 #define __ASM_ARC_CMPXCHG_H
0008 
0009 #include <linux/build_bug.h>
0010 #include <linux/types.h>
0011 
0012 #include <asm/barrier.h>
0013 #include <asm/smp.h>
0014 
0015 #ifdef CONFIG_ARC_HAS_LLSC
0016 
0017 /*
0018  * if (*ptr == @old)
0019  *      *ptr = @new
0020  */
0021 #define __cmpxchg(ptr, old, new)                    \
0022 ({                                  \
0023     __typeof__(*(ptr)) _prev;                   \
0024                                     \
0025     __asm__ __volatile__(                       \
0026     "1: llock  %0, [%1] \n"                 \
0027     "   brne   %0, %2, 2f   \n"             \
0028     "   scond  %3, [%1] \n"                 \
0029     "   bnz     1b      \n"             \
0030     "2:             \n"             \
0031     : "=&r"(_prev)  /* Early clobber prevent reg reuse */       \
0032     : "r"(ptr), /* Not "m": llock only supports reg */      \
0033       "ir"(old),                            \
0034       "r"(new)  /* Not "ir": scond can't take LIMM */       \
0035     : "cc",                             \
0036       "memory");    /* gcc knows memory is clobbered */     \
0037                                     \
0038     _prev;                              \
0039 })
0040 
0041 #define arch_cmpxchg_relaxed(ptr, old, new)             \
0042 ({                                  \
0043     __typeof__(ptr) _p_ = (ptr);                    \
0044     __typeof__(*(ptr)) _o_ = (old);                 \
0045     __typeof__(*(ptr)) _n_ = (new);                 \
0046     __typeof__(*(ptr)) _prev_;                  \
0047                                     \
0048     switch(sizeof((_p_))) {                     \
0049     case 4:                             \
0050         _prev_ = __cmpxchg(_p_, _o_, _n_);          \
0051         break;                          \
0052     default:                            \
0053         BUILD_BUG();                        \
0054     }                               \
0055     _prev_;                             \
0056 })
0057 
0058 #else
0059 
0060 #define arch_cmpxchg(ptr, old, new)                     \
0061 ({                                  \
0062     volatile __typeof__(ptr) _p_ = (ptr);               \
0063     __typeof__(*(ptr)) _o_ = (old);                 \
0064     __typeof__(*(ptr)) _n_ = (new);                 \
0065     __typeof__(*(ptr)) _prev_;                  \
0066     unsigned long __flags;                      \
0067                                     \
0068     BUILD_BUG_ON(sizeof(_p_) != 4);                 \
0069                                     \
0070     /*                              \
0071      * spin lock/unlock provide the needed smp_mb() before/after    \
0072      */                             \
0073     atomic_ops_lock(__flags);                   \
0074     _prev_ = *_p_;                          \
0075     if (_prev_ == _o_)                      \
0076         *_p_ = _n_;                     \
0077     atomic_ops_unlock(__flags);                 \
0078     _prev_;                             \
0079 })
0080 
0081 #endif
0082 
0083 /*
0084  * xchg
0085  */
0086 #ifdef CONFIG_ARC_HAS_LLSC
0087 
0088 #define __xchg(ptr, val)                        \
0089 ({                                  \
0090     __asm__ __volatile__(                       \
0091     "   ex  %0, [%1]    \n" /* set new value */         \
0092     : "+r"(val)                         \
0093     : "r"(ptr)                          \
0094     : "memory");                            \
0095     _val_;      /* get old value */             \
0096 })
0097 
0098 #define arch_xchg_relaxed(ptr, val)                 \
0099 ({                                  \
0100     __typeof__(ptr) _p_ = (ptr);                    \
0101     __typeof__(*(ptr)) _val_ = (val);               \
0102                                     \
0103     switch(sizeof(*(_p_))) {                    \
0104     case 4:                             \
0105         _val_ = __xchg(_p_, _val_);             \
0106         break;                          \
0107     default:                            \
0108         BUILD_BUG();                        \
0109     }                               \
0110     _val_;                              \
0111 })
0112 
0113 #else  /* !CONFIG_ARC_HAS_LLSC */
0114 
0115 /*
0116  * EX instructions is baseline and present in !LLSC too. But in this
0117  * regime it still needs use @atomic_ops_lock spinlock to allow interop
0118  * with cmpxchg() which uses spinlock in !LLSC
0119  * (llist.h use xchg and cmpxchg on sama data)
0120  */
0121 
0122 #define arch_xchg(ptr, val)                         \
0123 ({                                  \
0124     __typeof__(ptr) _p_ = (ptr);                    \
0125     __typeof__(*(ptr)) _val_ = (val);               \
0126                                     \
0127     unsigned long __flags;                      \
0128                                     \
0129     atomic_ops_lock(__flags);                   \
0130                                     \
0131     __asm__ __volatile__(                       \
0132     "   ex  %0, [%1]    \n"                 \
0133     : "+r"(_val_)                           \
0134     : "r"(_p_)                          \
0135     : "memory");                            \
0136                                     \
0137     atomic_ops_unlock(__flags);                 \
0138     _val_;                              \
0139 })
0140 
0141 #endif
0142 
0143 #endif