0001
0002
0003 #ifndef _ASM_ARC_ATOMIC_LLSC_H
0004 #define _ASM_ARC_ATOMIC_LLSC_H
0005
0006 #define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
0007
0008 #define ATOMIC_OP(op, asm_op) \
0009 static inline void arch_atomic_##op(int i, atomic_t *v) \
0010 { \
0011 unsigned int val; \
0012 \
0013 __asm__ __volatile__( \
0014 "1: llock %[val], [%[ctr]] \n" \
0015 " " #asm_op " %[val], %[val], %[i] \n" \
0016 " scond %[val], [%[ctr]] \n" \
0017 " bnz 1b \n" \
0018 : [val] "=&r" (val) \
0019 : [ctr] "r" (&v->counter), \
0020 [i] "ir" (i) \
0021 : "cc"); \
0022 } \
0023
0024 #define ATOMIC_OP_RETURN(op, asm_op) \
0025 static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
0026 { \
0027 unsigned int val; \
0028 \
0029 __asm__ __volatile__( \
0030 "1: llock %[val], [%[ctr]] \n" \
0031 " " #asm_op " %[val], %[val], %[i] \n" \
0032 " scond %[val], [%[ctr]] \n" \
0033 " bnz 1b \n" \
0034 : [val] "=&r" (val) \
0035 : [ctr] "r" (&v->counter), \
0036 [i] "ir" (i) \
0037 : "cc"); \
0038 \
0039 return val; \
0040 }
0041
0042 #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
0043 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
0044
0045 #define ATOMIC_FETCH_OP(op, asm_op) \
0046 static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
0047 { \
0048 unsigned int val, orig; \
0049 \
0050 __asm__ __volatile__( \
0051 "1: llock %[orig], [%[ctr]] \n" \
0052 " " #asm_op " %[val], %[orig], %[i] \n" \
0053 " scond %[val], [%[ctr]] \n" \
0054 " bnz 1b \n" \
0055 : [val] "=&r" (val), \
0056 [orig] "=&r" (orig) \
0057 : [ctr] "r" (&v->counter), \
0058 [i] "ir" (i) \
0059 : "cc"); \
0060 \
0061 return orig; \
0062 }
0063
0064 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
0065 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
0066
0067 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
0068 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
0069 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
0070 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
0071
0072 #define ATOMIC_OPS(op, asm_op) \
0073 ATOMIC_OP(op, asm_op) \
0074 ATOMIC_OP_RETURN(op, asm_op) \
0075 ATOMIC_FETCH_OP(op, asm_op)
0076
0077 ATOMIC_OPS(add, add)
0078 ATOMIC_OPS(sub, sub)
0079
0080 #undef ATOMIC_OPS
0081 #define ATOMIC_OPS(op, asm_op) \
0082 ATOMIC_OP(op, asm_op) \
0083 ATOMIC_FETCH_OP(op, asm_op)
0084
0085 ATOMIC_OPS(and, and)
0086 ATOMIC_OPS(andnot, bic)
0087 ATOMIC_OPS(or, or)
0088 ATOMIC_OPS(xor, xor)
0089
0090 #define arch_atomic_andnot arch_atomic_andnot
0091
0092 #undef ATOMIC_OPS
0093 #undef ATOMIC_FETCH_OP
0094 #undef ATOMIC_OP_RETURN
0095 #undef ATOMIC_OP
0096
0097 #endif