0001
0002
0003
0004
0005
0006
0007
0008
0009 #ifndef __ASM_GENERIC_ATOMIC_H
0010 #define __ASM_GENERIC_ATOMIC_H
0011
0012 #include <asm/cmpxchg.h>
0013 #include <asm/barrier.h>
0014
0015 #ifdef CONFIG_SMP
0016
0017
0018
0019 #define ATOMIC_OP(op, c_op) \
0020 static inline void generic_atomic_##op(int i, atomic_t *v) \
0021 { \
0022 int c, old; \
0023 \
0024 c = v->counter; \
0025 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
0026 c = old; \
0027 }
0028
0029 #define ATOMIC_OP_RETURN(op, c_op) \
0030 static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
0031 { \
0032 int c, old; \
0033 \
0034 c = v->counter; \
0035 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
0036 c = old; \
0037 \
0038 return c c_op i; \
0039 }
0040
0041 #define ATOMIC_FETCH_OP(op, c_op) \
0042 static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
0043 { \
0044 int c, old; \
0045 \
0046 c = v->counter; \
0047 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
0048 c = old; \
0049 \
0050 return c; \
0051 }
0052
0053 #else
0054
0055 #include <linux/irqflags.h>
0056
0057 #define ATOMIC_OP(op, c_op) \
0058 static inline void generic_atomic_##op(int i, atomic_t *v) \
0059 { \
0060 unsigned long flags; \
0061 \
0062 raw_local_irq_save(flags); \
0063 v->counter = v->counter c_op i; \
0064 raw_local_irq_restore(flags); \
0065 }
0066
0067 #define ATOMIC_OP_RETURN(op, c_op) \
0068 static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
0069 { \
0070 unsigned long flags; \
0071 int ret; \
0072 \
0073 raw_local_irq_save(flags); \
0074 ret = (v->counter = v->counter c_op i); \
0075 raw_local_irq_restore(flags); \
0076 \
0077 return ret; \
0078 }
0079
0080 #define ATOMIC_FETCH_OP(op, c_op) \
0081 static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
0082 { \
0083 unsigned long flags; \
0084 int ret; \
0085 \
0086 raw_local_irq_save(flags); \
0087 ret = v->counter; \
0088 v->counter = v->counter c_op i; \
0089 raw_local_irq_restore(flags); \
0090 \
0091 return ret; \
0092 }
0093
0094 #endif
0095
0096 ATOMIC_OP_RETURN(add, +)
0097 ATOMIC_OP_RETURN(sub, -)
0098
0099 ATOMIC_FETCH_OP(add, +)
0100 ATOMIC_FETCH_OP(sub, -)
0101 ATOMIC_FETCH_OP(and, &)
0102 ATOMIC_FETCH_OP(or, |)
0103 ATOMIC_FETCH_OP(xor, ^)
0104
0105 ATOMIC_OP(add, +)
0106 ATOMIC_OP(sub, -)
0107 ATOMIC_OP(and, &)
0108 ATOMIC_OP(or, |)
0109 ATOMIC_OP(xor, ^)
0110
0111 #undef ATOMIC_FETCH_OP
0112 #undef ATOMIC_OP_RETURN
0113 #undef ATOMIC_OP
0114
0115 #define arch_atomic_add_return generic_atomic_add_return
0116 #define arch_atomic_sub_return generic_atomic_sub_return
0117
0118 #define arch_atomic_fetch_add generic_atomic_fetch_add
0119 #define arch_atomic_fetch_sub generic_atomic_fetch_sub
0120 #define arch_atomic_fetch_and generic_atomic_fetch_and
0121 #define arch_atomic_fetch_or generic_atomic_fetch_or
0122 #define arch_atomic_fetch_xor generic_atomic_fetch_xor
0123
0124 #define arch_atomic_add generic_atomic_add
0125 #define arch_atomic_sub generic_atomic_sub
0126 #define arch_atomic_and generic_atomic_and
0127 #define arch_atomic_or generic_atomic_or
0128 #define arch_atomic_xor generic_atomic_xor
0129
0130 #define arch_atomic_read(v) READ_ONCE((v)->counter)
0131 #define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
0132
0133 #define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
0134 #define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))
0135
0136 #endif