0001
0002 #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
0003 #define _ASM_GENERIC_BITOPS_ATOMIC_H_
0004
0005 #include <linux/atomic.h>
0006 #include <linux/compiler.h>
0007 #include <asm/barrier.h>
0008
0009
0010
0011
0012
0013
0014 static __always_inline void
0015 arch_set_bit(unsigned int nr, volatile unsigned long *p)
0016 {
0017 p += BIT_WORD(nr);
0018 arch_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
0019 }
0020
0021 static __always_inline void
0022 arch_clear_bit(unsigned int nr, volatile unsigned long *p)
0023 {
0024 p += BIT_WORD(nr);
0025 arch_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
0026 }
0027
0028 static __always_inline void
0029 arch_change_bit(unsigned int nr, volatile unsigned long *p)
0030 {
0031 p += BIT_WORD(nr);
0032 arch_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
0033 }
0034
0035 static __always_inline int
0036 arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
0037 {
0038 long old;
0039 unsigned long mask = BIT_MASK(nr);
0040
0041 p += BIT_WORD(nr);
0042 old = arch_atomic_long_fetch_or(mask, (atomic_long_t *)p);
0043 return !!(old & mask);
0044 }
0045
0046 static __always_inline int
0047 arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
0048 {
0049 long old;
0050 unsigned long mask = BIT_MASK(nr);
0051
0052 p += BIT_WORD(nr);
0053 old = arch_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
0054 return !!(old & mask);
0055 }
0056
0057 static __always_inline int
0058 arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
0059 {
0060 long old;
0061 unsigned long mask = BIT_MASK(nr);
0062
0063 p += BIT_WORD(nr);
0064 old = arch_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
0065 return !!(old & mask);
0066 }
0067
0068 #include <asm-generic/bitops/instrumented-atomic.h>
0069
0070 #endif