0001
0002 #ifndef _ASM_GENERIC_BITOPS_LOCK_H_
0003 #define _ASM_GENERIC_BITOPS_LOCK_H_
0004
0005 #include <linux/atomic.h>
0006 #include <linux/compiler.h>
0007 #include <asm/barrier.h>
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 static __always_inline int
0019 arch_test_and_set_bit_lock(unsigned int nr, volatile unsigned long *p)
0020 {
0021 long old;
0022 unsigned long mask = BIT_MASK(nr);
0023
0024 p += BIT_WORD(nr);
0025 if (READ_ONCE(*p) & mask)
0026 return 1;
0027
0028 old = arch_atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p);
0029 return !!(old & mask);
0030 }
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040 static __always_inline void
0041 arch_clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
0042 {
0043 p += BIT_WORD(nr);
0044 arch_atomic_long_fetch_andnot_release(BIT_MASK(nr), (atomic_long_t *)p);
0045 }
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058 static inline void
0059 arch___clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
0060 {
0061 unsigned long old;
0062
0063 p += BIT_WORD(nr);
0064 old = READ_ONCE(*p);
0065 old &= ~BIT_MASK(nr);
0066 arch_atomic_long_set_release((atomic_long_t *)p, old);
0067 }
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078 #ifndef arch_clear_bit_unlock_is_negative_byte
0079 static inline bool arch_clear_bit_unlock_is_negative_byte(unsigned int nr,
0080 volatile unsigned long *p)
0081 {
0082 long old;
0083 unsigned long mask = BIT_MASK(nr);
0084
0085 p += BIT_WORD(nr);
0086 old = arch_atomic_long_fetch_andnot_release(mask, (atomic_long_t *)p);
0087 return !!(old & BIT(7));
0088 }
0089 #define arch_clear_bit_unlock_is_negative_byte arch_clear_bit_unlock_is_negative_byte
0090 #endif
0091
0092 #include <asm-generic/bitops/instrumented-lock.h>
0093
0094 #endif