0001
0002
0003 #ifndef __ASM_GENERIC_BITOPS_GENERIC_NON_ATOMIC_H
0004 #define __ASM_GENERIC_BITOPS_GENERIC_NON_ATOMIC_H
0005
0006 #include <linux/bits.h>
0007 #include <asm/barrier.h>
0008
0009 #ifndef _LINUX_BITOPS_H
0010 #error only <linux/bitops.h> can be included directly
0011 #endif
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027 static __always_inline void
0028 generic___set_bit(unsigned long nr, volatile unsigned long *addr)
0029 {
0030 unsigned long mask = BIT_MASK(nr);
0031 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0032
0033 *p |= mask;
0034 }
0035
0036 static __always_inline void
0037 generic___clear_bit(unsigned long nr, volatile unsigned long *addr)
0038 {
0039 unsigned long mask = BIT_MASK(nr);
0040 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0041
0042 *p &= ~mask;
0043 }
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054 static __always_inline void
0055 generic___change_bit(unsigned long nr, volatile unsigned long *addr)
0056 {
0057 unsigned long mask = BIT_MASK(nr);
0058 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0059
0060 *p ^= mask;
0061 }
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072 static __always_inline bool
0073 generic___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
0074 {
0075 unsigned long mask = BIT_MASK(nr);
0076 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0077 unsigned long old = *p;
0078
0079 *p = old | mask;
0080 return (old & mask) != 0;
0081 }
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092 static __always_inline bool
0093 generic___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
0094 {
0095 unsigned long mask = BIT_MASK(nr);
0096 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0097 unsigned long old = *p;
0098
0099 *p = old & ~mask;
0100 return (old & mask) != 0;
0101 }
0102
0103
0104 static __always_inline bool
0105 generic___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
0106 {
0107 unsigned long mask = BIT_MASK(nr);
0108 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0109 unsigned long old = *p;
0110
0111 *p = old ^ mask;
0112 return (old & mask) != 0;
0113 }
0114
0115
0116
0117
0118
0119
0120 static __always_inline bool
0121 generic_test_bit(unsigned long nr, const volatile unsigned long *addr)
0122 {
0123
0124
0125
0126
0127
0128 return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
0129 }
0130
0131
0132
0133
0134
0135
0136 static __always_inline bool
0137 generic_test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
0138 {
0139 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
0140 return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
0141 }
0142
0143
0144
0145
0146
0147 #define const___set_bit generic___set_bit
0148 #define const___clear_bit generic___clear_bit
0149 #define const___change_bit generic___change_bit
0150 #define const___test_and_set_bit generic___test_and_set_bit
0151 #define const___test_and_clear_bit generic___test_and_clear_bit
0152 #define const___test_and_change_bit generic___test_and_change_bit
0153 #define const_test_bit_acquire generic_test_bit_acquire
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165 static __always_inline bool
0166 const_test_bit(unsigned long nr, const volatile unsigned long *addr)
0167 {
0168 const unsigned long *p = (const unsigned long *)addr + BIT_WORD(nr);
0169 unsigned long mask = BIT_MASK(nr);
0170 unsigned long val = *p;
0171
0172 return !!(val & mask);
0173 }
0174
0175 #endif