0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 #ifndef __ASM_ARM_BITOPS_H
0019 #define __ASM_ARM_BITOPS_H
0020
0021 #ifdef __KERNEL__
0022
0023 #ifndef _LINUX_BITOPS_H
0024 #error only <linux/bitops.h> can be included directly
0025 #endif
0026
0027 #include <linux/compiler.h>
0028 #include <linux/irqflags.h>
0029 #include <asm/barrier.h>
0030
0031
0032
0033
0034
0035
0036 static inline void ____atomic_set_bit(unsigned int bit, volatile unsigned long *p)
0037 {
0038 unsigned long flags;
0039 unsigned long mask = BIT_MASK(bit);
0040
0041 p += BIT_WORD(bit);
0042
0043 raw_local_irq_save(flags);
0044 *p |= mask;
0045 raw_local_irq_restore(flags);
0046 }
0047
0048 static inline void ____atomic_clear_bit(unsigned int bit, volatile unsigned long *p)
0049 {
0050 unsigned long flags;
0051 unsigned long mask = BIT_MASK(bit);
0052
0053 p += BIT_WORD(bit);
0054
0055 raw_local_irq_save(flags);
0056 *p &= ~mask;
0057 raw_local_irq_restore(flags);
0058 }
0059
0060 static inline void ____atomic_change_bit(unsigned int bit, volatile unsigned long *p)
0061 {
0062 unsigned long flags;
0063 unsigned long mask = BIT_MASK(bit);
0064
0065 p += BIT_WORD(bit);
0066
0067 raw_local_irq_save(flags);
0068 *p ^= mask;
0069 raw_local_irq_restore(flags);
0070 }
0071
0072 static inline int
0073 ____atomic_test_and_set_bit(unsigned int bit, volatile unsigned long *p)
0074 {
0075 unsigned long flags;
0076 unsigned int res;
0077 unsigned long mask = BIT_MASK(bit);
0078
0079 p += BIT_WORD(bit);
0080
0081 raw_local_irq_save(flags);
0082 res = *p;
0083 *p = res | mask;
0084 raw_local_irq_restore(flags);
0085
0086 return (res & mask) != 0;
0087 }
0088
0089 static inline int
0090 ____atomic_test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
0091 {
0092 unsigned long flags;
0093 unsigned int res;
0094 unsigned long mask = BIT_MASK(bit);
0095
0096 p += BIT_WORD(bit);
0097
0098 raw_local_irq_save(flags);
0099 res = *p;
0100 *p = res & ~mask;
0101 raw_local_irq_restore(flags);
0102
0103 return (res & mask) != 0;
0104 }
0105
0106 static inline int
0107 ____atomic_test_and_change_bit(unsigned int bit, volatile unsigned long *p)
0108 {
0109 unsigned long flags;
0110 unsigned int res;
0111 unsigned long mask = BIT_MASK(bit);
0112
0113 p += BIT_WORD(bit);
0114
0115 raw_local_irq_save(flags);
0116 res = *p;
0117 *p = res ^ mask;
0118 raw_local_irq_restore(flags);
0119
0120 return (res & mask) != 0;
0121 }
0122
0123 #include <asm-generic/bitops/non-atomic.h>
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153 extern void _set_bit(int nr, volatile unsigned long * p);
0154 extern void _clear_bit(int nr, volatile unsigned long * p);
0155 extern void _change_bit(int nr, volatile unsigned long * p);
0156 extern int _test_and_set_bit(int nr, volatile unsigned long * p);
0157 extern int _test_and_clear_bit(int nr, volatile unsigned long * p);
0158 extern int _test_and_change_bit(int nr, volatile unsigned long * p);
0159
0160
0161
0162
0163 unsigned long _find_first_zero_bit_le(const unsigned long *p, unsigned long size);
0164 unsigned long _find_next_zero_bit_le(const unsigned long *p,
0165 unsigned long size, unsigned long offset);
0166 unsigned long _find_first_bit_le(const unsigned long *p, unsigned long size);
0167 unsigned long _find_next_bit_le(const unsigned long *p, unsigned long size, unsigned long offset);
0168
0169
0170
0171
0172 unsigned long _find_first_zero_bit_be(const unsigned long *p, unsigned long size);
0173 unsigned long _find_next_zero_bit_be(const unsigned long *p,
0174 unsigned long size, unsigned long offset);
0175 unsigned long _find_first_bit_be(const unsigned long *p, unsigned long size);
0176 unsigned long _find_next_bit_be(const unsigned long *p, unsigned long size, unsigned long offset);
0177
0178 #ifndef CONFIG_SMP
0179
0180
0181
0182 #define ATOMIC_BITOP(name,nr,p) \
0183 (__builtin_constant_p(nr) ? ____atomic_##name(nr, p) : _##name(nr,p))
0184 #else
0185 #define ATOMIC_BITOP(name,nr,p) _##name(nr,p)
0186 #endif
0187
0188
0189
0190
0191 #define set_bit(nr,p) ATOMIC_BITOP(set_bit,nr,p)
0192 #define clear_bit(nr,p) ATOMIC_BITOP(clear_bit,nr,p)
0193 #define change_bit(nr,p) ATOMIC_BITOP(change_bit,nr,p)
0194 #define test_and_set_bit(nr,p) ATOMIC_BITOP(test_and_set_bit,nr,p)
0195 #define test_and_clear_bit(nr,p) ATOMIC_BITOP(test_and_clear_bit,nr,p)
0196 #define test_and_change_bit(nr,p) ATOMIC_BITOP(test_and_change_bit,nr,p)
0197
0198 #ifndef __ARMEB__
0199
0200
0201
0202 #define find_first_zero_bit(p,sz) _find_first_zero_bit_le(p,sz)
0203 #define find_next_zero_bit(p,sz,off) _find_next_zero_bit_le(p,sz,off)
0204 #define find_first_bit(p,sz) _find_first_bit_le(p,sz)
0205 #define find_next_bit(p,sz,off) _find_next_bit_le(p,sz,off)
0206
0207 #else
0208
0209
0210
0211 #define find_first_zero_bit(p,sz) _find_first_zero_bit_be(p,sz)
0212 #define find_next_zero_bit(p,sz,off) _find_next_zero_bit_be(p,sz,off)
0213 #define find_first_bit(p,sz) _find_first_bit_be(p,sz)
0214 #define find_next_bit(p,sz,off) _find_next_bit_be(p,sz,off)
0215
0216 #endif
0217
0218 #if __LINUX_ARM_ARCH__ < 5
0219
0220 #include <asm-generic/bitops/__fls.h>
0221 #include <asm-generic/bitops/__ffs.h>
0222 #include <asm-generic/bitops/fls.h>
0223 #include <asm-generic/bitops/ffs.h>
0224
0225 #else
0226
0227
0228
0229
0230
0231
0232 #include <asm-generic/bitops/builtin-__fls.h>
0233 #include <asm-generic/bitops/builtin-__ffs.h>
0234 #include <asm-generic/bitops/builtin-fls.h>
0235 #include <asm-generic/bitops/builtin-ffs.h>
0236
0237 #endif
0238
0239 #include <asm-generic/bitops/ffz.h>
0240
0241 #include <asm-generic/bitops/fls64.h>
0242
0243 #include <asm-generic/bitops/sched.h>
0244 #include <asm-generic/bitops/hweight.h>
0245 #include <asm-generic/bitops/lock.h>
0246
0247 #ifdef __ARMEB__
0248
0249 static inline int find_first_zero_bit_le(const void *p, unsigned size)
0250 {
0251 return _find_first_zero_bit_le(p, size);
0252 }
0253 #define find_first_zero_bit_le find_first_zero_bit_le
0254
0255 static inline int find_next_zero_bit_le(const void *p, int size, int offset)
0256 {
0257 return _find_next_zero_bit_le(p, size, offset);
0258 }
0259 #define find_next_zero_bit_le find_next_zero_bit_le
0260
0261 static inline int find_next_bit_le(const void *p, int size, int offset)
0262 {
0263 return _find_next_bit_le(p, size, offset);
0264 }
0265 #define find_next_bit_le find_next_bit_le
0266
0267 #endif
0268
0269 #include <asm-generic/bitops/le.h>
0270
0271
0272
0273
0274 #include <asm-generic/bitops/ext2-atomic-setbit.h>
0275
0276 #endif
0277
0278 #endif