0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029 #ifndef _S390_BITOPS_H
0030 #define _S390_BITOPS_H
0031
0032 #ifndef _LINUX_BITOPS_H
0033 #error only <linux/bitops.h> can be included directly
0034 #endif
0035
0036 #include <linux/typecheck.h>
0037 #include <linux/compiler.h>
0038 #include <linux/types.h>
0039 #include <asm/atomic_ops.h>
0040 #include <asm/barrier.h>
0041
0042 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
0043
0044 static inline unsigned long *
0045 __bitops_word(unsigned long nr, const volatile unsigned long *ptr)
0046 {
0047 unsigned long addr;
0048
0049 addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
0050 return (unsigned long *)addr;
0051 }
0052
0053 static inline unsigned long __bitops_mask(unsigned long nr)
0054 {
0055 return 1UL << (nr & (BITS_PER_LONG - 1));
0056 }
0057
0058 static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
0059 {
0060 unsigned long *addr = __bitops_word(nr, ptr);
0061 unsigned long mask = __bitops_mask(nr);
0062
0063 __atomic64_or(mask, (long *)addr);
0064 }
0065
0066 static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
0067 {
0068 unsigned long *addr = __bitops_word(nr, ptr);
0069 unsigned long mask = __bitops_mask(nr);
0070
0071 __atomic64_and(~mask, (long *)addr);
0072 }
0073
0074 static __always_inline void arch_change_bit(unsigned long nr,
0075 volatile unsigned long *ptr)
0076 {
0077 unsigned long *addr = __bitops_word(nr, ptr);
0078 unsigned long mask = __bitops_mask(nr);
0079
0080 __atomic64_xor(mask, (long *)addr);
0081 }
0082
0083 static inline bool arch_test_and_set_bit(unsigned long nr,
0084 volatile unsigned long *ptr)
0085 {
0086 unsigned long *addr = __bitops_word(nr, ptr);
0087 unsigned long mask = __bitops_mask(nr);
0088 unsigned long old;
0089
0090 old = __atomic64_or_barrier(mask, (long *)addr);
0091 return old & mask;
0092 }
0093
0094 static inline bool arch_test_and_clear_bit(unsigned long nr,
0095 volatile unsigned long *ptr)
0096 {
0097 unsigned long *addr = __bitops_word(nr, ptr);
0098 unsigned long mask = __bitops_mask(nr);
0099 unsigned long old;
0100
0101 old = __atomic64_and_barrier(~mask, (long *)addr);
0102 return old & mask;
0103 }
0104
0105 static inline bool arch_test_and_change_bit(unsigned long nr,
0106 volatile unsigned long *ptr)
0107 {
0108 unsigned long *addr = __bitops_word(nr, ptr);
0109 unsigned long mask = __bitops_mask(nr);
0110 unsigned long old;
0111
0112 old = __atomic64_xor_barrier(mask, (long *)addr);
0113 return old & mask;
0114 }
0115
0116 static __always_inline void
0117 arch___set_bit(unsigned long nr, volatile unsigned long *addr)
0118 {
0119 unsigned long *p = __bitops_word(nr, addr);
0120 unsigned long mask = __bitops_mask(nr);
0121
0122 *p |= mask;
0123 }
0124
0125 static __always_inline void
0126 arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
0127 {
0128 unsigned long *p = __bitops_word(nr, addr);
0129 unsigned long mask = __bitops_mask(nr);
0130
0131 *p &= ~mask;
0132 }
0133
0134 static __always_inline void
0135 arch___change_bit(unsigned long nr, volatile unsigned long *addr)
0136 {
0137 unsigned long *p = __bitops_word(nr, addr);
0138 unsigned long mask = __bitops_mask(nr);
0139
0140 *p ^= mask;
0141 }
0142
0143 static __always_inline bool
0144 arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
0145 {
0146 unsigned long *p = __bitops_word(nr, addr);
0147 unsigned long mask = __bitops_mask(nr);
0148 unsigned long old;
0149
0150 old = *p;
0151 *p |= mask;
0152 return old & mask;
0153 }
0154
0155 static __always_inline bool
0156 arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
0157 {
0158 unsigned long *p = __bitops_word(nr, addr);
0159 unsigned long mask = __bitops_mask(nr);
0160 unsigned long old;
0161
0162 old = *p;
0163 *p &= ~mask;
0164 return old & mask;
0165 }
0166
0167 static __always_inline bool
0168 arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
0169 {
0170 unsigned long *p = __bitops_word(nr, addr);
0171 unsigned long mask = __bitops_mask(nr);
0172 unsigned long old;
0173
0174 old = *p;
0175 *p ^= mask;
0176 return old & mask;
0177 }
0178
0179 #define arch_test_bit generic_test_bit
0180 #define arch_test_bit_acquire generic_test_bit_acquire
0181
0182 static inline bool arch_test_and_set_bit_lock(unsigned long nr,
0183 volatile unsigned long *ptr)
0184 {
0185 if (arch_test_bit(nr, ptr))
0186 return true;
0187 return arch_test_and_set_bit(nr, ptr);
0188 }
0189
0190 static inline void arch_clear_bit_unlock(unsigned long nr,
0191 volatile unsigned long *ptr)
0192 {
0193 smp_mb__before_atomic();
0194 arch_clear_bit(nr, ptr);
0195 }
0196
0197 static inline void arch___clear_bit_unlock(unsigned long nr,
0198 volatile unsigned long *ptr)
0199 {
0200 smp_mb();
0201 arch___clear_bit(nr, ptr);
0202 }
0203
0204 #include <asm-generic/bitops/instrumented-atomic.h>
0205 #include <asm-generic/bitops/instrumented-non-atomic.h>
0206 #include <asm-generic/bitops/instrumented-lock.h>
0207
0208
0209
0210
0211
0212
0213 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
0214 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
0215 unsigned long offset);
0216
0217 #define for_each_set_bit_inv(bit, addr, size) \
0218 for ((bit) = find_first_bit_inv((addr), (size)); \
0219 (bit) < (size); \
0220 (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
0221
0222 static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
0223 {
0224 return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0225 }
0226
0227 static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
0228 {
0229 return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0230 }
0231
0232 static inline bool test_and_clear_bit_inv(unsigned long nr,
0233 volatile unsigned long *ptr)
0234 {
0235 return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0236 }
0237
0238 static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
0239 {
0240 return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0241 }
0242
0243 static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
0244 {
0245 return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0246 }
0247
0248 static inline bool test_bit_inv(unsigned long nr,
0249 const volatile unsigned long *ptr)
0250 {
0251 return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
0252 }
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262 static inline unsigned char __flogr(unsigned long word)
0263 {
0264 if (__builtin_constant_p(word)) {
0265 unsigned long bit = 0;
0266
0267 if (!word)
0268 return 64;
0269 if (!(word & 0xffffffff00000000UL)) {
0270 word <<= 32;
0271 bit += 32;
0272 }
0273 if (!(word & 0xffff000000000000UL)) {
0274 word <<= 16;
0275 bit += 16;
0276 }
0277 if (!(word & 0xff00000000000000UL)) {
0278 word <<= 8;
0279 bit += 8;
0280 }
0281 if (!(word & 0xf000000000000000UL)) {
0282 word <<= 4;
0283 bit += 4;
0284 }
0285 if (!(word & 0xc000000000000000UL)) {
0286 word <<= 2;
0287 bit += 2;
0288 }
0289 if (!(word & 0x8000000000000000UL)) {
0290 word <<= 1;
0291 bit += 1;
0292 }
0293 return bit;
0294 } else {
0295 union register_pair rp;
0296
0297 rp.even = word;
0298 asm volatile(
0299 " flogr %[rp],%[rp]\n"
0300 : [rp] "+d" (rp.pair) : : "cc");
0301 return rp.even;
0302 }
0303 }
0304
0305
0306
0307
0308
0309
0310
0311 static inline unsigned long __ffs(unsigned long word)
0312 {
0313 return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
0314 }
0315
0316
0317
0318
0319
0320
0321
0322
0323 static inline int ffs(int word)
0324 {
0325 unsigned long mask = 2 * BITS_PER_LONG - 1;
0326 unsigned int val = (unsigned int)word;
0327
0328 return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
0329 }
0330
0331
0332
0333
0334
0335
0336
0337 static inline unsigned long __fls(unsigned long word)
0338 {
0339 return __flogr(word) ^ (BITS_PER_LONG - 1);
0340 }
0341
0342
0343
0344
0345
0346
0347
0348
0349
0350
0351
0352
0353 static inline int fls64(unsigned long word)
0354 {
0355 unsigned long mask = 2 * BITS_PER_LONG - 1;
0356
0357 return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
0358 }
0359
0360
0361
0362
0363
0364
0365
0366
0367 static inline int fls(unsigned int word)
0368 {
0369 return fls64(word);
0370 }
0371
0372 #include <asm-generic/bitops/ffz.h>
0373 #include <asm-generic/bitops/hweight.h>
0374 #include <asm-generic/bitops/sched.h>
0375 #include <asm-generic/bitops/le.h>
0376 #include <asm-generic/bitops/ext2-atomic-setbit.h>
0377
0378 #endif