0001
0002 #ifndef _ASM_X86_BITOPS_H
0003 #define _ASM_X86_BITOPS_H
0004
0005
0006
0007
0008
0009
0010
0011
0012 #ifndef _LINUX_BITOPS_H
0013 #error only <linux/bitops.h> can be included directly
0014 #endif
0015
0016 #include <linux/compiler.h>
0017 #include <asm/alternative.h>
0018 #include <asm/rmwcc.h>
0019 #include <asm/barrier.h>
0020
0021 #if BITS_PER_LONG == 32
0022 # define _BITOPS_LONG_SHIFT 5
0023 #elif BITS_PER_LONG == 64
0024 # define _BITOPS_LONG_SHIFT 6
0025 #else
0026 # error "Unexpected BITS_PER_LONG"
0027 #endif
0028
0029 #define BIT_64(n) (U64_C(1) << (n))
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039 #define RLONG_ADDR(x) "m" (*(volatile long *) (x))
0040 #define WBYTE_ADDR(x) "+m" (*(volatile char *) (x))
0041
0042 #define ADDR RLONG_ADDR(addr)
0043
0044
0045
0046
0047
0048 #define CONST_MASK_ADDR(nr, addr) WBYTE_ADDR((void *)(addr) + ((nr)>>3))
0049 #define CONST_MASK(nr) (1 << ((nr) & 7))
0050
0051 static __always_inline void
0052 arch_set_bit(long nr, volatile unsigned long *addr)
0053 {
0054 if (__builtin_constant_p(nr)) {
0055 asm volatile(LOCK_PREFIX "orb %b1,%0"
0056 : CONST_MASK_ADDR(nr, addr)
0057 : "iq" (CONST_MASK(nr))
0058 : "memory");
0059 } else {
0060 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
0061 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
0062 }
0063 }
0064
0065 static __always_inline void
0066 arch___set_bit(unsigned long nr, volatile unsigned long *addr)
0067 {
0068 asm volatile(__ASM_SIZE(bts) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
0069 }
0070
0071 static __always_inline void
0072 arch_clear_bit(long nr, volatile unsigned long *addr)
0073 {
0074 if (__builtin_constant_p(nr)) {
0075 asm volatile(LOCK_PREFIX "andb %b1,%0"
0076 : CONST_MASK_ADDR(nr, addr)
0077 : "iq" (~CONST_MASK(nr)));
0078 } else {
0079 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
0080 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
0081 }
0082 }
0083
0084 static __always_inline void
0085 arch_clear_bit_unlock(long nr, volatile unsigned long *addr)
0086 {
0087 barrier();
0088 arch_clear_bit(nr, addr);
0089 }
0090
0091 static __always_inline void
0092 arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
0093 {
0094 asm volatile(__ASM_SIZE(btr) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
0095 }
0096
0097 static __always_inline bool
0098 arch_clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
0099 {
0100 bool negative;
0101 asm volatile(LOCK_PREFIX "andb %2,%1"
0102 CC_SET(s)
0103 : CC_OUT(s) (negative), WBYTE_ADDR(addr)
0104 : "ir" ((char) ~(1 << nr)) : "memory");
0105 return negative;
0106 }
0107 #define arch_clear_bit_unlock_is_negative_byte \
0108 arch_clear_bit_unlock_is_negative_byte
0109
0110 static __always_inline void
0111 arch___clear_bit_unlock(long nr, volatile unsigned long *addr)
0112 {
0113 arch___clear_bit(nr, addr);
0114 }
0115
0116 static __always_inline void
0117 arch___change_bit(unsigned long nr, volatile unsigned long *addr)
0118 {
0119 asm volatile(__ASM_SIZE(btc) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
0120 }
0121
0122 static __always_inline void
0123 arch_change_bit(long nr, volatile unsigned long *addr)
0124 {
0125 if (__builtin_constant_p(nr)) {
0126 asm volatile(LOCK_PREFIX "xorb %b1,%0"
0127 : CONST_MASK_ADDR(nr, addr)
0128 : "iq" (CONST_MASK(nr)));
0129 } else {
0130 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
0131 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
0132 }
0133 }
0134
0135 static __always_inline bool
0136 arch_test_and_set_bit(long nr, volatile unsigned long *addr)
0137 {
0138 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr);
0139 }
0140
0141 static __always_inline bool
0142 arch_test_and_set_bit_lock(long nr, volatile unsigned long *addr)
0143 {
0144 return arch_test_and_set_bit(nr, addr);
0145 }
0146
0147 static __always_inline bool
0148 arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
0149 {
0150 bool oldbit;
0151
0152 asm(__ASM_SIZE(bts) " %2,%1"
0153 CC_SET(c)
0154 : CC_OUT(c) (oldbit)
0155 : ADDR, "Ir" (nr) : "memory");
0156 return oldbit;
0157 }
0158
0159 static __always_inline bool
0160 arch_test_and_clear_bit(long nr, volatile unsigned long *addr)
0161 {
0162 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr);
0163 }
0164
0165
0166
0167
0168
0169
0170
0171
0172
0173 static __always_inline bool
0174 arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
0175 {
0176 bool oldbit;
0177
0178 asm volatile(__ASM_SIZE(btr) " %2,%1"
0179 CC_SET(c)
0180 : CC_OUT(c) (oldbit)
0181 : ADDR, "Ir" (nr) : "memory");
0182 return oldbit;
0183 }
0184
0185 static __always_inline bool
0186 arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
0187 {
0188 bool oldbit;
0189
0190 asm volatile(__ASM_SIZE(btc) " %2,%1"
0191 CC_SET(c)
0192 : CC_OUT(c) (oldbit)
0193 : ADDR, "Ir" (nr) : "memory");
0194
0195 return oldbit;
0196 }
0197
0198 static __always_inline bool
0199 arch_test_and_change_bit(long nr, volatile unsigned long *addr)
0200 {
0201 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
0202 }
0203
0204 static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
0205 {
0206 return ((1UL << (nr & (BITS_PER_LONG-1))) &
0207 (addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
0208 }
0209
0210 static __always_inline bool constant_test_bit_acquire(long nr, const volatile unsigned long *addr)
0211 {
0212 bool oldbit;
0213
0214 asm volatile("testb %2,%1"
0215 CC_SET(nz)
0216 : CC_OUT(nz) (oldbit)
0217 : "m" (((unsigned char *)addr)[nr >> 3]),
0218 "i" (1 << (nr & 7))
0219 :"memory");
0220
0221 return oldbit;
0222 }
0223
0224 static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
0225 {
0226 bool oldbit;
0227
0228 asm volatile(__ASM_SIZE(bt) " %2,%1"
0229 CC_SET(c)
0230 : CC_OUT(c) (oldbit)
0231 : "m" (*(unsigned long *)addr), "Ir" (nr) : "memory");
0232
0233 return oldbit;
0234 }
0235
0236 static __always_inline bool
0237 arch_test_bit(unsigned long nr, const volatile unsigned long *addr)
0238 {
0239 return __builtin_constant_p(nr) ? constant_test_bit(nr, addr) :
0240 variable_test_bit(nr, addr);
0241 }
0242
0243 static __always_inline bool
0244 arch_test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
0245 {
0246 return __builtin_constant_p(nr) ? constant_test_bit_acquire(nr, addr) :
0247 variable_test_bit(nr, addr);
0248 }
0249
0250
0251
0252
0253
0254
0255
0256 static __always_inline unsigned long __ffs(unsigned long word)
0257 {
0258 asm("rep; bsf %1,%0"
0259 : "=r" (word)
0260 : "rm" (word));
0261 return word;
0262 }
0263
0264
0265
0266
0267
0268
0269
0270 static __always_inline unsigned long ffz(unsigned long word)
0271 {
0272 asm("rep; bsf %1,%0"
0273 : "=r" (word)
0274 : "r" (~word));
0275 return word;
0276 }
0277
0278
0279
0280
0281
0282
0283
0284 static __always_inline unsigned long __fls(unsigned long word)
0285 {
0286 asm("bsr %1,%0"
0287 : "=r" (word)
0288 : "rm" (word));
0289 return word;
0290 }
0291
0292 #undef ADDR
0293
0294 #ifdef __KERNEL__
0295
0296
0297
0298
0299
0300
0301
0302
0303
0304
0305
0306 static __always_inline int ffs(int x)
0307 {
0308 int r;
0309
0310 #ifdef CONFIG_X86_64
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320 asm("bsfl %1,%0"
0321 : "=r" (r)
0322 : "rm" (x), "0" (-1));
0323 #elif defined(CONFIG_X86_CMOV)
0324 asm("bsfl %1,%0\n\t"
0325 "cmovzl %2,%0"
0326 : "=&r" (r) : "rm" (x), "r" (-1));
0327 #else
0328 asm("bsfl %1,%0\n\t"
0329 "jnz 1f\n\t"
0330 "movl $-1,%0\n"
0331 "1:" : "=r" (r) : "rm" (x));
0332 #endif
0333 return r + 1;
0334 }
0335
0336
0337
0338
0339
0340
0341
0342
0343
0344
0345
0346
0347 static __always_inline int fls(unsigned int x)
0348 {
0349 int r;
0350
0351 #ifdef CONFIG_X86_64
0352
0353
0354
0355
0356
0357
0358
0359
0360
0361 asm("bsrl %1,%0"
0362 : "=r" (r)
0363 : "rm" (x), "0" (-1));
0364 #elif defined(CONFIG_X86_CMOV)
0365 asm("bsrl %1,%0\n\t"
0366 "cmovzl %2,%0"
0367 : "=&r" (r) : "rm" (x), "rm" (-1));
0368 #else
0369 asm("bsrl %1,%0\n\t"
0370 "jnz 1f\n\t"
0371 "movl $-1,%0\n"
0372 "1:" : "=r" (r) : "rm" (x));
0373 #endif
0374 return r + 1;
0375 }
0376
0377
0378
0379
0380
0381
0382
0383
0384
0385
0386
0387
0388 #ifdef CONFIG_X86_64
0389 static __always_inline int fls64(__u64 x)
0390 {
0391 int bitpos = -1;
0392
0393
0394
0395
0396
0397 asm("bsrq %1,%q0"
0398 : "+r" (bitpos)
0399 : "rm" (x));
0400 return bitpos + 1;
0401 }
0402 #else
0403 #include <asm-generic/bitops/fls64.h>
0404 #endif
0405
0406 #include <asm-generic/bitops/sched.h>
0407
0408 #include <asm/arch_hweight.h>
0409
0410 #include <asm-generic/bitops/const_hweight.h>
0411
0412 #include <asm-generic/bitops/instrumented-atomic.h>
0413 #include <asm-generic/bitops/instrumented-non-atomic.h>
0414 #include <asm-generic/bitops/instrumented-lock.h>
0415
0416 #include <asm-generic/bitops/le.h>
0417
0418 #include <asm-generic/bitops/ext2-atomic-setbit.h>
0419
0420 #endif
0421 #endif