0001
0002
0003
0004
0005
0006
0007
0008
0009 #ifndef _ASM_BITOPS_H
0010 #define _ASM_BITOPS_H
0011
0012 #ifndef _LINUX_BITOPS_H
0013 #error only <linux/bitops.h> can be included directly
0014 #endif
0015
0016 #include <linux/bits.h>
0017 #include <linux/compiler.h>
0018 #include <linux/types.h>
0019 #include <asm/asm.h>
0020 #include <asm/barrier.h>
0021 #include <asm/byteorder.h> /* sigh ... */
0022 #include <asm/compiler.h>
0023 #include <asm/cpu-features.h>
0024 #include <asm/sgidefs.h>
0025
0026 #define __bit_op(mem, insn, inputs...) do { \
0027 unsigned long __temp; \
0028 \
0029 asm volatile( \
0030 " .set push \n" \
0031 " .set " MIPS_ISA_LEVEL " \n" \
0032 " " __SYNC(full, loongson3_war) " \n" \
0033 "1: " __stringify(LONG_LL) " %0, %1 \n" \
0034 " " insn " \n" \
0035 " " __stringify(LONG_SC) " %0, %1 \n" \
0036 " " __stringify(SC_BEQZ) " %0, 1b \n" \
0037 " .set pop \n" \
0038 : "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem) \
0039 : inputs \
0040 : __LLSC_CLOBBER); \
0041 } while (0)
0042
0043 #define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \
0044 unsigned long __orig, __temp; \
0045 \
0046 asm volatile( \
0047 " .set push \n" \
0048 " .set " MIPS_ISA_LEVEL " \n" \
0049 " " __SYNC(full, loongson3_war) " \n" \
0050 "1: " __stringify(LONG_LL) " " ll_dst ", %2\n" \
0051 " " insn " \n" \
0052 " " __stringify(LONG_SC) " %1, %2 \n" \
0053 " " __stringify(SC_BEQZ) " %1, 1b \n" \
0054 " .set pop \n" \
0055 : "=&r"(__orig), "=&r"(__temp), \
0056 "+" GCC_OFF_SMALL_ASM()(mem) \
0057 : inputs \
0058 : __LLSC_CLOBBER); \
0059 \
0060 __orig; \
0061 })
0062
0063
0064
0065
0066
0067 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
0068 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
0069 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
0070 int __mips_test_and_set_bit_lock(unsigned long nr,
0071 volatile unsigned long *addr);
0072 int __mips_test_and_clear_bit(unsigned long nr,
0073 volatile unsigned long *addr);
0074 int __mips_test_and_change_bit(unsigned long nr,
0075 volatile unsigned long *addr);
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
0089 {
0090 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0091 int bit = nr % BITS_PER_LONG;
0092
0093 if (!kernel_uses_llsc) {
0094 __mips_set_bit(nr, addr);
0095 return;
0096 }
0097
0098 if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
0099 __bit_op(*m, __stringify(LONG_INS) " %0, %3, %2, 1", "i"(bit), "r"(~0));
0100 return;
0101 }
0102
0103 __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
0104 }
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
0117 {
0118 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0119 int bit = nr % BITS_PER_LONG;
0120
0121 if (!kernel_uses_llsc) {
0122 __mips_clear_bit(nr, addr);
0123 return;
0124 }
0125
0126 if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
0127 __bit_op(*m, __stringify(LONG_INS) " %0, $0, %2, 1", "i"(bit));
0128 return;
0129 }
0130
0131 __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
0132 }
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
0143 {
0144 smp_mb__before_atomic();
0145 clear_bit(nr, addr);
0146 }
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
0158 {
0159 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0160 int bit = nr % BITS_PER_LONG;
0161
0162 if (!kernel_uses_llsc) {
0163 __mips_change_bit(nr, addr);
0164 return;
0165 }
0166
0167 __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
0168 }
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178 static inline int test_and_set_bit_lock(unsigned long nr,
0179 volatile unsigned long *addr)
0180 {
0181 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0182 int bit = nr % BITS_PER_LONG;
0183 unsigned long res, orig;
0184
0185 if (!kernel_uses_llsc) {
0186 res = __mips_test_and_set_bit_lock(nr, addr);
0187 } else {
0188 orig = __test_bit_op(*m, "%0",
0189 "or\t%1, %0, %3",
0190 "ir"(BIT(bit)));
0191 res = (orig & BIT(bit)) != 0;
0192 }
0193
0194 smp_llsc_mb();
0195
0196 return res;
0197 }
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207 static inline int test_and_set_bit(unsigned long nr,
0208 volatile unsigned long *addr)
0209 {
0210 smp_mb__before_atomic();
0211 return test_and_set_bit_lock(nr, addr);
0212 }
0213
0214
0215
0216
0217
0218
0219
0220
0221
0222 static inline int test_and_clear_bit(unsigned long nr,
0223 volatile unsigned long *addr)
0224 {
0225 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0226 int bit = nr % BITS_PER_LONG;
0227 unsigned long res, orig;
0228
0229 smp_mb__before_atomic();
0230
0231 if (!kernel_uses_llsc) {
0232 res = __mips_test_and_clear_bit(nr, addr);
0233 } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
0234 res = __test_bit_op(*m, "%1",
0235 __stringify(LONG_EXT) " %0, %1, %3, 1;"
0236 __stringify(LONG_INS) " %1, $0, %3, 1",
0237 "i"(bit));
0238 } else {
0239 orig = __test_bit_op(*m, "%0",
0240 "or\t%1, %0, %3;"
0241 "xor\t%1, %1, %3",
0242 "ir"(BIT(bit)));
0243 res = (orig & BIT(bit)) != 0;
0244 }
0245
0246 smp_llsc_mb();
0247
0248 return res;
0249 }
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259 static inline int test_and_change_bit(unsigned long nr,
0260 volatile unsigned long *addr)
0261 {
0262 volatile unsigned long *m = &addr[BIT_WORD(nr)];
0263 int bit = nr % BITS_PER_LONG;
0264 unsigned long res, orig;
0265
0266 smp_mb__before_atomic();
0267
0268 if (!kernel_uses_llsc) {
0269 res = __mips_test_and_change_bit(nr, addr);
0270 } else {
0271 orig = __test_bit_op(*m, "%0",
0272 "xor\t%1, %0, %3",
0273 "ir"(BIT(bit)));
0274 res = (orig & BIT(bit)) != 0;
0275 }
0276
0277 smp_llsc_mb();
0278
0279 return res;
0280 }
0281
0282 #undef __bit_op
0283 #undef __test_bit_op
0284
0285 #include <asm-generic/bitops/non-atomic.h>
0286
0287
0288
0289
0290
0291
0292
0293
0294
0295
0296 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
0297 {
0298 smp_mb__before_llsc();
0299 __clear_bit(nr, addr);
0300 nudge_writes();
0301 }
0302
0303
0304
0305
0306
0307 static __always_inline unsigned long __fls(unsigned long word)
0308 {
0309 int num;
0310
0311 if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
0312 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
0313 __asm__(
0314 " .set push \n"
0315 " .set "MIPS_ISA_LEVEL" \n"
0316 " clz %0, %1 \n"
0317 " .set pop \n"
0318 : "=r" (num)
0319 : "r" (word));
0320
0321 return 31 - num;
0322 }
0323
0324 if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
0325 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
0326 __asm__(
0327 " .set push \n"
0328 " .set "MIPS_ISA_LEVEL" \n"
0329 " dclz %0, %1 \n"
0330 " .set pop \n"
0331 : "=r" (num)
0332 : "r" (word));
0333
0334 return 63 - num;
0335 }
0336
0337 num = BITS_PER_LONG - 1;
0338
0339 #if BITS_PER_LONG == 64
0340 if (!(word & (~0ul << 32))) {
0341 num -= 32;
0342 word <<= 32;
0343 }
0344 #endif
0345 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
0346 num -= 16;
0347 word <<= 16;
0348 }
0349 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
0350 num -= 8;
0351 word <<= 8;
0352 }
0353 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
0354 num -= 4;
0355 word <<= 4;
0356 }
0357 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
0358 num -= 2;
0359 word <<= 2;
0360 }
0361 if (!(word & (~0ul << (BITS_PER_LONG-1))))
0362 num -= 1;
0363 return num;
0364 }
0365
0366
0367
0368
0369
0370
0371
0372
0373 static __always_inline unsigned long __ffs(unsigned long word)
0374 {
0375 return __fls(word & -word);
0376 }
0377
0378
0379
0380
0381
0382
0383
0384
0385 static inline int fls(unsigned int x)
0386 {
0387 int r;
0388
0389 if (!__builtin_constant_p(x) &&
0390 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
0391 __asm__(
0392 " .set push \n"
0393 " .set "MIPS_ISA_LEVEL" \n"
0394 " clz %0, %1 \n"
0395 " .set pop \n"
0396 : "=r" (x)
0397 : "r" (x));
0398
0399 return 32 - x;
0400 }
0401
0402 r = 32;
0403 if (!x)
0404 return 0;
0405 if (!(x & 0xffff0000u)) {
0406 x <<= 16;
0407 r -= 16;
0408 }
0409 if (!(x & 0xff000000u)) {
0410 x <<= 8;
0411 r -= 8;
0412 }
0413 if (!(x & 0xf0000000u)) {
0414 x <<= 4;
0415 r -= 4;
0416 }
0417 if (!(x & 0xc0000000u)) {
0418 x <<= 2;
0419 r -= 2;
0420 }
0421 if (!(x & 0x80000000u)) {
0422 x <<= 1;
0423 r -= 1;
0424 }
0425 return r;
0426 }
0427
0428 #include <asm-generic/bitops/fls64.h>
0429
0430
0431
0432
0433
0434
0435
0436
0437
0438 static inline int ffs(int word)
0439 {
0440 if (!word)
0441 return 0;
0442
0443 return fls(word & -word);
0444 }
0445
0446 #include <asm-generic/bitops/ffz.h>
0447
0448 #ifdef __KERNEL__
0449
0450 #include <asm-generic/bitops/sched.h>
0451
0452 #include <asm/arch_hweight.h>
0453 #include <asm-generic/bitops/const_hweight.h>
0454
0455 #include <asm-generic/bitops/le.h>
0456 #include <asm-generic/bitops/ext2-atomic.h>
0457
0458 #endif
0459
0460 #endif