Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * This file is subject to the terms and conditions of the GNU General Public
0003  * License.  See the file "COPYING" in the main directory of this archive
0004  * for more details.
0005  *
0006  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
0007  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
0008  */
0009 #ifndef _ASM_BITOPS_H
0010 #define _ASM_BITOPS_H
0011 
0012 #ifndef _LINUX_BITOPS_H
0013 #error only <linux/bitops.h> can be included directly
0014 #endif
0015 
0016 #include <linux/bits.h>
0017 #include <linux/compiler.h>
0018 #include <linux/types.h>
0019 #include <asm/asm.h>
0020 #include <asm/barrier.h>
0021 #include <asm/byteorder.h>      /* sigh ... */
0022 #include <asm/compiler.h>
0023 #include <asm/cpu-features.h>
0024 #include <asm/sgidefs.h>
0025 
0026 #define __bit_op(mem, insn, inputs...) do {         \
0027     unsigned long __temp;                   \
0028                                 \
0029     asm volatile(                       \
0030     "   .set        push            \n" \
0031     "   .set        " MIPS_ISA_LEVEL "  \n" \
0032     "   " __SYNC(full, loongson3_war) "     \n" \
0033     "1: " __stringify(LONG_LL)  "   %0, %1  \n" \
0034     "   " insn      "           \n" \
0035     "   " __stringify(LONG_SC)  "   %0, %1  \n" \
0036     "   " __stringify(SC_BEQZ)  "   %0, 1b  \n" \
0037     "   .set        pop         \n" \
0038     : "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem)       \
0039     : inputs                        \
0040     : __LLSC_CLOBBER);                  \
0041 } while (0)
0042 
0043 #define __test_bit_op(mem, ll_dst, insn, inputs...) ({      \
0044     unsigned long __orig, __temp;               \
0045                                 \
0046     asm volatile(                       \
0047     "   .set        push            \n" \
0048     "   .set        " MIPS_ISA_LEVEL "  \n" \
0049     "   " __SYNC(full, loongson3_war) "     \n" \
0050     "1: " __stringify(LONG_LL) " "  ll_dst ", %2\n" \
0051     "   " insn      "           \n" \
0052     "   " __stringify(LONG_SC)  "   %1, %2  \n" \
0053     "   " __stringify(SC_BEQZ)  "   %1, 1b  \n" \
0054     "   .set        pop         \n" \
0055     : "=&r"(__orig), "=&r"(__temp),             \
0056       "+" GCC_OFF_SMALL_ASM()(mem)              \
0057     : inputs                        \
0058     : __LLSC_CLOBBER);                  \
0059                                 \
0060     __orig;                         \
0061 })
0062 
0063 /*
0064  * These are the "slower" versions of the functions and are in bitops.c.
0065  * These functions call raw_local_irq_{save,restore}().
0066  */
0067 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
0068 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
0069 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
0070 int __mips_test_and_set_bit_lock(unsigned long nr,
0071                  volatile unsigned long *addr);
0072 int __mips_test_and_clear_bit(unsigned long nr,
0073                   volatile unsigned long *addr);
0074 int __mips_test_and_change_bit(unsigned long nr,
0075                    volatile unsigned long *addr);
0076 
0077 
0078 /*
0079  * set_bit - Atomically set a bit in memory
0080  * @nr: the bit to set
0081  * @addr: the address to start counting from
0082  *
0083  * This function is atomic and may not be reordered.  See __set_bit()
0084  * if you do not require the atomic guarantees.
0085  * Note that @nr may be almost arbitrarily large; this function is not
0086  * restricted to acting on a single-word quantity.
0087  */
0088 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
0089 {
0090     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0091     int bit = nr % BITS_PER_LONG;
0092 
0093     if (!kernel_uses_llsc) {
0094         __mips_set_bit(nr, addr);
0095         return;
0096     }
0097 
0098     if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
0099         __bit_op(*m, __stringify(LONG_INS) " %0, %3, %2, 1", "i"(bit), "r"(~0));
0100         return;
0101     }
0102 
0103     __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
0104 }
0105 
0106 /*
0107  * clear_bit - Clears a bit in memory
0108  * @nr: Bit to clear
0109  * @addr: Address to start counting from
0110  *
0111  * clear_bit() is atomic and may not be reordered.  However, it does
0112  * not contain a memory barrier, so if it is used for locking purposes,
0113  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
0114  * in order to ensure changes are visible on other processors.
0115  */
0116 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
0117 {
0118     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0119     int bit = nr % BITS_PER_LONG;
0120 
0121     if (!kernel_uses_llsc) {
0122         __mips_clear_bit(nr, addr);
0123         return;
0124     }
0125 
0126     if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
0127         __bit_op(*m, __stringify(LONG_INS) " %0, $0, %2, 1", "i"(bit));
0128         return;
0129     }
0130 
0131     __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
0132 }
0133 
0134 /*
0135  * clear_bit_unlock - Clears a bit in memory
0136  * @nr: Bit to clear
0137  * @addr: Address to start counting from
0138  *
0139  * clear_bit() is atomic and implies release semantics before the memory
0140  * operation. It can be used for an unlock.
0141  */
0142 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
0143 {
0144     smp_mb__before_atomic();
0145     clear_bit(nr, addr);
0146 }
0147 
0148 /*
0149  * change_bit - Toggle a bit in memory
0150  * @nr: Bit to change
0151  * @addr: Address to start counting from
0152  *
0153  * change_bit() is atomic and may not be reordered.
0154  * Note that @nr may be almost arbitrarily large; this function is not
0155  * restricted to acting on a single-word quantity.
0156  */
0157 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
0158 {
0159     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0160     int bit = nr % BITS_PER_LONG;
0161 
0162     if (!kernel_uses_llsc) {
0163         __mips_change_bit(nr, addr);
0164         return;
0165     }
0166 
0167     __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
0168 }
0169 
0170 /*
0171  * test_and_set_bit_lock - Set a bit and return its old value
0172  * @nr: Bit to set
0173  * @addr: Address to count from
0174  *
0175  * This operation is atomic and implies acquire ordering semantics
0176  * after the memory operation.
0177  */
0178 static inline int test_and_set_bit_lock(unsigned long nr,
0179     volatile unsigned long *addr)
0180 {
0181     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0182     int bit = nr % BITS_PER_LONG;
0183     unsigned long res, orig;
0184 
0185     if (!kernel_uses_llsc) {
0186         res = __mips_test_and_set_bit_lock(nr, addr);
0187     } else {
0188         orig = __test_bit_op(*m, "%0",
0189                      "or\t%1, %0, %3",
0190                      "ir"(BIT(bit)));
0191         res = (orig & BIT(bit)) != 0;
0192     }
0193 
0194     smp_llsc_mb();
0195 
0196     return res;
0197 }
0198 
0199 /*
0200  * test_and_set_bit - Set a bit and return its old value
0201  * @nr: Bit to set
0202  * @addr: Address to count from
0203  *
0204  * This operation is atomic and cannot be reordered.
0205  * It also implies a memory barrier.
0206  */
0207 static inline int test_and_set_bit(unsigned long nr,
0208     volatile unsigned long *addr)
0209 {
0210     smp_mb__before_atomic();
0211     return test_and_set_bit_lock(nr, addr);
0212 }
0213 
0214 /*
0215  * test_and_clear_bit - Clear a bit and return its old value
0216  * @nr: Bit to clear
0217  * @addr: Address to count from
0218  *
0219  * This operation is atomic and cannot be reordered.
0220  * It also implies a memory barrier.
0221  */
0222 static inline int test_and_clear_bit(unsigned long nr,
0223     volatile unsigned long *addr)
0224 {
0225     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0226     int bit = nr % BITS_PER_LONG;
0227     unsigned long res, orig;
0228 
0229     smp_mb__before_atomic();
0230 
0231     if (!kernel_uses_llsc) {
0232         res = __mips_test_and_clear_bit(nr, addr);
0233     } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
0234         res = __test_bit_op(*m, "%1",
0235                     __stringify(LONG_EXT) " %0, %1, %3, 1;"
0236                     __stringify(LONG_INS) " %1, $0, %3, 1",
0237                     "i"(bit));
0238     } else {
0239         orig = __test_bit_op(*m, "%0",
0240                      "or\t%1, %0, %3;"
0241                      "xor\t%1, %1, %3",
0242                      "ir"(BIT(bit)));
0243         res = (orig & BIT(bit)) != 0;
0244     }
0245 
0246     smp_llsc_mb();
0247 
0248     return res;
0249 }
0250 
0251 /*
0252  * test_and_change_bit - Change a bit and return its old value
0253  * @nr: Bit to change
0254  * @addr: Address to count from
0255  *
0256  * This operation is atomic and cannot be reordered.
0257  * It also implies a memory barrier.
0258  */
0259 static inline int test_and_change_bit(unsigned long nr,
0260     volatile unsigned long *addr)
0261 {
0262     volatile unsigned long *m = &addr[BIT_WORD(nr)];
0263     int bit = nr % BITS_PER_LONG;
0264     unsigned long res, orig;
0265 
0266     smp_mb__before_atomic();
0267 
0268     if (!kernel_uses_llsc) {
0269         res = __mips_test_and_change_bit(nr, addr);
0270     } else {
0271         orig = __test_bit_op(*m, "%0",
0272                      "xor\t%1, %0, %3",
0273                      "ir"(BIT(bit)));
0274         res = (orig & BIT(bit)) != 0;
0275     }
0276 
0277     smp_llsc_mb();
0278 
0279     return res;
0280 }
0281 
0282 #undef __bit_op
0283 #undef __test_bit_op
0284 
0285 #include <asm-generic/bitops/non-atomic.h>
0286 
0287 /*
0288  * __clear_bit_unlock - Clears a bit in memory
0289  * @nr: Bit to clear
0290  * @addr: Address to start counting from
0291  *
0292  * __clear_bit() is non-atomic and implies release semantics before the memory
0293  * operation. It can be used for an unlock if no other CPUs can concurrently
0294  * modify other bits in the word.
0295  */
0296 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
0297 {
0298     smp_mb__before_llsc();
0299     __clear_bit(nr, addr);
0300     nudge_writes();
0301 }
0302 
0303 /*
0304  * Return the bit position (0..63) of the most significant 1 bit in a word
0305  * Returns -1 if no 1 bit exists
0306  */
0307 static __always_inline unsigned long __fls(unsigned long word)
0308 {
0309     int num;
0310 
0311     if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
0312         __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
0313         __asm__(
0314         "   .set    push                    \n"
0315         "   .set    "MIPS_ISA_LEVEL"            \n"
0316         "   clz %0, %1                  \n"
0317         "   .set    pop                 \n"
0318         : "=r" (num)
0319         : "r" (word));
0320 
0321         return 31 - num;
0322     }
0323 
0324     if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
0325         __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
0326         __asm__(
0327         "   .set    push                    \n"
0328         "   .set    "MIPS_ISA_LEVEL"            \n"
0329         "   dclz    %0, %1                  \n"
0330         "   .set    pop                 \n"
0331         : "=r" (num)
0332         : "r" (word));
0333 
0334         return 63 - num;
0335     }
0336 
0337     num = BITS_PER_LONG - 1;
0338 
0339 #if BITS_PER_LONG == 64
0340     if (!(word & (~0ul << 32))) {
0341         num -= 32;
0342         word <<= 32;
0343     }
0344 #endif
0345     if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
0346         num -= 16;
0347         word <<= 16;
0348     }
0349     if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
0350         num -= 8;
0351         word <<= 8;
0352     }
0353     if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
0354         num -= 4;
0355         word <<= 4;
0356     }
0357     if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
0358         num -= 2;
0359         word <<= 2;
0360     }
0361     if (!(word & (~0ul << (BITS_PER_LONG-1))))
0362         num -= 1;
0363     return num;
0364 }
0365 
0366 /*
0367  * __ffs - find first bit in word.
0368  * @word: The word to search
0369  *
0370  * Returns 0..SZLONG-1
0371  * Undefined if no bit exists, so code should check against 0 first.
0372  */
0373 static __always_inline unsigned long __ffs(unsigned long word)
0374 {
0375     return __fls(word & -word);
0376 }
0377 
0378 /*
0379  * fls - find last bit set.
0380  * @word: The word to search
0381  *
0382  * This is defined the same way as ffs.
0383  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
0384  */
0385 static inline int fls(unsigned int x)
0386 {
0387     int r;
0388 
0389     if (!__builtin_constant_p(x) &&
0390         __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
0391         __asm__(
0392         "   .set    push                    \n"
0393         "   .set    "MIPS_ISA_LEVEL"            \n"
0394         "   clz %0, %1                  \n"
0395         "   .set    pop                 \n"
0396         : "=r" (x)
0397         : "r" (x));
0398 
0399         return 32 - x;
0400     }
0401 
0402     r = 32;
0403     if (!x)
0404         return 0;
0405     if (!(x & 0xffff0000u)) {
0406         x <<= 16;
0407         r -= 16;
0408     }
0409     if (!(x & 0xff000000u)) {
0410         x <<= 8;
0411         r -= 8;
0412     }
0413     if (!(x & 0xf0000000u)) {
0414         x <<= 4;
0415         r -= 4;
0416     }
0417     if (!(x & 0xc0000000u)) {
0418         x <<= 2;
0419         r -= 2;
0420     }
0421     if (!(x & 0x80000000u)) {
0422         x <<= 1;
0423         r -= 1;
0424     }
0425     return r;
0426 }
0427 
0428 #include <asm-generic/bitops/fls64.h>
0429 
0430 /*
0431  * ffs - find first bit set.
0432  * @word: The word to search
0433  *
0434  * This is defined the same way as
0435  * the libc and compiler builtin ffs routines, therefore
0436  * differs in spirit from the below ffz (man ffs).
0437  */
0438 static inline int ffs(int word)
0439 {
0440     if (!word)
0441         return 0;
0442 
0443     return fls(word & -word);
0444 }
0445 
0446 #include <asm-generic/bitops/ffz.h>
0447 
0448 #ifdef __KERNEL__
0449 
0450 #include <asm-generic/bitops/sched.h>
0451 
0452 #include <asm/arch_hweight.h>
0453 #include <asm-generic/bitops/const_hweight.h>
0454 
0455 #include <asm-generic/bitops/le.h>
0456 #include <asm-generic/bitops/ext2-atomic.h>
0457 
0458 #endif /* __KERNEL__ */
0459 
0460 #endif /* _ASM_BITOPS_H */