0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032 #ifndef _ASM_POWERPC_BITOPS_H
0033 #define _ASM_POWERPC_BITOPS_H
0034
0035 #ifdef __KERNEL__
0036
0037 #ifndef _LINUX_BITOPS_H
0038 #error only <linux/bitops.h> can be included directly
0039 #endif
0040
0041 #include <linux/compiler.h>
0042 #include <asm/asm-compat.h>
0043 #include <asm/synch.h>
0044
0045
0046 #define PPC_BITLSHIFT(be) (BITS_PER_LONG - 1 - (be))
0047 #define PPC_BIT(bit) (1UL << PPC_BITLSHIFT(bit))
0048 #define PPC_BITMASK(bs, be) ((PPC_BIT(bs) - PPC_BIT(be)) | PPC_BIT(bs))
0049
0050
0051 #define PPC_BITEXTRACT(bits, ppc_bit, dst_bit) \
0052 ((((bits) >> PPC_BITLSHIFT(ppc_bit)) & 1) << (dst_bit))
0053
0054 #define PPC_BITLSHIFT32(be) (32 - 1 - (be))
0055 #define PPC_BIT32(bit) (1UL << PPC_BITLSHIFT32(bit))
0056 #define PPC_BITMASK32(bs, be) ((PPC_BIT32(bs) - PPC_BIT32(be))|PPC_BIT32(bs))
0057
0058 #define PPC_BITLSHIFT8(be) (8 - 1 - (be))
0059 #define PPC_BIT8(bit) (1UL << PPC_BITLSHIFT8(bit))
0060 #define PPC_BITMASK8(bs, be) ((PPC_BIT8(bs) - PPC_BIT8(be))|PPC_BIT8(bs))
0061
0062 #include <asm/barrier.h>
0063
0064
0065 #define DEFINE_BITOP(fn, op, prefix) \
0066 static inline void fn(unsigned long mask, \
0067 volatile unsigned long *_p) \
0068 { \
0069 unsigned long old; \
0070 unsigned long *p = (unsigned long *)_p; \
0071 __asm__ __volatile__ ( \
0072 prefix \
0073 "1:" PPC_LLARX "%0,0,%3,0\n" \
0074 #op "%I2 %0,%0,%2\n" \
0075 PPC_STLCX "%0,0,%3\n" \
0076 "bne- 1b\n" \
0077 : "=&r" (old), "+m" (*p) \
0078 : "rK" (mask), "r" (p) \
0079 : "cc", "memory"); \
0080 }
0081
0082 DEFINE_BITOP(set_bits, or, "")
0083 DEFINE_BITOP(change_bits, xor, "")
0084
0085 static __always_inline bool is_rlwinm_mask_valid(unsigned long x)
0086 {
0087 if (!x)
0088 return false;
0089 if (x & 1)
0090 x = ~x;
0091 x += x & -x;
0092
0093 return !(x & (x - 1));
0094 }
0095
0096 #define DEFINE_CLROP(fn, prefix) \
0097 static inline void fn(unsigned long mask, volatile unsigned long *_p) \
0098 { \
0099 unsigned long old; \
0100 unsigned long *p = (unsigned long *)_p; \
0101 \
0102 if (IS_ENABLED(CONFIG_PPC32) && \
0103 __builtin_constant_p(mask) && is_rlwinm_mask_valid(~mask)) {\
0104 asm volatile ( \
0105 prefix \
0106 "1:" "lwarx %0,0,%3\n" \
0107 "rlwinm %0,%0,0,%2\n" \
0108 "stwcx. %0,0,%3\n" \
0109 "bne- 1b\n" \
0110 : "=&r" (old), "+m" (*p) \
0111 : "n" (~mask), "r" (p) \
0112 : "cc", "memory"); \
0113 } else { \
0114 asm volatile ( \
0115 prefix \
0116 "1:" PPC_LLARX "%0,0,%3,0\n" \
0117 "andc %0,%0,%2\n" \
0118 PPC_STLCX "%0,0,%3\n" \
0119 "bne- 1b\n" \
0120 : "=&r" (old), "+m" (*p) \
0121 : "r" (mask), "r" (p) \
0122 : "cc", "memory"); \
0123 } \
0124 }
0125
0126 DEFINE_CLROP(clear_bits, "")
0127 DEFINE_CLROP(clear_bits_unlock, PPC_RELEASE_BARRIER)
0128
0129 static inline void arch_set_bit(int nr, volatile unsigned long *addr)
0130 {
0131 set_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
0132 }
0133
0134 static inline void arch_clear_bit(int nr, volatile unsigned long *addr)
0135 {
0136 clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
0137 }
0138
0139 static inline void arch_clear_bit_unlock(int nr, volatile unsigned long *addr)
0140 {
0141 clear_bits_unlock(BIT_MASK(nr), addr + BIT_WORD(nr));
0142 }
0143
0144 static inline void arch_change_bit(int nr, volatile unsigned long *addr)
0145 {
0146 change_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
0147 }
0148
0149
0150
0151 #define DEFINE_TESTOP(fn, op, prefix, postfix, eh) \
0152 static inline unsigned long fn( \
0153 unsigned long mask, \
0154 volatile unsigned long *_p) \
0155 { \
0156 unsigned long old, t; \
0157 unsigned long *p = (unsigned long *)_p; \
0158 __asm__ __volatile__ ( \
0159 prefix \
0160 "1:" PPC_LLARX "%0,0,%3,%4\n" \
0161 #op "%I2 %1,%0,%2\n" \
0162 PPC_STLCX "%1,0,%3\n" \
0163 "bne- 1b\n" \
0164 postfix \
0165 : "=&r" (old), "=&r" (t) \
0166 : "rK" (mask), "r" (p), "n" (eh) \
0167 : "cc", "memory"); \
0168 return (old & mask); \
0169 }
0170
0171 DEFINE_TESTOP(test_and_set_bits, or, PPC_ATOMIC_ENTRY_BARRIER,
0172 PPC_ATOMIC_EXIT_BARRIER, 0)
0173 DEFINE_TESTOP(test_and_set_bits_lock, or, "",
0174 PPC_ACQUIRE_BARRIER, IS_ENABLED(CONFIG_PPC64))
0175 DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,
0176 PPC_ATOMIC_EXIT_BARRIER, 0)
0177
0178 static inline unsigned long test_and_clear_bits(unsigned long mask, volatile unsigned long *_p)
0179 {
0180 unsigned long old, t;
0181 unsigned long *p = (unsigned long *)_p;
0182
0183 if (IS_ENABLED(CONFIG_PPC32) &&
0184 __builtin_constant_p(mask) && is_rlwinm_mask_valid(~mask)) {
0185 asm volatile (
0186 PPC_ATOMIC_ENTRY_BARRIER
0187 "1:" "lwarx %0,0,%3\n"
0188 "rlwinm %1,%0,0,%2\n"
0189 "stwcx. %1,0,%3\n"
0190 "bne- 1b\n"
0191 PPC_ATOMIC_EXIT_BARRIER
0192 : "=&r" (old), "=&r" (t)
0193 : "n" (~mask), "r" (p)
0194 : "cc", "memory");
0195 } else {
0196 asm volatile (
0197 PPC_ATOMIC_ENTRY_BARRIER
0198 "1:" PPC_LLARX "%0,0,%3,0\n"
0199 "andc %1,%0,%2\n"
0200 PPC_STLCX "%1,0,%3\n"
0201 "bne- 1b\n"
0202 PPC_ATOMIC_EXIT_BARRIER
0203 : "=&r" (old), "=&r" (t)
0204 : "r" (mask), "r" (p)
0205 : "cc", "memory");
0206 }
0207
0208 return (old & mask);
0209 }
0210
0211 static inline int arch_test_and_set_bit(unsigned long nr,
0212 volatile unsigned long *addr)
0213 {
0214 return test_and_set_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
0215 }
0216
0217 static inline int arch_test_and_set_bit_lock(unsigned long nr,
0218 volatile unsigned long *addr)
0219 {
0220 return test_and_set_bits_lock(BIT_MASK(nr),
0221 addr + BIT_WORD(nr)) != 0;
0222 }
0223
0224 static inline int arch_test_and_clear_bit(unsigned long nr,
0225 volatile unsigned long *addr)
0226 {
0227 return test_and_clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
0228 }
0229
0230 static inline int arch_test_and_change_bit(unsigned long nr,
0231 volatile unsigned long *addr)
0232 {
0233 return test_and_change_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
0234 }
0235
0236 #ifdef CONFIG_PPC64
0237 static inline unsigned long
0238 clear_bit_unlock_return_word(int nr, volatile unsigned long *addr)
0239 {
0240 unsigned long old, t;
0241 unsigned long *p = (unsigned long *)addr + BIT_WORD(nr);
0242 unsigned long mask = BIT_MASK(nr);
0243
0244 __asm__ __volatile__ (
0245 PPC_RELEASE_BARRIER
0246 "1:" PPC_LLARX "%0,0,%3,0\n"
0247 "andc %1,%0,%2\n"
0248 PPC_STLCX "%1,0,%3\n"
0249 "bne- 1b\n"
0250 : "=&r" (old), "=&r" (t)
0251 : "r" (mask), "r" (p)
0252 : "cc", "memory");
0253
0254 return old;
0255 }
0256
0257
0258
0259
0260
0261 #define arch_clear_bit_unlock_is_negative_byte(nr, addr) \
0262 (clear_bit_unlock_return_word(nr, addr) & BIT_MASK(7))
0263
0264 #endif
0265
0266 #include <asm-generic/bitops/non-atomic.h>
0267
0268 static inline void arch___clear_bit_unlock(int nr, volatile unsigned long *addr)
0269 {
0270 __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
0271 __clear_bit(nr, addr);
0272 }
0273
0274
0275
0276
0277
0278 #define __ilog2(x) ilog2(x)
0279
0280 #include <asm-generic/bitops/ffz.h>
0281
0282 #include <asm-generic/bitops/builtin-__ffs.h>
0283
0284 #include <asm-generic/bitops/builtin-ffs.h>
0285
0286
0287
0288
0289
0290 static __always_inline int fls(unsigned int x)
0291 {
0292 int lz;
0293
0294 if (__builtin_constant_p(x))
0295 return x ? 32 - __builtin_clz(x) : 0;
0296 asm("cntlzw %0,%1" : "=r" (lz) : "r" (x));
0297 return 32 - lz;
0298 }
0299
0300 #include <asm-generic/bitops/builtin-__fls.h>
0301
0302
0303
0304
0305
0306
0307 #ifdef CONFIG_PPC64
0308 static __always_inline int fls64(__u64 x)
0309 {
0310 int lz;
0311
0312 if (__builtin_constant_p(x))
0313 return x ? 64 - __builtin_clzll(x) : 0;
0314 asm("cntlzd %0,%1" : "=r" (lz) : "r" (x));
0315 return 64 - lz;
0316 }
0317 #else
0318 #include <asm-generic/bitops/fls64.h>
0319 #endif
0320
0321 #ifdef CONFIG_PPC64
0322 unsigned int __arch_hweight8(unsigned int w);
0323 unsigned int __arch_hweight16(unsigned int w);
0324 unsigned int __arch_hweight32(unsigned int w);
0325 unsigned long __arch_hweight64(__u64 w);
0326 #include <asm-generic/bitops/const_hweight.h>
0327 #else
0328 #include <asm-generic/bitops/hweight.h>
0329 #endif
0330
0331
0332 #include <asm-generic/bitops/instrumented-atomic.h>
0333 #include <asm-generic/bitops/instrumented-lock.h>
0334
0335
0336 #include <asm-generic/bitops/le.h>
0337
0338
0339
0340 #include <asm-generic/bitops/ext2-atomic-setbit.h>
0341
0342 #include <asm-generic/bitops/sched.h>
0343
0344 #endif
0345
0346 #endif