0001
0002
0003
0004
0005
0006
0007
0008 #ifndef __ASM_BARRIER_H
0009 #define __ASM_BARRIER_H
0010
0011
0012
0013
0014
0015
0016
0017 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
0018
0019 #define __ASM_BCR_SERIALIZE "bcr 14,0\n"
0020 #else
0021 #define __ASM_BCR_SERIALIZE "bcr 15,0\n"
0022 #endif
0023
0024 static __always_inline void bcr_serialize(void)
0025 {
0026 asm volatile(__ASM_BCR_SERIALIZE : : : "memory");
0027 }
0028
0029 #define __mb() bcr_serialize()
0030 #define __rmb() barrier()
0031 #define __wmb() barrier()
0032 #define __dma_rmb() __mb()
0033 #define __dma_wmb() __mb()
0034 #define __smp_mb() __mb()
0035 #define __smp_rmb() __rmb()
0036 #define __smp_wmb() __wmb()
0037
0038 #define __smp_store_release(p, v) \
0039 do { \
0040 compiletime_assert_atomic_type(*p); \
0041 barrier(); \
0042 WRITE_ONCE(*p, v); \
0043 } while (0)
0044
0045 #define __smp_load_acquire(p) \
0046 ({ \
0047 typeof(*p) ___p1 = READ_ONCE(*p); \
0048 compiletime_assert_atomic_type(*p); \
0049 barrier(); \
0050 ___p1; \
0051 })
0052
0053 #define __smp_mb__before_atomic() barrier()
0054 #define __smp_mb__after_atomic() barrier()
0055
0056
0057
0058
0059
0060
0061
0062 #define array_index_mask_nospec array_index_mask_nospec
0063 static inline unsigned long array_index_mask_nospec(unsigned long index,
0064 unsigned long size)
0065 {
0066 unsigned long mask;
0067
0068 if (__builtin_constant_p(size) && size > 0) {
0069 asm(" clgr %2,%1\n"
0070 " slbgr %0,%0\n"
0071 :"=d" (mask) : "d" (size-1), "d" (index) :"cc");
0072 return mask;
0073 }
0074 asm(" clgr %1,%2\n"
0075 " slbgr %0,%0\n"
0076 :"=d" (mask) : "d" (size), "d" (index) :"cc");
0077 return ~mask;
0078 }
0079
0080 #include <asm-generic/barrier.h>
0081
0082 #endif