0001
0002
0003
0004
0005 #ifndef __ASM_CACHE_H
0006 #define __ASM_CACHE_H
0007
0008 #define L1_CACHE_SHIFT (6)
0009 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
0010
0011 #define CLIDR_LOUU_SHIFT 27
0012 #define CLIDR_LOC_SHIFT 24
0013 #define CLIDR_LOUIS_SHIFT 21
0014
0015 #define CLIDR_LOUU(clidr) (((clidr) >> CLIDR_LOUU_SHIFT) & 0x7)
0016 #define CLIDR_LOC(clidr) (((clidr) >> CLIDR_LOC_SHIFT) & 0x7)
0017 #define CLIDR_LOUIS(clidr) (((clidr) >> CLIDR_LOUIS_SHIFT) & 0x7)
0018
0019
0020
0021
0022
0023
0024
0025
0026 #define ARCH_DMA_MINALIGN (128)
0027
0028 #ifndef __ASSEMBLY__
0029
0030 #include <linux/bitops.h>
0031 #include <linux/kasan-enabled.h>
0032
0033 #include <asm/cputype.h>
0034 #include <asm/mte-def.h>
0035 #include <asm/sysreg.h>
0036
0037 #ifdef CONFIG_KASAN_SW_TAGS
0038 #define ARCH_SLAB_MINALIGN (1ULL << KASAN_SHADOW_SCALE_SHIFT)
0039 #elif defined(CONFIG_KASAN_HW_TAGS)
0040 static inline unsigned int arch_slab_minalign(void)
0041 {
0042 return kasan_hw_tags_enabled() ? MTE_GRANULE_SIZE :
0043 __alignof__(unsigned long long);
0044 }
0045 #define arch_slab_minalign() arch_slab_minalign()
0046 #endif
0047
0048 #define CTR_CACHE_MINLINE_MASK \
0049 (0xf << CTR_EL0_DMINLINE_SHIFT | \
0050 CTR_EL0_IMINLINE_MASK << CTR_EL0_IMINLINE_SHIFT)
0051
0052 #define CTR_L1IP(ctr) SYS_FIELD_GET(CTR_EL0, L1Ip, ctr)
0053
0054 #define ICACHEF_ALIASING 0
0055 #define ICACHEF_VPIPT 1
0056 extern unsigned long __icache_flags;
0057
0058
0059
0060
0061
0062 static inline int icache_is_aliasing(void)
0063 {
0064 return test_bit(ICACHEF_ALIASING, &__icache_flags);
0065 }
0066
0067 static __always_inline int icache_is_vpipt(void)
0068 {
0069 return test_bit(ICACHEF_VPIPT, &__icache_flags);
0070 }
0071
0072 static inline u32 cache_type_cwg(void)
0073 {
0074 return SYS_FIELD_GET(CTR_EL0, CWG, read_cpuid_cachetype());
0075 }
0076
0077 #define __read_mostly __section(".data..read_mostly")
0078
0079 static inline int cache_line_size_of_cpu(void)
0080 {
0081 u32 cwg = cache_type_cwg();
0082
0083 return cwg ? 4 << cwg : ARCH_DMA_MINALIGN;
0084 }
0085
0086 int cache_line_size(void);
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103
0104 static inline u32 __attribute_const__ read_cpuid_effective_cachetype(void)
0105 {
0106 u32 ctr = read_cpuid_cachetype();
0107
0108 if (!(ctr & BIT(CTR_EL0_IDC_SHIFT))) {
0109 u64 clidr = read_sysreg(clidr_el1);
0110
0111 if (CLIDR_LOC(clidr) == 0 ||
0112 (CLIDR_LOUIS(clidr) == 0 && CLIDR_LOUU(clidr) == 0))
0113 ctr |= BIT(CTR_EL0_IDC_SHIFT);
0114 }
0115
0116 return ctr;
0117 }
0118
0119 #endif
0120
0121 #endif