Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 /*
0003  *  arch/arm/include/asm/cache.h
0004  */
0005 #ifndef __ASMARM_CACHE_H
0006 #define __ASMARM_CACHE_H
0007 
0008 #define L1_CACHE_SHIFT      CONFIG_ARM_L1_CACHE_SHIFT
0009 #define L1_CACHE_BYTES      (1 << L1_CACHE_SHIFT)
0010 
0011 /*
0012  * Memory returned by kmalloc() may be used for DMA, so we must make
0013  * sure that all such allocations are cache aligned. Otherwise,
0014  * unrelated code may cause parts of the buffer to be read into the
0015  * cache before the transfer is done, causing old data to be seen by
0016  * the CPU.
0017  */
0018 #define ARCH_DMA_MINALIGN   L1_CACHE_BYTES
0019 
0020 /*
0021  * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
0022  */
0023 #if defined(CONFIG_AEABI) && (__LINUX_ARM_ARCH__ >= 5)
0024 #define ARCH_SLAB_MINALIGN 8
0025 #endif
0026 
0027 #define __read_mostly __section(".data..read_mostly")
0028 
0029 #endif