xref: /linux/arch/arm/include/asm/cache.h (revision 93d90ad708b8da6efc0e487b66111aa9db7f70c7)
1 /*
2  *  arch/arm/include/asm/cache.h
3  */
4 #ifndef __ASMARM_CACHE_H
5 #define __ASMARM_CACHE_H
6 
7 #define L1_CACHE_SHIFT		CONFIG_ARM_L1_CACHE_SHIFT
8 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
9 
10 /*
11  * Memory returned by kmalloc() may be used for DMA, so we must make
12  * sure that all such allocations are cache aligned. Otherwise,
13  * unrelated code may cause parts of the buffer to be read into the
14  * cache before the transfer is done, causing old data to be seen by
15  * the CPU.
16  */
17 #define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
18 
19 /*
20  * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
21  */
22 #if defined(CONFIG_AEABI) && (__LINUX_ARM_ARCH__ >= 5)
23 #define ARCH_SLAB_MINALIGN 8
24 #endif
25 
26 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
27 
28 #endif
29