xref: /linux/arch/um/include/asm/cache.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef __UM_CACHE_H
2 #define __UM_CACHE_H
3 
4 
5 #if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT)
6 # define L1_CACHE_SHIFT		(CONFIG_X86_L1_CACHE_SHIFT)
7 #elif defined(CONFIG_UML_X86) /* 64-bit */
8 # define L1_CACHE_SHIFT		6 /* Should be 7 on Intel */
9 #else
10 /* XXX: this was taken from x86, now it's completely random. Luckily only
11  * affects SMP padding. */
12 # define L1_CACHE_SHIFT		5
13 #endif
14 
15 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
16 
17 #endif
18