xref: /linux/arch/loongarch/include/asm/cacheflush.h (revision c532de5a67a70f8533d495f8f2aaa9a0491c3ad0)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef _ASM_CACHEFLUSH_H
6 #define _ASM_CACHEFLUSH_H
7 
8 #include <linux/mm.h>
9 #include <asm/cpu-info.h>
10 #include <asm/cacheops.h>
11 
12 static inline bool cache_present(struct cache_desc *cdesc)
13 {
14 	return cdesc->flags & CACHE_PRESENT;
15 }
16 
17 static inline bool cache_private(struct cache_desc *cdesc)
18 {
19 	return cdesc->flags & CACHE_PRIVATE;
20 }
21 
22 static inline bool cache_inclusive(struct cache_desc *cdesc)
23 {
24 	return cdesc->flags & CACHE_INCLUSIVE;
25 }
26 
27 static inline unsigned int cpu_last_level_cache_line_size(void)
28 {
29 	int cache_present = boot_cpu_data.cache_leaves_present;
30 
31 	return boot_cpu_data.cache_leaves[cache_present - 1].linesz;
32 }
33 
34 asmlinkage void __flush_cache_all(void);
35 void local_flush_icache_range(unsigned long start, unsigned long end);
36 
37 #define flush_icache_range	local_flush_icache_range
38 #define flush_icache_user_range	local_flush_icache_range
39 
40 #define flush_cache_all()				do { } while (0)
41 #define flush_cache_mm(mm)				do { } while (0)
42 #define flush_cache_dup_mm(mm)				do { } while (0)
43 #define flush_cache_range(vma, start, end)		do { } while (0)
44 #define flush_cache_page(vma, vmaddr, pfn)		do { } while (0)
45 #define flush_cache_vmap(start, end)			do { } while (0)
46 #define flush_cache_vunmap(start, end)			do { } while (0)
47 #define flush_icache_user_page(vma, page, addr, len)	do { } while (0)
48 #define flush_dcache_mmap_lock(mapping)			do { } while (0)
49 #define flush_dcache_mmap_unlock(mapping)		do { } while (0)
50 
51 #define cache_op(op, addr)						\
52 	__asm__ __volatile__(						\
53 	"	cacop	%0, %1					\n"	\
54 	:								\
55 	: "i" (op), "ZC" (*(unsigned char *)(addr)))
56 
57 static inline void flush_cache_line(int leaf, unsigned long addr)
58 {
59 	switch (leaf) {
60 	case Cache_LEAF0:
61 		cache_op(Index_Writeback_Inv_LEAF0, addr);
62 		break;
63 	case Cache_LEAF1:
64 		cache_op(Index_Writeback_Inv_LEAF1, addr);
65 		break;
66 	case Cache_LEAF2:
67 		cache_op(Index_Writeback_Inv_LEAF2, addr);
68 		break;
69 	case Cache_LEAF3:
70 		cache_op(Index_Writeback_Inv_LEAF3, addr);
71 		break;
72 	case Cache_LEAF4:
73 		cache_op(Index_Writeback_Inv_LEAF4, addr);
74 		break;
75 	case Cache_LEAF5:
76 		cache_op(Index_Writeback_Inv_LEAF5, addr);
77 		break;
78 	default:
79 		break;
80 	}
81 }
82 
83 #include <asm-generic/cacheflush.h>
84 
85 #endif /* _ASM_CACHEFLUSH_H */
86