xref: /linux/arch/riscv/include/asm/errata_list.h (revision 8e07e0e3964ca4e23ce7b68e2096fe660a888942)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2021 Sifive.
4  */
5 #ifndef ASM_ERRATA_LIST_H
6 #define ASM_ERRATA_LIST_H
7 
8 #include <asm/alternative.h>
9 #include <asm/csr.h>
10 #include <asm/insn-def.h>
11 #include <asm/hwcap.h>
12 #include <asm/vendorid_list.h>
13 
14 #ifdef CONFIG_ERRATA_ANDES
15 #define ERRATA_ANDESTECH_NO_IOCP	0
16 #define ERRATA_ANDESTECH_NUMBER		1
17 #endif
18 
19 #ifdef CONFIG_ERRATA_SIFIVE
20 #define	ERRATA_SIFIVE_CIP_453 0
21 #define	ERRATA_SIFIVE_CIP_1200 1
22 #define	ERRATA_SIFIVE_NUMBER 2
23 #endif
24 
25 #ifdef CONFIG_ERRATA_THEAD
26 #define	ERRATA_THEAD_PBMT 0
27 #define	ERRATA_THEAD_CMO 1
28 #define	ERRATA_THEAD_PMU 2
29 #define	ERRATA_THEAD_NUMBER 3
30 #endif
31 
32 #ifdef __ASSEMBLY__
33 
34 #define ALT_INSN_FAULT(x)						\
35 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault),			\
36 	    __stringify(RISCV_PTR sifive_cip_453_insn_fault_trp),	\
37 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
38 	    CONFIG_ERRATA_SIFIVE_CIP_453)
39 
40 #define ALT_PAGE_FAULT(x)						\
41 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault),			\
42 	    __stringify(RISCV_PTR sifive_cip_453_page_fault_trp),	\
43 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
44 	    CONFIG_ERRATA_SIFIVE_CIP_453)
45 #else /* !__ASSEMBLY__ */
46 
47 #define ALT_FLUSH_TLB_PAGE(x)						\
48 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID,	\
49 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
50 		: : "r" (addr) : "memory")
51 
52 /*
53  * _val is marked as "will be overwritten", so need to set it to 0
54  * in the default case.
55  */
56 #define ALT_SVPBMT_SHIFT 61
57 #define ALT_THEAD_PBMT_SHIFT 59
58 #define ALT_SVPBMT(_val, prot)						\
59 asm(ALTERNATIVE_2("li %0, 0\t\nnop",					\
60 		  "li %0, %1\t\nslli %0,%0,%3", 0,			\
61 			RISCV_ISA_EXT_SVPBMT, CONFIG_RISCV_ISA_SVPBMT,	\
62 		  "li %0, %2\t\nslli %0,%0,%4", THEAD_VENDOR_ID,	\
63 			ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)	\
64 		: "=r"(_val)						\
65 		: "I"(prot##_SVPBMT >> ALT_SVPBMT_SHIFT),		\
66 		  "I"(prot##_THEAD >> ALT_THEAD_PBMT_SHIFT),		\
67 		  "I"(ALT_SVPBMT_SHIFT),				\
68 		  "I"(ALT_THEAD_PBMT_SHIFT))
69 
70 #ifdef CONFIG_ERRATA_THEAD_PBMT
71 /*
72  * IO/NOCACHE memory types are handled together with svpbmt,
73  * so on T-Head chips, check if no other memory type is set,
74  * and set the non-0 PMA type if applicable.
75  */
76 #define ALT_THEAD_PMA(_val)						\
77 asm volatile(ALTERNATIVE(						\
78 	__nops(7),							\
79 	"li      t3, %1\n\t"						\
80 	"slli    t3, t3, %3\n\t"					\
81 	"and     t3, %0, t3\n\t"					\
82 	"bne     t3, zero, 2f\n\t"					\
83 	"li      t3, %2\n\t"						\
84 	"slli    t3, t3, %3\n\t"					\
85 	"or      %0, %0, t3\n\t"					\
86 	"2:",  THEAD_VENDOR_ID,						\
87 		ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)		\
88 	: "+r"(_val)							\
89 	: "I"(_PAGE_MTMASK_THEAD >> ALT_THEAD_PBMT_SHIFT),		\
90 	  "I"(_PAGE_PMA_THEAD >> ALT_THEAD_PBMT_SHIFT),			\
91 	  "I"(ALT_THEAD_PBMT_SHIFT)					\
92 	: "t3")
93 #else
94 #define ALT_THEAD_PMA(_val)
95 #endif
96 
97 /*
98  * th.dcache.ipa rs1 (invalidate, physical address)
99  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
100  *   0000001    01010      rs1       000      00000  0001011
101  * th.dache.iva rs1 (invalida, virtual address)
102  *   0000001    00110      rs1       000      00000  0001011
103  *
104  * th.dcache.cpa rs1 (clean, physical address)
105  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
106  *   0000001    01001      rs1       000      00000  0001011
107  * th.dcache.cva rs1 (clean, virtual address)
108  *   0000001    00101      rs1       000      00000  0001011
109  *
110  * th.dcache.cipa rs1 (clean then invalidate, physical address)
111  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
112  *   0000001    01011      rs1       000      00000  0001011
113  * th.dcache.civa rs1 (... virtual address)
114  *   0000001    00111      rs1       000      00000  0001011
115  *
116  * th.sync.s (make sure all cache operations finished)
117  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
118  *   0000000    11001     00000      000      00000  0001011
119  */
120 #define THEAD_INVAL_A0	".long 0x0265000b"
121 #define THEAD_CLEAN_A0	".long 0x0255000b"
122 #define THEAD_FLUSH_A0	".long 0x0275000b"
123 #define THEAD_SYNC_S	".long 0x0190000b"
124 
125 #define ALT_CMO_OP(_op, _start, _size, _cachesize)			\
126 asm volatile(ALTERNATIVE_2(						\
127 	__nops(6),							\
128 	"mv a0, %1\n\t"							\
129 	"j 2f\n\t"							\
130 	"3:\n\t"							\
131 	CBO_##_op(a0)							\
132 	"add a0, a0, %0\n\t"						\
133 	"2:\n\t"							\
134 	"bltu a0, %2, 3b\n\t"						\
135 	"nop", 0, RISCV_ISA_EXT_ZICBOM, CONFIG_RISCV_ISA_ZICBOM,	\
136 	"mv a0, %1\n\t"							\
137 	"j 2f\n\t"							\
138 	"3:\n\t"							\
139 	THEAD_##_op##_A0 "\n\t"						\
140 	"add a0, a0, %0\n\t"						\
141 	"2:\n\t"							\
142 	"bltu a0, %2, 3b\n\t"						\
143 	THEAD_SYNC_S, THEAD_VENDOR_ID,					\
144 			ERRATA_THEAD_CMO, CONFIG_ERRATA_THEAD_CMO)	\
145 	: : "r"(_cachesize),						\
146 	    "r"((unsigned long)(_start) & ~((_cachesize) - 1UL)),	\
147 	    "r"((unsigned long)(_start) + (_size))			\
148 	: "a0")
149 
150 #define THEAD_C9XX_RV_IRQ_PMU			17
151 #define THEAD_C9XX_CSR_SCOUNTEROF		0x5c5
152 
153 #define ALT_SBI_PMU_OVERFLOW(__ovl)					\
154 asm volatile(ALTERNATIVE(						\
155 	"csrr %0, " __stringify(CSR_SSCOUNTOVF),			\
156 	"csrr %0, " __stringify(THEAD_C9XX_CSR_SCOUNTEROF),		\
157 		THEAD_VENDOR_ID, ERRATA_THEAD_PMU,			\
158 		CONFIG_ERRATA_THEAD_PMU)				\
159 	: "=r" (__ovl) :						\
160 	: "memory")
161 
162 #endif /* __ASSEMBLY__ */
163 
164 #endif
165