xref: /linux/arch/riscv/include/asm/errata_list.h (revision cb7e3669c683669d93139184adff68a7d9000536)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2021 Sifive.
4  */
5 #ifndef ASM_ERRATA_LIST_H
6 #define ASM_ERRATA_LIST_H
7 
8 #include <asm/csr.h>
9 #include <asm/insn-def.h>
10 #include <asm/hwcap.h>
11 #include <asm/vendorid_list.h>
12 #include <asm/errata_list_vendors.h>
13 #include <asm/vendor_extensions/mips.h>
14 
15 #ifdef __ASSEMBLER__
16 
17 #define ALT_INSN_FAULT(x)						\
18 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault),			\
19 	    __stringify(RISCV_PTR sifive_cip_453_insn_fault_trp),	\
20 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
21 	    CONFIG_ERRATA_SIFIVE_CIP_453)
22 
23 #define ALT_PAGE_FAULT(x)						\
24 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault),			\
25 	    __stringify(RISCV_PTR sifive_cip_453_page_fault_trp),	\
26 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
27 	    CONFIG_ERRATA_SIFIVE_CIP_453)
28 #else /* !__ASSEMBLER__ */
29 
30 #define ALT_SFENCE_VMA_ASID(asid)					\
31 asm(ALTERNATIVE("sfence.vma x0, %0", "sfence.vma", SIFIVE_VENDOR_ID,	\
32 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
33 		: : "r" (asid) : "memory")
34 
35 #define ALT_SFENCE_VMA_ADDR(addr)					\
36 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID,	\
37 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
38 		: : "r" (addr) : "memory")
39 
40 #define ALT_SFENCE_VMA_ADDR_ASID(addr, asid)				\
41 asm(ALTERNATIVE("sfence.vma %0, %1", "sfence.vma", SIFIVE_VENDOR_ID,	\
42 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
43 		: : "r" (addr), "r" (asid) : "memory")
44 
45 #define ALT_RISCV_PAUSE()					\
46 asm(ALTERNATIVE(	\
47 		RISCV_PAUSE, /* Original RISC‑V pause insn */	\
48 		MIPS_PAUSE, /* Replacement for MIPS P8700 */	\
49 		MIPS_VENDOR_ID, /* Vendor ID to match */	\
50 		ERRATA_MIPS_P8700_PAUSE_OPCODE, /* patch_id */	\
51 		CONFIG_ERRATA_MIPS_P8700_PAUSE_OPCODE)	\
52 	: /* no outputs */	\
53 	: /* no inputs */	\
54 	: "memory")
55 
56 /*
57  * _val is marked as "will be overwritten", so need to set it to 0
58  * in the default case.
59  */
60 #define ALT_SVPBMT_SHIFT 61
61 #define ALT_THEAD_MAE_SHIFT 59
62 #define ALT_SVPBMT(_val, prot)						\
63 asm(ALTERNATIVE_2("li %0, 0\t\nnop",					\
64 		  "li %0, %1\t\nslli %0,%0,%3", 0,			\
65 			RISCV_ISA_EXT_SVPBMT, CONFIG_RISCV_ISA_SVPBMT,	\
66 		  "li %0, %2\t\nslli %0,%0,%4", THEAD_VENDOR_ID,	\
67 			ERRATA_THEAD_MAE, CONFIG_ERRATA_THEAD_MAE)	\
68 		: "=r"(_val)						\
69 		: "I"(prot##_SVPBMT >> ALT_SVPBMT_SHIFT),		\
70 		  "I"(prot##_THEAD >> ALT_THEAD_MAE_SHIFT),		\
71 		  "I"(ALT_SVPBMT_SHIFT),				\
72 		  "I"(ALT_THEAD_MAE_SHIFT))
73 
74 #ifdef CONFIG_ERRATA_THEAD_MAE
75 /*
76  * IO/NOCACHE memory types are handled together with svpbmt,
77  * so on T-Head chips, check if no other memory type is set,
78  * and set the non-0 PMA type if applicable.
79  */
80 #define ALT_THEAD_PMA(_val)						\
81 asm volatile(ALTERNATIVE(						\
82 	__nops(7),							\
83 	"li      t3, %1\n\t"						\
84 	"slli    t3, t3, %3\n\t"					\
85 	"and     t3, %0, t3\n\t"					\
86 	"bne     t3, zero, 2f\n\t"					\
87 	"li      t3, %2\n\t"						\
88 	"slli    t3, t3, %3\n\t"					\
89 	"or      %0, %0, t3\n\t"					\
90 	"2:",  THEAD_VENDOR_ID,						\
91 		ERRATA_THEAD_MAE, CONFIG_ERRATA_THEAD_MAE)		\
92 	: "+r"(_val)							\
93 	: "I"(_PAGE_MTMASK_THEAD >> ALT_THEAD_MAE_SHIFT),		\
94 	  "I"(_PAGE_PMA_THEAD >> ALT_THEAD_MAE_SHIFT),			\
95 	  "I"(ALT_THEAD_MAE_SHIFT)					\
96 	: "t3")
97 #else
98 #define ALT_THEAD_PMA(_val)
99 #endif
100 
101 #define ALT_CMO_OP(_op, _start, _size, _cachesize)			\
102 asm volatile(ALTERNATIVE(						\
103 	__nops(5),							\
104 	"mv a0, %1\n\t"							\
105 	"j 2f\n\t"							\
106 	"3:\n\t"							\
107 	CBO_##_op(a0)							\
108 	"add a0, a0, %0\n\t"						\
109 	"2:\n\t"							\
110 	"bltu a0, %2, 3b\n\t",						\
111 	0, RISCV_ISA_EXT_ZICBOM, CONFIG_RISCV_ISA_ZICBOM)		\
112 	: : "r"(_cachesize),						\
113 	    "r"((unsigned long)(_start) & ~((_cachesize) - 1UL)),	\
114 	    "r"((unsigned long)(_start) + (_size))			\
115 	: "a0")
116 
117 #define THEAD_C9XX_RV_IRQ_PMU			17
118 #define THEAD_C9XX_CSR_SCOUNTEROF		0x5c5
119 
120 #endif /* __ASSEMBLER__ */
121 
122 #endif
123