xref: /linux/arch/mips/include/asm/msa.h (revision 890ca861f868a10617029ffc87eae7d48ea6876c)
1 /*
2  * Copyright (C) 2013 Imagination Technologies
3  * Author: Paul Burton <paul.burton@imgtec.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License as published by the
7  * Free Software Foundation;  either version 2 of the  License, or (at your
8  * option) any later version.
9  */
10 #ifndef _ASM_MSA_H
11 #define _ASM_MSA_H
12 
13 #include <asm/mipsregs.h>
14 
15 #ifndef __ASSEMBLY__
16 
17 extern void _save_msa(struct task_struct *);
18 extern void _restore_msa(struct task_struct *);
19 extern void _init_msa_upper(void);
20 
21 static inline void enable_msa(void)
22 {
23 	if (cpu_has_msa) {
24 		set_c0_config5(MIPS_CONF5_MSAEN);
25 		enable_fpu_hazard();
26 	}
27 }
28 
29 static inline void disable_msa(void)
30 {
31 	if (cpu_has_msa) {
32 		clear_c0_config5(MIPS_CONF5_MSAEN);
33 		disable_fpu_hazard();
34 	}
35 }
36 
37 static inline int is_msa_enabled(void)
38 {
39 	if (!cpu_has_msa)
40 		return 0;
41 
42 	return read_c0_config5() & MIPS_CONF5_MSAEN;
43 }
44 
45 static inline int thread_msa_context_live(void)
46 {
47 	/*
48 	 * Check cpu_has_msa only if it's a constant. This will allow the
49 	 * compiler to optimise out code for CPUs without MSA without adding
50 	 * an extra redundant check for CPUs with MSA.
51 	 */
52 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
53 		return 0;
54 
55 	return test_thread_flag(TIF_MSA_CTX_LIVE);
56 }
57 
58 static inline void save_msa(struct task_struct *t)
59 {
60 	if (cpu_has_msa)
61 		_save_msa(t);
62 }
63 
64 static inline void restore_msa(struct task_struct *t)
65 {
66 	if (cpu_has_msa)
67 		_restore_msa(t);
68 }
69 
70 #ifdef TOOLCHAIN_SUPPORTS_MSA
71 
72 #define __BUILD_MSA_CTL_REG(name, cs)				\
73 static inline unsigned int read_msa_##name(void)		\
74 {								\
75 	unsigned int reg;					\
76 	__asm__ __volatile__(					\
77 	"	.set	push\n"					\
78 	"	.set	msa\n"					\
79 	"	cfcmsa	%0, $" #cs "\n"				\
80 	"	.set	pop\n"					\
81 	: "=r"(reg));						\
82 	return reg;						\
83 }								\
84 								\
85 static inline void write_msa_##name(unsigned int val)		\
86 {								\
87 	__asm__ __volatile__(					\
88 	"	.set	push\n"					\
89 	"	.set	msa\n"					\
90 	"	ctcmsa	$" #cs ", %0\n"				\
91 	"	.set	pop\n"					\
92 	: : "r"(val));						\
93 }
94 
95 #else /* !TOOLCHAIN_SUPPORTS_MSA */
96 
97 /*
98  * Define functions using .word for the c[ft]cmsa instructions in order to
99  * allow compilation with toolchains that do not support MSA. Once all
100  * toolchains in use support MSA these can be removed.
101  */
102 #ifdef CONFIG_CPU_MICROMIPS
103 #define CFC_MSA_INSN	0x587e0056
104 #define CTC_MSA_INSN	0x583e0816
105 #else
106 #define CFC_MSA_INSN	0x787e0059
107 #define CTC_MSA_INSN	0x783e0819
108 #endif
109 
110 #define __BUILD_MSA_CTL_REG(name, cs)				\
111 static inline unsigned int read_msa_##name(void)		\
112 {								\
113 	unsigned int reg;					\
114 	__asm__ __volatile__(					\
115 	"	.set	push\n"					\
116 	"	.set	noat\n"					\
117 	"	.insn\n"					\
118 	"	.word	%1 | (" #cs " << 11)\n"			\
119 	"	move	%0, $1\n"				\
120 	"	.set	pop\n"					\
121 	: "=r"(reg) : "i"(CFC_MSA_INSN));			\
122 	return reg;						\
123 }								\
124 								\
125 static inline void write_msa_##name(unsigned int val)		\
126 {								\
127 	__asm__ __volatile__(					\
128 	"	.set	push\n"					\
129 	"	.set	noat\n"					\
130 	"	move	$1, %0\n"				\
131 	"	.insn\n"					\
132 	"	.word	%1 | (" #cs " << 6)\n"			\
133 	"	.set	pop\n"					\
134 	: : "r"(val), "i"(CTC_MSA_INSN));			\
135 }
136 
137 #endif /* !TOOLCHAIN_SUPPORTS_MSA */
138 
139 __BUILD_MSA_CTL_REG(ir, 0)
140 __BUILD_MSA_CTL_REG(csr, 1)
141 __BUILD_MSA_CTL_REG(access, 2)
142 __BUILD_MSA_CTL_REG(save, 3)
143 __BUILD_MSA_CTL_REG(modify, 4)
144 __BUILD_MSA_CTL_REG(request, 5)
145 __BUILD_MSA_CTL_REG(map, 6)
146 __BUILD_MSA_CTL_REG(unmap, 7)
147 
148 #endif /* !__ASSEMBLY__ */
149 
150 #define MSA_IR		0
151 #define MSA_CSR		1
152 #define MSA_ACCESS	2
153 #define MSA_SAVE	3
154 #define MSA_MODIFY	4
155 #define MSA_REQUEST	5
156 #define MSA_MAP		6
157 #define MSA_UNMAP	7
158 
159 /* MSA Implementation Register (MSAIR) */
160 #define MSA_IR_REVB		0
161 #define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
162 #define MSA_IR_PROCB		8
163 #define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
164 #define MSA_IR_WRPB		16
165 #define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
166 
167 /* MSA Control & Status Register (MSACSR) */
168 #define MSA_CSR_RMB		0
169 #define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
170 #define MSA_CSR_RM_NEAREST	0
171 #define MSA_CSR_RM_TO_ZERO	1
172 #define MSA_CSR_RM_TO_POS	2
173 #define MSA_CSR_RM_TO_NEG	3
174 #define MSA_CSR_FLAGSB		2
175 #define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
176 #define MSA_CSR_FLAGS_IB	2
177 #define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
178 #define MSA_CSR_FLAGS_UB	3
179 #define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
180 #define MSA_CSR_FLAGS_OB	4
181 #define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
182 #define MSA_CSR_FLAGS_ZB	5
183 #define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
184 #define MSA_CSR_FLAGS_VB	6
185 #define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
186 #define MSA_CSR_ENABLESB	7
187 #define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
188 #define MSA_CSR_ENABLES_IB	7
189 #define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
190 #define MSA_CSR_ENABLES_UB	8
191 #define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
192 #define MSA_CSR_ENABLES_OB	9
193 #define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
194 #define MSA_CSR_ENABLES_ZB	10
195 #define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
196 #define MSA_CSR_ENABLES_VB	11
197 #define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
198 #define MSA_CSR_CAUSEB		12
199 #define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
200 #define MSA_CSR_CAUSE_IB	12
201 #define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
202 #define MSA_CSR_CAUSE_UB	13
203 #define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
204 #define MSA_CSR_CAUSE_OB	14
205 #define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
206 #define MSA_CSR_CAUSE_ZB	15
207 #define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
208 #define MSA_CSR_CAUSE_VB	16
209 #define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
210 #define MSA_CSR_CAUSE_EB	17
211 #define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
212 #define MSA_CSR_NXB		18
213 #define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
214 #define MSA_CSR_FSB		24
215 #define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
216 
217 #endif /* _ASM_MSA_H */
218