xref: /linux/arch/mips/include/asm/cpu-features.h (revision feb4eb060c3aecc3c5076bebe699cd09f1133c41)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 2004 Ralf Baechle
7  * Copyright (C) 2004  Maciej W. Rozycki
8  */
9 #ifndef __ASM_CPU_FEATURES_H
10 #define __ASM_CPU_FEATURES_H
11 
12 #include <asm/cpu.h>
13 #include <asm/cpu-info.h>
14 #include <asm/isa-rev.h>
15 #include <cpu-feature-overrides.h>
16 
17 #define __ase(ase)			(cpu_data[0].ases & (ase))
18 #define __isa(isa)			(cpu_data[0].isa_level & (isa))
19 #define __opt(opt)			(cpu_data[0].options & (opt))
20 
21 /*
22  * Check if MIPS_ISA_REV is >= isa *and* an option or ASE is detected during
23  * boot (typically by cpu_probe()).
24  *
25  * Note that these should only be used in cases where a kernel built for an
26  * older ISA *cannot* run on a CPU which supports the feature in question. For
27  * example this may be used for features introduced with MIPSr6, since a kernel
28  * built for an older ISA cannot run on a MIPSr6 CPU. This should not be used
29  * for MIPSr2 features however, since a MIPSr1 or earlier kernel might run on a
30  * MIPSr2 CPU.
31  */
32 #define __isa_ge_and_ase(isa, ase)	((MIPS_ISA_REV >= (isa)) && __ase(ase))
33 #define __isa_ge_and_opt(isa, opt)	((MIPS_ISA_REV >= (isa)) && __opt(opt))
34 
35 /*
36  * Check if MIPS_ISA_REV is >= isa *or* an option or ASE is detected during
37  * boot (typically by cpu_probe()).
38  *
39  * These are for use with features that are optional up until a particular ISA
40  * revision & then become required.
41  */
42 #define __isa_ge_or_ase(isa, ase)	((MIPS_ISA_REV >= (isa)) || __ase(ase))
43 #define __isa_ge_or_opt(isa, opt)	((MIPS_ISA_REV >= (isa)) || __opt(opt))
44 
45 /*
46  * Check if MIPS_ISA_REV is < isa *and* an option or ASE is detected during
47  * boot (typically by cpu_probe()).
48  *
49  * These are for use with features that are optional up until a particular ISA
50  * revision & are then removed - ie. no longer present in any CPU implementing
51  * the given ISA revision.
52  */
53 #define __isa_lt_and_ase(isa, ase)	((MIPS_ISA_REV < (isa)) && __ase(ase))
54 #define __isa_lt_and_opt(isa, opt)	((MIPS_ISA_REV < (isa)) && __opt(opt))
55 
56 /*
57  * Similarly allow for ISA level checks that take into account knowledge of the
58  * ISA targeted by the kernel build, provided by MIPS_ISA_REV.
59  */
60 #define __isa_ge_and_flag(isa, flag)	((MIPS_ISA_REV >= (isa)) && __isa(flag))
61 #define __isa_ge_or_flag(isa, flag)	((MIPS_ISA_REV >= (isa)) || __isa(flag))
62 #define __isa_lt_and_flag(isa, flag)	((MIPS_ISA_REV < (isa)) && __isa(flag))
63 #define __isa_range(ge, lt) \
64 	((MIPS_ISA_REV >= (ge)) && (MIPS_ISA_REV < (lt)))
65 #define __isa_range_or_flag(ge, lt, flag) \
66 	(__isa_range(ge, lt) || ((MIPS_ISA_REV < (lt)) && __isa(flag)))
67 
68 /*
69  * SMP assumption: Options of CPU 0 are a superset of all processors.
70  * This is true for all known MIPS systems.
71  */
72 #ifndef cpu_has_tlb
73 #define cpu_has_tlb		__opt(MIPS_CPU_TLB)
74 #endif
75 #ifndef cpu_has_ftlb
76 #define cpu_has_ftlb		__opt(MIPS_CPU_FTLB)
77 #endif
78 #ifndef cpu_has_tlbinv
79 #define cpu_has_tlbinv		__opt(MIPS_CPU_TLBINV)
80 #endif
81 #ifndef cpu_has_segments
82 #define cpu_has_segments	__opt(MIPS_CPU_SEGMENTS)
83 #endif
84 #ifndef cpu_has_eva
85 #define cpu_has_eva		__opt(MIPS_CPU_EVA)
86 #endif
87 #ifndef cpu_has_htw
88 #define cpu_has_htw		__opt(MIPS_CPU_HTW)
89 #endif
90 #ifndef cpu_has_ldpte
91 #define cpu_has_ldpte		__opt(MIPS_CPU_LDPTE)
92 #endif
93 #ifndef cpu_has_rixiex
94 #define cpu_has_rixiex		__isa_ge_or_opt(6, MIPS_CPU_RIXIEX)
95 #endif
96 #ifndef cpu_has_maar
97 #define cpu_has_maar		__opt(MIPS_CPU_MAAR)
98 #endif
99 #ifndef cpu_has_rw_llb
100 #define cpu_has_rw_llb		__isa_ge_or_opt(6, MIPS_CPU_RW_LLB)
101 #endif
102 
103 /*
104  * For the moment we don't consider R6000 and R8000 so we can assume that
105  * anything that doesn't support R4000-style exceptions and interrupts is
106  * R3000-like.  Users should still treat these two macro definitions as
107  * opaque.
108  */
109 #ifndef cpu_has_3kex
110 #define cpu_has_3kex		(!cpu_has_4kex)
111 #endif
112 #ifndef cpu_has_4kex
113 #define cpu_has_4kex		__isa_ge_or_opt(1, MIPS_CPU_4KEX)
114 #endif
115 #ifndef cpu_has_3k_cache
116 #define cpu_has_3k_cache	__isa_lt_and_opt(1, MIPS_CPU_3K_CACHE)
117 #endif
118 #define cpu_has_6k_cache	0
119 #define cpu_has_8k_cache	0
120 #ifndef cpu_has_4k_cache
121 #define cpu_has_4k_cache	__isa_ge_or_opt(1, MIPS_CPU_4K_CACHE)
122 #endif
123 #ifndef cpu_has_tx39_cache
124 #define cpu_has_tx39_cache	__opt(MIPS_CPU_TX39_CACHE)
125 #endif
126 #ifndef cpu_has_octeon_cache
127 #define cpu_has_octeon_cache	0
128 #endif
129 /* Don't override `cpu_has_fpu' to 1 or the "nofpu" option won't work.  */
130 #ifndef cpu_has_fpu
131 # ifdef CONFIG_MIPS_FP_SUPPORT
132 #  define cpu_has_fpu		(current_cpu_data.options & MIPS_CPU_FPU)
133 #  define raw_cpu_has_fpu	(raw_current_cpu_data.options & MIPS_CPU_FPU)
134 # else
135 #  define cpu_has_fpu		0
136 #  define raw_cpu_has_fpu	0
137 # endif
138 #else
139 # define raw_cpu_has_fpu	cpu_has_fpu
140 #endif
141 #ifndef cpu_has_32fpr
142 #define cpu_has_32fpr		__isa_ge_or_opt(1, MIPS_CPU_32FPR)
143 #endif
144 #ifndef cpu_has_counter
145 #define cpu_has_counter		__opt(MIPS_CPU_COUNTER)
146 #endif
147 #ifndef cpu_has_watch
148 #define cpu_has_watch		__opt(MIPS_CPU_WATCH)
149 #endif
150 #ifndef cpu_has_divec
151 #define cpu_has_divec		__isa_ge_or_opt(1, MIPS_CPU_DIVEC)
152 #endif
153 #ifndef cpu_has_vce
154 #define cpu_has_vce		__opt(MIPS_CPU_VCE)
155 #endif
156 #ifndef cpu_has_cache_cdex_p
157 #define cpu_has_cache_cdex_p	__opt(MIPS_CPU_CACHE_CDEX_P)
158 #endif
159 #ifndef cpu_has_cache_cdex_s
160 #define cpu_has_cache_cdex_s	__opt(MIPS_CPU_CACHE_CDEX_S)
161 #endif
162 #ifndef cpu_has_prefetch
163 #define cpu_has_prefetch	__isa_ge_or_opt(1, MIPS_CPU_PREFETCH)
164 #endif
165 #ifndef cpu_has_mcheck
166 #define cpu_has_mcheck		__isa_ge_or_opt(1, MIPS_CPU_MCHECK)
167 #endif
168 #ifndef cpu_has_ejtag
169 #define cpu_has_ejtag		__opt(MIPS_CPU_EJTAG)
170 #endif
171 #ifndef cpu_has_llsc
172 #define cpu_has_llsc		__isa_ge_or_opt(1, MIPS_CPU_LLSC)
173 #endif
174 #ifndef cpu_has_bp_ghist
175 #define cpu_has_bp_ghist	__opt(MIPS_CPU_BP_GHIST)
176 #endif
177 #ifndef kernel_uses_llsc
178 #define kernel_uses_llsc	cpu_has_llsc
179 #endif
180 #ifndef cpu_has_guestctl0ext
181 #define cpu_has_guestctl0ext	__opt(MIPS_CPU_GUESTCTL0EXT)
182 #endif
183 #ifndef cpu_has_guestctl1
184 #define cpu_has_guestctl1	__opt(MIPS_CPU_GUESTCTL1)
185 #endif
186 #ifndef cpu_has_guestctl2
187 #define cpu_has_guestctl2	__opt(MIPS_CPU_GUESTCTL2)
188 #endif
189 #ifndef cpu_has_guestid
190 #define cpu_has_guestid		__opt(MIPS_CPU_GUESTID)
191 #endif
192 #ifndef cpu_has_drg
193 #define cpu_has_drg		__opt(MIPS_CPU_DRG)
194 #endif
195 #ifndef cpu_has_mips16
196 #define cpu_has_mips16		__isa_lt_and_ase(6, MIPS_ASE_MIPS16)
197 #endif
198 #ifndef cpu_has_mips16e2
199 #define cpu_has_mips16e2	__isa_lt_and_ase(6, MIPS_ASE_MIPS16E2)
200 #endif
201 #ifndef cpu_has_mdmx
202 #define cpu_has_mdmx		__isa_lt_and_ase(6, MIPS_ASE_MDMX)
203 #endif
204 #ifndef cpu_has_mips3d
205 #define cpu_has_mips3d		__isa_lt_and_ase(6, MIPS_ASE_MIPS3D)
206 #endif
207 #ifndef cpu_has_smartmips
208 #define cpu_has_smartmips	__isa_lt_and_ase(6, MIPS_ASE_SMARTMIPS)
209 #endif
210 
211 #ifndef cpu_has_rixi
212 #define cpu_has_rixi		__isa_ge_or_opt(6, MIPS_CPU_RIXI)
213 #endif
214 
215 #ifndef cpu_has_mmips
216 # if defined(__mips_micromips)
217 #  define cpu_has_mmips		1
218 # elif defined(CONFIG_SYS_SUPPORTS_MICROMIPS)
219 #  define cpu_has_mmips		__opt(MIPS_CPU_MICROMIPS)
220 # else
221 #  define cpu_has_mmips		0
222 # endif
223 #endif
224 
225 #ifndef cpu_has_lpa
226 #define cpu_has_lpa		__opt(MIPS_CPU_LPA)
227 #endif
228 #ifndef cpu_has_mvh
229 #define cpu_has_mvh		__opt(MIPS_CPU_MVH)
230 #endif
231 #ifndef cpu_has_xpa
232 #define cpu_has_xpa		(cpu_has_lpa && cpu_has_mvh)
233 #endif
234 #ifndef cpu_has_vtag_icache
235 #define cpu_has_vtag_icache	(cpu_data[0].icache.flags & MIPS_CACHE_VTAG)
236 #endif
237 #ifndef cpu_has_dc_aliases
238 #define cpu_has_dc_aliases	(cpu_data[0].dcache.flags & MIPS_CACHE_ALIASES)
239 #endif
240 #ifndef cpu_has_ic_fills_f_dc
241 #define cpu_has_ic_fills_f_dc	(cpu_data[0].icache.flags & MIPS_CACHE_IC_F_DC)
242 #endif
243 #ifndef cpu_has_pindexed_dcache
244 #define cpu_has_pindexed_dcache	(cpu_data[0].dcache.flags & MIPS_CACHE_PINDEX)
245 #endif
246 #ifndef cpu_has_local_ebase
247 #define cpu_has_local_ebase	1
248 #endif
249 
250 /*
251  * I-Cache snoops remote store.	 This only matters on SMP.  Some multiprocessors
252  * such as the R10000 have I-Caches that snoop local stores; the embedded ones
253  * don't.  For maintaining I-cache coherency this means we need to flush the
254  * D-cache all the way back to whever the I-cache does refills from, so the
255  * I-cache has a chance to see the new data at all.  Then we have to flush the
256  * I-cache also.
257  * Note we may have been rescheduled and may no longer be running on the CPU
258  * that did the store so we can't optimize this into only doing the flush on
259  * the local CPU.
260  */
261 #ifndef cpu_icache_snoops_remote_store
262 #ifdef CONFIG_SMP
263 #define cpu_icache_snoops_remote_store	(cpu_data[0].icache.flags & MIPS_IC_SNOOPS_REMOTE)
264 #else
265 #define cpu_icache_snoops_remote_store	1
266 #endif
267 #endif
268 
269 #ifndef cpu_has_mips_1
270 # define cpu_has_mips_1		(MIPS_ISA_REV < 6)
271 #endif
272 #ifndef cpu_has_mips_2
273 # define cpu_has_mips_2		__isa_lt_and_flag(6, MIPS_CPU_ISA_II)
274 #endif
275 #ifndef cpu_has_mips_3
276 # define cpu_has_mips_3		__isa_lt_and_flag(6, MIPS_CPU_ISA_III)
277 #endif
278 #ifndef cpu_has_mips_4
279 # define cpu_has_mips_4		__isa_lt_and_flag(6, MIPS_CPU_ISA_IV)
280 #endif
281 #ifndef cpu_has_mips_5
282 # define cpu_has_mips_5		__isa_lt_and_flag(6, MIPS_CPU_ISA_V)
283 #endif
284 #ifndef cpu_has_mips32r1
285 # define cpu_has_mips32r1	__isa_range_or_flag(1, 6, MIPS_CPU_ISA_M32R1)
286 #endif
287 #ifndef cpu_has_mips32r2
288 # define cpu_has_mips32r2	__isa_range_or_flag(2, 6, MIPS_CPU_ISA_M32R2)
289 #endif
290 #ifndef cpu_has_mips32r6
291 # define cpu_has_mips32r6	__isa_ge_or_flag(6, MIPS_CPU_ISA_M32R6)
292 #endif
293 #ifndef cpu_has_mips64r1
294 # define cpu_has_mips64r1	__isa_range_or_flag(1, 6, MIPS_CPU_ISA_M64R1)
295 #endif
296 #ifndef cpu_has_mips64r2
297 # define cpu_has_mips64r2	__isa_range_or_flag(2, 6, MIPS_CPU_ISA_M64R2)
298 #endif
299 #ifndef cpu_has_mips64r6
300 # define cpu_has_mips64r6	__isa_ge_and_flag(6, MIPS_CPU_ISA_M64R6)
301 #endif
302 
303 /*
304  * Shortcuts ...
305  */
306 #define cpu_has_mips_2_3_4_5	(cpu_has_mips_2 | cpu_has_mips_3_4_5)
307 #define cpu_has_mips_3_4_5	(cpu_has_mips_3 | cpu_has_mips_4_5)
308 #define cpu_has_mips_4_5	(cpu_has_mips_4 | cpu_has_mips_5)
309 
310 #define cpu_has_mips_2_3_4_5_r	(cpu_has_mips_2 | cpu_has_mips_3_4_5_r)
311 #define cpu_has_mips_3_4_5_r	(cpu_has_mips_3 | cpu_has_mips_4_5_r)
312 #define cpu_has_mips_4_5_r	(cpu_has_mips_4 | cpu_has_mips_5_r)
313 #define cpu_has_mips_5_r	(cpu_has_mips_5 | cpu_has_mips_r)
314 
315 #define cpu_has_mips_3_4_5_64_r2_r6					\
316 				(cpu_has_mips_3 | cpu_has_mips_4_5_64_r2_r6)
317 #define cpu_has_mips_4_5_64_r2_r6					\
318 				(cpu_has_mips_4_5 | cpu_has_mips64r1 |	\
319 				 cpu_has_mips_r2 | cpu_has_mips_r6)
320 
321 #define cpu_has_mips32	(cpu_has_mips32r1 | cpu_has_mips32r2 | cpu_has_mips32r6)
322 #define cpu_has_mips64	(cpu_has_mips64r1 | cpu_has_mips64r2 | cpu_has_mips64r6)
323 #define cpu_has_mips_r1 (cpu_has_mips32r1 | cpu_has_mips64r1)
324 #define cpu_has_mips_r2 (cpu_has_mips32r2 | cpu_has_mips64r2)
325 #define cpu_has_mips_r6	(cpu_has_mips32r6 | cpu_has_mips64r6)
326 #define cpu_has_mips_r	(cpu_has_mips32r1 | cpu_has_mips32r2 | \
327 			 cpu_has_mips32r6 | cpu_has_mips64r1 | \
328 			 cpu_has_mips64r2 | cpu_has_mips64r6)
329 
330 /* MIPSR2 and MIPSR6 have a lot of similarities */
331 #define cpu_has_mips_r2_r6	(cpu_has_mips_r2 | cpu_has_mips_r6)
332 
333 /*
334  * cpu_has_mips_r2_exec_hazard - return if IHB is required on current processor
335  *
336  * Returns non-zero value if the current processor implementation requires
337  * an IHB instruction to deal with an instruction hazard as per MIPS R2
338  * architecture specification, zero otherwise.
339  */
340 #ifndef cpu_has_mips_r2_exec_hazard
341 #define cpu_has_mips_r2_exec_hazard					\
342 ({									\
343 	int __res;							\
344 									\
345 	switch (current_cpu_type()) {					\
346 	case CPU_M14KC:							\
347 	case CPU_74K:							\
348 	case CPU_1074K:							\
349 	case CPU_PROAPTIV:						\
350 	case CPU_P5600:							\
351 	case CPU_M5150:							\
352 	case CPU_QEMU_GENERIC:						\
353 	case CPU_CAVIUM_OCTEON:						\
354 	case CPU_CAVIUM_OCTEON_PLUS:					\
355 	case CPU_CAVIUM_OCTEON2:					\
356 	case CPU_CAVIUM_OCTEON3:					\
357 		__res = 0;						\
358 		break;							\
359 									\
360 	default:							\
361 		__res = 1;						\
362 	}								\
363 									\
364 	__res;								\
365 })
366 #endif
367 
368 /*
369  * MIPS32, MIPS64, VR5500, IDT32332, IDT32334 and maybe a few other
370  * pre-MIPS32/MIPS64 processors have CLO, CLZ.	The IDT RC64574 is 64-bit and
371  * has CLO and CLZ but not DCLO nor DCLZ.  For 64-bit kernels
372  * cpu_has_clo_clz also indicates the availability of DCLO and DCLZ.
373  */
374 #ifndef cpu_has_clo_clz
375 #define cpu_has_clo_clz	cpu_has_mips_r
376 #endif
377 
378 /*
379  * MIPS32 R2, MIPS64 R2, Loongson 3A and Octeon have WSBH.
380  * MIPS64 R2, Loongson 3A and Octeon have WSBH, DSBH and DSHD.
381  * This indicates the availability of WSBH and in case of 64 bit CPUs also
382  * DSBH and DSHD.
383  */
384 #ifndef cpu_has_wsbh
385 #define cpu_has_wsbh		cpu_has_mips_r2
386 #endif
387 
388 #ifndef cpu_has_dsp
389 #define cpu_has_dsp		__ase(MIPS_ASE_DSP)
390 #endif
391 
392 #ifndef cpu_has_dsp2
393 #define cpu_has_dsp2		__ase(MIPS_ASE_DSP2P)
394 #endif
395 
396 #ifndef cpu_has_dsp3
397 #define cpu_has_dsp3		__ase(MIPS_ASE_DSP3)
398 #endif
399 
400 #ifndef cpu_has_loongson_mmi
401 #define cpu_has_loongson_mmi		__ase(MIPS_ASE_LOONGSON_MMI)
402 #endif
403 
404 #ifndef cpu_has_loongson_cam
405 #define cpu_has_loongson_cam		__ase(MIPS_ASE_LOONGSON_CAM)
406 #endif
407 
408 #ifndef cpu_has_loongson_ext
409 #define cpu_has_loongson_ext		__ase(MIPS_ASE_LOONGSON_EXT)
410 #endif
411 
412 #ifndef cpu_has_loongson_ext2
413 #define cpu_has_loongson_ext2		__ase(MIPS_ASE_LOONGSON_EXT2)
414 #endif
415 
416 #ifndef cpu_has_mipsmt
417 #define cpu_has_mipsmt		__isa_lt_and_ase(6, MIPS_ASE_MIPSMT)
418 #endif
419 
420 #ifndef cpu_has_vp
421 #define cpu_has_vp		__isa_ge_and_opt(6, MIPS_CPU_VP)
422 #endif
423 
424 #ifndef cpu_has_userlocal
425 #define cpu_has_userlocal	__isa_ge_or_opt(6, MIPS_CPU_ULRI)
426 #endif
427 
428 #ifdef CONFIG_32BIT
429 # ifndef cpu_has_nofpuex
430 # define cpu_has_nofpuex	__isa_lt_and_opt(1, MIPS_CPU_NOFPUEX)
431 # endif
432 # ifndef cpu_has_64bits
433 # define cpu_has_64bits		(cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT)
434 # endif
435 # ifndef cpu_has_64bit_zero_reg
436 # define cpu_has_64bit_zero_reg	(cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT)
437 # endif
438 # ifndef cpu_has_64bit_gp_regs
439 # define cpu_has_64bit_gp_regs		0
440 # endif
441 # ifndef cpu_has_64bit_addresses
442 # define cpu_has_64bit_addresses	0
443 # endif
444 # ifndef cpu_vmbits
445 # define cpu_vmbits 31
446 # endif
447 #endif
448 
449 #ifdef CONFIG_64BIT
450 # ifndef cpu_has_nofpuex
451 # define cpu_has_nofpuex		0
452 # endif
453 # ifndef cpu_has_64bits
454 # define cpu_has_64bits			1
455 # endif
456 # ifndef cpu_has_64bit_zero_reg
457 # define cpu_has_64bit_zero_reg		1
458 # endif
459 # ifndef cpu_has_64bit_gp_regs
460 # define cpu_has_64bit_gp_regs		1
461 # endif
462 # ifndef cpu_has_64bit_addresses
463 # define cpu_has_64bit_addresses	1
464 # endif
465 # ifndef cpu_vmbits
466 # define cpu_vmbits cpu_data[0].vmbits
467 # define __NEED_VMBITS_PROBE
468 # endif
469 #endif
470 
471 #if defined(CONFIG_CPU_MIPSR2_IRQ_VI) && !defined(cpu_has_vint)
472 # define cpu_has_vint		__opt(MIPS_CPU_VINT)
473 #elif !defined(cpu_has_vint)
474 # define cpu_has_vint			0
475 #endif
476 
477 #if defined(CONFIG_CPU_MIPSR2_IRQ_EI) && !defined(cpu_has_veic)
478 # define cpu_has_veic		__opt(MIPS_CPU_VEIC)
479 #elif !defined(cpu_has_veic)
480 # define cpu_has_veic			0
481 #endif
482 
483 #ifndef cpu_has_inclusive_pcaches
484 #define cpu_has_inclusive_pcaches	__opt(MIPS_CPU_INCLUSIVE_CACHES)
485 #endif
486 
487 #ifndef cpu_dcache_line_size
488 #define cpu_dcache_line_size()	cpu_data[0].dcache.linesz
489 #endif
490 #ifndef cpu_icache_line_size
491 #define cpu_icache_line_size()	cpu_data[0].icache.linesz
492 #endif
493 #ifndef cpu_scache_line_size
494 #define cpu_scache_line_size()	cpu_data[0].scache.linesz
495 #endif
496 #ifndef cpu_tcache_line_size
497 #define cpu_tcache_line_size()	cpu_data[0].tcache.linesz
498 #endif
499 
500 #ifndef cpu_hwrena_impl_bits
501 #define cpu_hwrena_impl_bits		0
502 #endif
503 
504 #ifndef cpu_has_perf_cntr_intr_bit
505 #define cpu_has_perf_cntr_intr_bit	__opt(MIPS_CPU_PCI)
506 #endif
507 
508 #ifndef cpu_has_vz
509 #define cpu_has_vz		__ase(MIPS_ASE_VZ)
510 #endif
511 
512 #if defined(CONFIG_CPU_HAS_MSA) && !defined(cpu_has_msa)
513 # define cpu_has_msa		__ase(MIPS_ASE_MSA)
514 #elif !defined(cpu_has_msa)
515 # define cpu_has_msa		0
516 #endif
517 
518 #ifndef cpu_has_ufr
519 # define cpu_has_ufr		__opt(MIPS_CPU_UFR)
520 #endif
521 
522 #ifndef cpu_has_fre
523 # define cpu_has_fre		__opt(MIPS_CPU_FRE)
524 #endif
525 
526 #ifndef cpu_has_cdmm
527 # define cpu_has_cdmm		__opt(MIPS_CPU_CDMM)
528 #endif
529 
530 #ifndef cpu_has_small_pages
531 # define cpu_has_small_pages	__opt(MIPS_CPU_SP)
532 #endif
533 
534 #ifndef cpu_has_nan_legacy
535 #define cpu_has_nan_legacy	__isa_lt_and_opt(6, MIPS_CPU_NAN_LEGACY)
536 #endif
537 #ifndef cpu_has_nan_2008
538 #define cpu_has_nan_2008	__isa_ge_or_opt(6, MIPS_CPU_NAN_2008)
539 #endif
540 
541 #ifndef cpu_has_ebase_wg
542 # define cpu_has_ebase_wg	__opt(MIPS_CPU_EBASE_WG)
543 #endif
544 
545 #ifndef cpu_has_badinstr
546 # define cpu_has_badinstr	__isa_ge_or_opt(6, MIPS_CPU_BADINSTR)
547 #endif
548 
549 #ifndef cpu_has_badinstrp
550 # define cpu_has_badinstrp	__isa_ge_or_opt(6, MIPS_CPU_BADINSTRP)
551 #endif
552 
553 #ifndef cpu_has_contextconfig
554 # define cpu_has_contextconfig	__opt(MIPS_CPU_CTXTC)
555 #endif
556 
557 #ifndef cpu_has_perf
558 # define cpu_has_perf		__opt(MIPS_CPU_PERF)
559 #endif
560 
561 #ifdef CONFIG_SMP
562 /*
563  * Some systems share FTLB RAMs between threads within a core (siblings in
564  * kernel parlance). This means that FTLB entries may become invalid at almost
565  * any point when an entry is evicted due to a sibling thread writing an entry
566  * to the shared FTLB RAM.
567  *
568  * This is only relevant to SMP systems, and the only systems that exhibit this
569  * property implement MIPSr6 or higher so we constrain support for this to
570  * kernels that will run on such systems.
571  */
572 # ifndef cpu_has_shared_ftlb_ram
573 #  define cpu_has_shared_ftlb_ram \
574 	__isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_RAM)
575 # endif
576 
577 /*
578  * Some systems take this a step further & share FTLB entries between siblings.
579  * This is implemented as TLB writes happening as usual, but if an entry
580  * written by a sibling exists in the shared FTLB for a translation which would
581  * otherwise cause a TLB refill exception then the CPU will use the entry
582  * written by its sibling rather than triggering a refill & writing a matching
583  * TLB entry for itself.
584  *
585  * This is naturally only valid if a TLB entry is known to be suitable for use
586  * on all siblings in a CPU, and so it only takes effect when MMIDs are in use
587  * rather than ASIDs or when a TLB entry is marked global.
588  */
589 # ifndef cpu_has_shared_ftlb_entries
590 #  define cpu_has_shared_ftlb_entries \
591 	__isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_ENTRIES)
592 # endif
593 #endif /* SMP */
594 
595 #ifndef cpu_has_shared_ftlb_ram
596 # define cpu_has_shared_ftlb_ram 0
597 #endif
598 #ifndef cpu_has_shared_ftlb_entries
599 # define cpu_has_shared_ftlb_entries 0
600 #endif
601 
602 #ifdef CONFIG_MIPS_MT_SMP
603 # define cpu_has_mipsmt_pertccounters \
604 	__isa_lt_and_opt(6, MIPS_CPU_MT_PER_TC_PERF_COUNTERS)
605 #else
606 # define cpu_has_mipsmt_pertccounters 0
607 #endif /* CONFIG_MIPS_MT_SMP */
608 
609 /*
610  * We only enable MMID support for configurations which natively support 64 bit
611  * atomics because getting good performance from the allocator relies upon
612  * efficient atomic64_*() functions.
613  */
614 #ifndef cpu_has_mmid
615 # ifdef CONFIG_GENERIC_ATOMIC64
616 #  define cpu_has_mmid		0
617 # else
618 #  define cpu_has_mmid		__isa_ge_and_opt(6, MIPS_CPU_MMID)
619 # endif
620 #endif
621 
622 /*
623  * Guest capabilities
624  */
625 #ifndef cpu_guest_has_conf1
626 #define cpu_guest_has_conf1	(cpu_data[0].guest.conf & (1 << 1))
627 #endif
628 #ifndef cpu_guest_has_conf2
629 #define cpu_guest_has_conf2	(cpu_data[0].guest.conf & (1 << 2))
630 #endif
631 #ifndef cpu_guest_has_conf3
632 #define cpu_guest_has_conf3	(cpu_data[0].guest.conf & (1 << 3))
633 #endif
634 #ifndef cpu_guest_has_conf4
635 #define cpu_guest_has_conf4	(cpu_data[0].guest.conf & (1 << 4))
636 #endif
637 #ifndef cpu_guest_has_conf5
638 #define cpu_guest_has_conf5	(cpu_data[0].guest.conf & (1 << 5))
639 #endif
640 #ifndef cpu_guest_has_conf6
641 #define cpu_guest_has_conf6	(cpu_data[0].guest.conf & (1 << 6))
642 #endif
643 #ifndef cpu_guest_has_conf7
644 #define cpu_guest_has_conf7	(cpu_data[0].guest.conf & (1 << 7))
645 #endif
646 #ifndef cpu_guest_has_fpu
647 #define cpu_guest_has_fpu	(cpu_data[0].guest.options & MIPS_CPU_FPU)
648 #endif
649 #ifndef cpu_guest_has_watch
650 #define cpu_guest_has_watch	(cpu_data[0].guest.options & MIPS_CPU_WATCH)
651 #endif
652 #ifndef cpu_guest_has_contextconfig
653 #define cpu_guest_has_contextconfig (cpu_data[0].guest.options & MIPS_CPU_CTXTC)
654 #endif
655 #ifndef cpu_guest_has_segments
656 #define cpu_guest_has_segments	(cpu_data[0].guest.options & MIPS_CPU_SEGMENTS)
657 #endif
658 #ifndef cpu_guest_has_badinstr
659 #define cpu_guest_has_badinstr	(cpu_data[0].guest.options & MIPS_CPU_BADINSTR)
660 #endif
661 #ifndef cpu_guest_has_badinstrp
662 #define cpu_guest_has_badinstrp	(cpu_data[0].guest.options & MIPS_CPU_BADINSTRP)
663 #endif
664 #ifndef cpu_guest_has_htw
665 #define cpu_guest_has_htw	(cpu_data[0].guest.options & MIPS_CPU_HTW)
666 #endif
667 #ifndef cpu_guest_has_mvh
668 #define cpu_guest_has_mvh	(cpu_data[0].guest.options & MIPS_CPU_MVH)
669 #endif
670 #ifndef cpu_guest_has_msa
671 #define cpu_guest_has_msa	(cpu_data[0].guest.ases & MIPS_ASE_MSA)
672 #endif
673 #ifndef cpu_guest_has_kscr
674 #define cpu_guest_has_kscr(n)	(cpu_data[0].guest.kscratch_mask & (1u << (n)))
675 #endif
676 #ifndef cpu_guest_has_rw_llb
677 #define cpu_guest_has_rw_llb	(cpu_has_mips_r6 || (cpu_data[0].guest.options & MIPS_CPU_RW_LLB))
678 #endif
679 #ifndef cpu_guest_has_perf
680 #define cpu_guest_has_perf	(cpu_data[0].guest.options & MIPS_CPU_PERF)
681 #endif
682 #ifndef cpu_guest_has_maar
683 #define cpu_guest_has_maar	(cpu_data[0].guest.options & MIPS_CPU_MAAR)
684 #endif
685 #ifndef cpu_guest_has_userlocal
686 #define cpu_guest_has_userlocal	(cpu_data[0].guest.options & MIPS_CPU_ULRI)
687 #endif
688 
689 /*
690  * Guest dynamic capabilities
691  */
692 #ifndef cpu_guest_has_dyn_fpu
693 #define cpu_guest_has_dyn_fpu	(cpu_data[0].guest.options_dyn & MIPS_CPU_FPU)
694 #endif
695 #ifndef cpu_guest_has_dyn_watch
696 #define cpu_guest_has_dyn_watch	(cpu_data[0].guest.options_dyn & MIPS_CPU_WATCH)
697 #endif
698 #ifndef cpu_guest_has_dyn_contextconfig
699 #define cpu_guest_has_dyn_contextconfig (cpu_data[0].guest.options_dyn & MIPS_CPU_CTXTC)
700 #endif
701 #ifndef cpu_guest_has_dyn_perf
702 #define cpu_guest_has_dyn_perf	(cpu_data[0].guest.options_dyn & MIPS_CPU_PERF)
703 #endif
704 #ifndef cpu_guest_has_dyn_msa
705 #define cpu_guest_has_dyn_msa	(cpu_data[0].guest.ases_dyn & MIPS_ASE_MSA)
706 #endif
707 #ifndef cpu_guest_has_dyn_maar
708 #define cpu_guest_has_dyn_maar	(cpu_data[0].guest.options_dyn & MIPS_CPU_MAAR)
709 #endif
710 
711 #endif /* __ASM_CPU_FEATURES_H */
712