xref: /linux/arch/sh/include/asm/mmu_context_32.h (revision 95298d63c67673c654c08952672d016212b26054)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_SH_MMU_CONTEXT_32_H
3 #define __ASM_SH_MMU_CONTEXT_32_H
4 
5 /*
6  * Destroy context related info for an mm_struct that is about
7  * to be put to rest.
8  */
9 static inline void destroy_context(struct mm_struct *mm)
10 {
11 	/* Do nothing */
12 }
13 
14 #ifdef CONFIG_CPU_HAS_PTEAEX
15 static inline void set_asid(unsigned long asid)
16 {
17 	__raw_writel(asid, MMU_PTEAEX);
18 }
19 
20 static inline unsigned long get_asid(void)
21 {
22 	return __raw_readl(MMU_PTEAEX) & MMU_CONTEXT_ASID_MASK;
23 }
24 #else
25 static inline void set_asid(unsigned long asid)
26 {
27 	unsigned long __dummy;
28 
29 	__asm__ __volatile__ ("mov.l	%2, %0\n\t"
30 			      "and	%3, %0\n\t"
31 			      "or	%1, %0\n\t"
32 			      "mov.l	%0, %2"
33 			      : "=&r" (__dummy)
34 			      : "r" (asid), "m" (__m(MMU_PTEH)),
35 			        "r" (0xffffff00));
36 }
37 
38 static inline unsigned long get_asid(void)
39 {
40 	unsigned long asid;
41 
42 	__asm__ __volatile__ ("mov.l	%1, %0"
43 			      : "=r" (asid)
44 			      : "m" (__m(MMU_PTEH)));
45 	asid &= MMU_CONTEXT_ASID_MASK;
46 	return asid;
47 }
48 #endif /* CONFIG_CPU_HAS_PTEAEX */
49 
50 /* MMU_TTB is used for optimizing the fault handling. */
51 static inline void set_TTB(pgd_t *pgd)
52 {
53 	__raw_writel((unsigned long)pgd, MMU_TTB);
54 }
55 
56 static inline pgd_t *get_TTB(void)
57 {
58 	return (pgd_t *)__raw_readl(MMU_TTB);
59 }
60 #endif /* __ASM_SH_MMU_CONTEXT_32_H */
61