xref: /linux/arch/sh/include/asm/mmu_context_32.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef __ASM_SH_MMU_CONTEXT_32_H
2 #define __ASM_SH_MMU_CONTEXT_32_H
3 
4 /*
5  * Destroy context related info for an mm_struct that is about
6  * to be put to rest.
7  */
8 static inline void destroy_context(struct mm_struct *mm)
9 {
10 	/* Do nothing */
11 }
12 
13 #ifdef CONFIG_CPU_HAS_PTEAEX
14 static inline void set_asid(unsigned long asid)
15 {
16 	__raw_writel(asid, MMU_PTEAEX);
17 }
18 
19 static inline unsigned long get_asid(void)
20 {
21 	return __raw_readl(MMU_PTEAEX) & MMU_CONTEXT_ASID_MASK;
22 }
23 #else
24 static inline void set_asid(unsigned long asid)
25 {
26 	unsigned long __dummy;
27 
28 	__asm__ __volatile__ ("mov.l	%2, %0\n\t"
29 			      "and	%3, %0\n\t"
30 			      "or	%1, %0\n\t"
31 			      "mov.l	%0, %2"
32 			      : "=&r" (__dummy)
33 			      : "r" (asid), "m" (__m(MMU_PTEH)),
34 			        "r" (0xffffff00));
35 }
36 
37 static inline unsigned long get_asid(void)
38 {
39 	unsigned long asid;
40 
41 	__asm__ __volatile__ ("mov.l	%1, %0"
42 			      : "=r" (asid)
43 			      : "m" (__m(MMU_PTEH)));
44 	asid &= MMU_CONTEXT_ASID_MASK;
45 	return asid;
46 }
47 #endif /* CONFIG_CPU_HAS_PTEAEX */
48 
49 /* MMU_TTB is used for optimizing the fault handling. */
50 static inline void set_TTB(pgd_t *pgd)
51 {
52 	__raw_writel((unsigned long)pgd, MMU_TTB);
53 }
54 
55 static inline pgd_t *get_TTB(void)
56 {
57 	return (pgd_t *)__raw_readl(MMU_TTB);
58 }
59 #endif /* __ASM_SH_MMU_CONTEXT_32_H */
60