xref: /linux/arch/powerpc/include/asm/switch_to.h (revision c532de5a67a70f8533d495f8f2aaa9a0491c3ad0)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
4  */
5 #ifndef _ASM_POWERPC_SWITCH_TO_H
6 #define _ASM_POWERPC_SWITCH_TO_H
7 
8 #include <linux/sched.h>
9 #include <asm/reg.h>
10 
11 struct thread_struct;
12 struct task_struct;
13 struct pt_regs;
14 
15 extern struct task_struct *__switch_to(struct task_struct *,
16 	struct task_struct *);
17 #define switch_to(prev, next, last)	((last) = __switch_to((prev), (next)))
18 
19 extern struct task_struct *_switch(struct thread_struct *prev,
20 				   struct thread_struct *next);
21 
22 extern void switch_booke_debug_regs(struct debug_reg *new_debug);
23 
24 extern int emulate_altivec(struct pt_regs *);
25 
26 #ifdef CONFIG_PPC_BOOK3S_64
27 void restore_math(struct pt_regs *regs);
28 #else
29 static inline void restore_math(struct pt_regs *regs)
30 {
31 }
32 #endif
33 
34 void restore_tm_state(struct pt_regs *regs);
35 
36 extern void flush_all_to_thread(struct task_struct *);
37 extern void giveup_all(struct task_struct *);
38 
39 #ifdef CONFIG_PPC_FPU
40 extern void enable_kernel_fp(void);
41 extern void flush_fp_to_thread(struct task_struct *);
42 extern void giveup_fpu(struct task_struct *);
43 extern void save_fpu(struct task_struct *);
44 static inline void disable_kernel_fp(void)
45 {
46 	msr_check_and_clear(MSR_FP);
47 }
48 #else
49 static inline void save_fpu(struct task_struct *t) { }
50 static inline void flush_fp_to_thread(struct task_struct *t) { }
51 static inline void enable_kernel_fp(void)
52 {
53 	BUILD_BUG();
54 }
55 #endif
56 
57 #ifdef CONFIG_ALTIVEC
58 extern void enable_kernel_altivec(void);
59 extern void flush_altivec_to_thread(struct task_struct *);
60 extern void giveup_altivec(struct task_struct *);
61 extern void save_altivec(struct task_struct *);
62 static inline void disable_kernel_altivec(void)
63 {
64 	msr_check_and_clear(MSR_VEC);
65 }
66 #else
67 static inline void save_altivec(struct task_struct *t) { }
68 static inline void __giveup_altivec(struct task_struct *t) { }
69 static inline void enable_kernel_altivec(void)
70 {
71 	BUILD_BUG();
72 }
73 
74 static inline void disable_kernel_altivec(void)
75 {
76 	BUILD_BUG();
77 }
78 #endif
79 
80 #ifdef CONFIG_VSX
81 extern void enable_kernel_vsx(void);
82 extern void flush_vsx_to_thread(struct task_struct *);
83 static inline void disable_kernel_vsx(void)
84 {
85 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
86 }
87 #else
88 static inline void enable_kernel_vsx(void)
89 {
90 	BUILD_BUG();
91 }
92 
93 static inline void disable_kernel_vsx(void)
94 {
95 	BUILD_BUG();
96 }
97 #endif
98 
99 #ifdef CONFIG_SPE
100 extern void enable_kernel_spe(void);
101 extern void flush_spe_to_thread(struct task_struct *);
102 extern void giveup_spe(struct task_struct *);
103 extern void __giveup_spe(struct task_struct *);
104 static inline void disable_kernel_spe(void)
105 {
106 	msr_check_and_clear(MSR_SPE);
107 }
108 #else
109 static inline void __giveup_spe(struct task_struct *t) { }
110 #endif
111 
112 static inline void clear_task_ebb(struct task_struct *t)
113 {
114 #ifdef CONFIG_PPC_BOOK3S_64
115     /* EBB perf events are not inherited, so clear all EBB state. */
116     t->thread.ebbrr = 0;
117     t->thread.ebbhr = 0;
118     t->thread.bescr = 0;
119     t->thread.mmcr2 = 0;
120     t->thread.mmcr0 = 0;
121     t->thread.siar = 0;
122     t->thread.sdar = 0;
123     t->thread.sier = 0;
124     t->thread.used_ebb = 0;
125 #endif
126 }
127 
128 void kvmppc_save_user_regs(void);
129 void kvmppc_save_current_sprs(void);
130 
131 extern int set_thread_tidr(struct task_struct *t);
132 
133 #endif /* _ASM_POWERPC_SWITCH_TO_H */
134