vector.S (bf61c8840efe60fd8f91446860b63338fb424158) | vector.S (de79f7b9f6f92ec1bd6f61fa1f20de60728a5b5e) |
---|---|
1#include <asm/processor.h> 2#include <asm/ppc_asm.h> 3#include <asm/reg.h> 4#include <asm/asm-offsets.h> 5#include <asm/cputable.h> 6#include <asm/thread_info.h> 7#include <asm/page.h> 8#include <asm/ptrace.h> 9 10#ifdef CONFIG_PPC_TRANSACTIONAL_MEM | 1#include <asm/processor.h> 2#include <asm/ppc_asm.h> 3#include <asm/reg.h> 4#include <asm/asm-offsets.h> 5#include <asm/cputable.h> 6#include <asm/thread_info.h> 7#include <asm/page.h> 8#include <asm/ptrace.h> 9 10#ifdef CONFIG_PPC_TRANSACTIONAL_MEM |
11/* 12 * Wrapper to call load_up_altivec from C. 13 * void do_load_up_altivec(struct pt_regs *regs); 14 */ 15_GLOBAL(do_load_up_altivec) 16 mflr r0 17 std r0, 16(r1) 18 stdu r1, -112(r1) 19 20 subi r6, r3, STACK_FRAME_OVERHEAD 21 /* load_up_altivec expects r12=MSR, r13=PACA, and returns 22 * with r12 = new MSR. 23 */ 24 ld r12,_MSR(r6) 25 GET_PACA(r13) 26 bl load_up_altivec 27 std r12,_MSR(r6) 28 29 ld r0, 112+16(r1) 30 addi r1, r1, 112 31 mtlr r0 32 blr 33 | |
34/* void do_load_up_transact_altivec(struct thread_struct *thread) 35 * 36 * This is similar to load_up_altivec but for the transactional version of the 37 * vector regs. It doesn't mess with the task MSR or valid flags. 38 * Furthermore, VEC laziness is not supported with TM currently. 39 */ 40_GLOBAL(do_load_up_transact_altivec) 41 mfmsr r6 42 oris r5,r6,MSR_VEC@h 43 MTMSRD(r5) 44 isync 45 46 li r4,1 47 stw r4,THREAD_USED_VR(r3) 48 | 11/* void do_load_up_transact_altivec(struct thread_struct *thread) 12 * 13 * This is similar to load_up_altivec but for the transactional version of the 14 * vector regs. It doesn't mess with the task MSR or valid flags. 15 * Furthermore, VEC laziness is not supported with TM currently. 16 */ 17_GLOBAL(do_load_up_transact_altivec) 18 mfmsr r6 19 oris r5,r6,MSR_VEC@h 20 MTMSRD(r5) 21 isync 22 23 li r4,1 24 stw r4,THREAD_USED_VR(r3) 25 |
49 li r10,THREAD_TRANSACT_VSCR | 26 li r10,THREAD_TRANSACT_VRSTATE+VRSTATE_VSCR |
50 lvx vr0,r10,r3 51 mtvscr vr0 | 27 lvx vr0,r10,r3 28 mtvscr vr0 |
52 REST_32VRS_TRANSACT(0,r4,r3) | 29 addi r10,r3,THREAD_TRANSACT_VRSTATE 30 REST_32VRS(0,r4,r10) |
53 54 /* Disable VEC again. */ 55 MTMSRD(r6) 56 isync 57 58 blr 59#endif 60 61/* | 31 32 /* Disable VEC again. */ 33 MTMSRD(r6) 34 isync 35 36 blr 37#endif 38 39/* |
62 * load_up_altivec(unused, unused, tsk) | |
63 * Disable VMX for the task which had it previously, 64 * and save its vector registers in its thread_struct. 65 * Enables the VMX for use in the kernel on return. 66 * On SMP we know the VMX is free, since we give it up every 67 * switch (ie, no lazy save of the vector registers). 68 */ 69_GLOBAL(load_up_altivec) 70 mfmsr r5 /* grab the current MSR */ --- 14 unchanged lines hidden (view full) --- 85 toreal(r3) 86 PPC_LL r4,ADDROFF(last_task_used_altivec)(r3) 87 PPC_LCMPI 0,r4,0 88 beq 1f 89 90 /* Save VMX state to last_task_used_altivec's THREAD struct */ 91 toreal(r4) 92 addi r4,r4,THREAD | 40 * Disable VMX for the task which had it previously, 41 * and save its vector registers in its thread_struct. 42 * Enables the VMX for use in the kernel on return. 43 * On SMP we know the VMX is free, since we give it up every 44 * switch (ie, no lazy save of the vector registers). 45 */ 46_GLOBAL(load_up_altivec) 47 mfmsr r5 /* grab the current MSR */ --- 14 unchanged lines hidden (view full) --- 62 toreal(r3) 63 PPC_LL r4,ADDROFF(last_task_used_altivec)(r3) 64 PPC_LCMPI 0,r4,0 65 beq 1f 66 67 /* Save VMX state to last_task_used_altivec's THREAD struct */ 68 toreal(r4) 69 addi r4,r4,THREAD |
93 SAVE_32VRS(0,r5,r4) | 70 addi r7,r4,THREAD_VRSTATE 71 SAVE_32VRS(0,r5,r7) |
94 mfvscr vr0 | 72 mfvscr vr0 |
95 li r10,THREAD_VSCR 96 stvx vr0,r10,r4 | 73 li r10,VRSTATE_VSCR 74 stvx vr0,r10,r7 |
97 /* Disable VMX for last_task_used_altivec */ 98 PPC_LL r5,PT_REGS(r4) 99 toreal(r5) 100 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 101 lis r10,MSR_VEC@h 102 andc r4,r4,r10 103 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 1041: --- 15 unchanged lines hidden (view full) --- 120 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 121 oris r9,r9,MSR_VEC@h 122#else 123 ld r4,PACACURRENT(r13) 124 addi r5,r4,THREAD /* Get THREAD */ 125 oris r12,r12,MSR_VEC@h 126 std r12,_MSR(r1) 127#endif | 75 /* Disable VMX for last_task_used_altivec */ 76 PPC_LL r5,PT_REGS(r4) 77 toreal(r5) 78 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 79 lis r10,MSR_VEC@h 80 andc r4,r4,r10 81 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 821: --- 15 unchanged lines hidden (view full) --- 98 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 99 oris r9,r9,MSR_VEC@h 100#else 101 ld r4,PACACURRENT(r13) 102 addi r5,r4,THREAD /* Get THREAD */ 103 oris r12,r12,MSR_VEC@h 104 std r12,_MSR(r1) 105#endif |
106 addi r7,r5,THREAD_VRSTATE |
|
128 li r4,1 | 107 li r4,1 |
129 li r10,THREAD_VSCR | 108 li r10,VRSTATE_VSCR |
130 stw r4,THREAD_USED_VR(r5) | 109 stw r4,THREAD_USED_VR(r5) |
131 lvx vr0,r10,r5 | 110 lvx vr0,r10,r7 |
132 mtvscr vr0 | 111 mtvscr vr0 |
133 REST_32VRS(0,r4,r5) | 112 REST_32VRS(0,r4,r7) |
134#ifndef CONFIG_SMP 135 /* Update last_task_used_altivec to 'current' */ 136 subi r4,r5,THREAD /* Back to 'current' */ 137 fromreal(r4) 138 PPC_STL r4,ADDROFF(last_task_used_altivec)(r3) 139#endif /* CONFIG_SMP */ 140 /* restore registers and return */ 141 blr --- 18 unchanged lines hidden (view full) --- 160 mfmsr r5 161 oris r5,r5,MSR_VEC@h 162 SYNC 163 MTMSRD(r5) /* enable use of VMX now */ 164 isync 165 PPC_LCMPI 0,r3,0 166 beqlr /* if no previous owner, done */ 167 addi r3,r3,THREAD /* want THREAD of task */ | 113#ifndef CONFIG_SMP 114 /* Update last_task_used_altivec to 'current' */ 115 subi r4,r5,THREAD /* Back to 'current' */ 116 fromreal(r4) 117 PPC_STL r4,ADDROFF(last_task_used_altivec)(r3) 118#endif /* CONFIG_SMP */ 119 /* restore registers and return */ 120 blr --- 18 unchanged lines hidden (view full) --- 139 mfmsr r5 140 oris r5,r5,MSR_VEC@h 141 SYNC 142 MTMSRD(r5) /* enable use of VMX now */ 143 isync 144 PPC_LCMPI 0,r3,0 145 beqlr /* if no previous owner, done */ 146 addi r3,r3,THREAD /* want THREAD of task */ |
147 addi r7,r3,THREAD_VRSTATE |
|
168 PPC_LL r5,PT_REGS(r3) 169 PPC_LCMPI 0,r5,0 | 148 PPC_LL r5,PT_REGS(r3) 149 PPC_LCMPI 0,r5,0 |
170 SAVE_32VRS(0,r4,r3) | 150 SAVE_32VRS(0,r4,r7) |
171 mfvscr vr0 | 151 mfvscr vr0 |
172 li r4,THREAD_VSCR 173 stvx vr0,r4,r3 | 152 li r4,VRSTATE_VSCR 153 stvx vr0,r4,r7 |
174 beq 1f 175 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 176#ifdef CONFIG_VSX 177BEGIN_FTR_SECTION 178 lis r3,(MSR_VEC|MSR_VSX)@h 179FTR_SECTION_ELSE 180 lis r3,MSR_VEC@h 181ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX) --- 287 unchanged lines hidden --- | 154 beq 1f 155 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 156#ifdef CONFIG_VSX 157BEGIN_FTR_SECTION 158 lis r3,(MSR_VEC|MSR_VSX)@h 159FTR_SECTION_ELSE 160 lis r3,MSR_VEC@h 161ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX) --- 287 unchanged lines hidden --- |