1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle 7 * Copyright (C) 1996 David S. Miller (dm@engr.sgi.com) 8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse 9 * Copyright (C) 1999 Silicon Graphics, Inc. 10 * Copyright (C) 2000 MIPS Technologies, Inc. 11 * written by Carsten Langgaard, carstenl@mips.com 12 */ 13#include <linux/config.h> 14#include <asm/asm.h> 15#include <asm/cachectl.h> 16#include <asm/fpregdef.h> 17#include <asm/mipsregs.h> 18#include <asm/asm-offsets.h> 19#include <asm/page.h> 20#include <asm/pgtable-bits.h> 21#include <asm/regdef.h> 22#include <asm/stackframe.h> 23#include <asm/thread_info.h> 24 25#include <asm/asmmacro.h> 26 27/* 28 * Offset to the current process status flags, the first 32 bytes of the 29 * stack are not used. 30 */ 31#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS) 32 33/* 34 * FPU context is saved iff the process has used it's FPU in the current 35 * time slice as indicated by _TIF_USEDFPU. In any case, the CU1 bit for user 36 * space STATUS register should be 0, so that a process *always* starts its 37 * userland with FPU disabled after each context switch. 38 * 39 * FPU will be enabled as soon as the process accesses FPU again, through 40 * do_cpu() trap. 41 */ 42 43/* 44 * task_struct *resume(task_struct *prev, task_struct *next, 45 * struct thread_info *next_ti) 46 */ 47 .align 5 48 LEAF(resume) 49#ifndef CONFIG_CPU_HAS_LLSC 50 sw zero, ll_bit 51#endif 52 mfc0 t1, CP0_STATUS 53 LONG_S t1, THREAD_STATUS(a0) 54 cpu_save_nonscratch a0 55 LONG_S ra, THREAD_REG31(a0) 56 57 /* 58 * check if we need to save FPU registers 59 */ 60 PTR_L t3, TASK_THREAD_INFO(a0) 61 LONG_L t0, TI_FLAGS(t3) 62 li t1, _TIF_USEDFPU 63 and t2, t0, t1 64 beqz t2, 1f 65 nor t1, zero, t1 66 67 and t0, t0, t1 68 LONG_S t0, TI_FLAGS(t3) 69 70 /* 71 * clear saved user stack CU1 bit 72 */ 73 LONG_L t0, ST_OFF(t3) 74 li t1, ~ST0_CU1 75 and t0, t0, t1 76 LONG_S t0, ST_OFF(t3) 77 78 fpu_save_double a0 t0 t1 # c0_status passed in t0 79 # clobbers t1 801: 81 82 /* 83 * The order of restoring the registers takes care of the race 84 * updating $28, $29 and kernelsp without disabling ints. 85 */ 86 move $28, a2 87 cpu_restore_nonscratch a1 88 89 PTR_ADDIU t0, $28, _THREAD_SIZE - 32 90 set_saved_sp t0, t1, t2 91#ifdef CONFIG_MIPS_MT_SMTC 92 /* Read-modify-writes of Status must be atomic on a VPE */ 93 mfc0 t2, CP0_TCSTATUS 94 ori t1, t2, TCSTATUS_IXMT 95 mtc0 t1, CP0_TCSTATUS 96 andi t2, t2, TCSTATUS_IXMT 97 _ehb 98 DMT 8 # dmt t0 99 move t1,ra 100 jal mips_ihb 101 move ra,t1 102#endif /* CONFIG_MIPS_MT_SMTC */ 103 mfc0 t1, CP0_STATUS /* Do we really need this? */ 104 li a3, 0xff01 105 and t1, a3 106 LONG_L a2, THREAD_STATUS(a1) 107 nor a3, $0, a3 108 and a2, a3 109 or a2, t1 110 mtc0 a2, CP0_STATUS 111#ifdef CONFIG_MIPS_MT_SMTC 112 _ehb 113 andi t0, t0, VPECONTROL_TE 114 beqz t0, 1f 115 emt 1161: 117 mfc0 t1, CP0_TCSTATUS 118 xori t1, t1, TCSTATUS_IXMT 119 or t1, t1, t2 120 mtc0 t1, CP0_TCSTATUS 121 _ehb 122#endif /* CONFIG_MIPS_MT_SMTC */ 123 move v0, a0 124 jr ra 125 END(resume) 126 127/* 128 * Save a thread's fp context. 129 */ 130LEAF(_save_fp) 131#ifdef CONFIG_64BIT 132 mfc0 t0, CP0_STATUS 133#endif 134 fpu_save_double a0 t0 t1 # clobbers t1 135 jr ra 136 END(_save_fp) 137 138/* 139 * Restore a thread's fp context. 140 */ 141LEAF(_restore_fp) 142#ifdef CONFIG_64BIT 143 mfc0 t0, CP0_STATUS 144#endif 145 fpu_restore_double a0 t0 t1 # clobbers t1 146 jr ra 147 END(_restore_fp) 148 149/* 150 * Load the FPU with signalling NANS. This bit pattern we're using has 151 * the property that no matter whether considered as single or as double 152 * precision represents signaling NANS. 153 * 154 * We initialize fcr31 to rounding to nearest, no exceptions. 155 */ 156 157#define FPU_DEFAULT 0x00000000 158 159LEAF(_init_fpu) 160#ifdef CONFIG_MIPS_MT_SMTC 161 /* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */ 162 mfc0 t0, CP0_TCSTATUS 163 /* Bit position is the same for Status, TCStatus */ 164 li t1, ST0_CU1 165 or t0, t1 166 mtc0 t0, CP0_TCSTATUS 167#else /* Normal MIPS CU1 enable */ 168 mfc0 t0, CP0_STATUS 169 li t1, ST0_CU1 170 or t0, t1 171 mtc0 t0, CP0_STATUS 172#endif /* CONFIG_MIPS_MT_SMTC */ 173 fpu_enable_hazard 174 175 li t1, FPU_DEFAULT 176 ctc1 t1, fcr31 177 178 li t1, -1 # SNaN 179 180#ifdef CONFIG_64BIT 181 sll t0, t0, 5 182 bgez t0, 1f # 16 / 32 register mode? 183 184 dmtc1 t1, $f1 185 dmtc1 t1, $f3 186 dmtc1 t1, $f5 187 dmtc1 t1, $f7 188 dmtc1 t1, $f9 189 dmtc1 t1, $f11 190 dmtc1 t1, $f13 191 dmtc1 t1, $f15 192 dmtc1 t1, $f17 193 dmtc1 t1, $f19 194 dmtc1 t1, $f21 195 dmtc1 t1, $f23 196 dmtc1 t1, $f25 197 dmtc1 t1, $f27 198 dmtc1 t1, $f29 199 dmtc1 t1, $f31 2001: 201#endif 202 203#ifdef CONFIG_CPU_MIPS32 204 mtc1 t1, $f0 205 mtc1 t1, $f1 206 mtc1 t1, $f2 207 mtc1 t1, $f3 208 mtc1 t1, $f4 209 mtc1 t1, $f5 210 mtc1 t1, $f6 211 mtc1 t1, $f7 212 mtc1 t1, $f8 213 mtc1 t1, $f9 214 mtc1 t1, $f10 215 mtc1 t1, $f11 216 mtc1 t1, $f12 217 mtc1 t1, $f13 218 mtc1 t1, $f14 219 mtc1 t1, $f15 220 mtc1 t1, $f16 221 mtc1 t1, $f17 222 mtc1 t1, $f18 223 mtc1 t1, $f19 224 mtc1 t1, $f20 225 mtc1 t1, $f21 226 mtc1 t1, $f22 227 mtc1 t1, $f23 228 mtc1 t1, $f24 229 mtc1 t1, $f25 230 mtc1 t1, $f26 231 mtc1 t1, $f27 232 mtc1 t1, $f28 233 mtc1 t1, $f29 234 mtc1 t1, $f30 235 mtc1 t1, $f31 236#else 237 .set mips3 238 dmtc1 t1, $f0 239 dmtc1 t1, $f2 240 dmtc1 t1, $f4 241 dmtc1 t1, $f6 242 dmtc1 t1, $f8 243 dmtc1 t1, $f10 244 dmtc1 t1, $f12 245 dmtc1 t1, $f14 246 dmtc1 t1, $f16 247 dmtc1 t1, $f18 248 dmtc1 t1, $f20 249 dmtc1 t1, $f22 250 dmtc1 t1, $f24 251 dmtc1 t1, $f26 252 dmtc1 t1, $f28 253 dmtc1 t1, $f30 254#endif 255 jr ra 256 END(_init_fpu) 257