1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle 7 * 8 * Multi-arch abstraction and asm macros for easier reading: 9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 10 * 11 * Carsten Langgaard, carstenl@mips.com 12 * Copyright (C) 2000 MIPS Technologies, Inc. 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc. 14 */ 15#include <asm/asm.h> 16#include <asm/asmmacro.h> 17#include <asm/errno.h> 18#include <asm/fpregdef.h> 19#include <asm/mipsregs.h> 20#include <asm/asm-offsets.h> 21#include <asm/regdef.h> 22 23/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */ 24#undef fp 25 26 .macro EX insn, reg, src 27 .set push 28 SET_HARDFLOAT 29 .set nomacro 30.ex\@: \insn \reg, \src 31 .set pop 32 .section __ex_table,"a" 33 PTR .ex\@, fault 34 .previous 35 .endm 36 37 .set noreorder 38 39/** 40 * _save_fp_context() - save FP context from the FPU 41 * @a0 - pointer to fpregs field of sigcontext 42 * @a1 - pointer to fpc_csr field of sigcontext 43 * 44 * Save FP context, including the 32 FP data registers and the FP 45 * control & status register, from the FPU to signal context. 46 */ 47LEAF(_save_fp_context) 48 .set push 49 SET_HARDFLOAT 50 cfc1 t1, fcr31 51 .set pop 52 53#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \ 54 defined(CONFIG_CPU_MIPS32_R6) 55 .set push 56 SET_HARDFLOAT 57#ifdef CONFIG_CPU_MIPS32_R2 58 .set mips32r2 59 .set fp=64 60 mfc0 t0, CP0_STATUS 61 sll t0, t0, 5 62 bgez t0, 1f # skip storing odd if FR=0 63 nop 64#endif 65 /* Store the 16 odd double precision registers */ 66 EX sdc1 $f1, 8(a0) 67 EX sdc1 $f3, 24(a0) 68 EX sdc1 $f5, 40(a0) 69 EX sdc1 $f7, 56(a0) 70 EX sdc1 $f9, 72(a0) 71 EX sdc1 $f11, 88(a0) 72 EX sdc1 $f13, 104(a0) 73 EX sdc1 $f15, 120(a0) 74 EX sdc1 $f17, 136(a0) 75 EX sdc1 $f19, 152(a0) 76 EX sdc1 $f21, 168(a0) 77 EX sdc1 $f23, 184(a0) 78 EX sdc1 $f25, 200(a0) 79 EX sdc1 $f27, 216(a0) 80 EX sdc1 $f29, 232(a0) 81 EX sdc1 $f31, 248(a0) 821: .set pop 83#endif 84 85 .set push 86 SET_HARDFLOAT 87 /* Store the 16 even double precision registers */ 88 EX sdc1 $f0, 0(a0) 89 EX sdc1 $f2, 16(a0) 90 EX sdc1 $f4, 32(a0) 91 EX sdc1 $f6, 48(a0) 92 EX sdc1 $f8, 64(a0) 93 EX sdc1 $f10, 80(a0) 94 EX sdc1 $f12, 96(a0) 95 EX sdc1 $f14, 112(a0) 96 EX sdc1 $f16, 128(a0) 97 EX sdc1 $f18, 144(a0) 98 EX sdc1 $f20, 160(a0) 99 EX sdc1 $f22, 176(a0) 100 EX sdc1 $f24, 192(a0) 101 EX sdc1 $f26, 208(a0) 102 EX sdc1 $f28, 224(a0) 103 EX sdc1 $f30, 240(a0) 104 EX sw t1, 0(a1) 105 jr ra 106 li v0, 0 # success 107 .set pop 108 END(_save_fp_context) 109 110#ifdef CONFIG_MIPS32_COMPAT 111 /* Save 32-bit process floating point context */ 112LEAF(_save_fp_context32) 113 .set push 114 .set MIPS_ISA_ARCH_LEVEL_RAW 115 SET_HARDFLOAT 116 cfc1 t1, fcr31 117 118#ifndef CONFIG_CPU_MIPS64_R6 119 mfc0 t0, CP0_STATUS 120 sll t0, t0, 5 121 bgez t0, 1f # skip storing odd if FR=0 122 nop 123#endif 124 125 /* Store the 16 odd double precision registers */ 126 EX sdc1 $f1, SC32_FPREGS+8(a0) 127 EX sdc1 $f3, SC32_FPREGS+24(a0) 128 EX sdc1 $f5, SC32_FPREGS+40(a0) 129 EX sdc1 $f7, SC32_FPREGS+56(a0) 130 EX sdc1 $f9, SC32_FPREGS+72(a0) 131 EX sdc1 $f11, SC32_FPREGS+88(a0) 132 EX sdc1 $f13, SC32_FPREGS+104(a0) 133 EX sdc1 $f15, SC32_FPREGS+120(a0) 134 EX sdc1 $f17, SC32_FPREGS+136(a0) 135 EX sdc1 $f19, SC32_FPREGS+152(a0) 136 EX sdc1 $f21, SC32_FPREGS+168(a0) 137 EX sdc1 $f23, SC32_FPREGS+184(a0) 138 EX sdc1 $f25, SC32_FPREGS+200(a0) 139 EX sdc1 $f27, SC32_FPREGS+216(a0) 140 EX sdc1 $f29, SC32_FPREGS+232(a0) 141 EX sdc1 $f31, SC32_FPREGS+248(a0) 142 143 /* Store the 16 even double precision registers */ 1441: EX sdc1 $f0, SC32_FPREGS+0(a0) 145 EX sdc1 $f2, SC32_FPREGS+16(a0) 146 EX sdc1 $f4, SC32_FPREGS+32(a0) 147 EX sdc1 $f6, SC32_FPREGS+48(a0) 148 EX sdc1 $f8, SC32_FPREGS+64(a0) 149 EX sdc1 $f10, SC32_FPREGS+80(a0) 150 EX sdc1 $f12, SC32_FPREGS+96(a0) 151 EX sdc1 $f14, SC32_FPREGS+112(a0) 152 EX sdc1 $f16, SC32_FPREGS+128(a0) 153 EX sdc1 $f18, SC32_FPREGS+144(a0) 154 EX sdc1 $f20, SC32_FPREGS+160(a0) 155 EX sdc1 $f22, SC32_FPREGS+176(a0) 156 EX sdc1 $f24, SC32_FPREGS+192(a0) 157 EX sdc1 $f26, SC32_FPREGS+208(a0) 158 EX sdc1 $f28, SC32_FPREGS+224(a0) 159 EX sdc1 $f30, SC32_FPREGS+240(a0) 160 EX sw t1, SC32_FPC_CSR(a0) 161 cfc1 t0, $0 # implementation/version 162 EX sw t0, SC32_FPC_EIR(a0) 163 .set pop 164 165 jr ra 166 li v0, 0 # success 167 END(_save_fp_context32) 168#endif 169 170/** 171 * _restore_fp_context() - restore FP context to the FPU 172 * @a0 - pointer to fpregs field of sigcontext 173 * @a1 - pointer to fpc_csr field of sigcontext 174 * 175 * Restore FP context, including the 32 FP data registers and the FP 176 * control & status register, from signal context to the FPU. 177 */ 178LEAF(_restore_fp_context) 179 EX lw t1, 0(a1) 180 181#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \ 182 defined(CONFIG_CPU_MIPS32_R6) 183 .set push 184 SET_HARDFLOAT 185#ifdef CONFIG_CPU_MIPS32_R2 186 .set mips32r2 187 .set fp=64 188 mfc0 t0, CP0_STATUS 189 sll t0, t0, 5 190 bgez t0, 1f # skip loading odd if FR=0 191 nop 192#endif 193 EX ldc1 $f1, 8(a0) 194 EX ldc1 $f3, 24(a0) 195 EX ldc1 $f5, 40(a0) 196 EX ldc1 $f7, 56(a0) 197 EX ldc1 $f9, 72(a0) 198 EX ldc1 $f11, 88(a0) 199 EX ldc1 $f13, 104(a0) 200 EX ldc1 $f15, 120(a0) 201 EX ldc1 $f17, 136(a0) 202 EX ldc1 $f19, 152(a0) 203 EX ldc1 $f21, 168(a0) 204 EX ldc1 $f23, 184(a0) 205 EX ldc1 $f25, 200(a0) 206 EX ldc1 $f27, 216(a0) 207 EX ldc1 $f29, 232(a0) 208 EX ldc1 $f31, 248(a0) 2091: .set pop 210#endif 211 .set push 212 SET_HARDFLOAT 213 EX ldc1 $f0, 0(a0) 214 EX ldc1 $f2, 16(a0) 215 EX ldc1 $f4, 32(a0) 216 EX ldc1 $f6, 48(a0) 217 EX ldc1 $f8, 64(a0) 218 EX ldc1 $f10, 80(a0) 219 EX ldc1 $f12, 96(a0) 220 EX ldc1 $f14, 112(a0) 221 EX ldc1 $f16, 128(a0) 222 EX ldc1 $f18, 144(a0) 223 EX ldc1 $f20, 160(a0) 224 EX ldc1 $f22, 176(a0) 225 EX ldc1 $f24, 192(a0) 226 EX ldc1 $f26, 208(a0) 227 EX ldc1 $f28, 224(a0) 228 EX ldc1 $f30, 240(a0) 229 ctc1 t1, fcr31 230 .set pop 231 jr ra 232 li v0, 0 # success 233 END(_restore_fp_context) 234 235#ifdef CONFIG_MIPS32_COMPAT 236LEAF(_restore_fp_context32) 237 /* Restore an o32 sigcontext. */ 238 .set push 239 SET_HARDFLOAT 240 EX lw t1, SC32_FPC_CSR(a0) 241 242#ifndef CONFIG_CPU_MIPS64_R6 243 mfc0 t0, CP0_STATUS 244 sll t0, t0, 5 245 bgez t0, 1f # skip loading odd if FR=0 246 nop 247#endif 248 249 EX ldc1 $f1, SC32_FPREGS+8(a0) 250 EX ldc1 $f3, SC32_FPREGS+24(a0) 251 EX ldc1 $f5, SC32_FPREGS+40(a0) 252 EX ldc1 $f7, SC32_FPREGS+56(a0) 253 EX ldc1 $f9, SC32_FPREGS+72(a0) 254 EX ldc1 $f11, SC32_FPREGS+88(a0) 255 EX ldc1 $f13, SC32_FPREGS+104(a0) 256 EX ldc1 $f15, SC32_FPREGS+120(a0) 257 EX ldc1 $f17, SC32_FPREGS+136(a0) 258 EX ldc1 $f19, SC32_FPREGS+152(a0) 259 EX ldc1 $f21, SC32_FPREGS+168(a0) 260 EX ldc1 $f23, SC32_FPREGS+184(a0) 261 EX ldc1 $f25, SC32_FPREGS+200(a0) 262 EX ldc1 $f27, SC32_FPREGS+216(a0) 263 EX ldc1 $f29, SC32_FPREGS+232(a0) 264 EX ldc1 $f31, SC32_FPREGS+248(a0) 265 2661: EX ldc1 $f0, SC32_FPREGS+0(a0) 267 EX ldc1 $f2, SC32_FPREGS+16(a0) 268 EX ldc1 $f4, SC32_FPREGS+32(a0) 269 EX ldc1 $f6, SC32_FPREGS+48(a0) 270 EX ldc1 $f8, SC32_FPREGS+64(a0) 271 EX ldc1 $f10, SC32_FPREGS+80(a0) 272 EX ldc1 $f12, SC32_FPREGS+96(a0) 273 EX ldc1 $f14, SC32_FPREGS+112(a0) 274 EX ldc1 $f16, SC32_FPREGS+128(a0) 275 EX ldc1 $f18, SC32_FPREGS+144(a0) 276 EX ldc1 $f20, SC32_FPREGS+160(a0) 277 EX ldc1 $f22, SC32_FPREGS+176(a0) 278 EX ldc1 $f24, SC32_FPREGS+192(a0) 279 EX ldc1 $f26, SC32_FPREGS+208(a0) 280 EX ldc1 $f28, SC32_FPREGS+224(a0) 281 EX ldc1 $f30, SC32_FPREGS+240(a0) 282 ctc1 t1, fcr31 283 jr ra 284 li v0, 0 # success 285 .set pop 286 END(_restore_fp_context32) 287#endif 288 289#ifdef CONFIG_CPU_HAS_MSA 290 291 .macro op_one_wr op, idx, base 292 .align 4 293\idx: \op \idx, 0, \base 294 jr ra 295 nop 296 .endm 297 298 .macro op_msa_wr name, op 299LEAF(\name) 300 .set push 301 .set noreorder 302 sll t0, a0, 4 303 PTR_LA t1, 0f 304 PTR_ADDU t0, t0, t1 305 jr t0 306 nop 307 op_one_wr \op, 0, a1 308 op_one_wr \op, 1, a1 309 op_one_wr \op, 2, a1 310 op_one_wr \op, 3, a1 311 op_one_wr \op, 4, a1 312 op_one_wr \op, 5, a1 313 op_one_wr \op, 6, a1 314 op_one_wr \op, 7, a1 315 op_one_wr \op, 8, a1 316 op_one_wr \op, 9, a1 317 op_one_wr \op, 10, a1 318 op_one_wr \op, 11, a1 319 op_one_wr \op, 12, a1 320 op_one_wr \op, 13, a1 321 op_one_wr \op, 14, a1 322 op_one_wr \op, 15, a1 323 op_one_wr \op, 16, a1 324 op_one_wr \op, 17, a1 325 op_one_wr \op, 18, a1 326 op_one_wr \op, 19, a1 327 op_one_wr \op, 20, a1 328 op_one_wr \op, 21, a1 329 op_one_wr \op, 22, a1 330 op_one_wr \op, 23, a1 331 op_one_wr \op, 24, a1 332 op_one_wr \op, 25, a1 333 op_one_wr \op, 26, a1 334 op_one_wr \op, 27, a1 335 op_one_wr \op, 28, a1 336 op_one_wr \op, 29, a1 337 op_one_wr \op, 30, a1 338 op_one_wr \op, 31, a1 339 .set pop 340 END(\name) 341 .endm 342 343 op_msa_wr read_msa_wr_b, st_b 344 op_msa_wr read_msa_wr_h, st_h 345 op_msa_wr read_msa_wr_w, st_w 346 op_msa_wr read_msa_wr_d, st_d 347 348 op_msa_wr write_msa_wr_b, ld_b 349 op_msa_wr write_msa_wr_h, ld_h 350 op_msa_wr write_msa_wr_w, ld_w 351 op_msa_wr write_msa_wr_d, ld_d 352 353#endif /* CONFIG_CPU_HAS_MSA */ 354 355#ifdef CONFIG_CPU_HAS_MSA 356 357 .macro save_msa_upper wr, off, base 358 .set push 359 .set noat 360#ifdef CONFIG_64BIT 361 copy_u_d \wr, 1 362 EX sd $1, \off(\base) 363#elif defined(CONFIG_CPU_LITTLE_ENDIAN) 364 copy_u_w \wr, 2 365 EX sw $1, \off(\base) 366 copy_u_w \wr, 3 367 EX sw $1, (\off+4)(\base) 368#else /* CONFIG_CPU_BIG_ENDIAN */ 369 copy_u_w \wr, 2 370 EX sw $1, (\off+4)(\base) 371 copy_u_w \wr, 3 372 EX sw $1, \off(\base) 373#endif 374 .set pop 375 .endm 376 377LEAF(_save_msa_all_upper) 378 save_msa_upper 0, 0x00, a0 379 save_msa_upper 1, 0x08, a0 380 save_msa_upper 2, 0x10, a0 381 save_msa_upper 3, 0x18, a0 382 save_msa_upper 4, 0x20, a0 383 save_msa_upper 5, 0x28, a0 384 save_msa_upper 6, 0x30, a0 385 save_msa_upper 7, 0x38, a0 386 save_msa_upper 8, 0x40, a0 387 save_msa_upper 9, 0x48, a0 388 save_msa_upper 10, 0x50, a0 389 save_msa_upper 11, 0x58, a0 390 save_msa_upper 12, 0x60, a0 391 save_msa_upper 13, 0x68, a0 392 save_msa_upper 14, 0x70, a0 393 save_msa_upper 15, 0x78, a0 394 save_msa_upper 16, 0x80, a0 395 save_msa_upper 17, 0x88, a0 396 save_msa_upper 18, 0x90, a0 397 save_msa_upper 19, 0x98, a0 398 save_msa_upper 20, 0xa0, a0 399 save_msa_upper 21, 0xa8, a0 400 save_msa_upper 22, 0xb0, a0 401 save_msa_upper 23, 0xb8, a0 402 save_msa_upper 24, 0xc0, a0 403 save_msa_upper 25, 0xc8, a0 404 save_msa_upper 26, 0xd0, a0 405 save_msa_upper 27, 0xd8, a0 406 save_msa_upper 28, 0xe0, a0 407 save_msa_upper 29, 0xe8, a0 408 save_msa_upper 30, 0xf0, a0 409 save_msa_upper 31, 0xf8, a0 410 jr ra 411 li v0, 0 412 END(_save_msa_all_upper) 413 414 .macro restore_msa_upper wr, off, base 415 .set push 416 .set noat 417#ifdef CONFIG_64BIT 418 EX ld $1, \off(\base) 419 insert_d \wr, 1 420#elif defined(CONFIG_CPU_LITTLE_ENDIAN) 421 EX lw $1, \off(\base) 422 insert_w \wr, 2 423 EX lw $1, (\off+4)(\base) 424 insert_w \wr, 3 425#else /* CONFIG_CPU_BIG_ENDIAN */ 426 EX lw $1, (\off+4)(\base) 427 insert_w \wr, 2 428 EX lw $1, \off(\base) 429 insert_w \wr, 3 430#endif 431 .set pop 432 .endm 433 434LEAF(_restore_msa_all_upper) 435 restore_msa_upper 0, 0x00, a0 436 restore_msa_upper 1, 0x08, a0 437 restore_msa_upper 2, 0x10, a0 438 restore_msa_upper 3, 0x18, a0 439 restore_msa_upper 4, 0x20, a0 440 restore_msa_upper 5, 0x28, a0 441 restore_msa_upper 6, 0x30, a0 442 restore_msa_upper 7, 0x38, a0 443 restore_msa_upper 8, 0x40, a0 444 restore_msa_upper 9, 0x48, a0 445 restore_msa_upper 10, 0x50, a0 446 restore_msa_upper 11, 0x58, a0 447 restore_msa_upper 12, 0x60, a0 448 restore_msa_upper 13, 0x68, a0 449 restore_msa_upper 14, 0x70, a0 450 restore_msa_upper 15, 0x78, a0 451 restore_msa_upper 16, 0x80, a0 452 restore_msa_upper 17, 0x88, a0 453 restore_msa_upper 18, 0x90, a0 454 restore_msa_upper 19, 0x98, a0 455 restore_msa_upper 20, 0xa0, a0 456 restore_msa_upper 21, 0xa8, a0 457 restore_msa_upper 22, 0xb0, a0 458 restore_msa_upper 23, 0xb8, a0 459 restore_msa_upper 24, 0xc0, a0 460 restore_msa_upper 25, 0xc8, a0 461 restore_msa_upper 26, 0xd0, a0 462 restore_msa_upper 27, 0xd8, a0 463 restore_msa_upper 28, 0xe0, a0 464 restore_msa_upper 29, 0xe8, a0 465 restore_msa_upper 30, 0xf0, a0 466 restore_msa_upper 31, 0xf8, a0 467 jr ra 468 li v0, 0 469 END(_restore_msa_all_upper) 470 471#endif /* CONFIG_CPU_HAS_MSA */ 472 473 .set reorder 474 475 .type fault@function 476 .ent fault 477fault: li v0, -EFAULT # failure 478 jr ra 479 .end fault 480