1#include <linux/irqchip/arm-gic.h> 2#include <asm/assembler.h> 3 4#define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4)) 5#define VCPU_USR_SP (VCPU_USR_REG(13)) 6#define VCPU_USR_LR (VCPU_USR_REG(14)) 7#define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4)) 8 9/* 10 * Many of these macros need to access the VCPU structure, which is always 11 * held in r0. These macros should never clobber r1, as it is used to hold the 12 * exception code on the return path (except of course the macro that switches 13 * all the registers before the final jump to the VM). 14 */ 15vcpu .req r0 @ vcpu pointer always in r0 16 17/* Clobbers {r2-r6} */ 18.macro store_vfp_state vfp_base 19 @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions 20 VFPFMRX r2, FPEXC 21 @ Make sure VFP is enabled so we can touch the registers. 22 orr r6, r2, #FPEXC_EN 23 VFPFMXR FPEXC, r6 24 25 VFPFMRX r3, FPSCR 26 tst r2, #FPEXC_EX @ Check for VFP Subarchitecture 27 beq 1f 28 @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so 29 @ we only need to save them if FPEXC_EX is set. 30 VFPFMRX r4, FPINST 31 tst r2, #FPEXC_FP2V 32 VFPFMRX r5, FPINST2, ne @ vmrsne 33 bic r6, r2, #FPEXC_EX @ FPEXC_EX disable 34 VFPFMXR FPEXC, r6 351: 36 VFPFSTMIA \vfp_base, r6 @ Save VFP registers 37 stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2 38.endm 39 40/* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */ 41.macro restore_vfp_state vfp_base 42 VFPFLDMIA \vfp_base, r6 @ Load VFP registers 43 ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2 44 45 VFPFMXR FPSCR, r3 46 tst r2, #FPEXC_EX @ Check for VFP Subarchitecture 47 beq 1f 48 VFPFMXR FPINST, r4 49 tst r2, #FPEXC_FP2V 50 VFPFMXR FPINST2, r5, ne 511: 52 VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN) 53.endm 54 55/* These are simply for the macros to work - value don't have meaning */ 56.equ usr, 0 57.equ svc, 1 58.equ abt, 2 59.equ und, 3 60.equ irq, 4 61.equ fiq, 5 62 63.macro push_host_regs_mode mode 64 mrs r2, SP_\mode 65 mrs r3, LR_\mode 66 mrs r4, SPSR_\mode 67 push {r2, r3, r4} 68.endm 69 70/* 71 * Store all host persistent registers on the stack. 72 * Clobbers all registers, in all modes, except r0 and r1. 73 */ 74.macro save_host_regs 75 /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */ 76 mrs r2, ELR_hyp 77 push {r2} 78 79 /* usr regs */ 80 push {r4-r12} @ r0-r3 are always clobbered 81 mrs r2, SP_usr 82 mov r3, lr 83 push {r2, r3} 84 85 push_host_regs_mode svc 86 push_host_regs_mode abt 87 push_host_regs_mode und 88 push_host_regs_mode irq 89 90 /* fiq regs */ 91 mrs r2, r8_fiq 92 mrs r3, r9_fiq 93 mrs r4, r10_fiq 94 mrs r5, r11_fiq 95 mrs r6, r12_fiq 96 mrs r7, SP_fiq 97 mrs r8, LR_fiq 98 mrs r9, SPSR_fiq 99 push {r2-r9} 100.endm 101 102.macro pop_host_regs_mode mode 103 pop {r2, r3, r4} 104 msr SP_\mode, r2 105 msr LR_\mode, r3 106 msr SPSR_\mode, r4 107.endm 108 109/* 110 * Restore all host registers from the stack. 111 * Clobbers all registers, in all modes, except r0 and r1. 112 */ 113.macro restore_host_regs 114 pop {r2-r9} 115 msr r8_fiq, r2 116 msr r9_fiq, r3 117 msr r10_fiq, r4 118 msr r11_fiq, r5 119 msr r12_fiq, r6 120 msr SP_fiq, r7 121 msr LR_fiq, r8 122 msr SPSR_fiq, r9 123 124 pop_host_regs_mode irq 125 pop_host_regs_mode und 126 pop_host_regs_mode abt 127 pop_host_regs_mode svc 128 129 pop {r2, r3} 130 msr SP_usr, r2 131 mov lr, r3 132 pop {r4-r12} 133 134 pop {r2} 135 msr ELR_hyp, r2 136.endm 137 138/* 139 * Restore SP, LR and SPSR for a given mode. offset is the offset of 140 * this mode's registers from the VCPU base. 141 * 142 * Assumes vcpu pointer in vcpu reg 143 * 144 * Clobbers r1, r2, r3, r4. 145 */ 146.macro restore_guest_regs_mode mode, offset 147 add r1, vcpu, \offset 148 ldm r1, {r2, r3, r4} 149 msr SP_\mode, r2 150 msr LR_\mode, r3 151 msr SPSR_\mode, r4 152.endm 153 154/* 155 * Restore all guest registers from the vcpu struct. 156 * 157 * Assumes vcpu pointer in vcpu reg 158 * 159 * Clobbers *all* registers. 160 */ 161.macro restore_guest_regs 162 restore_guest_regs_mode svc, #VCPU_SVC_REGS 163 restore_guest_regs_mode abt, #VCPU_ABT_REGS 164 restore_guest_regs_mode und, #VCPU_UND_REGS 165 restore_guest_regs_mode irq, #VCPU_IRQ_REGS 166 167 add r1, vcpu, #VCPU_FIQ_REGS 168 ldm r1, {r2-r9} 169 msr r8_fiq, r2 170 msr r9_fiq, r3 171 msr r10_fiq, r4 172 msr r11_fiq, r5 173 msr r12_fiq, r6 174 msr SP_fiq, r7 175 msr LR_fiq, r8 176 msr SPSR_fiq, r9 177 178 @ Load return state 179 ldr r2, [vcpu, #VCPU_PC] 180 ldr r3, [vcpu, #VCPU_CPSR] 181 msr ELR_hyp, r2 182 msr SPSR_cxsf, r3 183 184 @ Load user registers 185 ldr r2, [vcpu, #VCPU_USR_SP] 186 ldr r3, [vcpu, #VCPU_USR_LR] 187 msr SP_usr, r2 188 mov lr, r3 189 add vcpu, vcpu, #(VCPU_USR_REGS) 190 ldm vcpu, {r0-r12} 191.endm 192 193/* 194 * Save SP, LR and SPSR for a given mode. offset is the offset of 195 * this mode's registers from the VCPU base. 196 * 197 * Assumes vcpu pointer in vcpu reg 198 * 199 * Clobbers r2, r3, r4, r5. 200 */ 201.macro save_guest_regs_mode mode, offset 202 add r2, vcpu, \offset 203 mrs r3, SP_\mode 204 mrs r4, LR_\mode 205 mrs r5, SPSR_\mode 206 stm r2, {r3, r4, r5} 207.endm 208 209/* 210 * Save all guest registers to the vcpu struct 211 * Expects guest's r0, r1, r2 on the stack. 212 * 213 * Assumes vcpu pointer in vcpu reg 214 * 215 * Clobbers r2, r3, r4, r5. 216 */ 217.macro save_guest_regs 218 @ Store usr registers 219 add r2, vcpu, #VCPU_USR_REG(3) 220 stm r2, {r3-r12} 221 add r2, vcpu, #VCPU_USR_REG(0) 222 pop {r3, r4, r5} @ r0, r1, r2 223 stm r2, {r3, r4, r5} 224 mrs r2, SP_usr 225 mov r3, lr 226 str r2, [vcpu, #VCPU_USR_SP] 227 str r3, [vcpu, #VCPU_USR_LR] 228 229 @ Store return state 230 mrs r2, ELR_hyp 231 mrs r3, spsr 232 str r2, [vcpu, #VCPU_PC] 233 str r3, [vcpu, #VCPU_CPSR] 234 235 @ Store other guest registers 236 save_guest_regs_mode svc, #VCPU_SVC_REGS 237 save_guest_regs_mode abt, #VCPU_ABT_REGS 238 save_guest_regs_mode und, #VCPU_UND_REGS 239 save_guest_regs_mode irq, #VCPU_IRQ_REGS 240.endm 241 242/* Reads cp15 registers from hardware and stores them in memory 243 * @store_to_vcpu: If 0, registers are written in-order to the stack, 244 * otherwise to the VCPU struct pointed to by vcpup 245 * 246 * Assumes vcpu pointer in vcpu reg 247 * 248 * Clobbers r2 - r12 249 */ 250.macro read_cp15_state store_to_vcpu 251 mrc p15, 0, r2, c1, c0, 0 @ SCTLR 252 mrc p15, 0, r3, c1, c0, 2 @ CPACR 253 mrc p15, 0, r4, c2, c0, 2 @ TTBCR 254 mrc p15, 0, r5, c3, c0, 0 @ DACR 255 mrrc p15, 0, r6, r7, c2 @ TTBR 0 256 mrrc p15, 1, r8, r9, c2 @ TTBR 1 257 mrc p15, 0, r10, c10, c2, 0 @ PRRR 258 mrc p15, 0, r11, c10, c2, 1 @ NMRR 259 mrc p15, 2, r12, c0, c0, 0 @ CSSELR 260 261 .if \store_to_vcpu == 0 262 push {r2-r12} @ Push CP15 registers 263 .else 264 str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] 265 str r3, [vcpu, #CP15_OFFSET(c1_CPACR)] 266 str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] 267 str r5, [vcpu, #CP15_OFFSET(c3_DACR)] 268 add r2, vcpu, #CP15_OFFSET(c2_TTBR0) 269 strd r6, r7, [r2] 270 add r2, vcpu, #CP15_OFFSET(c2_TTBR1) 271 strd r8, r9, [r2] 272 str r10, [vcpu, #CP15_OFFSET(c10_PRRR)] 273 str r11, [vcpu, #CP15_OFFSET(c10_NMRR)] 274 str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] 275 .endif 276 277 mrc p15, 0, r2, c13, c0, 1 @ CID 278 mrc p15, 0, r3, c13, c0, 2 @ TID_URW 279 mrc p15, 0, r4, c13, c0, 3 @ TID_URO 280 mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV 281 mrc p15, 0, r6, c5, c0, 0 @ DFSR 282 mrc p15, 0, r7, c5, c0, 1 @ IFSR 283 mrc p15, 0, r8, c5, c1, 0 @ ADFSR 284 mrc p15, 0, r9, c5, c1, 1 @ AIFSR 285 mrc p15, 0, r10, c6, c0, 0 @ DFAR 286 mrc p15, 0, r11, c6, c0, 2 @ IFAR 287 mrc p15, 0, r12, c12, c0, 0 @ VBAR 288 289 .if \store_to_vcpu == 0 290 push {r2-r12} @ Push CP15 registers 291 .else 292 str r2, [vcpu, #CP15_OFFSET(c13_CID)] 293 str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] 294 str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] 295 str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] 296 str r6, [vcpu, #CP15_OFFSET(c5_DFSR)] 297 str r7, [vcpu, #CP15_OFFSET(c5_IFSR)] 298 str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 299 str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] 300 str r10, [vcpu, #CP15_OFFSET(c6_DFAR)] 301 str r11, [vcpu, #CP15_OFFSET(c6_IFAR)] 302 str r12, [vcpu, #CP15_OFFSET(c12_VBAR)] 303 .endif 304 305 mrc p15, 0, r2, c14, c1, 0 @ CNTKCTL 306 mrrc p15, 0, r4, r5, c7 @ PAR 307 mrc p15, 0, r6, c10, c3, 0 @ AMAIR0 308 mrc p15, 0, r7, c10, c3, 1 @ AMAIR1 309 310 .if \store_to_vcpu == 0 311 push {r2,r4-r7} 312 .else 313 str r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)] 314 add r12, vcpu, #CP15_OFFSET(c7_PAR) 315 strd r4, r5, [r12] 316 str r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)] 317 str r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)] 318 .endif 319.endm 320 321/* 322 * Reads cp15 registers from memory and writes them to hardware 323 * @read_from_vcpu: If 0, registers are read in-order from the stack, 324 * otherwise from the VCPU struct pointed to by vcpup 325 * 326 * Assumes vcpu pointer in vcpu reg 327 */ 328.macro write_cp15_state read_from_vcpu 329 .if \read_from_vcpu == 0 330 pop {r2,r4-r7} 331 .else 332 ldr r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)] 333 add r12, vcpu, #CP15_OFFSET(c7_PAR) 334 ldrd r4, r5, [r12] 335 ldr r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)] 336 ldr r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)] 337 .endif 338 339 mcr p15, 0, r2, c14, c1, 0 @ CNTKCTL 340 mcrr p15, 0, r4, r5, c7 @ PAR 341 mcr p15, 0, r6, c10, c3, 0 @ AMAIR0 342 mcr p15, 0, r7, c10, c3, 1 @ AMAIR1 343 344 .if \read_from_vcpu == 0 345 pop {r2-r12} 346 .else 347 ldr r2, [vcpu, #CP15_OFFSET(c13_CID)] 348 ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] 349 ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] 350 ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] 351 ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)] 352 ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)] 353 ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 354 ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] 355 ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)] 356 ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)] 357 ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)] 358 .endif 359 360 mcr p15, 0, r2, c13, c0, 1 @ CID 361 mcr p15, 0, r3, c13, c0, 2 @ TID_URW 362 mcr p15, 0, r4, c13, c0, 3 @ TID_URO 363 mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV 364 mcr p15, 0, r6, c5, c0, 0 @ DFSR 365 mcr p15, 0, r7, c5, c0, 1 @ IFSR 366 mcr p15, 0, r8, c5, c1, 0 @ ADFSR 367 mcr p15, 0, r9, c5, c1, 1 @ AIFSR 368 mcr p15, 0, r10, c6, c0, 0 @ DFAR 369 mcr p15, 0, r11, c6, c0, 2 @ IFAR 370 mcr p15, 0, r12, c12, c0, 0 @ VBAR 371 372 .if \read_from_vcpu == 0 373 pop {r2-r12} 374 .else 375 ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] 376 ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)] 377 ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] 378 ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)] 379 add r12, vcpu, #CP15_OFFSET(c2_TTBR0) 380 ldrd r6, r7, [r12] 381 add r12, vcpu, #CP15_OFFSET(c2_TTBR1) 382 ldrd r8, r9, [r12] 383 ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)] 384 ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)] 385 ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] 386 .endif 387 388 mcr p15, 0, r2, c1, c0, 0 @ SCTLR 389 mcr p15, 0, r3, c1, c0, 2 @ CPACR 390 mcr p15, 0, r4, c2, c0, 2 @ TTBCR 391 mcr p15, 0, r5, c3, c0, 0 @ DACR 392 mcrr p15, 0, r6, r7, c2 @ TTBR 0 393 mcrr p15, 1, r8, r9, c2 @ TTBR 1 394 mcr p15, 0, r10, c10, c2, 0 @ PRRR 395 mcr p15, 0, r11, c10, c2, 1 @ NMRR 396 mcr p15, 2, r12, c0, c0, 0 @ CSSELR 397.endm 398 399/* 400 * Save the VGIC CPU state into memory 401 * 402 * Assumes vcpu pointer in vcpu reg 403 */ 404.macro save_vgic_state 405 /* Get VGIC VCTRL base into r2 */ 406 ldr r2, [vcpu, #VCPU_KVM] 407 ldr r2, [r2, #KVM_VGIC_VCTRL] 408 cmp r2, #0 409 beq 2f 410 411 /* Compute the address of struct vgic_cpu */ 412 add r11, vcpu, #VCPU_VGIC_CPU 413 414 /* Save all interesting registers */ 415 ldr r4, [r2, #GICH_VMCR] 416 ldr r5, [r2, #GICH_MISR] 417 ldr r6, [r2, #GICH_EISR0] 418 ldr r7, [r2, #GICH_EISR1] 419 ldr r8, [r2, #GICH_ELRSR0] 420 ldr r9, [r2, #GICH_ELRSR1] 421 ldr r10, [r2, #GICH_APR] 422ARM_BE8(rev r4, r4 ) 423ARM_BE8(rev r5, r5 ) 424ARM_BE8(rev r6, r6 ) 425ARM_BE8(rev r7, r7 ) 426ARM_BE8(rev r8, r8 ) 427ARM_BE8(rev r9, r9 ) 428ARM_BE8(rev r10, r10 ) 429 430 str r4, [r11, #VGIC_V2_CPU_VMCR] 431 str r5, [r11, #VGIC_V2_CPU_MISR] 432#ifdef CONFIG_CPU_ENDIAN_BE8 433 str r6, [r11, #(VGIC_V2_CPU_EISR + 4)] 434 str r7, [r11, #VGIC_V2_CPU_EISR] 435 str r8, [r11, #(VGIC_V2_CPU_ELRSR + 4)] 436 str r9, [r11, #VGIC_V2_CPU_ELRSR] 437#else 438 str r6, [r11, #VGIC_V2_CPU_EISR] 439 str r7, [r11, #(VGIC_V2_CPU_EISR + 4)] 440 str r8, [r11, #VGIC_V2_CPU_ELRSR] 441 str r9, [r11, #(VGIC_V2_CPU_ELRSR + 4)] 442#endif 443 str r10, [r11, #VGIC_V2_CPU_APR] 444 445 /* Clear GICH_HCR */ 446 mov r5, #0 447 str r5, [r2, #GICH_HCR] 448 449 /* Save list registers */ 450 add r2, r2, #GICH_LR0 451 add r3, r11, #VGIC_V2_CPU_LR 452 ldr r4, [r11, #VGIC_CPU_NR_LR] 4531: ldr r6, [r2], #4 454ARM_BE8(rev r6, r6 ) 455 str r6, [r3], #4 456 subs r4, r4, #1 457 bne 1b 4582: 459.endm 460 461/* 462 * Restore the VGIC CPU state from memory 463 * 464 * Assumes vcpu pointer in vcpu reg 465 */ 466.macro restore_vgic_state 467 /* Get VGIC VCTRL base into r2 */ 468 ldr r2, [vcpu, #VCPU_KVM] 469 ldr r2, [r2, #KVM_VGIC_VCTRL] 470 cmp r2, #0 471 beq 2f 472 473 /* Compute the address of struct vgic_cpu */ 474 add r11, vcpu, #VCPU_VGIC_CPU 475 476 /* We only restore a minimal set of registers */ 477 ldr r3, [r11, #VGIC_V2_CPU_HCR] 478 ldr r4, [r11, #VGIC_V2_CPU_VMCR] 479 ldr r8, [r11, #VGIC_V2_CPU_APR] 480ARM_BE8(rev r3, r3 ) 481ARM_BE8(rev r4, r4 ) 482ARM_BE8(rev r8, r8 ) 483 484 str r3, [r2, #GICH_HCR] 485 str r4, [r2, #GICH_VMCR] 486 str r8, [r2, #GICH_APR] 487 488 /* Restore list registers */ 489 add r2, r2, #GICH_LR0 490 add r3, r11, #VGIC_V2_CPU_LR 491 ldr r4, [r11, #VGIC_CPU_NR_LR] 4921: ldr r6, [r3], #4 493ARM_BE8(rev r6, r6 ) 494 str r6, [r2], #4 495 subs r4, r4, #1 496 bne 1b 4972: 498.endm 499 500#define CNTHCTL_PL1PCTEN (1 << 0) 501#define CNTHCTL_PL1PCEN (1 << 1) 502 503/* 504 * Save the timer state onto the VCPU and allow physical timer/counter access 505 * for the host. 506 * 507 * Assumes vcpu pointer in vcpu reg 508 * Clobbers r2-r5 509 */ 510.macro save_timer_state 511 ldr r4, [vcpu, #VCPU_KVM] 512 ldr r2, [r4, #KVM_TIMER_ENABLED] 513 cmp r2, #0 514 beq 1f 515 516 mrc p15, 0, r2, c14, c3, 1 @ CNTV_CTL 517 str r2, [vcpu, #VCPU_TIMER_CNTV_CTL] 518 519 isb 520 521 mrrc p15, 3, rr_lo_hi(r2, r3), c14 @ CNTV_CVAL 522 ldr r4, =VCPU_TIMER_CNTV_CVAL 523 add r5, vcpu, r4 524 strd r2, r3, [r5] 525 526 @ Ensure host CNTVCT == CNTPCT 527 mov r2, #0 528 mcrr p15, 4, r2, r2, c14 @ CNTVOFF 529 5301: 531 mov r2, #0 @ Clear ENABLE 532 mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL 533 534 @ Allow physical timer/counter access for the host 535 mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL 536 orr r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN) 537 mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL 538.endm 539 540/* 541 * Load the timer state from the VCPU and deny physical timer/counter access 542 * for the host. 543 * 544 * Assumes vcpu pointer in vcpu reg 545 * Clobbers r2-r5 546 */ 547.macro restore_timer_state 548 @ Disallow physical timer access for the guest 549 @ Physical counter access is allowed 550 mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL 551 orr r2, r2, #CNTHCTL_PL1PCTEN 552 bic r2, r2, #CNTHCTL_PL1PCEN 553 mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL 554 555 ldr r4, [vcpu, #VCPU_KVM] 556 ldr r2, [r4, #KVM_TIMER_ENABLED] 557 cmp r2, #0 558 beq 1f 559 560 ldr r2, [r4, #KVM_TIMER_CNTVOFF] 561 ldr r3, [r4, #(KVM_TIMER_CNTVOFF + 4)] 562 mcrr p15, 4, rr_lo_hi(r2, r3), c14 @ CNTVOFF 563 564 ldr r4, =VCPU_TIMER_CNTV_CVAL 565 add r5, vcpu, r4 566 ldrd r2, r3, [r5] 567 mcrr p15, 3, rr_lo_hi(r2, r3), c14 @ CNTV_CVAL 568 isb 569 570 ldr r2, [vcpu, #VCPU_TIMER_CNTV_CTL] 571 and r2, r2, #3 572 mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL 5731: 574.endm 575 576.equ vmentry, 0 577.equ vmexit, 1 578 579/* Configures the HSTR (Hyp System Trap Register) on entry/return 580 * (hardware reset value is 0) */ 581.macro set_hstr operation 582 mrc p15, 4, r2, c1, c1, 3 583 ldr r3, =HSTR_T(15) 584 .if \operation == vmentry 585 orr r2, r2, r3 @ Trap CR{15} 586 .else 587 bic r2, r2, r3 @ Don't trap any CRx accesses 588 .endif 589 mcr p15, 4, r2, c1, c1, 3 590.endm 591 592/* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return 593 * (hardware reset value is 0). Keep previous value in r2. 594 * An ISB is emited on vmexit/vmtrap, but executed on vmexit only if 595 * VFP wasn't already enabled (always executed on vmtrap). 596 * If a label is specified with vmexit, it is branched to if VFP wasn't 597 * enabled. 598 */ 599.macro set_hcptr operation, mask, label = none 600 mrc p15, 4, r2, c1, c1, 2 601 ldr r3, =\mask 602 .if \operation == vmentry 603 orr r3, r2, r3 @ Trap coproc-accesses defined in mask 604 .else 605 bic r3, r2, r3 @ Don't trap defined coproc-accesses 606 .endif 607 mcr p15, 4, r3, c1, c1, 2 608 .if \operation != vmentry 609 .if \operation == vmexit 610 tst r2, #(HCPTR_TCP(10) | HCPTR_TCP(11)) 611 beq 1f 612 .endif 613 isb 614 .if \label != none 615 b \label 616 .endif 6171: 618 .endif 619.endm 620 621/* Configures the HDCR (Hyp Debug Configuration Register) on entry/return 622 * (hardware reset value is 0) */ 623.macro set_hdcr operation 624 mrc p15, 4, r2, c1, c1, 1 625 ldr r3, =(HDCR_TPM|HDCR_TPMCR) 626 .if \operation == vmentry 627 orr r2, r2, r3 @ Trap some perfmon accesses 628 .else 629 bic r2, r2, r3 @ Don't trap any perfmon accesses 630 .endif 631 mcr p15, 4, r2, c1, c1, 1 632.endm 633 634/* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */ 635.macro configure_hyp_role operation 636 .if \operation == vmentry 637 ldr r2, [vcpu, #VCPU_HCR] 638 ldr r3, [vcpu, #VCPU_IRQ_LINES] 639 orr r2, r2, r3 640 .else 641 mov r2, #0 642 .endif 643 mcr p15, 4, r2, c1, c1, 0 @ HCR 644.endm 645 646.macro load_vcpu 647 mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR 648.endm 649