Searched refs:sameHReg (Results 1 – 10 of 10) sorted by relevance
/external/valgrind/VEX/priv/ |
D | host_generic_regs.c | 173 if (sameHReg(tab->vRegs[i], reg)) in addHRegUse() 218 if (sameHReg(reg, tab->vRegs[i])) in HRegUsage__contains() 255 if (sameHReg(map->orig[i], orig)) in addToHRegRemap() 275 if (sameHReg(map->orig[i], orig)) in lookupHRegRemap()
|
D | host_generic_regs.h | 171 static inline Bool sameHReg ( HReg r1, HReg r2 ) in sameHReg() function 180 return sameHReg(r, INVALID_HREG); in hregIsInvalid()
|
D | host_x86_defs.c | 1458 && sameHReg(i->Xin.SseReRg.src, i->Xin.SseReRg.dst)) { in getRegUsage_X86Instr() 1771 && sameHReg(i->Xin.Alu32R.src->Xrmi.Reg.reg, vreg)) { in directReload_X86() 1772 vassert(! sameHReg(i->Xin.Alu32R.dst, vreg)); in directReload_X86() 1786 && sameHReg(i->Xin.Alu32R.dst, vreg)) { in directReload_X86() 1799 && sameHReg(i->Xin.Push.src->Xrmi.Reg.reg, vreg)) { in directReload_X86() 1809 && sameHReg(i->Xin.CMov32.src->Xrm.Reg.reg, vreg)) { in directReload_X86() 1810 vassert(! sameHReg(i->Xin.CMov32.dst, vreg)); in directReload_X86() 1821 && sameHReg(i->Xin.Test32.dst->Xrm.Reg.reg, vreg)) { in directReload_X86() 1923 && ! sameHReg(am->Xam.IR.reg, hregX86_ESP()) in doAMode_M__wrk() 1924 && ! sameHReg(am->Xam.IR.reg, hregX86_EBP()) ) { in doAMode_M__wrk() [all …]
|
D | host_amd64_defs.c | 1660 && sameHReg(i->Ain.SseReRg.src, i->Ain.SseReRg.dst)) { in getRegUsage_AMD64Instr() 2009 && sameHReg(i->Ain.Alu64R.src->Armi.Reg.reg, vreg)) { in directReload_AMD64() 2010 vassert(! sameHReg(i->Ain.Alu64R.dst, vreg)); in directReload_AMD64() 2024 && sameHReg(i->Ain.Alu64R.dst, vreg)) { in directReload_AMD64() 2165 && ! sameHReg(am->Aam.IR.reg, hregAMD64_RSP()) in doAMode_M__wrk() 2166 && ! sameHReg(am->Aam.IR.reg, hregAMD64_RBP()) in doAMode_M__wrk() 2167 && ! sameHReg(am->Aam.IR.reg, hregAMD64_R12()) in doAMode_M__wrk() 2168 && ! sameHReg(am->Aam.IR.reg, hregAMD64_R13()) in doAMode_M__wrk() 2174 && ! sameHReg(am->Aam.IR.reg, hregAMD64_RSP()) in doAMode_M__wrk() 2175 && ! sameHReg(am->Aam.IR.reg, hregAMD64_R12()) in doAMode_M__wrk() [all …]
|
D | host_generic_reg_alloc2.c | 1306 vassert(! sameHReg(reg_usage_arr[ii].vRegs[0], in doRegisterAllocation() 1443 if (sameHReg(rreg_state[k].vreg, reg_usage_arr[ii].vRegs[m])) { in doRegisterAllocation() 1475 vassert(! sameHReg(rreg_state[spillee].vreg, vreg)); in doRegisterAllocation()
|
D | host_ppc_defs.c | 1540 if (!sameHReg(dst, src)) { in ppMovReg() 1560 sameHReg(rh_srcR->Prh.Reg.reg, r_srcL)) { in ppPPCInstr() 2564 && sameHReg(i->Pin.AvBinary.dst, i->Pin.AvBinary.srcL) in getRegUsage_PPCInstr() 2565 && sameHReg(i->Pin.AvBinary.dst, i->Pin.AvBinary.srcR)) { in getRegUsage_PPCInstr() 3132 if (! sameHReg(i->Pin.Alu.srcR->Prh.Reg.reg, i->Pin.Alu.srcL)) in isMove_PPCInstr()
|
D | host_mips_defs.c | 1957 if (!sameHReg(i->Min.Alu.srcR->Mrh.Reg.reg, i->Min.Alu.srcL)) in isMove_MIPSInstr()
|
D | host_x86_isel.c | 1537 || sameHReg(am->Xam.IR.reg, hregX86_EBP())) ); in sane_AMode()
|
D | host_amd64_isel.c | 268 || sameHReg(am->Aam.IR.reg, hregAMD64_RBP())) ); in sane_AMode()
|
D | host_arm_isel.c | 885 || sameHReg(am->ARMam1.RI.reg, hregARM_R8())) in sane_AMode1()
|