Lines Matching refs:vassert
119 vassert(r >= 0 && r < 16); in ppHRegAMD64()
124 vassert(r >= 0 && r < 16); in ppHRegAMD64()
148 vassert(r >= 0 && r < 16); in ppHRegAMD64_lo32()
200 vassert(shift >= 0 && shift <= 3); in AMD64AMode_IRRS()
610 vassert(op != Aalu_MUL); in AMD64Instr_Alu64M()
651 default: vassert(0); in AMD64Instr_Alu32R()
668 vassert(sz == 4 || sz == 8); in AMD64Instr_Div()
685 vassert(regparms >= 0 && regparms <= 6); in AMD64Instr_Call()
686 vassert(is_sane_RetLoc(rloc)); in AMD64Instr_Call()
726 vassert(cond != Acc_ALWAYS); in AMD64Instr_CMov64()
737 vassert(cond != Acc_ALWAYS && (szB == 4 || szB == 8)); in AMD64Instr_CLoad()
748 vassert(cond != Acc_ALWAYS && (szB == 4 || szB == 8)); in AMD64Instr_CStore()
767 vassert(szSmall == 1 || szSmall == 2 || szSmall == 4); in AMD64Instr_LoadEX()
776 vassert(sz == 1 || sz == 2 || sz == 4); in AMD64Instr_Store()
804 vassert(sz == 8 || sz == 4 || sz == 2 || sz == 1); in AMD64Instr_ACAS()
812 vassert(sz == 8 || sz == 4); in AMD64Instr_DACAS()
821 vassert(nregs >= 1 && nregs <= 7); in AMD64Instr_A87Free()
831 vassert(szB == 8 || szB == 4); in AMD64Instr_A87PushPop()
868 vassert(sz == 4 || sz == 8); in AMD64Instr_SseUComIS()
878 vassert(szS == 4 || szS == 8); in AMD64Instr_SseSI2SF()
879 vassert(szD == 4 || szD == 8); in AMD64Instr_SseSI2SF()
889 vassert(szS == 4 || szS == 8); in AMD64Instr_SseSF2SI()
890 vassert(szD == 4 || szD == 8); in AMD64Instr_SseSF2SI()
910 vassert(sz == 4 || sz == 8 || sz == 16); in AMD64Instr_SseLdSt()
921 vassert(cond != Acc_ALWAYS); in AMD64Instr_SseCStore()
932 vassert(cond != Acc_ALWAYS); in AMD64Instr_SseCLoad()
942 vassert(sz == 4 || sz == 8); in AMD64Instr_SseLdzLO()
951 vassert(op != Asse_MOV); in AMD64Instr_Sse32Fx4()
960 vassert(op != Asse_MOV); in AMD64Instr_Sse32FLo()
969 vassert(op != Asse_MOV); in AMD64Instr_Sse64Fx2()
978 vassert(op != Asse_MOV); in AMD64Instr_Sse64FLo()
995 vassert(cond != Acc_ALWAYS); in AMD64Instr_SseCMov()
1004 vassert(order >= 0 && order <= 0xFF); in AMD64Instr_SseShuf()
1040 vassert(mode64 == True); in ppAMD64Instr()
1281 default: vassert(0); in ppAMD64Instr()
1397 vassert(mode64 == True); in getRegUsage_AMD64Instr()
1435 vassert(i->Ain.Alu32R.op != Aalu_MOV); in getRegUsage_AMD64Instr()
1622 vassert(i->Ain.Sse32Fx4.op != Asse_MOV); in getRegUsage_AMD64Instr()
1631 vassert(i->Ain.Sse32FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1640 vassert(i->Ain.Sse64Fx2.op != Asse_MOV); in getRegUsage_AMD64Instr()
1649 vassert(i->Ain.Sse64FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1722 vassert(mode64 == True); in mapRegs_AMD64Instr()
1958 vassert(offsetB >= 0); in genSpill_AMD64()
1959 vassert(!hregIsVirtual(rreg)); in genSpill_AMD64()
1960 vassert(mode64 == True); in genSpill_AMD64()
1980 vassert(offsetB >= 0); in genReload_AMD64()
1981 vassert(!hregIsVirtual(rreg)); in genReload_AMD64()
1982 vassert(mode64 == True); in genReload_AMD64()
2000 vassert(spill_off >= 0 && spill_off < 10000); /* let's say */ in directReload_AMD64()
2010 vassert(! sameHReg(i->Ain.Alu64R.dst, vreg)); in directReload_AMD64()
2042 vassert(hregClass(r) == HRcInt64); in iregEnc210()
2043 vassert(!hregIsVirtual(r)); in iregEnc210()
2045 vassert(n <= 15); in iregEnc210()
2053 vassert(hregClass(r) == HRcInt64); in iregEnc3()
2054 vassert(!hregIsVirtual(r)); in iregEnc3()
2056 vassert(n <= 15); in iregEnc3()
2064 vassert(hregClass(r) == HRcInt64); in iregEnc3210()
2065 vassert(!hregIsVirtual(r)); in iregEnc3210()
2067 vassert(n <= 15); in iregEnc3210()
2075 vassert(hregClass(r) == HRcVec128); in vregEnc3210()
2076 vassert(!hregIsVirtual(r)); in vregEnc3210()
2078 vassert(n <= 15); in vregEnc3210()
2084 vassert(mod < 4); in mkModRegRM()
2085 vassert((reg|regmem) < 8); in mkModRegRM()
2091 vassert(shift < 4); in mkSIB()
2092 vassert((regindex|regbase) < 8); in mkSIB()
2239 vassert(gregEnc3210 < 16); in doAMode_M_enc()
2259 vassert(gregEnc3210 < 16); in doAMode_R_enc_reg()
2265 vassert(eregEnc3210 < 16); in doAMode_R_reg_enc()
2271 vassert( (gregEnc3210|eregEnc3210) < 16); in doAMode_R_enc_enc()
2301 vassert(0); in rexAMode_M__wrk()
2312 vassert(gregEnc3210 < 16); in rexAMode_M_enc()
2334 vassert(gregEnc3210 < 16); in rexAMode_R_enc_reg()
2340 vassert(eregEnc3210 < 16); in rexAMode_R_reg_enc()
2346 vassert((gregEnc3210|eregEnc3210) < 16); in rexAMode_R_enc_enc()
2435 vassert(n >= 0 && n <= 7); in do_ffree_st()
2462 vassert(nbuf >= 64); in emit_AMD64Instr()
2463 vassert(mode64 == True); in emit_AMD64Instr()
2978 vassert(0); // should never get here in emit_AMD64Instr()
2992 vassert(delta >= 0 && delta < 100/*arbitrary*/); in emit_AMD64Instr()
2997 vassert(delta >= 0 && delta < 100/*arbitrary*/); in emit_AMD64Instr()
3009 vassert(disp_cp_chain_me_to_slowEP != NULL); in emit_AMD64Instr()
3010 vassert(disp_cp_chain_me_to_fastEP != NULL); in emit_AMD64Instr()
3067 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3080 vassert(disp_cp_xindir != NULL); in emit_AMD64Instr()
3122 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3171 vassert(trcval != 0); in emit_AMD64Instr()
3186 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3193 vassert(i->Ain.CMov64.cond != Acc_ALWAYS); in emit_AMD64Instr()
3201 vassert(i->Ain.CLoad.cond != Acc_ALWAYS); in emit_AMD64Instr()
3204 vassert(i->Ain.CLoad.szB == 4 || i->Ain.CLoad.szB == 8); in emit_AMD64Instr()
3224 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3232 vassert(i->Ain.CStore.cond != Acc_ALWAYS); in emit_AMD64Instr()
3235 vassert(i->Ain.CStore.szB == 4 || i->Ain.CStore.szB == 8); in emit_AMD64Instr()
3253 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3314 vassert(reg < 16); in emit_AMD64Instr()
3381 vassert(i->Ain.A87Free.nregs > 0 && i->Ain.A87Free.nregs <= 7); in emit_AMD64Instr()
3388 vassert(i->Ain.A87PushPop.szB == 8 || i->Ain.A87PushPop.szB == 4); in emit_AMD64Instr()
3496 vassert(i->Ain.SseUComIS.sz == 4); in emit_AMD64Instr()
3556 vassert(0); in emit_AMD64Instr()
3568 vassert(i->Ain.SseCStore.cond != Acc_ALWAYS); in emit_AMD64Instr()
3589 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3595 vassert(i->Ain.SseCLoad.cond != Acc_ALWAYS); in emit_AMD64Instr()
3616 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
3622 vassert(i->Ain.SseLdzLO.sz == 4 || i->Ain.SseLdzLO.sz == 8); in emit_AMD64Instr()
3881 vassert(p - p0 == 3); in emit_AMD64Instr()
3885 vassert(p - p0 == 5); in emit_AMD64Instr()
3894 vassert(p - p0 == 8); /* also ensures that 0x03 offset above is ok */ in emit_AMD64Instr()
3896 vassert(evCheckSzB_AMD64() == 8); in emit_AMD64Instr()
3914 vassert(!(*is_profInc)); in emit_AMD64Instr()
3929 vassert(p - &buf[0] <= 64); in emit_AMD64Instr()
3950 vassert(endness_host == VexEndnessLE); in chainXDirect_AMD64()
3960 vassert(p[0] == 0x49); in chainXDirect_AMD64()
3961 vassert(p[1] == 0xBB); in chainXDirect_AMD64()
3962 vassert(read_misaligned_ULong_LE(&p[2]) == (Addr)disp_cp_chain_me_EXPECTED); in chainXDirect_AMD64()
3963 vassert(p[10] == 0x41); in chainXDirect_AMD64()
3964 vassert(p[11] == 0xFF); in chainXDirect_AMD64()
3965 vassert(p[12] == 0xD3); in chainXDirect_AMD64()
4018 vassert(delta == 0LL || delta == -1LL); in chainXDirect_AMD64()
4036 vassert(endness_host == VexEndnessLE); in unchainXDirect_AMD64()
4077 vassert(valid); in unchainXDirect_AMD64()
4103 vassert(endness_host == VexEndnessLE); in patchProfInc_AMD64()
4104 vassert(sizeof(ULong*) == 8); in patchProfInc_AMD64()
4106 vassert(p[0] == 0x49); in patchProfInc_AMD64()
4107 vassert(p[1] == 0xBB); in patchProfInc_AMD64()
4108 vassert(p[2] == 0x00); in patchProfInc_AMD64()
4109 vassert(p[3] == 0x00); in patchProfInc_AMD64()
4110 vassert(p[4] == 0x00); in patchProfInc_AMD64()
4111 vassert(p[5] == 0x00); in patchProfInc_AMD64()
4112 vassert(p[6] == 0x00); in patchProfInc_AMD64()
4113 vassert(p[7] == 0x00); in patchProfInc_AMD64()
4114 vassert(p[8] == 0x00); in patchProfInc_AMD64()
4115 vassert(p[9] == 0x00); in patchProfInc_AMD64()
4116 vassert(p[10] == 0x49); in patchProfInc_AMD64()
4117 vassert(p[11] == 0xFF); in patchProfInc_AMD64()
4118 vassert(p[12] == 0x03); in patchProfInc_AMD64()