• Home
  • Raw
  • Download

Lines Matching refs:vassert

62          vassert(r >= 0 && r < 16);  in ppHRegAMD64()
67 vassert(r >= 0 && r < 6); in ppHRegAMD64()
72 vassert(r >= 0 && r < 16); in ppHRegAMD64()
96 vassert(r >= 0 && r < 16); in ppHRegAMD64_lo32()
219 vassert(shift >= 0 && shift <= 3); in AMD64AMode_IRRS()
629 vassert(op != Aalu_MUL); in AMD64Instr_Alu64M()
670 default: vassert(0); in AMD64Instr_Alu32R()
687 vassert(sz == 4 || sz == 8); in AMD64Instr_Div()
702 vassert(regparms >= 0 && regparms <= 6); in AMD64Instr_Call()
742 vassert(cond != Acc_ALWAYS); in AMD64Instr_CMov64()
761 vassert(szSmall == 1 || szSmall == 2 || szSmall == 4); in AMD64Instr_LoadEX()
770 vassert(sz == 1 || sz == 2 || sz == 4); in AMD64Instr_Store()
798 vassert(sz == 8 || sz == 4 || sz == 2 || sz == 1); in AMD64Instr_ACAS()
806 vassert(sz == 8 || sz == 4); in AMD64Instr_DACAS()
815 vassert(nregs >= 1 && nregs <= 7); in AMD64Instr_A87Free()
825 vassert(szB == 8 || szB == 4); in AMD64Instr_A87PushPop()
862 vassert(sz == 4 || sz == 8); in AMD64Instr_SseUComIS()
872 vassert(szS == 4 || szS == 8); in AMD64Instr_SseSI2SF()
873 vassert(szD == 4 || szD == 8); in AMD64Instr_SseSI2SF()
883 vassert(szS == 4 || szS == 8); in AMD64Instr_SseSF2SI()
884 vassert(szD == 4 || szD == 8); in AMD64Instr_SseSF2SI()
904 vassert(sz == 4 || sz == 8 || sz == 16); in AMD64Instr_SseLdSt()
914 vassert(sz == 4 || sz == 8); in AMD64Instr_SseLdzLO()
923 vassert(op != Asse_MOV); in AMD64Instr_Sse32Fx4()
932 vassert(op != Asse_MOV); in AMD64Instr_Sse32FLo()
941 vassert(op != Asse_MOV); in AMD64Instr_Sse64Fx2()
950 vassert(op != Asse_MOV); in AMD64Instr_Sse64FLo()
967 vassert(cond != Acc_ALWAYS); in AMD64Instr_SseCMov()
976 vassert(order >= 0 && order <= 0xFF); in AMD64Instr_SseShuf()
1012 vassert(mode64 == True); in ppAMD64Instr()
1231 default: vassert(0); in ppAMD64Instr()
1329 vassert(mode64 == True); in getRegUsage_AMD64Instr()
1367 vassert(i->Ain.Alu32R.op != Aalu_MOV); in getRegUsage_AMD64Instr()
1538 vassert(i->Ain.Sse32Fx4.op != Asse_MOV); in getRegUsage_AMD64Instr()
1547 vassert(i->Ain.Sse32FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1556 vassert(i->Ain.Sse64Fx2.op != Asse_MOV); in getRegUsage_AMD64Instr()
1565 vassert(i->Ain.Sse64FLo.op != Asse_MOV); in getRegUsage_AMD64Instr()
1638 vassert(mode64 == True); in mapRegs_AMD64Instr()
1858 vassert(offsetB >= 0); in genSpill_AMD64()
1859 vassert(!hregIsVirtual(rreg)); in genSpill_AMD64()
1860 vassert(mode64 == True); in genSpill_AMD64()
1880 vassert(offsetB >= 0); in genReload_AMD64()
1881 vassert(!hregIsVirtual(rreg)); in genReload_AMD64()
1882 vassert(mode64 == True); in genReload_AMD64()
1905 vassert(hregClass(r) == HRcInt64); in iregBits210()
1906 vassert(!hregIsVirtual(r)); in iregBits210()
1908 vassert(n <= 15); in iregBits210()
1916 vassert(hregClass(r) == HRcInt64); in iregBit3()
1917 vassert(!hregIsVirtual(r)); in iregBit3()
1919 vassert(n <= 15); in iregBit3()
1927 vassert(hregClass(r) == HRcInt64); in iregBits3210()
1928 vassert(!hregIsVirtual(r)); in iregBits3210()
1930 vassert(n <= 15); in iregBits3210()
1942 vassert(hregClass(r) == HRcVec128); in vreg2ireg()
1943 vassert(!hregIsVirtual(r)); in vreg2ireg()
1945 vassert(n <= 15); in vreg2ireg()
2147 vassert(0); in rexAMode_M()
2246 vassert(n >= 0 && n <= 7); in do_ffree_st()
2273 vassert(nbuf >= 32); in emit_AMD64Instr()
2274 vassert(mode64 == True); in emit_AMD64Instr()
2702 vassert(disp_cp_chain_me_to_slowEP != NULL); in emit_AMD64Instr()
2703 vassert(disp_cp_chain_me_to_fastEP != NULL); in emit_AMD64Instr()
2760 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
2773 vassert(disp_cp_xindir != NULL); in emit_AMD64Instr()
2815 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
2863 vassert(trcval != 0); in emit_AMD64Instr()
2878 vassert(delta > 0 && delta < 40); in emit_AMD64Instr()
2885 vassert(i->Ain.CMov64.cond != Acc_ALWAYS); in emit_AMD64Instr()
2958 vassert(reg < 16); in emit_AMD64Instr()
3025 vassert(i->Ain.A87Free.nregs > 0 && i->Ain.A87Free.nregs <= 7); in emit_AMD64Instr()
3032 vassert(i->Ain.A87PushPop.szB == 8 || i->Ain.A87PushPop.szB == 4); in emit_AMD64Instr()
3124 vassert(i->Ain.SseUComIS.sz == 4); in emit_AMD64Instr()
3184 vassert(0); in emit_AMD64Instr()
3194 vassert(i->Ain.SseLdzLO.sz == 4 || i->Ain.SseLdzLO.sz == 8); in emit_AMD64Instr()
3453 vassert(p - p0 == 3); in emit_AMD64Instr()
3457 vassert(p - p0 == 5); in emit_AMD64Instr()
3466 vassert(p - p0 == 8); /* also ensures that 0x03 offset above is ok */ in emit_AMD64Instr()
3468 vassert(evCheckSzB_AMD64() == 8); in emit_AMD64Instr()
3486 vassert(!(*is_profInc)); in emit_AMD64Instr()
3501 vassert(p - &buf[0] <= 32); in emit_AMD64Instr()
3531 vassert(p[0] == 0x49); in chainXDirect_AMD64()
3532 vassert(p[1] == 0xBB); in chainXDirect_AMD64()
3533 vassert(*(ULong*)(&p[2]) == Ptr_to_ULong(disp_cp_chain_me_EXPECTED)); in chainXDirect_AMD64()
3534 vassert(p[10] == 0x41); in chainXDirect_AMD64()
3535 vassert(p[11] == 0xFF); in chainXDirect_AMD64()
3536 vassert(p[12] == 0xD3); in chainXDirect_AMD64()
3592 vassert(delta == 0LL || delta == -1LL); in chainXDirect_AMD64()
3647 vassert(valid); in unchainXDirect_AMD64()
3672 vassert(sizeof(ULong*) == 8); in patchProfInc_AMD64()
3674 vassert(p[0] == 0x49); in patchProfInc_AMD64()
3675 vassert(p[1] == 0xBB); in patchProfInc_AMD64()
3676 vassert(p[2] == 0x00); in patchProfInc_AMD64()
3677 vassert(p[3] == 0x00); in patchProfInc_AMD64()
3678 vassert(p[4] == 0x00); in patchProfInc_AMD64()
3679 vassert(p[5] == 0x00); in patchProfInc_AMD64()
3680 vassert(p[6] == 0x00); in patchProfInc_AMD64()
3681 vassert(p[7] == 0x00); in patchProfInc_AMD64()
3682 vassert(p[8] == 0x00); in patchProfInc_AMD64()
3683 vassert(p[9] == 0x00); in patchProfInc_AMD64()
3684 vassert(p[10] == 0x49); in patchProfInc_AMD64()
3685 vassert(p[11] == 0xFF); in patchProfInc_AMD64()
3686 vassert(p[12] == 0x03); in patchProfInc_AMD64()