Lines Matching refs:vassert
150 vassert(n > 1 && n < 64); in sx_to_64()
261 vassert(i < 65536); in mkU16()
267 vassert(i < 256); in mkU8()
358 vassert(isPlausibleIRType(ty)); in newTemp()
372 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_2()
373 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_2()
381 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_3()
382 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_3()
383 vassert(t3 && *t3 == IRTemp_INVALID); in newTempsV128_3()
392 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_4()
393 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_4()
394 vassert(t3 && *t3 == IRTemp_INVALID); in newTempsV128_4()
395 vassert(t4 && *t4 == IRTemp_INVALID); in newTempsV128_4()
406 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_7()
407 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_7()
408 vassert(t3 && *t3 == IRTemp_INVALID); in newTempsV128_7()
409 vassert(t4 && *t4 == IRTemp_INVALID); in newTempsV128_7()
410 vassert(t5 && *t5 == IRTemp_INVALID); in newTempsV128_7()
411 vassert(t6 && *t6 == IRTemp_INVALID); in newTempsV128_7()
412 vassert(t7 && *t7 == IRTemp_INVALID); in newTempsV128_7()
588 vassert(size < 4); in mkVecADD()
595 vassert(size < 4); in mkVecQADDU()
602 vassert(size < 4); in mkVecQADDS()
610 vassert(size < 4); in mkVecQADDEXTSUSATUU()
618 vassert(size < 4); in mkVecQADDEXTUSSATSS()
625 vassert(size < 4); in mkVecSUB()
632 vassert(size < 4); in mkVecQSUBU()
639 vassert(size < 4); in mkVecQSUBS()
646 vassert(size < 4); in mkVecSARN()
653 vassert(size < 4); in mkVecSHRN()
660 vassert(size < 4); in mkVecSHLN()
668 vassert(size < 4); in mkVecCATEVENLANES()
676 vassert(size < 4); in mkVecCATODDLANES()
684 vassert(size < 4); in mkVecINTERLEAVELO()
692 vassert(size < 4); in mkVecINTERLEAVEHI()
699 vassert(size < 4); in mkVecMAXU()
706 vassert(size < 4); in mkVecMAXS()
713 vassert(size < 4); in mkVecMINU()
720 vassert(size < 4); in mkVecMINS()
727 vassert(size < 3); in mkVecMUL()
734 vassert(sizeNarrow < 3); in mkVecMULLU()
741 vassert(sizeNarrow < 3); in mkVecMULLS()
748 vassert(sizeNarrow < 3); in mkVecQDMULLS()
755 vassert(size < 4); in mkVecCMPEQ()
762 vassert(size < 4); in mkVecCMPGTU()
769 vassert(size < 4); in mkVecCMPGTS()
776 vassert(size < 4); in mkVecABS()
784 vassert(size < 4); in mkVecZEROHIxxOFV128()
799 vassert(size < 4); in mkVecQDMULHIS()
806 vassert(size < 4); in mkVecQRDMULHIS()
814 vassert(size < 4); in mkVecQANDUQSH()
822 vassert(size < 4); in mkVecQANDSQSH()
830 vassert(size < 4); in mkVecQANDUQRSH()
838 vassert(size < 4); in mkVecQANDSQRSH()
845 vassert(size < 4); in mkVecSHU()
852 vassert(size < 4); in mkVecSHS()
859 vassert(size < 4); in mkVecRSHU()
866 vassert(size < 4); in mkVecRSHS()
874 vassert(sizeNarrow < 4); in mkVecNARROWUN()
882 vassert(sizeNarrow < 4); in mkVecQNARROWUNSU()
890 vassert(sizeNarrow < 4); in mkVecQNARROWUNSS()
898 vassert(sizeNarrow < 4); in mkVecQNARROWUNUU()
906 vassert(sizeNarrow < 4); in mkVecQANDqshrNNARROWUU()
914 vassert(sizeNarrow < 4); in mkVecQANDqsarNNARROWSS()
922 vassert(sizeNarrow < 4); in mkVecQANDqsarNNARROWSU()
930 vassert(sizeNarrow < 4); in mkVecQANDqrshrNNARROWUU()
938 vassert(sizeNarrow < 4); in mkVecQANDqrsarNNARROWSS()
946 vassert(sizeNarrow < 4); in mkVecQANDqrsarNNARROWSU()
954 vassert(size < 4); in mkVecQSHLNSATUU()
962 vassert(size < 4); in mkVecQSHLNSATSS()
970 vassert(size < 4); in mkVecQSHLNSATSU()
977 vassert(size < 4); in mkVecADDF()
984 vassert(size < 4); in mkVecMAXF()
991 vassert(size < 4); in mkVecMINF()
1003 vassert(ty == Ity_I32); in mathROR()
1006 vassert(w != 0); in mathROR()
1007 vassert(imm < w); in mathROR()
1026 vassert(ty == Ity_I32); in mathREPLICATE()
1029 vassert(w != 0); in mathREPLICATE()
1030 vassert(imm < w); in mathREPLICATE()
1194 default: vassert(0); in offsetIReg64()
1205 vassert(iregNo < 32); in nameIReg64orZR()
1219 vassert(iregNo < 31); in nameIReg64orSP()
1225 vassert(iregNo < 32); in getIReg64orSP()
1234 vassert(iregNo < 31); in getIReg64orZR()
1240 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_I64); in putIReg64orSP()
1246 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_I64); in putIReg64orZR()
1250 vassert(iregNo < 31); in putIReg64orZR()
1256 vassert(iregNo < 32); in nameIReg32orZR()
1270 vassert(iregNo < 31); in nameIReg32orSP()
1276 vassert(iregNo < 32); in getIReg32orSP()
1286 vassert(iregNo < 31); in getIReg32orZR()
1293 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_I32); in putIReg32orSP()
1299 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_I32); in putIReg32orZR()
1303 vassert(iregNo < 31); in putIReg32orZR()
1309 vassert(is64 == True || is64 == False); in nameIRegOrSP()
1315 vassert(is64 == True || is64 == False); in nameIRegOrZR()
1321 vassert(is64 == True || is64 == False); in getIRegOrZR()
1327 vassert(is64 == True || is64 == False); in putIRegOrZR()
1333 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_I64); in putPC()
1377 default: vassert(0); in offsetQReg128()
1384 vassert(qregNo < 32); in putQReg128()
1385 vassert(typeOfIRExpr(irsb->tyenv, e) == Ity_V128); in putQReg128()
1392 vassert(qregNo < 32); in getQReg128()
1409 default: vassert(0); in preferredVectorSubTypeFromSize()
1418 vassert(host_endness == VexEndnessLE); in offsetQRegLane()
1433 vassert(laneSzB > 0); in offsetQRegLane()
1436 vassert(maxOff < 16); in offsetQRegLane()
1450 vassert(0); // Other cases are probably invalid in putQRegLO()
1466 vassert(0); // Other cases are ATC in getQRegLO()
1498 vassert(qregNo < 32); in nameQRegLO()
1505 default: vassert(0); in nameQRegLO()
1535 vassert(0); // Other cases are plain wrong in putQRegHI64()
1552 vassert(0); // Other cases are ATC in putQRegLane()
1566 vassert(0); // Other cases are ATC in getQRegLane()
1723 vassert(typeOfIRExpr(irsb->tyenv, cond) == Ity_I64); in mk_arm64g_calculate_condition_dyn()
1767 vassert(cond >= 0 && cond <= 15); in mk_arm64g_calculate_condition()
1848 vassert(typeOfIRTemp(irsb->tyenv, t_dep1 == Ity_I64)); in setFlags_D1_D2_ND()
1849 vassert(typeOfIRTemp(irsb->tyenv, t_dep2 == Ity_I64)); in setFlags_D1_D2_ND()
1850 vassert(typeOfIRTemp(irsb->tyenv, t_ndep == Ity_I64)); in setFlags_D1_D2_ND()
1851 vassert(cc_op >= ARM64G_CC_OP_COPY && cc_op < ARM64G_CC_OP_NUMBER); in setFlags_D1_D2_ND()
1880 else { vassert(0); } in setFlags_ADD_SUB()
1909 else { vassert(0); } in setFlags_ADC_SBC()
1940 else { vassert(0); } in setFlags_ADD_SUB_conditionally()
2048 vassert(sh >= 1 && sh <= 63); in math_SWAPHELPER()
2135 vassert(0); in math_DUP_TO_64()
2169 vassert(0); in math_DUP_TO_V128()
2179 vassert(0); in math_MAYBE_ZERO_HI64()
2281 vassert(width > 0 && width <= 64); in dbm_ROR()
2282 vassert(rot >= 0 && rot < width); in dbm_ROR()
2328 vassert(x == 0); in dbm_highestSetBit()
2337 vassert(immN < (1ULL << 1)); in dbm_DecodeBitMasks()
2338 vassert(imms < (1ULL << 6)); in dbm_DecodeBitMasks()
2339 vassert(immr < (1ULL << 6)); in dbm_DecodeBitMasks()
2340 vassert(immediate == False || immediate == True); in dbm_DecodeBitMasks()
2341 vassert(M == 32 || M == 64); in dbm_DecodeBitMasks()
2345 vassert(len <= 6); in dbm_DecodeBitMasks()
2346 vassert(M >= (1 << len)); in dbm_DecodeBitMasks()
2348 vassert(len >= 1 && len <= 6); in dbm_DecodeBitMasks()
2351 vassert(levels >= 1 && levels <= 63); in dbm_DecodeBitMasks()
2363 vassert(2 <= esize && esize <= 64); in dbm_DecodeBitMasks()
2368 vassert(S >= 0 && S <= 63); in dbm_DecodeBitMasks()
2369 vassert(esize >= (S+1)); in dbm_DecodeBitMasks()
2376 vassert(esize >= (d+1)); in dbm_DecodeBitMasks()
2377 vassert(d >= 0 && d <= 63); in dbm_DecodeBitMasks()
2383 if (esize != 64) vassert(elem_s < (1ULL << esize)); in dbm_DecodeBitMasks()
2384 if (esize != 64) vassert(elem_d < (1ULL << esize)); in dbm_DecodeBitMasks()
2421 vassert(sh <= 1); in dis_ARM64_data_processing_immediate()
2512 vassert(op < 4); in dis_ARM64_data_processing_immediate()
2566 vassert(imm64 < 0x100000000ULL); in dis_ARM64_data_processing_immediate()
2597 vassert(hw <= 1); in dis_ARM64_data_processing_immediate()
2609 vassert(0); in dis_ARM64_data_processing_immediate()
2652 vassert(0); in dis_ARM64_data_processing_immediate()
2717 vassert(imm6 > 0 && imm6 < szBits); in dis_ARM64_data_processing_immediate()
2746 default: vassert(0); in nameSH()
2763 vassert(sh_how < 4); in getShiftedIRegOrZR()
2764 vassert(sh_amt < (is64 ? 64 : 32)); in getShiftedIRegOrZR()
2783 vassert(0); in getShiftedIRegOrZR()
2942 default: vassert(0); in dis_ARM64_data_processing_register()
2953 vassert(((bN << 2) | INSN(30,29)) < 8); in dis_ARM64_data_processing_register()
3053 vassert(0); in dis_ARM64_data_processing_register()
3143 vassert(shSX >= 32); in dis_ARM64_data_processing_register()
3148 vassert(0); in dis_ARM64_data_processing_register()
3306 default: vassert(0); in dis_ARM64_data_processing_register()
3311 vassert(math); in dis_ARM64_data_processing_register()
3411 default: vassert(0); in dis_ARM64_data_processing_register()
3436 vassert(op < 4); in dis_ARM64_data_processing_register()
3519 vassert(sz >= 0 && sz <= 3); in dis_ARM64_data_processing_register()
3552 vassert(0); in dis_ARM64_data_processing_register()
3555 vassert(ix >= 0 && ix <= 7); in dis_ARM64_data_processing_register()
3687 vassert(0); in math_INTERLEAVE2_128()
3882 vassert(0); in math_INTERLEAVE3_128()
3947 vassert(0); in math_INTERLEAVE4_128()
4002 vassert(0); in math_DEINTERLEAVE2_128()
4172 vassert(0); in math_DEINTERLEAVE3_128()
4240 vassert(0); in math_DEINTERLEAVE4_128()
4268 vassert(0); in math_get_doubler_and_halver()
4293 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_INTERLEAVE2_64()
4324 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_INTERLEAVE3_64()
4360 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_INTERLEAVE4_64()
4406 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_DEINTERLEAVE2_64()
4438 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_DEINTERLEAVE3_64()
4474 vassert(laneSzBlg2 >= 0 && laneSzBlg2 <= 2); in math_DEINTERLEAVE4_64()
4589 default: vassert(0); in gen_indexed_EA()
4633 vassert(rhs); in gen_indexed_EA()
4663 vassert(0); in gen_narrowing_store()
4688 vassert(0); in gen_zwidening_load()
4699 vassert(bitQ <= 1 && size <= 3); in nameArr_Q_SZ()
4703 vassert(ix < 8); in nameArr_Q_SZ()
4742 vassert(szLg2 < 4); in dis_ARM64_load_store()
4814 vassert(0); /* NOTREACHED */ in dis_ARM64_load_store()
4858 vassert(0); in dis_ARM64_load_store()
4917 vassert(0); /* NOTREACHED */ in dis_ARM64_load_store()
4957 vassert(bL == 0 && bX == 0); in dis_ARM64_load_store()
4980 vassert(0); in dis_ARM64_load_store()
5035 vassert(0); /* NOTREACHED */ in dis_ARM64_load_store()
5062 vassert(0); in dis_ARM64_load_store()
5154 vassert(0); in dis_ARM64_load_store()
5189 vassert(bitX == 0); in dis_ARM64_load_store()
5215 vassert(0); in dis_ARM64_load_store()
5297 vassert(0); in dis_ARM64_load_store()
5372 vassert(0); in dis_ARM64_load_store()
5436 vassert(0); /* NOTREACHED */ in dis_ARM64_load_store()
5444 default: vassert(0); in dis_ARM64_load_store()
5503 vassert(0); in dis_ARM64_load_store()
5587 vassert(0); in dis_ARM64_load_store()
5873 default: vassert(0); in dis_ARM64_load_store()
5883 default: vassert(0); in dis_ARM64_load_store()
5898 default: vassert(0); in dis_ARM64_load_store()
5916 default: vassert(0); in dis_ARM64_load_store()
5953 vassert(0); in dis_ARM64_load_store()
5969 default: vassert(0); in dis_ARM64_load_store()
5984 default: vassert(0); in dis_ARM64_load_store()
6084 default: vassert(0); in dis_ARM64_load_store()
6094 default: vassert(0); in dis_ARM64_load_store()
6111 default: vassert(0); in dis_ARM64_load_store()
6146 vassert(0); in dis_ARM64_load_store()
6161 default: vassert(0); in dis_ARM64_load_store()
6280 vassert(0); in dis_ARM64_load_store()
6436 vassert(0); in dis_ARM64_load_store()
6502 vassert(szBlg2 < 4); in dis_ARM64_load_store()
6556 vassert(szB == 8 || szB == 4 || szB == 2 || szB == 1); in dis_ARM64_load_store()
6624 vassert(szBlg2 < 4); in dis_ARM64_load_store()
6726 vassert(dres->whatNext == Dis_Continue); in dis_ARM64_branch_etc()
6727 vassert(dres->len == 4); in dis_ARM64_branch_etc()
6728 vassert(dres->continueAt == 0); in dis_ARM64_branch_etc()
6729 vassert(dres->jk_StopHere == Ijk_INVALID); in dis_ARM64_branch_etc()
6998 vassert(archinfo->arm64_dMinLine_lg2_szB >= 2 in dis_ARM64_branch_etc()
7058 vassert(archinfo->arm64_iMinLine_lg2_szB >= 2 in dis_ARM64_branch_etc()
7089 vassert(archinfo->arm64_dMinLine_lg2_szB >= 2 in dis_ARM64_branch_etc()
7124 vassert(opc <= 2 && CRm <= 15); in dis_ARM64_branch_etc()
7297 vassert(bit <= 1 && N >= 1 && N < 64); in Replicate()
7308 vassert(0 == (bits32 & ~0xFFFFFFFFULL)); in Replicate32x2()
7314 vassert(0 == (bits16 & ~0xFFFFULL)); in Replicate16x4()
7320 vassert(0 == (bits8 & ~0xFFULL)); in Replicate8x8()
7330 vassert(imm8 <= 0xFF); in VFPExpandImm()
7331 vassert(N == 32 || N == 64); in VFPExpandImm()
7341 vassert(sign < (1ULL << 1)); in VFPExpandImm()
7342 vassert(exp < (1ULL << E)); in VFPExpandImm()
7343 vassert(frac < (1ULL << F)); in VFPExpandImm()
7344 vassert(1 + E + F == N); in VFPExpandImm()
7355 vassert(op <= 1); in AdvSIMDExpandImm()
7356 vassert(cmode <= 15); in AdvSIMDExpandImm()
7357 vassert(imm8 <= 255); in AdvSIMDExpandImm()
7420 vassert(0); in AdvSIMDExpandImm()
7438 vassert(bitQ == True || bitQ == False); in getLaneInfo_Q_SZ()
7439 vassert(bitSZ == True || bitSZ == False); in getLaneInfo_Q_SZ()
7473 vassert(immh < (1<<4)); in getLaneInfo_IMMH_IMMB()
7474 vassert(immb < (1<<3)); in getLaneInfo_IMMH_IMMB()
7699 vassert(0); in math_FOLDV()
7709 vassert(len >= 0 && len <= 3); in math_TBL_TBX()
7747 vassert(tabent >= 0 && tabent < 4); in math_TBL_TBX()
7816 vassert(size <= 3); in math_ABD()
7876 vassert(0); in math_WIDEN_LO_OR_HI_LANES()
7900 default: vassert(0); in math_WIDEN_EVEN_OR_ODD_LANES()
7930 vassert(size <= 3); in math_DUP_VEC_ELEM()
7937 vassert(ix <= 15); in math_DUP_VEC_ELEM()
7953 vassert(0); in math_DUP_VEC_ELEM()
8029 vassert(imm <= 0xFFFFULL); in math_VEC_DUP_IMM()
8034 vassert(imm <= 0xFFFFFFFFULL); in math_VEC_DUP_IMM()
8042 vassert(0); in math_VEC_DUP_IMM()
8120 vassert(size < 4); in math_ZERO_ALL_EXCEPT_LOWEST_LANE()
8144 vassert(res && *res == IRTemp_INVALID); in math_MULL_ACC()
8145 vassert(size <= 2); in math_MULL_ACC()
8146 vassert(mas == 'm' || mas == 'a' || mas == 's'); in math_MULL_ACC()
8147 if (mas == 'm') vassert(vecD == IRTemp_INVALID); in math_MULL_ACC()
8175 vassert(size <= 2); in math_SQDMULL_ACC()
8176 vassert(mas == 'm' || mas == 'a' || mas == 's'); in math_SQDMULL_ACC()
8184 vassert(sat2q && *sat2q == IRTemp_INVALID); in math_SQDMULL_ACC()
8185 vassert(sat2n && *sat2n == IRTemp_INVALID); in math_SQDMULL_ACC()
8222 vassert(sizeNarrow <= 2); in math_MULLS()
8246 vassert(size == X01 || size == X10); /* s or h only */ in math_SQDMULH()
8290 vassert(size <= 3); in math_QSHL_IMM()
8292 vassert(shift < laneBits); in math_QSHL_IMM()
8310 vassert(rshift >= 1 && rshift < laneBits); in math_QSHL_IMM()
8329 vassert(rshift >= 0 && rshift < laneBits-1); in math_QSHL_IMM()
8359 vassert(rshift >= 1 && rshift < laneBits); in math_QSHL_IMM()
8366 vassert(0); in math_QSHL_IMM()
8377 vassert(size <= 3); in math_RHADD()
8432 vassert(opZHI == Iop_ZeroHI64ofV128 in updateQCFLAGwithDifferenceZHI()
8472 vassert(rearrL && *rearrL == IRTemp_INVALID); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8473 vassert(rearrR && *rearrR == IRTemp_INVALID); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8478 vassert(bitQ == 1); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8488 vassert(!isD && bitQ == 0); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8505 vassert(n >= 2 && n <= 64); in two_to_the_minus()
8515 vassert(n >= 2 && n <= 64); in two_to_the_plus()
8556 vassert(imm4 >= 1 && imm4 <= 15); in dis_AdvSIMD_EXT()
8567 vassert(imm4 >= 1 && imm4 <= 7); in dis_AdvSIMD_EXT()
8627 vassert(i < 4); in dis_AdvSIMD_TBL_TBX()
8820 vassert(ix >= 1 && ix <= 5); in dis_AdvSIMD_across_lanes()
8833 vassert(size < 3); in dis_AdvSIMD_across_lanes()
8842 default: vassert(0); in dis_AdvSIMD_across_lanes()
8844 vassert(op != Iop_INVALID && nm != NULL); in dis_AdvSIMD_across_lanes()
9023 vassert(laneNo < 16); in dis_AdvSIMD_copy()
9117 vassert(laneNo < 16); in dis_AdvSIMD_copy()
9168 vassert(ix1 < 16); in dis_AdvSIMD_copy()
9169 vassert(ix2 < 16); in dis_AdvSIMD_copy()
9296 vassert(1 == (isMOV ? 1 : 0) + (isMVN ? 1 : 0) in dis_AdvSIMD_modified_immediate()
9389 vassert(laneNo < 16); in dis_AdvSIMD_scalar_copy()
9516 vassert(sh >= 1 && sh <= 64); in dis_AdvSIMD_scalar_shift_by_imm()
9526 vassert(!isU); in dis_AdvSIMD_scalar_shift_by_imm()
9549 vassert(sh >= 1 && sh <= 64); in dis_AdvSIMD_scalar_shift_by_imm()
9551 vassert(sh >= 1 && sh <= 64); in dis_AdvSIMD_scalar_shift_by_imm()
9571 vassert(sh >= 1 && sh <= 64); in dis_AdvSIMD_scalar_shift_by_imm()
9591 vassert(sh >= 0 && sh < 64); in dis_AdvSIMD_scalar_shift_by_imm()
9603 vassert(sh >= 0 && sh < 64); in dis_AdvSIMD_scalar_shift_by_imm()
9629 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_scalar_shift_by_imm()
9634 vassert(shift >= 0 && shift < lanebits); in dis_AdvSIMD_scalar_shift_by_imm()
9639 else vassert(0); in dis_AdvSIMD_scalar_shift_by_imm()
9668 vassert(size >= X00 && size <= X10); in dis_AdvSIMD_scalar_shift_by_imm()
9669 vassert(shift >= 1 && shift <= (8 << size)); in dis_AdvSIMD_scalar_shift_by_imm()
9691 else vassert(0); in dis_AdvSIMD_scalar_shift_by_imm()
9720 vassert(ok); in dis_AdvSIMD_scalar_shift_by_imm()
9722 vassert(size == X10 || size == X11); in dis_AdvSIMD_scalar_shift_by_imm()
9725 vassert(fbits >= 1 && fbits <= (isD ? 64 : 32)); in dis_AdvSIMD_scalar_shift_by_imm()
9758 vassert(ok); in dis_AdvSIMD_scalar_shift_by_imm()
9760 vassert(size == X10 || size == X11); in dis_AdvSIMD_scalar_shift_by_imm()
9763 vassert(fbits >= 1 && fbits <= (isD ? 64 : 32)); in dis_AdvSIMD_scalar_shift_by_imm()
9816 vassert(size < 4); in dis_AdvSIMD_scalar_three_different()
9830 default: vassert(0); in dis_AdvSIMD_scalar_three_different()
9832 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_scalar_three_different()
9834 vassert(size <= 2); in dis_AdvSIMD_scalar_three_different()
9846 vassert(sat1q != IRTemp_INVALID && sat1n != IRTemp_INVALID); in dis_AdvSIMD_scalar_three_different()
9885 vassert(size < 4); in dis_AdvSIMD_scalar_three_same()
10191 vassert(size < 4); in dis_AdvSIMD_scalar_two_reg_misc()
10317 default: vassert(0); in dis_AdvSIMD_scalar_two_reg_misc()
10336 vassert(size < 3); in dis_AdvSIMD_scalar_two_reg_misc()
10349 else vassert(0); in dis_AdvSIMD_scalar_two_reg_misc()
10409 default: vassert(0); in dis_AdvSIMD_scalar_two_reg_misc()
10510 vassert(size < 4); in dis_AdvSIMD_scalar_x_indexed_element()
10511 vassert(bitH < 2 && bitM < 2 && bitL < 2); in dis_AdvSIMD_scalar_x_indexed_element()
10523 vassert(index < (isD ? 2 : 4)); in dis_AdvSIMD_scalar_x_indexed_element()
10557 vassert(index < (isD ? 2 : 4)); in dis_AdvSIMD_scalar_x_indexed_element()
10589 default: vassert(0); in dis_AdvSIMD_scalar_x_indexed_element()
10591 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_scalar_x_indexed_element()
10604 vassert(0); in dis_AdvSIMD_scalar_x_indexed_element()
10606 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_scalar_x_indexed_element()
10618 vassert(sat1q != IRTemp_INVALID && sat1n != IRTemp_INVALID); in dis_AdvSIMD_scalar_x_indexed_element()
10647 vassert(0); in dis_AdvSIMD_scalar_x_indexed_element()
10649 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_scalar_x_indexed_element()
10710 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_shift_by_immediate()
10712 vassert(shift >= 1 && shift <= lanebits); in dis_AdvSIMD_shift_by_immediate()
10722 vassert(!isU); in dis_AdvSIMD_shift_by_immediate()
10759 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_shift_by_immediate()
10761 vassert(shift >= 1 && shift <= lanebits); in dis_AdvSIMD_shift_by_immediate()
10797 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_shift_by_immediate()
10799 vassert(shift >= 1 && shift <= lanebits); in dis_AdvSIMD_shift_by_immediate()
10839 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_shift_by_immediate()
10844 vassert(shift >= 0 && shift < lanebits); in dis_AdvSIMD_shift_by_immediate()
10882 vassert(size >= 0 && size <= 3); in dis_AdvSIMD_shift_by_immediate()
10887 vassert(shift >= 0 && shift < lanebits); in dis_AdvSIMD_shift_by_immediate()
10892 else vassert(0); in dis_AdvSIMD_shift_by_immediate()
10919 vassert(shift >= 1); in dis_AdvSIMD_shift_by_immediate()
10952 vassert(shift >= 1 && shift <= (8 << size)); in dis_AdvSIMD_shift_by_immediate()
10974 else vassert(0); in dis_AdvSIMD_shift_by_immediate()
11027 vassert(sh < 32); /* so 32-sh is 1..32 */ in dis_AdvSIMD_shift_by_immediate()
11036 vassert(sh < 16); /* so 16-sh is 1..16 */ in dis_AdvSIMD_shift_by_immediate()
11045 vassert(sh < 8); /* so 8-sh is 1..8 */ in dis_AdvSIMD_shift_by_immediate()
11052 vassert(immh == 0); in dis_AdvSIMD_shift_by_immediate()
11075 vassert(ok); in dis_AdvSIMD_shift_by_immediate()
11077 vassert(size == X10 || size == X11); in dis_AdvSIMD_shift_by_immediate()
11082 vassert(fbits >= 1 && fbits <= (isD ? 64 : 32)); in dis_AdvSIMD_shift_by_immediate()
11092 vassert(nLanes == 2 || nLanes == 4); in dis_AdvSIMD_shift_by_immediate()
11121 vassert(ok); in dis_AdvSIMD_shift_by_immediate()
11123 vassert(size == X10 || size == X11); in dis_AdvSIMD_shift_by_immediate()
11128 vassert(fbits >= 1 && fbits <= (isD ? 64 : 32)); in dis_AdvSIMD_shift_by_immediate()
11138 vassert(nLanes == 2 || nLanes == 4); in dis_AdvSIMD_shift_by_immediate()
11186 vassert(size < 4); in dis_AdvSIMD_three_different()
11196 vassert(size <= 2); in dis_AdvSIMD_three_different()
11222 vassert(size <= 2); in dis_AdvSIMD_three_different()
11247 vassert(size <= 2); in dis_AdvSIMD_three_different()
11286 vassert(size <= 2); in dis_AdvSIMD_three_different()
11320 default: vassert(0); in dis_AdvSIMD_three_different()
11322 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_three_different()
11324 vassert(size <= 2); in dis_AdvSIMD_three_different()
11357 default: vassert(0); in dis_AdvSIMD_three_different()
11359 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_three_different()
11361 vassert(size <= 2); in dis_AdvSIMD_three_different()
11372 vassert(sat1q != IRTemp_INVALID && sat1n != IRTemp_INVALID); in dis_AdvSIMD_three_different()
11404 vassert(size == X11); in dis_AdvSIMD_three_different()
11454 vassert(size < 4); in dis_AdvSIMD_three_same()
11610 vassert(0); in dis_AdvSIMD_three_same()
11745 vassert(size <= 2); in dis_AdvSIMD_three_same()
12133 vassert(size <= 1); in dis_AdvSIMD_three_same()
12190 vassert(size < 4); in dis_AdvSIMD_two_reg_misc()
12198 vassert(size <= 2); in dis_AdvSIMD_two_reg_misc()
12304 vassert(size <= 2); in dis_AdvSIMD_two_reg_misc()
12456 default: vassert(0); in dis_AdvSIMD_two_reg_misc()
12488 vassert(size < 3); in dis_AdvSIMD_two_reg_misc()
12508 vassert(size < 3); in dis_AdvSIMD_two_reg_misc()
12522 else vassert(0); in dis_AdvSIMD_two_reg_misc()
12632 vassert(ix >= 1 && ix <= 8); in dis_AdvSIMD_two_reg_misc()
12671 default: vassert(0); in dis_AdvSIMD_two_reg_misc()
12724 default: vassert(0); in dis_AdvSIMD_two_reg_misc()
12790 vassert(ok); /* the 'if' above should ensure this */ in dis_AdvSIMD_two_reg_misc()
12866 vassert(size < 4); in dis_AdvSIMD_vector_x_indexed_elem()
12867 vassert(bitH < 2 && bitM < 2 && bitL < 2); in dis_AdvSIMD_vector_x_indexed_elem()
12880 vassert(index < (isD ? 2 : 4)); in dis_AdvSIMD_vector_x_indexed_elem()
12914 vassert(index < (isD ? 2 : 4)); in dis_AdvSIMD_vector_x_indexed_elem()
12952 vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
12954 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_vector_x_indexed_elem()
12994 default: vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
12996 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_vector_x_indexed_elem()
13011 vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
13013 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_vector_x_indexed_elem()
13046 default: vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
13048 vassert(ks >= 0 && ks <= 2); in dis_AdvSIMD_vector_x_indexed_elem()
13062 vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
13064 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_vector_x_indexed_elem()
13075 vassert(sat1q != IRTemp_INVALID && sat1n != IRTemp_INVALID); in dis_AdvSIMD_vector_x_indexed_elem()
13107 vassert(0); in dis_AdvSIMD_vector_x_indexed_elem()
13109 vassert(mm < 32 && ix < 16); in dis_AdvSIMD_vector_x_indexed_elem()
13235 vassert(opc < 7); in dis_AdvSIMD_crypto_three_reg_sha()
13282 vassert(0); in dis_AdvSIMD_crypto_three_reg_sha()
13304 vassert(0); in dis_AdvSIMD_crypto_three_reg_sha()
13334 vassert(opc < 3); in dis_AdvSIMD_crypto_two_reg_sha()
13385 vassert(0); in dis_AdvSIMD_crypto_two_reg_sha()
13397 vassert(0); in dis_AdvSIMD_crypto_two_reg_sha()
13425 vassert(ty < 4); in dis_AdvSIMD_fp_compare()
13501 vassert(ty < 4 && op <= 1); in dis_AdvSIMD_fp_conditional_compare()
13630 vassert(0); in dis_AdvSIMD_fp_data_proc_1_source()
13823 default: vassert(0); in dis_AdvSIMD_fp_data_proc_2_source()
13883 vassert(ty < 4); in dis_AdvSIMD_fp_data_proc_3_source()
13922 default: vassert(0); in dis_AdvSIMD_fp_data_proc_3_source()
13961 vassert(0 == (imm & 0xFFFFFFFF00000000ULL)); in dis_AdvSIMD_fp_immediate()
14016 vassert(fbits >= 1 && fbits <= (isI64 ? 64 : 32)); in dis_AdvSIMD_fp_to_from_fixedp_conv()
14067 vassert(fbits >= 1 && fbits <= (isI64 ? 64 : 32)); in dis_AdvSIMD_fp_to_from_fixedp_conv()
14150 default: vassert(0); in dis_AdvSIMD_fp_to_from_int_conv()
14153 vassert(op == BITS3(1,0,0) || op == BITS3(1,0,1)); in dis_AdvSIMD_fp_to_from_int_conv()
14156 default: vassert(0); in dis_AdvSIMD_fp_to_from_int_conv()
14159 vassert(irrm != 8); in dis_AdvSIMD_fp_to_from_int_conv()
14172 vassert(ix < 8); in dis_AdvSIMD_fp_to_from_int_conv()
14292 vassert(bitSF == 1); in dis_AdvSIMD_fp_to_from_int_conv()
14334 vassert(0); in dis_AdvSIMD_fp_to_from_int_conv()
14467 vassert(0 == (guest_PC_curr_instr & 3ULL)); in disInstr_ARM64_WRK()
14582 vassert(0); /* Can't happen */ in disInstr_ARM64_WRK()
14588 vassert(dres->whatNext == Dis_Continue); in disInstr_ARM64_WRK()
14589 vassert(dres->len == 4); in disInstr_ARM64_WRK()
14590 vassert(dres->continueAt == 0); in disInstr_ARM64_WRK()
14591 vassert(dres->jk_StopHere == Ijk_INVALID); in disInstr_ARM64_WRK()
14624 vassert(guest_arch == VexArchARM64); in disInstr_ARM64()
14632 vassert((archinfo->arm64_dMinLine_lg2_szB - 2) <= 15); in disInstr_ARM64()
14633 vassert((archinfo->arm64_iMinLine_lg2_szB - 2) <= 15); in disInstr_ARM64()
14642 vassert(dres.len == 4 || dres.len == 20); in disInstr_ARM64()
14654 vassert(0); in disInstr_ARM64()