Lines Matching refs:mkU8
543 static IRExpr* mkU8 ( UChar i ) in mkU8() function
603 assign( ones8x16, unop(Iop_Dup8x16, mkU8(0x1)) ); in expand8Ux16()
606 binop(Iop_ShrV128, vIn, mkU8(8))) ); in expand8Ux16()
621 assign( ones8x16, unop(Iop_Dup8x16, mkU8(0x1)) ); in expand8Sx16()
624 binop(Iop_ShrV128, vIn, mkU8(8))) ); in expand8Sx16()
642 binop(Iop_ShrV128, vIn, mkU8(16))) ); in expand16Ux8()
660 binop(Iop_ShrV128, vIn, mkU8(16))) ); in expand16Sx8()
797 binop( Iop_Sar32, mkexpr(lo32), mkU8(31)))), in mkQNarrow64Sto32()
800 binop(Iop_Shr32, mkexpr(hi32), mkU8(31))), in mkQNarrow64Sto32()
862 binop(Iop_ShrV128, expr_vA, mkU8(8)), \
863 binop(Iop_ShrV128, expr_vB, mkU8(8)))
867 binop(Iop_ShrV128, expr_vA, mkU8(8)), \
868 binop(Iop_ShrV128, expr_vB, mkU8(8)))
872 binop(Iop_ShrV128, expr_vA, mkU8(16)), \
873 binop(Iop_ShrV128, expr_vB, mkU8(16)))
877 binop(Iop_ShrV128, expr_vA, mkU8(16)), \
878 binop(Iop_ShrV128, expr_vB, mkU8(16)))
1324 binop(Iop_Shr32, mkexpr(old), mkU8(shift[i])), in gen_POPCOUNT()
1348 binop( Iop_Shr64, mkexpr( old ), mkU8( shift[i] ) ), in gen_POPCOUNT()
1365 mask = binop(Iop_And8, rot_amt, mkU8(63)); in ROTL()
1368 binop(Iop_Shr64, src, binop(Iop_Sub8, mkU8(64), mask))); in ROTL()
1371 mask = binop(Iop_And8, rot_amt, mkU8(31)); in ROTL()
1374 binop(Iop_Shr32, src, binop(Iop_Sub8, mkU8(32), mask))); in ROTL()
1597 mkU8(toUChar(3-off)) ), in getCRbit()
1627 binop(Iop_Shl32, safe, mkU8(toUChar(off))) in putCRbit()
1700 assign( v1, binop(Iop_ShrV128, result, mkU8(32)) ); in set_AV_CR6()
1701 assign( v2, binop(Iop_ShrV128, result, mkU8(64)) ); in set_AV_CR6()
1702 assign( v3, binop(Iop_ShrV128, result, mkU8(96)) ); in set_AV_CR6()
1722 binop(Iop_Shl8, mkexpr(rOnes), mkU8(3)), in set_AV_CR6()
1723 binop(Iop_Shl8, mkexpr(rZeros), mkU8(1))) ); in set_AV_CR6()
1725 putCR321( 6, binop(Iop_Shl8, mkexpr(rZeros), mkU8(1)) ); in set_AV_CR6()
1727 putCR0( 6, mkU8(0) ); in set_AV_CR6()
1740 so = binop(Iop_And8, e, mkU8(1)); in putXER_SO()
1748 ov = binop(Iop_And8, e, mkU8(1)); in putXER_OV()
1756 ca = binop(Iop_And8, e, mkU8(1)); in putXER_CA()
1764 bc = binop(Iop_And8, e, mkU8(0x7F)); in putXER_BC()
1844 = binop(Iop_Shr32, xer_ov, mkU8(31) ); in set_XER_OV_32()
1877 mkU8(31)) in set_XER_OV_32()
1900 = binop(Iop_Shr32, xer_ov, mkU8(31) ); in set_XER_OV_32()
1977 = unop(Iop_64to1, binop(Iop_Shr64, xer_ov, mkU8(63))); in set_XER_OV_64()
2006 mkU8(31)) in set_XER_OV_64()
2027 = unop(Iop_64to1, binop(Iop_Shr64, xer_ov, mkU8(63))); in set_XER_OV_64()
2148 binop(Iop_Sar32, argL, mkU8(31)), in set_XER_CA_32()
2164 binop(Iop_Shr32, argL, mkU8(31)) in set_XER_CA_32()
2178 binop(Iop_Sar32, argL, mkU8(31)), in set_XER_CA_32()
2268 binop(Iop_Sar64, argL, mkU8(31)), in set_XER_CA_64()
2284 unop(Iop_64to32, binop(Iop_Shr64, argL, mkU8(63))) in set_XER_CA_64()
2299 binop(Iop_Sar64, argL, mkU8(31)), in set_XER_CA_64()
2322 binop(Iop_Sar64, argL, mkU8(63)), in set_XER_CA_64()
2338 unop(Iop_64to32, binop(Iop_Shr64, argL, mkU8(63))) in set_XER_CA_64()
2354 binop(Iop_Sar64, argL, mkU8(63)), in set_XER_CA_64()
2420 binop(Iop_And8, getCR321(_n), mkU8(7<<1)), \ in getGST()
2421 binop(Iop_And8, getCR0(_n), mkU8(1)) \ in getGST()
2424 mkU8(4 * (7-(_n))) \ in getGST()
2442 binop( Iop_Shl32, getXER_SO32(), mkU8(31)), in getGST()
2443 binop( Iop_Shl32, getXER_OV32(), mkU8(30))), in getGST()
2445 binop( Iop_Shl32, getXER_CA32(), mkU8(29)), in getGST()
2507 binop(Iop_Shl32, getXER_SO32(), mkU8(3)), in getGST_field()
2508 binop(Iop_Shl32, getXER_OV32(), mkU8(2))), in getGST_field()
2509 binop( Iop_Shl32, getXER_CA32(), mkU8(1))); in getGST_field()
2518 mkU8(toUChar( shft ))); in getGST_field()
2556 putXER_SO( unop(Iop_32to8, binop(Iop_Shr32, src, mkU8(31))) ); in putGST()
2557 putXER_OV( unop(Iop_32to8, binop(Iop_Shr32, src, mkU8(30))) ); in putGST()
2558 putXER_CA( unop(Iop_32to8, binop(Iop_Shr32, src, mkU8(29))) ); in putGST()
2655 putCR0 (fld, binop(Iop_And8, mkU8(1 ), unop(Iop_32to8, src))); in putGST_field()
2656 putCR321(fld, binop(Iop_And8, mkU8(7<<1), unop(Iop_32to8, src))); in putGST_field()
2664 binop(Iop_Shl32, src, mkU8(toUChar(shft))), in putGST_field()
2686 binop( Iop_Shr32, mkexpr( src ), mkU8( 23 ) ), in fp_exp_part_sp()
2700 mkU8( 20 ) ), mkU32( 0x7ff ) ); in fp_exp_part()
2704 binop( Iop_Shr64, mkexpr( src ), mkU8( 52 ) ), in fp_exp_part()
2793 binop( Iop_Shr32, mkexpr( src ), mkU8( 23 ) ), in is_NaN_32()
2818 mkU8( 31 ) ) ); in getNegatedResult()
2838 mkU8( 31 ) ), in getNegatedResult()
2861 mkU8( 31 ) ) ); in getNegatedResult_32()
2880 mkU8( 31 ) ) ) ); in getNegatedResult_32()
3163 mkU8(32)) ); in dis_int_arith()
3185 mkU8(32)) ); in dis_int_arith()
3973 mkU8( 63 ) ) ) ); in dis_int_logic()
3978 mkU8( i ) ) ); in dis_int_logic()
3979 rS_expr = binop( Iop_Shr64, rS_expr, mkU8( 8 ) ); in dis_int_logic()
4051 assign( rS1, binop(shr_op, mkexpr(rS), mkU8(8)) ); in dis_int_parity()
4055 assign( rS2, binop(shr_op, mkexpr(rS1), mkU8(8)) ); in dis_int_parity()
4059 assign( rS3, binop(shr_op, mkexpr(rS2), mkU8(8)) ); in dis_int_parity()
4064 assign( rS4, binop(shr_op, mkexpr(rS3), mkU8(8)) ); in dis_int_parity()
4068 assign( rS5, binop(shr_op, mkexpr(rS4), mkU8(8)) ); in dis_int_parity()
4072 assign( rS6, binop(shr_op, mkexpr(rS5), mkU8(8)) ); in dis_int_parity()
4076 assign( rS7, binop(shr_op, mkexpr(rS6), mkU8(8)) ); in dis_int_parity()
4088 assign( rS1, binop(shr_op, mkexpr(rS), mkU8(8)) ); in dis_int_parity()
4092 assign( rS2, binop(shr_op, mkexpr(rS1), mkU8(8)) ); in dis_int_parity()
4096 assign( rS3, binop(shr_op, mkexpr(rS2), mkU8(8)) ); in dis_int_parity()
4103 assign( rS4, binop(shr_op, mkexpr(rS3), mkU8(8)) ); in dis_int_parity()
4105 assign( rS5, binop(shr_op, mkexpr(rS4), mkU8(8)) ); in dis_int_parity()
4109 assign( rS6, binop(shr_op, mkexpr(rS5), mkU8(8)) ); in dis_int_parity()
4113 assign( rS7, binop(shr_op, mkexpr(rS6), mkU8(8))); in dis_int_parity()
4172 r = ROTL( unop(Iop_64to32, mkexpr(rS) ), mkU8(sh_imm) ); in dis_int_rot()
4175 binop(Iop_Shl64, r, mkU8(32))) ); in dis_int_rot()
4184 r = ROTL(mkexpr(rS), mkU8(sh_imm)); in dis_int_rot()
4206 r = ROTL( unop(Iop_64to32, mkexpr(rS) ), mkU8(sh_imm) ); in dis_int_rot()
4211 binop(Iop_Shl64, mkexpr(rTmp), mkU8(32))) ); in dis_int_rot()
4220 assign( rA, binop(Iop_Shl32, mkexpr(rS), mkU8(sh_imm)) ); in dis_int_rot()
4227 assign( rA, binop(Iop_Shr32, mkexpr(rS), mkU8(MaskBeg)) ); in dis_int_rot()
4236 ROTL(mkexpr(rS), mkU8(sh_imm)), in dis_int_rot()
4257 assign(rot, binop(Iop_Or64, r, binop(Iop_Shl64, r, mkU8(32)))); in dis_int_rot()
4303 r = ROTL(mkexpr(rS), mkU8(sh_imm)); in dis_int_rot()
4322 assign( rA, binop(Iop_Shr64, mkexpr(rS), mkU8(msk_imm)) ); in dis_int_rot()
4326 r = ROTL(mkexpr(rS), mkU8(sh_imm)); in dis_int_rot()
4339 assign( rA, binop(Iop_Shl64, mkexpr(rS), mkU8(sh_imm)) ); in dis_int_rot()
4343 r = ROTL(mkexpr(rS), mkU8(sh_imm)); in dis_int_rot()
4353 r = ROTL(mkexpr(rS), mkU8(sh_imm)); in dis_int_rot()
4926 mkU8(toUChar(shift)) in generate_lsw_sequence()
4967 mkU8(toUChar(shift)))) in generate_stsw_sequence()
5774 putCR321(0, binop(Iop_Shl8, unop(Iop_1Uto8, mkexpr(resSC)), mkU8(1))); in dis_memsync()
5867 putCR321(0, binop(Iop_Shl8, unop(Iop_1Uto8, mkexpr(resSC)), mkU8(1))); in dis_memsync()
5941 binop(Iop_Shl32, mkexpr(rB_lo32), mkU8(26)), in dis_int_shift()
5942 mkU8(31))) ); in dis_int_shift()
5985 mkU8(32)), in dis_int_shift()
5986 mkU8(32 + sh_imm)) ); in dis_int_shift()
5989 mkU8(sh_imm)) ); in dis_int_shift()
6019 mkU8(26)), in dis_int_shift()
6020 mkU8(31)))); in dis_int_shift()
6044 binop(Iop_Shl64, mkexpr(rB), mkU8(57)), in dis_int_shift()
6045 mkU8(63)))) ); in dis_int_shift()
6080 assign( rA, binop(Iop_Sar64, getIReg(rS_addr), mkU8(sh_imm)) ); in dis_int_shift()
6107 binop(Iop_Shl64, mkexpr(rB), mkU8(57)), in dis_int_shift()
6108 mkU8(63)))) ); in dis_int_shift()
6139 binop(Iop_Shl32, mkexpr(t), mkU8(24)), in gen_byterev32()
6141 binop(Iop_And32, binop(Iop_Shl32, mkexpr(t), mkU8(8)), in gen_byterev32()
6144 binop(Iop_And32, binop(Iop_Shr32, mkexpr(t), mkU8(8)), in gen_byterev32()
6146 binop(Iop_And32, binop(Iop_Shr32, mkexpr(t), mkU8(24)), in gen_byterev32()
6158 binop(Iop_And32, binop(Iop_Shl32, mkexpr(t), mkU8(8)), in gen_byterev16()
6160 binop(Iop_And32, binop(Iop_Shr32, mkexpr(t), mkU8(8)), in gen_byterev16()
6309 putXER_SO( mkU8(0) ); in dis_proc_ctl()
6310 putXER_OV( mkU8(0) ); in dis_proc_ctl()
6311 putXER_CA( mkU8(0) ); in dis_proc_ctl()
6472 mkU8(shft)), cr ); in dis_proc_ctl()
6688 binop(Iop_Shl32, mkexpr(rm_PPC32), mkU8(1)), in get_IR_roundingmode()
6804 binop(Iop_Shr32, mkexpr(iLo), mkU8(31))) ); in dis_fp_load()
7196 putCR321( 1, mkU8(0) ); in dis_fp_arith()
7197 putCR0( 1, mkU8(0) ); in dis_fp_arith()
7345 putCR321( 1, mkU8(0) ); in dis_fp_multadd()
7346 putCR0( 1, mkU8(0) ); in dis_fp_multadd()
7397 mkU8( 31 ) ), in do_fp_tsqrt()
7668 binop( Iop_Shl32, fl_flag, mkU8( 3 ) ), in do_fp_tdiv()
7669 binop( Iop_Shl32, mkexpr(fg_flag), mkU8( 2 ) ) ), in do_fp_tdiv()
7670 binop( Iop_Shl32, mkexpr(fe_flag), mkU8( 1 ) ) ); in do_fp_tdiv()
7723 binop( Iop_Shl32, mkexpr(fg_flag), mkU8( 2 ) ) ), in dis_fp_tests()
7724 binop( Iop_Shl32, mkexpr(fe_flag), mkU8( 1 ) ) ) ); in dis_fp_tests()
7792 binop(Iop_Shr32, mkexpr(ccIR), mkU8(5)) in dis_fp_cmp()
7801 binop(Iop_Shr32, mkexpr(ccIR), mkU8(6)) in dis_fp_cmp()
8012 mkexpr(frB))), mkU8(31))), in dis_fp_round()
8034 putCR321( 1, mkU8(0) ); in dis_fp_round()
8035 putCR0( 1, mkU8(0) ); in dis_fp_round()
8215 putCR321( 1, mkU8(0) ); in dis_fp_move()
8216 putCR0( 1, mkU8(0) ); in dis_fp_move()
8270 binop(Iop_Shr32,fpscr_all,mkU8(4 * (7-crfS))), in dis_fp_scr()
9090 binop( Iop_Shl32, mkexpr(fg_flagHi), mkU8( 2 ) ) ), in dis_vxv_dp_arith()
9091 binop( Iop_Shl32, mkexpr(fe_flagHi), mkU8( 1 ) ) ) ); in dis_vxv_dp_arith()
9095 binop( Iop_Shl32, mkexpr(fg_flagLo), mkU8( 2 ) ) ), in dis_vxv_dp_arith()
9096 binop( Iop_Shl32, mkexpr(fe_flagLo), mkU8( 1 ) ) ) ); in dis_vxv_dp_arith()
9128 binop( Iop_Shl32, mkexpr(fg_flagHi), mkU8( 2 ) ) ), in dis_vxv_dp_arith()
9129 binop( Iop_Shl32, mkexpr(fe_flagHi), mkU8( 1 ) ) ) ); in dis_vxv_dp_arith()
9133 binop( Iop_Shl32, mkexpr(fg_flagLo), mkU8( 2 ) ) ), in dis_vxv_dp_arith()
9134 binop( Iop_Shl32, mkexpr(fe_flagLo), mkU8( 1 ) ) ) ); in dis_vxv_dp_arith()
9376 binop( Iop_Shl32, mkexpr(fg_flag0), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9377 binop( Iop_Shl32, mkexpr(fe_flag0), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9381 binop( Iop_Shl32, mkexpr(fg_flag1), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9382 binop( Iop_Shl32, mkexpr(fe_flag1), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9386 binop( Iop_Shl32, mkexpr(fg_flag2), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9387 binop( Iop_Shl32, mkexpr(fe_flag2), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9391 binop( Iop_Shl32, mkexpr(fg_flag3), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9392 binop( Iop_Shl32, mkexpr(fe_flag3), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9431 binop( Iop_Shl32, mkexpr(fg_flag0), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9432 binop( Iop_Shl32, mkexpr(fe_flag0), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9436 binop( Iop_Shl32, mkexpr(fg_flag1), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9437 binop( Iop_Shl32, mkexpr(fe_flag1), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9441 binop( Iop_Shl32, mkexpr(fg_flag2), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9442 binop( Iop_Shl32, mkexpr(fe_flag2), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9446 binop( Iop_Shl32, mkexpr(fg_flag3), mkU8( 2 ) ) ), in dis_vxv_sp_arith()
9447 binop( Iop_Shl32, mkexpr(fe_flag3), mkU8( 1 ) ) ) ); in dis_vxv_sp_arith()
9509 mkU8( 5 ) ) ), in get_fp_cmp_CR_val()
9516 mkU8( 6 ) ) ), in get_fp_cmp_CR_val()
9716 mkU8( 31 ) ) ), in _do_vsx_fp_roundToInt()
10384 binop( Iop_Shl32, mkexpr(fg_flag), mkU8( 2 ) ) ), in dis_vxs_arith()
10385 binop( Iop_Shl32, mkexpr(fe_flag), mkU8( 1 ) ) ) ); in dis_vxs_arith()
10515 binop( Iop_Shl32, mkexpr( all_elem_false ), mkU8( 1 ) ), in do_vvec_fp_cmp()
10516 binop( Iop_Shl32, mkexpr( all_elem_true ), mkU8( 3 ) ) ) ); in do_vvec_fp_cmp()
10651 assign(absVal, binop(Iop_ShrV128, binop(Iop_ShlV128, mkexpr(vB), mkU8(1)), mkU8(1))); in dis_vxs_misc()
10665 mkU8( 1 ) ), in dis_vxs_misc()
10666 mkU8( 1 ) ) ); in dis_vxs_misc()
10669 mkU8( 127 ) ), in dis_vxs_misc()
10670 mkU8( 127 ) ) ); in dis_vxs_misc()
10682 mkU8( 1 ) ) ) ); in dis_vxs_misc()
10694 mkU8( 1 ) ), in dis_vxs_misc()
10695 mkU8( 1 ) ) ); in dis_vxs_misc()
10700 mkU8( 127 ) ) ), in dis_vxs_misc()
10701 mkU8( 127 ) ) ); in dis_vxs_misc()
11032 assign( hi, binop(Iop_ShlV128, mkexpr(vA), mkU8(SHW*32)) ); in dis_vx_permute_misc()
11033 assign( lo, binop(Iop_ShrV128, mkexpr(vB), mkU8(128-SHW*32)) ); in dis_vx_permute_misc()
11111 binop( Iop_ShrV128, mkexpr( vB ), mkU8( sh_uim ) ) ) ) ); in dis_vx_permute_misc()
11283 assign( eb, binop(Iop_And8, mkU8(0xF), in dis_av_store()
11287 binop(Iop_Sub8, mkU8(15), mkexpr(eb)), in dis_av_store()
11288 mkU8(3)) ); in dis_av_store()
11297 assign( eb, binop(Iop_And8, mkU8(0xF), in dis_av_store()
11300 binop(Iop_Sub8, mkU8(14), mkexpr(eb)), in dis_av_store()
11301 mkU8(3)) ); in dis_av_store()
11310 assign( eb, binop(Iop_And8, mkU8(0xF), in dis_av_store()
11313 binop(Iop_Sub8, mkU8(12), mkexpr(eb)), in dis_av_store()
11314 mkU8(3)) ); in dis_av_store()
11382 mkU8(31)) ); in dis_av_arith()
11445 mkU8(31)) ); in dis_av_arith()
12006 mkU8(15))) ); in dis_av_multarith()
12012 mkU8(15))) ); in dis_av_multarith()
12033 mkU8(14)) ); in dis_av_multarith()
12040 mkU8(15))) ); in dis_av_multarith()
12047 mkU8(15))) ); in dis_av_multarith()
12266 assign( sh, binop(Iop_And8, mkU8(0x7), in dis_av_shift()
12276 assign( sh, binop(Iop_And8, mkU8(0x78), in dis_av_shift()
12304 assign( sh, binop(Iop_And8, mkU8(0x7), in dis_av_shift()
12329 assign( sh, binop(Iop_And8, mkU8(0x78), in dis_av_shift()
12396 unop(Iop_Dup8x16, mkU8(0xF))) ); in dis_av_permute()
12403 binop(Iop_ShlN8x16, mkexpr(vC), mkU8(3)), in dis_av_permute()
12404 mkU8(7)) ); in dis_av_permute()
12425 binop(Iop_ShlV128, mkexpr(vA), mkU8(SHB_uimm4*8)), in dis_av_permute()
12426 binop(Iop_ShrV128, mkexpr(vB), mkU8((16-SHB_uimm4)*8))) ); in dis_av_permute()
12481 binop(Iop_ShrV128, mkexpr(vB), mkU8(sh_uimm))))) ); in dis_av_permute()
12489 binop(Iop_ShrV128, mkexpr(vB), mkU8(sh_uimm))))) ); in dis_av_permute()
12498 binop(Iop_ShrV128, mkexpr(vB), mkU8(sh_uimm)))) ); in dis_av_permute()
12503 putVReg( vD_addr, unop(Iop_Dup8x16, mkU8(SIMM_8)) ); in dis_av_permute()
12587 mkexpr(vA), mkU8(15)))) ); in dis_av_pack()
12591 mkexpr(vB), mkU8(15)))) ); in dis_av_pack()
12607 mkexpr(vA), mkU8(31)))) ); in dis_av_pack()
12611 mkexpr(vB), mkU8(31)))) ); in dis_av_pack()
12644 binop(Iop_ShrN32x4, mkexpr(vA), mkU8(19)), in dis_av_pack()
12645 mkU8(10)) ); in dis_av_pack()
12647 binop(Iop_ShrN16x8, mkexpr(vA), mkU8(11)), in dis_av_pack()
12648 mkU8(5)) ); in dis_av_pack()
12650 binop(Iop_ShlN16x8, mkexpr(vA), mkU8(8)), in dis_av_pack()
12651 mkU8(11)) ); in dis_av_pack()
12656 binop(Iop_ShrN32x4, mkexpr(vB), mkU8(19)), in dis_av_pack()
12657 mkU8(10)) ); in dis_av_pack()
12659 binop(Iop_ShrN16x8, mkexpr(vB), mkU8(11)), in dis_av_pack()
12660 mkU8(5)) ); in dis_av_pack()
12662 binop(Iop_ShlN16x8, mkexpr(vB), mkU8(8)), in dis_av_pack()
12663 mkU8(11)) ); in dis_av_pack()
12727 binop(Iop_SarN16x8, mkexpr(vB), mkU8(15)), in dis_av_pack()
12728 mkU8(8)) ); in dis_av_pack()
12730 binop(Iop_ShlN16x8, mkexpr(vB), mkU8(1)), in dis_av_pack()
12731 mkU8(11)) ); in dis_av_pack()
12736 binop(Iop_ShrN16x8, mkexpr(vB), mkU8(5)), in dis_av_pack()
12737 mkU8(11)), in dis_av_pack()
12738 mkU8(3)) ); in dis_av_pack()
12740 binop(Iop_ShlN16x8, mkexpr(vB), mkU8(11)), in dis_av_pack()
12741 mkU8(11)) ); in dis_av_pack()
12746 binop(Iop_ShlN32x4, mkexpr(z01), mkU8(16)), in dis_av_pack()
12760 binop(Iop_SarN16x8, mkexpr(vB), mkU8(15)), in dis_av_pack()
12761 mkU8(8)) ); in dis_av_pack()
12763 binop(Iop_ShlN16x8, mkexpr(vB), mkU8(1)), in dis_av_pack()
12764 mkU8(11)) ); in dis_av_pack()
12769 binop(Iop_ShrN16x8, mkexpr(vB), mkU8(5)), in dis_av_pack()
12770 mkU8(11)), in dis_av_pack()
12771 mkU8(3)) ); in dis_av_pack()
12773 binop(Iop_ShlN16x8, mkexpr(vB), mkU8(11)), in dis_av_pack()
12774 mkU8(11)) ); in dis_av_pack()
12779 binop(Iop_ShlN32x4, mkexpr(z01), mkU8(16)), in dis_av_pack()
12976 mkU8(30)) ); in dis_av_fp_cmp()