• Home
  • Raw
  • Download

Lines Matching refs:unop

248 static IRExpr* unop ( IROp op, IRExpr* a )  in unop()  function
334 return unop(signd ? Iop_8Sto32 : Iop_8Uto32, src); in doScalarWidening()
337 return unop(signd ? Iop_8Sto16 : Iop_8Uto16, src); in doScalarWidening()
340 return unop(signd ? Iop_16Sto32 : Iop_16Uto32, src); in doScalarWidening()
343 return unop(Iop_8Uto64, src); in doScalarWidening()
346 return unop(Iop_8Sto64, src); in doScalarWidening()
349 return unop(Iop_16Uto64, src); in doScalarWidening()
352 return unop(Iop_16Sto64, src); in doScalarWidening()
1012 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RAX, Ity_I64 )); in getIRegRAX()
1027 stmt( IRStmt_Put( OFFB_RAX, unop(Iop_32Uto64,e) )); in putIRegRAX()
1060 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RDX, Ity_I64 )); in getIRegRDX()
1073 case 4: stmt( IRStmt_Put( OFFB_RDX, unop(Iop_32Uto64,e) )); in putIRegRDX()
1111 return unop(Iop_64to32, in getIReg32()
1120 unop(Iop_32Uto64,e) ) ); in putIReg32()
1143 unop(Iop_16Uto64,e) ) ); in putIReg16()
1186 return unop(Iop_64to32, in getIRegRexB()
1211 sz==4 ? unop(Iop_32Uto64,e) : e in putIRegRexB()
1267 return unop(Iop_64to32, in getIRegG()
1281 e = unop(Iop_32Uto64,e); in putIRegG()
1314 return unop(Iop_64to32, in getIRegE()
1328 e = unop(Iop_32Uto64,e); in putIRegE()
1558 return unop(Iop_64to1, in mkAnd1()
1560 unop(Iop_1Uto64,x), in mkAnd1()
1561 unop(Iop_1Uto64,y))); in mkAnd1()
1645 return unop(Iop_64to1, call); in mk_amd64g_calculate_condition()
1692 case Ity_I32: return unop(Iop_32Uto64, e); in widenUto64()
1693 case Ity_I16: return unop(Iop_16Uto64, e); in widenUto64()
1694 case Ity_I8: return unop(Iop_8Uto64, e); in widenUto64()
1704 case Ity_I32: return unop(Iop_32Sto64, e); in widenSto64()
1705 case Ity_I16: return unop(Iop_16Sto64, e); in widenSto64()
1706 case Ity_I8: return unop(Iop_8Sto64, e); in widenSto64()
1719 return unop(Iop_32to16, e); in narrowTo()
1721 return unop(Iop_32to8, e); in narrowTo()
1723 return unop(Iop_64to32, e); in narrowTo()
1725 return unop(Iop_64to16, e); in narrowTo()
1727 return unop(Iop_64to8, e); in narrowTo()
2317 virtual = unop(Iop_32Uto64, unop(Iop_64to32, virtual)); in handleAddrOverrides()
3216 putIReg64( R_RAX, unop(Iop_128to64,mkexpr(dst128)) ); in codegen_div()
3217 putIReg64( R_RDX, unop(Iop_128HIto64,mkexpr(dst128)) ); in codegen_div()
3229 putIRegRAX( 4, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div()
3230 putIRegRDX( 4, unop(Iop_64HIto32,mkexpr(dst64)) ); in codegen_div()
3235 assign( src64, unop(widen3264, in codegen_div()
3239 assign( dst64, binop(op, mkexpr(src64), unop(widen1632,mkexpr(t))) ); in codegen_div()
3240 putIRegRAX( 2, unop(Iop_32to16,unop(Iop_64to32,mkexpr(dst64))) ); in codegen_div()
3241 putIRegRDX( 2, unop(Iop_32to16,unop(Iop_64HIto32,mkexpr(dst64))) ); in codegen_div()
3248 assign( src64, unop(widen3264, in codegen_div()
3249 unop(widen1632, getIRegRAX(2))) ); in codegen_div()
3252 unop(widen1632, unop(widen816, mkexpr(t)))) ); in codegen_div()
3253 putIRegRAX( 1, unop(Iop_16to8, in codegen_div()
3254 unop(Iop_32to16, in codegen_div()
3255 unop(Iop_64to32,mkexpr(dst64)))) ); in codegen_div()
3256 putIRegAH( unop(Iop_16to8, in codegen_div()
3257 unop(Iop_32to16, in codegen_div()
3258 unop(Iop_64HIto32,mkexpr(dst64)))) ); in codegen_div()
3782 assign( resHi, unop(Iop_128HIto64,mkexpr(res128))); in codegen_mulL_A_D()
3783 assign( resLo, unop(Iop_128to64,mkexpr(res128))); in codegen_mulL_A_D()
3796 assign( resHi, unop(Iop_64HIto32,mkexpr(res64))); in codegen_mulL_A_D()
3797 assign( resLo, unop(Iop_64to32,mkexpr(res64))); in codegen_mulL_A_D()
3810 assign( resHi, unop(Iop_32HIto16,mkexpr(res32))); in codegen_mulL_A_D()
3811 assign( resLo, unop(Iop_32to16,mkexpr(res32))); in codegen_mulL_A_D()
3824 assign( resHi, unop(Iop_16HIto8,mkexpr(res16))); in codegen_mulL_A_D()
3825 assign( resLo, unop(Iop_16to8,mkexpr(res16))); in codegen_mulL_A_D()
3874 unop(mkSizedOp(ty,Iop_Not8), in dis_Grp3()
3952 assign(dst1, unop(mkSizedOp(ty,Iop_Not8), mkexpr(t1))); in dis_Grp3()
4269 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_MOVS()
4270 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_MOVS()
4281 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_MOVS()
4282 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_MOVS()
4296 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_LODS()
4304 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_LODS()
4319 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_STOS()
4327 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_STOS()
4342 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_CMPS()
4343 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_CMPS()
4358 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_CMPS()
4359 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_CMPS()
4377 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_SCAS()
4387 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_SCAS()
4658 unop(Iop_1Uto8, in gen_LZCNT()
4660 unop(Iop_Clz64, mkexpr(src64x)), in gen_LZCNT()
4726 return unop(Iop_64to32, IRExpr_Get( OFFB_FPROUND, Ity_I64 )); in get_fpround()
4732 stmt( IRStmt_Put( OFFB_FPROUND, unop(Iop_32Uto64,e) ) ); in put_fpround()
4863 unop(Iop_32to16, in get_FPU_sw()
4868 binop(Iop_And32, unop(Iop_64to32, get_C3210()), in get_FPU_sw()
4899 unop(Iop_F32toF64, loadLE(Ity_F32,mkexpr(addr))) in fp_do_op_mem_ST_0()
4924 unop(Iop_F32toF64, loadLE(Ity_F32,mkexpr(addr))), in fp_do_oprev_mem_ST_0()
4984 unop( Iop_32Uto64, in fp_do_ucomi_ST0_STi()
5002 unop(Iop_1Uto8, in x87ishly_qnarrow_32_to_16()
5004 unop(Iop_32Uto64, in x87ishly_qnarrow_32_to_16()
5008 unop(Iop_32to16, mkexpr(t32))); in x87ishly_qnarrow_32_to_16()
5052 unop( Iop_32Uto64, in dis_FPU()
5057 unop(Iop_F32toF64, in dis_FPU()
5070 unop( Iop_32Uto64, in dis_FPU()
5075 unop(Iop_F32toF64, in dis_FPU()
5122 unop(Iop_32Uto64, in dis_FPU()
5137 unop(Iop_32Uto64, in dis_FPU()
5184 put_ST(0, unop(Iop_F32toF64, in dis_FPU()
5245 assign(ew, unop(Iop_64to32,mkexpr(w64)) ); in dis_FPU()
5278 unop( Iop_16Uto64, in dis_FPU()
5284 put_fpround( unop(Iop_64to32, mkexpr(t64)) ); in dis_FPU()
5285 assign( ew, unop(Iop_64HIto32, mkexpr(t64) ) ); in dis_FPU()
5350 unop( Iop_64to16, in dis_FPU()
5354 mkIRExprVec_1( unop(Iop_32Uto64, get_fpround()) ) in dis_FPU()
5392 put_ST_UNCHECKED(0, unop(Iop_NegF64, get_ST(0))); in dis_FPU()
5397 put_ST_UNCHECKED(0, unop(Iop_AbsF64, get_ST(0))); in dis_FPU()
5407 = mkIRExprVec_2( unop(Iop_8Uto64, get_ST_TAG(0)), in dis_FPU()
5408 unop(Iop_ReinterpF64asI64, in dis_FPU()
5517 assign( argI, unop(Iop_ReinterpF64asI64, mkexpr(argF))); in dis_FPU()
5534 assign( sigF, unop(Iop_ReinterpI64asF64, mkexpr(sigI)) ); in dis_FPU()
5535 assign( expF, unop(Iop_ReinterpI64asF64, mkexpr(expI)) ); in dis_FPU()
5558 unop(Iop_32Uto64, in dis_FPU()
5585 unop(Iop_32Uto64, in dis_FPU()
5715 unop(Iop_I32StoF64, in dis_FPU()
5723 unop(Iop_I32StoF64, in dis_FPU()
5744 unop(Iop_1Uto8, in dis_FPU()
5754 unop(Iop_1Uto8, in dis_FPU()
5764 unop(Iop_1Uto8, in dis_FPU()
5774 unop(Iop_1Uto8, in dis_FPU()
5783 unop(Iop_32Uto64, in dis_FPU()
5816 put_ST(0, unop(Iop_I32StoF64, in dis_FPU()
5863 put_ST(0, unop(Iop_ReinterpI64asF64, mkexpr(val))); in dis_FPU()
5875 unop(Iop_ReinterpF64asI64, get_ST(0)) ); in dis_FPU()
5912 unop(Iop_1Uto8, in dis_FPU()
5923 unop(Iop_1Uto8, in dis_FPU()
5937 unop(Iop_1Uto8, in dis_FPU()
5951 unop(Iop_1Uto8, in dis_FPU()
6056 unop(Iop_32Uto64, in dis_FPU()
6228 assign(ew, unop(Iop_64to32,mkexpr(w64)) ); in dis_FPU()
6357 unop(Iop_32Uto64, in dis_FPU()
6371 unop(Iop_32Uto64, in dis_FPU()
6436 unop(Iop_I32StoF64, in dis_FPU()
6437 unop(Iop_16Sto32, in dis_FPU()
6445 unop(Iop_I32StoF64, in dis_FPU()
6446 unop(Iop_16Sto32, in dis_FPU()
6474 unop(Iop_32Uto64, in dis_FPU()
6524 put_ST(0, unop(Iop_I32StoF64, in dis_FPU()
6525 unop(Iop_16Sto32, in dis_FPU()
6592 unop(Iop_32to16, in dis_FPU()
6598 unop(Iop_64to32, get_C3210()), in dis_FPU()
6792 argG = unop(Iop_Not64, argG); in dis_MMXop_regmem_to_reg()
6871 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_MMX_shiftG_byE()
6891 unop(Iop_1Uto8,binop(Iop_CmpLT64U,mkexpr(amt),mkU64(size))), in dis_MMX_shiftG_byE()
6901 unop(Iop_1Uto8,binop(Iop_CmpLT64U,mkexpr(amt),mkU64(size))), in dis_MMX_shiftG_byE()
7044 unop(Iop_64to32, getMMXReg(gregLO3ofRM(modrm)) ) ); in dis_MMX()
7052 unop(Iop_64to32, getMMXReg(gregLO3ofRM(modrm)) ) ); in dis_MMX()
7343 unop(Iop_Not64, mkexpr(mask)))) ); in dis_MMX()
7526 binop(Iop_Shl64, unop(Iop_16Uto64, mkexpr(esrc)), in dis_SHLRD_Gv_Ev()
7542 unop(Iop_16Uto64, mkexpr(esrc)), in dis_SHLRD_Gv_Ev()
7676 unop(Iop_64to8, in dis_bt_G_E()
7703 unop(Iop_Not8, mkexpr(t_mask))) ); in dis_bt_G_E()
7725 unop(Iop_8Uto64, mkexpr(t_fetched)), in dis_bt_G_E()
7794 unop(Iop_1Uto8, in dis_bs_E_G()
7848 fwds ? unop(Iop_Ctz64, mkexpr(src64)) in dis_bs_E_G()
7851 unop(Iop_Clz64, mkexpr(src64))) in dis_bs_E_G()
7856 assign( dst, unop(Iop_64to16, mkexpr(dst64)) ); in dis_bs_E_G()
7859 assign( dst, unop(Iop_64to32, mkexpr(dst64)) ); in dis_bs_E_G()
7991 assign( cond8, unop(Iop_1Uto8, mk_amd64g_calculate_condition(AMD64CondZ)) ); in dis_cmpxchg_G_E()
8008 assign( cond8, unop(Iop_1Uto8, mk_amd64g_calculate_condition(AMD64CondZ)) ); in dis_cmpxchg_G_E()
8031 assign( cond8, unop(Iop_1Uto8, mk_amd64g_calculate_condition(AMD64CondZ)) ); in dis_cmpxchg_G_E()
8081 IRExpr_Mux0X( unop(Iop_1Uto8, in dis_cmov_E_G()
8099 IRExpr_Mux0X( unop(Iop_1Uto8, in dis_cmov_E_G()
8308 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8376 assign( epart, unop( Iop_32UtoV128, in dis_SSE_E_to_G_lo32()
8412 assign( epart, unop( Iop_64UtoV128, in dis_SSE_E_to_G_lo64()
8438 unop(op, getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_E_to_G_unary_all()
8446 unop(op, loadLE(Ity_V128, mkexpr(addr))) ); in dis_SSE_E_to_G_unary_all()
8479 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
8490 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
8523 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
8534 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
8746 ? unop( Iop_64UtoV128, loadLE(Ity_I64, mkexpr(addr))) in dis_SSE_cmp_E_to_G()
8748 unop( Iop_32UtoV128, loadLE(Ity_I32, mkexpr(addr))) in dis_SSE_cmp_E_to_G()
8760 unop(Iop_NotV128, mkexpr(plain)) ); in dis_SSE_cmp_E_to_G()
8807 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_SSE_shiftG_byE()
8827 unop(Iop_1Uto8, in dis_SSE_shiftG_byE()
8838 unop(Iop_1Uto8, in dis_SSE_shiftG_byE()
8913 unop( Iop_64to32, in get_sse_roundingmode()
8923 unop(Iop_32Uto64,sseround) ) ); in put_sse_roundingmode()
8935 assign( hi64, unop(Iop_V128HIto64, mkexpr(t128)) ); in breakupV128to32s()
8936 assign( lo64, unop(Iop_V128to64, mkexpr(t128)) ); in breakupV128to32s()
8947 assign( *t0, unop(Iop_64to32, mkexpr(lo64)) ); in breakupV128to32s()
8948 assign( *t1, unop(Iop_64HIto32, mkexpr(lo64)) ); in breakupV128to32s()
8949 assign( *t2, unop(Iop_64to32, mkexpr(hi64)) ); in breakupV128to32s()
8950 assign( *t3, unop(Iop_64HIto32, mkexpr(hi64)) ); in breakupV128to32s()
8974 assign( hi32, unop(Iop_64HIto32, mkexpr(t64)) ); in breakup64to16s()
8975 assign( lo32, unop(Iop_64to32, mkexpr(t64)) ); in breakup64to16s()
8986 assign( *t0, unop(Iop_32to16, mkexpr(lo32)) ); in breakup64to16s()
8987 assign( *t1, unop(Iop_32HIto16, mkexpr(lo32)) ); in breakup64to16s()
8988 assign( *t2, unop(Iop_32to16, mkexpr(hi32)) ); in breakup64to16s()
8989 assign( *t3, unop(Iop_32HIto16, mkexpr(hi32)) ); in breakup64to16s()
9019 assign( *t0, unop(Iop_V256to64_0, mkexpr(t256)) ); in breakupV256to64s()
9020 assign( *t1, unop(Iop_V256to64_1, mkexpr(t256)) ); in breakupV256to64s()
9021 assign( *t2, unop(Iop_V256to64_2, mkexpr(t256)) ); in breakupV256to64s()
9022 assign( *t3, unop(Iop_V256to64_3, mkexpr(t256)) ); in breakupV256to64s()
9035 assign(*t1, unop(Iop_V256toV128_1, mkexpr(t256))); in breakupV256toV128s()
9036 assign(*t0, unop(Iop_V256toV128_0, mkexpr(t256))); in breakupV256toV128s()
9065 assign( *t0, unop(Iop_V128to64, mkexpr(t128)) ); in breakupV128to64s()
9066 assign( *t1, unop(Iop_V128HIto64, mkexpr(t128)) ); in breakupV128to64s()
9237 assign( posMask, unop(Iop_Not64, mkexpr(negMask)) ); in math_PABS_MMX()
9253 assign(aaHi, unop(Iop_V128HIto64, mkexpr(aa))); in math_PABS_XMM()
9254 assign(aaLo, unop(Iop_V128to64, mkexpr(aa))); in math_PABS_XMM()
9296 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_PALIGNR_XMM()
9297 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in math_PALIGNR_XMM()
9298 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PALIGNR_XMM()
9299 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in math_PALIGNR_XMM()
9543 unop( Iop_32Uto64, in dis_COMISD()
9586 unop( Iop_32Uto64, in dis_COMISS()
9588 unop(Iop_F32toF64,mkexpr(argL)), in dis_COMISS()
9589 unop(Iop_F32toF64,mkexpr(argR)))), in dis_COMISS()
9656 assign( hi64, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSRLDQ()
9657 assign( lo64, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSRLDQ()
9703 assign( hi64, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSLLDQ()
9704 assign( lo64, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSLLDQ()
9823 unop(Iop_F32toF64, mkexpr(f32lo))) ); in dis_CVTxSS2SI()
9829 unop(Iop_F32toF64, mkexpr(f32lo))) ); in dis_CVTxSS2SI()
9863 putXMMRegLane64F( rG, 1, unop(Iop_F32toF64, mkexpr(f32hi)) ); in dis_CVTPS2PD_128()
9864 putXMMRegLane64F( rG, 0, unop(Iop_F32toF64, mkexpr(f32lo)) ); in dis_CVTPS2PD_128()
9904 putYMMRegLane64F( rG, 3, unop(Iop_F32toF64, mkexpr(f32_3)) ); in dis_CVTPS2PD_256()
9905 putYMMRegLane64F( rG, 2, unop(Iop_F32toF64, mkexpr(f32_2)) ); in dis_CVTPS2PD_256()
9906 putYMMRegLane64F( rG, 1, unop(Iop_F32toF64, mkexpr(f32_1)) ); in dis_CVTPS2PD_256()
9907 putYMMRegLane64F( rG, 0, unop(Iop_F32toF64, mkexpr(f32_0)) ); in dis_CVTPS2PD_256()
9939 assign( t0, unop(Iop_ReinterpI64asF64, in dis_CVTPD2PS_128()
9940 unop(Iop_V128to64, mkexpr(argV))) ); in dis_CVTPD2PS_128()
9941 assign( t1, unop(Iop_ReinterpI64asF64, in dis_CVTPD2PS_128()
9942 unop(Iop_V128HIto64, mkexpr(argV))) ); in dis_CVTPD2PS_128()
9992 unop( Iop_F32toF64, \ in dis_CVTxPS2DQ_128()
9993 unop( Iop_ReinterpI32asF32, mkexpr(_t))) ) in dis_CVTxPS2DQ_128()
10042 unop( Iop_F32toF64, \ in dis_CVTxPS2DQ_256()
10043 unop( Iop_ReinterpI32asF32, mkexpr(_t))) ) in dis_CVTxPS2DQ_256()
10093 assign( t0, unop(Iop_ReinterpI64asF64, in dis_CVTxPD2DQ_128()
10094 unop(Iop_V128to64, mkexpr(argV))) ); in dis_CVTxPD2DQ_128()
10095 assign( t1, unop(Iop_ReinterpI64asF64, in dis_CVTxPD2DQ_128()
10096 unop(Iop_V128HIto64, mkexpr(argV))) ); in dis_CVTxPD2DQ_128()
10154 unop( Iop_ReinterpI64asF64, \ in dis_CVTxPD2DQ_256()
10203 unop(Iop_I32StoF64,mkexpr(_t))) in dis_CVTDQ2PS_128()
10253 unop(Iop_I32StoF64,mkexpr(_t))) in dis_CVTDQ2PS_256()
10286 putIReg32(rG, unop(Iop_64to32,mkexpr(t5))); in dis_PMOVMSKB_128()
10318 assign( d1, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_UNPCKxPD_128()
10319 assign( d0, unop(Iop_V128to64, mkexpr(dV)) ); in math_UNPCKxPD_128()
10320 assign( s1, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_UNPCKxPD_128()
10321 assign( s0, unop(Iop_V128to64, mkexpr(sV)) ); in math_UNPCKxPD_128()
10414 assign( d1, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_SHUFPD_128()
10415 assign( d0, unop(Iop_V128to64, mkexpr(dV)) ); in math_SHUFPD_128()
10416 assign( s1, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_SHUFPD_128()
10417 assign( s0, unop(Iop_V128to64, mkexpr(sV)) ); in math_SHUFPD_128()
10465 unop( Iop_NotV128, mkexpr(imm8_mask) ) ) ) ); in math_BLENDPD_128()
10498 unop( Iop_NotV128, mkexpr(imm8_mask) ) ) ) ); in math_BLENDPS_128()
10535 unop( Iop_NotV128, mkexpr(imm16_mask) ) ) ) ); in math_PBLENDW_128()
10604 assign( a1, unop(Iop_V128HIto64, mkexpr(addV) )); in math_ADDSUBPD_128()
10605 assign( s0, unop(Iop_V128to64, mkexpr(subV) )); in math_ADDSUBPD_128()
10709 assign( sVmut, unop(xIsH ? Iop_V128HIto64 : Iop_V128to64, mkexpr(sV)) ); in dis_PSHUFxW_128()
10710 assign( sVcon, unop(xIsH ? Iop_V128to64 : Iop_V128HIto64, mkexpr(sV)) ); in dis_PSHUFxW_128()
10751 case 0: assign(d16, unop(Iop_32to16, mkexpr(s0))); break; in dis_PEXTRW_128_EregOnly_toG()
10752 case 1: assign(d16, unop(Iop_32HIto16, mkexpr(s0))); break; in dis_PEXTRW_128_EregOnly_toG()
10753 case 2: assign(d16, unop(Iop_32to16, mkexpr(s1))); break; in dis_PEXTRW_128_EregOnly_toG()
10754 case 3: assign(d16, unop(Iop_32HIto16, mkexpr(s1))); break; in dis_PEXTRW_128_EregOnly_toG()
10755 case 4: assign(d16, unop(Iop_32to16, mkexpr(s2))); break; in dis_PEXTRW_128_EregOnly_toG()
10756 case 5: assign(d16, unop(Iop_32HIto16, mkexpr(s2))); break; in dis_PEXTRW_128_EregOnly_toG()
10757 case 6: assign(d16, unop(Iop_32to16, mkexpr(s3))); break; in dis_PEXTRW_128_EregOnly_toG()
10758 case 7: assign(d16, unop(Iop_32HIto16, mkexpr(s3))); break; in dis_PEXTRW_128_EregOnly_toG()
10761 putIReg32(rG, unop(Iop_16Uto32, mkexpr(d16))); in dis_PEXTRW_128_EregOnly_toG()
10789 unop(Iop_I32StoF64, unop(Iop_64to32, mkexpr(arg64))) in dis_CVTDQ2PD_128()
10793 unop(Iop_I32StoF64, unop(Iop_64HIto32, mkexpr(arg64))) in dis_CVTDQ2PD_128()
10821 unop(Iop_64to32, in dis_STMXCSR()
10825 mkIRExprVec_1( unop(Iop_32Uto64,get_sse_roundingmode()) ) in dis_STMXCSR()
10863 unop(Iop_32Uto64, in dis_LDMXCSR()
10870 put_sse_roundingmode( unop(Iop_64to32, mkexpr(t64)) ); in dis_LDMXCSR()
10871 assign( ew, unop(Iop_64HIto32, mkexpr(t64) ) ); in dis_LDMXCSR()
10878 binop(Iop_CmpNE64, unop(Iop_32Uto64,mkexpr(ew)), mkU64(0)), in dis_LDMXCSR()
10897 unop(Iop_16Uto64, mkexpr(u16)), in math_PINSRW_128()
10970 unop(Iop_NotV128, mkexpr(mask)))) ); in dis_MASKMOVDQU()
11666 unop(Iop_I32StoF64, in dis_ESC_0F__SSE2()
11667 unop(Iop_64to32, mkexpr(arg64)) )) ); in dis_ESC_0F__SSE2()
11673 unop(Iop_I32StoF64, in dis_ESC_0F__SSE2()
11674 unop(Iop_64HIto32, mkexpr(arg64)) )) ); in dis_ESC_0F__SSE2()
11703 unop(Iop_I32StoF64, mkexpr(arg32)) ) ); in dis_ESC_0F__SSE2()
11748 unop(Iop_I32StoF64, mkexpr(arg32)) in dis_ESC_0F__SSE2()
11804 unop(Iop_I32StoF64, unop(Iop_64to32, mkexpr(arg64)) ) in dis_ESC_0F__SSE2()
11809 unop(Iop_I32StoF64, unop(Iop_64HIto32, mkexpr(arg64)) ) in dis_ESC_0F__SSE2()
11882 unop( Iop_F32toF64, mkexpr(f32hi) ) ), in dis_ESC_0F__SSE2()
11885 unop( Iop_F32toF64, mkexpr(f32lo) ) ) in dis_ESC_0F__SSE2()
12219 unop( Iop_F32toF64, mkexpr(f32lo) ) ); in dis_ESC_0F__SSE2()
12525 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
12532 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
12543 ? unop( Iop_32UtoV128,loadLE(Ity_I32, mkexpr(addr)) ) in dis_ESC_0F__SSE2()
12544 : unop( Iop_64UtoV128,loadLE(Ity_I64, mkexpr(addr)) ) in dis_ESC_0F__SSE2()
13247 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5))); in dis_ESC_0F__SSE2()
13249 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5))); in dis_ESC_0F__SSE2()
13392 unop(Iop_64UtoV128, getMMXReg( eregLO3ofRM(modrm) )) ); in dis_ESC_0F__SSE2()
13461 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_64to32,mkexpr(t1))); in dis_ESC_0F__SSE2()
13849 assign( t0, unop(Iop_64to32, mkexpr(dV)) ); in dis_ESC_0F__SSE2()
13850 assign( t1, unop(Iop_64to32, mkexpr(sV)) ); in dis_ESC_0F__SSE2()
14039 assign ( d0, unop(Iop_V128to64, mkexpr(sV)) ); in dis_MOVDDUP_128()
14399 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_PSHUFB_XMM()
14400 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in math_PSHUFB_XMM()
14401 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSHUFB_XMM()
14402 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSHUFB_XMM()
14415 unop(Iop_Not64, binop(Iop_SarN8x8,mkexpr(sHi),mkU8(7)))); in math_PSHUFB_XMM()
14434 unop(Iop_Not64,mkexpr(maskBit3hi))) )); in math_PSHUFB_XMM()
14442 unop(Iop_Not64, binop(Iop_SarN8x8,mkexpr(sLo),mkU8(7)))); in math_PSHUFB_XMM()
14461 unop(Iop_Not64,mkexpr(maskBit3lo))) )); in math_PSHUFB_XMM()
14520 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_PHADD_128()
14521 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_PHADD_128()
14522 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_PHADD_128()
14523 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_PHADD_128()
14647 unop(Iop_Not64, binop(Iop_SarN8x8, mkexpr(sV), mkU8(7))) in dis_ESC_0F38__SupSSE3()
14862 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
14863 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
14864 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
14865 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
14946 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
14947 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
14948 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
14949 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
15266 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
15318 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
15322 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
15372 assign(notmask, unop(Iop_NotV128, mkexpr(mask))); in math_PBLENDVB_128()
15401 assign(notmask, unop(Iop_NotV256, mkexpr(mask))); in math_PBLENDVB_256()
15511 unop(Iop_V128to64, in finish_xTESTy()
15519 unop(Iop_V128to64, in finish_xTESTy()
15532 unop(Iop_Not64, in finish_xTESTy()
15536 unop(Iop_Not64, in finish_xTESTy()
15557 unop(Iop_Not64, in finish_xTESTy()
15564 unop(Iop_Not64, in finish_xTESTy()
15679 mkexpr(vecE), unop(Iop_NotV256, mkexpr(vecG)))); in dis_xTESTy_256()
15718 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVxXBW_128()
15761 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVxXWD_128()
15805 unop( Iop_16Sto64, in dis_PMOVSXWQ_128()
15806 unop( Iop_32HIto16, mkexpr(srcBytes) ) ), in dis_PMOVSXWQ_128()
15807 unop( Iop_16Sto64, in dis_PMOVSXWQ_128()
15808 unop( Iop_32to16, mkexpr(srcBytes) ) ) ) ); in dis_PMOVSXWQ_128()
15832 unop( Iop_32UtoV128, loadLE( Ity_I32, mkexpr(addr) ) ) ); in dis_PMOVZXWQ_128()
15869 assign( srcI64, unop(Iop_V128to64, mkexpr(srcVec)) ); in dis_PMOVxXDQ_128()
15875 assign( srcVec, unop( Iop_64UtoV128, mkexpr(srcI64)) ); in dis_PMOVxXDQ_128()
15885 unop( Iop_32Sto64, in dis_PMOVxXDQ_128()
15886 unop( Iop_64HIto32, mkexpr(srcI64) ) ), in dis_PMOVxXDQ_128()
15887 unop( Iop_32Sto64, in dis_PMOVxXDQ_128()
15888 unop( Iop_64to32, mkexpr(srcI64) ) ) ); in dis_PMOVxXDQ_128()
15916 unop( Iop_32UtoV128, loadLE( Ity_I32, mkexpr(addr) ) ) ); in dis_PMOVxXBD_128()
15964 unop( Iop_8Sto64, in dis_PMOVSXBQ_128()
15965 unop( Iop_16HIto8, mkexpr(srcBytes) ) ), in dis_PMOVSXBQ_128()
15966 unop( Iop_8Sto64, in dis_PMOVSXBQ_128()
15967 unop( Iop_16to8, mkexpr(srcBytes) ) ) ) ); in dis_PMOVSXBQ_128()
15991 unop( Iop_32UtoV128, in dis_PMOVZXBQ_128()
15992 unop( Iop_16Uto32, loadLE( Ity_I16, mkexpr(addr) )))); in dis_PMOVZXBQ_128()
16037 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_PHMINPOSUW_128()
16038 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_PHMINPOSUW_128()
16046 (rG, unop(Iop_64UtoV128, mkexpr(dLo))); in dis_PHMINPOSUW_128()
16696 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1))); in dis_ESC_0F38__SSE4()
16751 case 0: assign(d16, unop(Iop_32to16, mkexpr(t0))); break; in dis_PEXTRW()
16752 case 1: assign(d16, unop(Iop_32HIto16, mkexpr(t0))); break; in dis_PEXTRW()
16753 case 2: assign(d16, unop(Iop_32to16, mkexpr(t1))); break; in dis_PEXTRW()
16754 case 3: assign(d16, unop(Iop_32HIto16, mkexpr(t1))); break; in dis_PEXTRW()
16755 case 4: assign(d16, unop(Iop_32to16, mkexpr(t2))); break; in dis_PEXTRW()
16756 case 5: assign(d16, unop(Iop_32HIto16, mkexpr(t2))); break; in dis_PEXTRW()
16757 case 6: assign(d16, unop(Iop_32to16, mkexpr(t3))); break; in dis_PEXTRW()
16758 case 7: assign(d16, unop(Iop_32HIto16, mkexpr(t3))); break; in dis_PEXTRW()
16764 putIReg32( rE, unop(Iop_16Uto32, mkexpr(d16)) ); in dis_PEXTRW()
16855 case 0: assign( src_qword, unop(Iop_V128to64, mkexpr(xmm_vec)) ); in dis_PEXTRQ()
16857 case 1: assign( src_qword, unop(Iop_V128HIto64, mkexpr(xmm_vec)) ); in dis_PEXTRQ()
17019 unop(Iop_8Uto64, mkexpr(u8)), in math_PINSRB_128()
17149 unop( Iop_32Uto64, in dis_PEXTRB_128_GtoE()
17156 storeLE( mkexpr(addr), unop(Iop_32to8, mkexpr(shr_lane) ) ); in dis_PEXTRB_128_GtoE()
17252 assign( sHi, unop(Iop_V128HIto64, mkexpr(src_masked)) ); in math_MPSADBW_128()
17253 assign( sLo, unop(Iop_V128to64, mkexpr(src_masked)) ); in math_MPSADBW_128()
17257 assign( dHi, unop(Iop_V128HIto64, mkexpr(dst_masked)) ); in math_MPSADBW_128()
17258 assign( dLo, unop(Iop_V128to64, mkexpr(dst_masked)) ); in math_MPSADBW_128()
17336 assign(t0, unop((imm8&1)? Iop_V128HIto64 : Iop_V128to64, in math_PCLMULQDQ()
17338 assign(t1, unop((imm8&16) ? Iop_V128HIto64 : Iop_V128to64, in math_PCLMULQDQ()
17729 assign( new8, unop(Iop_32to8, getIReg32(rE)) ); in dis_ESC_0F3A__SSE4()
18345 unop(Iop_32Sto64, in dis_ESC_NONE()
18355 unop(Iop_32Sto64, in dis_ESC_NONE()
18588 ? unop(Iop_64to32, mkexpr(addr)) in dis_ESC_NONE()
18669 putIRegRAX( 8, unop(Iop_32Sto64, getIRegRAX(4)) ); in dis_ESC_NONE()
18674 putIRegRAX( 4, unop(Iop_16Sto32, getIRegRAX(2)) ); in dis_ESC_NONE()
18679 putIRegRAX( 2, unop(Iop_8Sto16, getIRegRAX(1)) ); in dis_ESC_NONE()
18752 storeLE( mkexpr(t1), unop(Iop_32to16, in dis_ESC_NONE()
18753 unop(Iop_64to32,mkexpr(t5))) ); in dis_ESC_NONE()
18791 unop(Iop_32to8, in dis_ESC_NONE()
18792 unop(Iop_64to32, in dis_ESC_NONE()
18804 unop(Iop_32to8, in dis_ESC_NONE()
18805 unop(Iop_64to32, in dis_ESC_NONE()
18817 unop(Iop_32to8, in dis_ESC_NONE()
18818 unop(Iop_64to32, in dis_ESC_NONE()
19264 unop(Iop_64to32, getIReg64(R_RCX)), in dis_ESC_NONE()
19307 unop(Iop_32Uto64, getIReg32(R_RCX)), in dis_ESC_NONE()
19344 assign(t1, unop(Iop_16Uto64, getIRegRDX(2))); in dis_ESC_NONE()
19351 assign(t1, unop(Iop_16Uto64, getIRegRDX(2))); in dis_ESC_NONE()
19393 assign( t1, unop(Iop_16Uto64, getIRegRDX(2)) ); in dis_ESC_NONE()
19400 assign( t1, unop(Iop_16Uto64, getIRegRDX(2)) ); in dis_ESC_NONE()
19787 putIRegRDX(4, unop(Iop_64HIto32, mkexpr(val))); in dis_ESC_0F()
19788 putIRegRAX(4, unop(Iop_64to32, mkexpr(val))); in dis_ESC_0F()
19907 assign( t1, unop(Iop_1Uto8,mk_amd64g_calculate_condition(opc-0x90)) ); in dis_ESC_0F()
20175 assign( expdHi, sz==4 ? unop(Iop_64to32, mkexpr(expdHi64)) in dis_ESC_0F()
20177 assign( expdLo, sz==4 ? unop(Iop_64to32, mkexpr(expdLo64)) in dis_ESC_0F()
20217 IRExpr_Mux0X( unop(Iop_1Uto8, mkexpr(success)), in dis_ESC_0F()
20218 sz == 4 ? unop(Iop_32Uto64, mkexpr(oldHi)) in dis_ESC_0F()
20223 IRExpr_Mux0X( unop(Iop_1Uto8, mkexpr(success)), in dis_ESC_0F()
20224 sz == 4 ? unop(Iop_32Uto64, mkexpr(oldLo)) in dis_ESC_0F()
20239 unop(Iop_1Uto64, mkexpr(success)), mkU64(1)), in dis_ESC_0F()
20596 assign(tSL, invertLeftArg ? unop(Iop_NotV128, getXMMReg(rSL)) in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
20692 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_AVX128_shiftV_byE()
20712 unop(Iop_1Uto8, in dis_AVX128_shiftV_byE()
20723 unop(Iop_1Uto8, in dis_AVX128_shiftV_byE()
20825 assign( epart, unop( Iop_64UtoV128, in dis_AVX128_E_V_to_G_lo64()
20879 putYMMRegLoAndZU( rG, unop(op, mkexpr(arg)) ); in dis_AVX128_E_V_to_G_lo64_unary()
20926 putYMMRegLoAndZU( rG, unop(op, mkexpr(arg)) ); in dis_AVX128_E_V_to_G_lo32_unary()
20962 assign( epart, unop( Iop_32UtoV128, in dis_AVX128_E_V_to_G_lo32()
21034 : sz == 8 ? unop( Iop_64UtoV128, loadLE(Ity_I64, mkexpr(addr))) in dis_AVX128_cmp_V_E_to_G()
21035 : /*sz==4*/ unop( Iop_32UtoV128, loadLE(Ity_I32, mkexpr(addr)))); in dis_AVX128_cmp_V_E_to_G()
21048 putYMMRegLoAndZU( rG, unop(Iop_NotV128, mkexpr(plain)) ); in dis_AVX128_cmp_V_E_to_G()
21082 unop(Iop_NotV128, mkexpr(plain)), in dis_AVX128_cmp_V_E_to_G()
21163 putYMMReg( rG, unop(Iop_NotV256, mkexpr(plain)) ); in dis_AVX256_cmp_V_E_to_G()
21230 putYMMRegLoAndZU( rG, unop(op, mkexpr(arg)) ); in dis_AVX128_E_to_G_unary_all()
21258 assign(tSL, invertLeftArg ? unop(Iop_NotV256, getYMMReg(rSL)) in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
21348 putYMMReg( rG, unop(op, mkexpr(arg)) ); in dis_AVX256_E_to_G_unary_all()
21382 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s3))), in dis_CVTDQ2PD_256()
21383 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s2))), in dis_CVTDQ2PD_256()
21384 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s1))), in dis_CVTDQ2PD_256()
21385 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s0))) in dis_CVTDQ2PD_256()
21419 unop(Iop_ReinterpI64asF64, mkexpr(_t)) ) in dis_CVTPD2PS_256()
22173 unop(Iop_I32StoF64, mkexpr(arg32))); in dis_ESC_0F__VEX()
22259 unop(Iop_I32StoF64, mkexpr(arg32)) ) ); in dis_ESC_0F__VEX()
22742 unop( Iop_F32toF64, mkexpr(f32lo)) ); in dis_ESC_0F__VEX()
23115 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
23124 unop( Iop_32UtoV128,loadLE(Ity_I32, mkexpr(addr))) in dis_ESC_0F__VEX()
23140 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
23149 unop( Iop_64UtoV128,loadLE(Ity_I64, mkexpr(addr))) in dis_ESC_0F__VEX()
23730 assign( new16, unop(Iop_32to16, in dis_ESC_0F__VEX()
24495 = IRExpr_Mux0X( unop(Iop_64to8, in math_PERMILPD_VAR_128()
24499 = IRExpr_Mux0X( unop(Iop_64to8, in math_PERMILPD_VAR_128()
25370 assign(s1, unop(Iop_V128HIto64, mkexpr(sV))); in dis_ESC_0F3A__VEX()
25371 assign(s0, unop(Iop_V128to64, mkexpr(sV))); in dis_ESC_0F3A__VEX()
25502 unop(Iop_ReinterpI32asF32, mkexpr(s))) in dis_ESC_0F3A__VEX()
25553 unop(Iop_ReinterpI32asF32, mkexpr(s))) in dis_ESC_0F3A__VEX()
25605 unop(Iop_ReinterpI64asF64, mkexpr(s))) in dis_ESC_0F3A__VEX()
25650 unop(Iop_ReinterpI64asF64, mkexpr(s))) in dis_ESC_0F3A__VEX()
26041 assign( src_u8, unop(Iop_32to8, getIReg32( rE )) ); in dis_ESC_0F3A__VEX()