/external/valgrind/VEX/priv/ |
D | guest_arm64_helpers.c | 1422 binop(Iop_CmpEQ32, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 1423 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 1428 binop(Iop_CmpNE32, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 1429 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 1437 binop(Iop_CmpLE32U, unop(Iop_64to32, cc_dep2), in guest_arm64_spechelper() 1438 unop(Iop_64to32, cc_dep1))); in guest_arm64_spechelper() 1443 binop(Iop_CmpLT32U, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 1444 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 1451 binop(Iop_CmpLE32U, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 1452 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() [all …]
|
D | guest_amd64_helpers.c | 1239 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1240 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1248 unop(Iop_64to32, cc_dep2), in guest_amd64_spechelper() 1249 unop(Iop_64to32, cc_dep1))); in guest_amd64_spechelper() 1257 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1258 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1264 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1265 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1274 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1275 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() [all …]
|
D | guest_ppc_toIR.c | 843 unop( Iop_ReinterpI32asF32, unop( Iop_64to32, mkexpr( hi64 ) ) ) ) ); in breakV128to4xF64() 850 unop( Iop_ReinterpI32asF32, unop( Iop_64to32, mkexpr( lo64 ) ) ) ) ); in breakV128to4xF64() 876 assign( *t2, unop(Iop_32Sto64, unop(Iop_64to32, mkexpr(hi64))) ); in breakV128to4x64S() 878 assign( *t0, unop(Iop_32Sto64, unop(Iop_64to32, mkexpr(lo64))) ); in breakV128to4x64S() 903 assign( *t2, unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(hi64))) ); in breakV128to4x64U() 905 assign( *t0, unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(lo64))) ); in breakV128to4x64U() 929 assign( *t2, unop(Iop_64to32, mkexpr(hi64)) ); in breakV128to4x32() 931 assign( *t0, unop(Iop_64to32, mkexpr(lo64)) ); in breakV128to4x32() 968 assign( lo32, unop(Iop_64to32, t64)); in mkQNarrow64Sto32() 990 assign( lo32, unop(Iop_64to32, t64)); in mkQNarrow64Uto32() [all …]
|
D | guest_mips_toIR.c | 1254 return ty == Ity_I64 ? unop(Iop_64to32, src) : src; in mkNarrowTo32() 1287 assign(t_lo, unop(Iop_64to32, getAcc(0))); in putHI() 1303 putLO(unop(Iop_64to32, e)); in putAcc() 1358 assign(t1, unop(Iop_64to32, mkexpr(t0))); in getLoFromF64() 1514 assign(t5, unop(Iop_64to32, mkexpr(t6))); /* lo */ in putDReg() 2826 assign(t1, unop(Iop_64to32, getAcc(ac))); in disDSPInstr_MIPS_WRK() 2833 putIReg(rd, unop(Iop_64to32, getAcc(ac_mfhilo))); in disDSPInstr_MIPS_WRK() 3475 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() 3487 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() 3531 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() [all …]
|
D | guest_s390_helpers.c | 1966 return unop(Iop_64to32, binop(Iop_Shr64, cc_dep1, mkU8(63))); in guest_s390x_spechelper() 1979 return unop(Iop_64to32, binop(Iop_Xor64, in guest_s390x_spechelper() 2033 word = unop(Iop_64to32, cc_dep1); in guest_s390x_spechelper() 2349 unop(Iop_64to32, cc_dep1), in guest_s390x_spechelper() 2350 unop(Iop_64to32, cc_dep2)), in guest_s390x_spechelper() 2356 unop(Iop_64to32, cc_dep1), in guest_s390x_spechelper() 2357 unop(Iop_64to32, cc_dep2)), in guest_s390x_spechelper() 2404 return unop(Iop_64to32, cc_dep1); in guest_s390x_spechelper()
|
D | guest_x86_toIR.c | 1483 return unop(Iop_64to32, mkexpr(r64)); in handleSegOverride() 2250 putIReg( 4, R_EAX, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div() 2260 putIReg( 2, R_EAX, unop(Iop_32to16,unop(Iop_64to32,mkexpr(dst64))) ); in codegen_div() 2273 unop(Iop_64to32,mkexpr(dst64)))) ); in codegen_div() 2452 assign( dst1, narrowTo(ty, unop(Iop_64to32, mkexpr(r64))) ); in dis_Grp2() 2763 assign( resLo, unop(Iop_64to32,mkexpr(res64))); in codegen_mulL_A_D() 4107 put_fpround( unop(Iop_64to32, mkexpr(t64)) ); in dis_FPU() 5719 assign( amt, unop(Iop_64to32, getMMXReg(eregOfRM(rm))) ); in dis_MMX_shiftG_byE() 5884 unop(Iop_64to32, getMMXReg(gregOfRM(modrm)) ) ); in dis_MMX() 5891 unop(Iop_64to32, getMMXReg(gregOfRM(modrm)) ) ); in dis_MMX() [all …]
|
D | guest_amd64_toIR.c | 1020 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RAX, Ity_I64 )); in getIRegRAX() 1068 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RDX, Ity_I64 )); in getIRegRDX() 1119 return unop(Iop_64to32, in getIReg32() 1194 return unop(Iop_64to32, in getIRegRexB() 1275 return unop(Iop_64to32, in getIRegG() 1307 return unop(Iop_64to32, in getIRegV() 1354 return unop(Iop_64to32, in getIRegE() 1774 return unop(Iop_64to32, e); in narrowTo() 2393 virtual = unop(Iop_32Uto64, unop(Iop_64to32, virtual)); in handleAddrOverrides() 3473 putIRegRAX( 4, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div() [all …]
|
D | guest_arm64_toIR.c | 1056 case Ity_I32: return unop(Iop_64to32, e); in narrowFrom64() 1277 return unop(Iop_64to32, in getIReg32orSP() 1287 return unop(Iop_64to32, in getIReg32orZR() 2871 : unop(Iop_64to32, mk_arm64g_calculate_flag_c()) ); in dis_ARM64_data_processing_register() 3126 xMw = unop(Iop_32Uto64, unop(Iop_64to32, xMw)); in dis_ARM64_data_processing_register() 3168 putIReg32orZR(dd, unop(Iop_64to32, mkexpr(res))); in dis_ARM64_data_processing_register() 3169 assign(argL32, unop(Iop_64to32, mkexpr(argL))); in dis_ARM64_data_processing_register() 3170 assign(argR32, unop(Iop_64to32, mkexpr(argR))); in dis_ARM64_data_processing_register() 3173 putIReg32orSP(dd, unop(Iop_64to32, mkexpr(res))); in dis_ARM64_data_processing_register() 3318 unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(dst)))); in dis_ARM64_data_processing_register() [all …]
|
D | ir_opt.c | 1516 case Iop_64to32: { in fold_Expr() 5327 if (is_Unop(aa, Iop_64to32) && is_Unop(aa->Iex.Unop.arg, Iop_CmpwNEZ64)) in fold_IRExpr_Unop() 5366 case Iop_64to32: in fold_IRExpr_Unop() 5384 if (is_Unop(aa, Iop_64to32) in fold_IRExpr_Unop() 5388 Iop_64to32)) { in fold_IRExpr_Unop() 5393 if (is_Unop(aa, Iop_64to32) in fold_IRExpr_Unop() 5397 Iop_64to32)) { in fold_IRExpr_Unop() 5402 Iop_64to32, in fold_IRExpr_Unop()
|
D | guest_s390_toIR.c | 6600 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MR() 6616 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_M() 6632 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MFY() 6648 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MH() 6664 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MHY() 6680 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MHI() 6712 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MLR() 6744 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_ML() 6775 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MSR() 6821 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MS() [all …]
|
D | guest_arm_toIR.c | 9110 unop(Iop_64to32, mkexpr(irt_prod)), in decode_V6MEDIA_instruction() 10740 assign( irt_resLo, unop(Iop_64to32, mkexpr(irt_res)) ); in decode_V6MEDIA_instruction() 10806 binop(Iop_Shr32, unop(Iop_64to32, mkexpr(irt_prod)), mkU8(16)) in decode_V6MEDIA_instruction() 12527 assign( resLo, unop(Iop_64to32, mkexpr(result)) ); in decode_V6MEDIA_instruction() 12615 assign( resLo, unop(Iop_64to32, mkexpr(result)) ); in decode_V6MEDIA_instruction() 12664 assign( *t0, unop(Iop_64to32, mkexpr(lo64)) ); in breakupV128to32s() 12666 assign( *t2, unop(Iop_64to32, mkexpr(hi64)) ); in breakupV128to32s() 13132 unop(Iop_64to32, mkexpr(srcN)), in decode_V8_instruction() 13134 unop(Iop_64to32, mkexpr(srcM))); in decode_V8_instruction() 13428 PUT_IREG(tt, unop(Iop_64to32, mkexpr(res))); in decode_V8_instruction() [all …]
|
D | host_s390_isel.c | 1560 ((unop == Iop_64to32 && in s390_isel_int_expr_wrk() 1719 case Iop_64to32: in s390_isel_int_expr_wrk()
|
D | ir_defs.c | 253 case Iop_64to32: vex_printf("64to32"); return; in ppIROp() 2779 case Iop_64HIto32: case Iop_64to32: in typeOfPrimop()
|
D | host_arm64_isel.c | 1968 case Iop_64to32: in iselIntExpr_R_wrk()
|
D | host_mips_isel.c | 1590 case Iop_64to32: { in iselWordExpr_R_wrk()
|
D | host_x86_isel.c | 1281 case Iop_64to32: { in iselIntExpr_R_wrk()
|
D | host_amd64_isel.c | 1687 case Iop_64to32: in iselIntExpr_R_wrk()
|
D | host_ppc_isel.c | 1990 case Iop_64to32: { in iselWordExpr_R_wrk()
|
D | host_arm_isel.c | 1789 case Iop_64to32: { in iselIntExpr_R_wrk()
|
/external/valgrind/drd/ |
D | drd_load_store.c | 456 IRExpr_Unop(Iop_64to32, data_expr_lo))); in instr_trace_mem_store()
|
/external/valgrind/VEX/pub/ |
D | libvex_ir.h | 531 Iop_64to32, // :: I64 -> I32, low half enumerator
|
/external/valgrind/coregrind/ |
D | m_translate.c | 1007 return IRExpr_Unop(Iop_64to32, e); in narrowTo32()
|
/external/valgrind/memcheck/ |
D | mc_translate.c | 798 return assignNew('V', mce, Ity_I32, unop(Iop_64to32, tmp)); in mkPCastTo() 4515 case Iop_64to32: in expr2vbits_Unop() 7013 assign( 'B', mce, bTmp32, unop(Iop_64to32, mkexpr(bTmp)) ); in gen_guarded_load_b() 7126 return assignNew( 'B', mce, Ity_I32, unop(Iop_64to32, e) ); in narrowTo32()
|
/external/valgrind/memcheck/tests/vbit-test/ |
D | irops.c | 173 …{ DEFOP(Iop_64to32, UNDEF_TRUNC), .s390x = 1, .amd64 = 1, .x86 = 1, .arm = 1, .ppc64 = 1, .ppc…
|