/external/valgrind/VEX/priv/ |
D | guest_tilegx_toIR.c | 221 return unop(Iop_64to32, e); in narrowTo() 1444 unop(Iop_64to32, in disInstr_TILEGX_WRK() 1448 unop(Iop_64to32, in disInstr_TILEGX_WRK() 1461 unop(Iop_64to32, in disInstr_TILEGX_WRK() 1464 unop(Iop_64to32, mkexpr(t0)), in disInstr_TILEGX_WRK() 1465 unop(Iop_64to32, binop(Iop_Shr64, getIReg(rb), mkU8(32))))); in disInstr_TILEGX_WRK() 1467 unop(Iop_64to32, binop(Iop_Shr64, in disInstr_TILEGX_WRK() 1470 unop(Iop_64to32, binop(Iop_Shr64, getIReg(rb), mkU8(32))))); in disInstr_TILEGX_WRK() 1481 unop(Iop_64to32, in disInstr_TILEGX_WRK() 1485 unop(Iop_64to32, in disInstr_TILEGX_WRK() [all …]
|
D | guest_arm64_helpers.c | 822 binop(Iop_CmpEQ32, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 823 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 828 binop(Iop_CmpNE32, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 829 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 837 binop(Iop_CmpLE32U, unop(Iop_64to32, cc_dep2), in guest_arm64_spechelper() 838 unop(Iop_64to32, cc_dep1))); in guest_arm64_spechelper() 843 binop(Iop_CmpLT32U, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 844 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() 851 binop(Iop_CmpLE32U, unop(Iop_64to32, cc_dep1), in guest_arm64_spechelper() 852 unop(Iop_64to32, cc_dep2))); in guest_arm64_spechelper() [all …]
|
D | guest_amd64_helpers.c | 1213 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1214 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1222 unop(Iop_64to32, cc_dep2), in guest_amd64_spechelper() 1223 unop(Iop_64to32, cc_dep1))); in guest_amd64_spechelper() 1231 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1232 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1238 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1239 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() 1248 unop(Iop_64to32, cc_dep1), in guest_amd64_spechelper() 1249 unop(Iop_64to32, cc_dep2))); in guest_amd64_spechelper() [all …]
|
D | guest_mips_toIR.c | 1219 return ty == Ity_I64 ? unop(Iop_64to32, src) : src; in mkNarrowTo32() 1252 assign(t_lo, unop(Iop_64to32, getAcc(0))); in putHI() 1268 putLO(unop(Iop_64to32, e)); in putAcc() 1323 assign(t1, unop(Iop_64to32, mkexpr(t0))); in getLoFromF64() 1469 assign(t5, unop(Iop_64to32, mkexpr(t6))); /* lo */ in putDReg() 2750 assign(t1, unop(Iop_64to32, getAcc(ac))); in disDSPInstr_MIPS_WRK() 2757 putIReg(rd, unop(Iop_64to32, getAcc(ac_mfhilo))); in disDSPInstr_MIPS_WRK() 3399 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() 3411 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() 3455 unop(Iop_64to32, in disDSPInstr_MIPS_WRK() [all …]
|
D | guest_ppc_toIR.c | 790 unop( Iop_ReinterpI32asF32, unop( Iop_64to32, mkexpr( hi64 ) ) ) ) ); in breakV128to4xF64() 797 unop( Iop_ReinterpI32asF32, unop( Iop_64to32, mkexpr( lo64 ) ) ) ) ); in breakV128to4xF64() 823 assign( *t2, unop(Iop_32Sto64, unop(Iop_64to32, mkexpr(hi64))) ); in breakV128to4x64S() 825 assign( *t0, unop(Iop_32Sto64, unop(Iop_64to32, mkexpr(lo64))) ); in breakV128to4x64S() 850 assign( *t2, unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(hi64))) ); in breakV128to4x64U() 852 assign( *t0, unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(lo64))) ); in breakV128to4x64U() 876 assign( *t2, unop(Iop_64to32, mkexpr(hi64)) ); in breakV128to4x32() 878 assign( *t0, unop(Iop_64to32, mkexpr(lo64)) ); in breakV128to4x32() 901 assign( lo32, unop(Iop_64to32, t64)); in mkQNarrow64Sto32() 923 assign( lo32, unop(Iop_64to32, t64)); in mkQNarrow64Uto32() [all …]
|
D | guest_s390_helpers.c | 1966 return unop(Iop_64to32, binop(Iop_Shr64, cc_dep1, mkU8(63))); in guest_s390x_spechelper() 1979 return unop(Iop_64to32, binop(Iop_Xor64, in guest_s390x_spechelper() 2033 word = unop(Iop_64to32, cc_dep1); in guest_s390x_spechelper() 2349 unop(Iop_64to32, cc_dep1), in guest_s390x_spechelper() 2350 unop(Iop_64to32, cc_dep2)), in guest_s390x_spechelper() 2356 unop(Iop_64to32, cc_dep1), in guest_s390x_spechelper() 2357 unop(Iop_64to32, cc_dep2)), in guest_s390x_spechelper() 2404 return unop(Iop_64to32, cc_dep1); in guest_s390x_spechelper()
|
D | guest_amd64_toIR.c | 1020 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RAX, Ity_I64 )); in getIRegRAX() 1068 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RDX, Ity_I64 )); in getIRegRDX() 1119 return unop(Iop_64to32, in getIReg32() 1194 return unop(Iop_64to32, in getIRegRexB() 1275 return unop(Iop_64to32, in getIRegG() 1307 return unop(Iop_64to32, in getIRegV() 1354 return unop(Iop_64to32, in getIRegE() 1764 return unop(Iop_64to32, e); in narrowTo() 2362 virtual = unop(Iop_32Uto64, unop(Iop_64to32, virtual)); in handleAddrOverrides() 3363 putIRegRAX( 4, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div() [all …]
|
D | guest_x86_toIR.c | 1481 return unop(Iop_64to32, mkexpr(r64)); in handleSegOverride() 2248 putIReg( 4, R_EAX, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div() 2258 putIReg( 2, R_EAX, unop(Iop_32to16,unop(Iop_64to32,mkexpr(dst64))) ); in codegen_div() 2271 unop(Iop_64to32,mkexpr(dst64)))) ); in codegen_div() 2450 assign( dst1, narrowTo(ty, unop(Iop_64to32, mkexpr(r64))) ); in dis_Grp2() 2761 assign( resLo, unop(Iop_64to32,mkexpr(res64))); in codegen_mulL_A_D() 4105 put_fpround( unop(Iop_64to32, mkexpr(t64)) ); in dis_FPU() 5717 assign( amt, unop(Iop_64to32, getMMXReg(eregOfRM(rm))) ); in dis_MMX_shiftG_byE() 5882 unop(Iop_64to32, getMMXReg(gregOfRM(modrm)) ) ); in dis_MMX() 5889 unop(Iop_64to32, getMMXReg(gregOfRM(modrm)) ) ); in dis_MMX() [all …]
|
D | ir_opt.c | 1515 case Iop_64to32: { in fold_Expr() 5326 if (is_Unop(aa, Iop_64to32) && is_Unop(aa->Iex.Unop.arg, Iop_CmpwNEZ64)) in fold_IRExpr_Unop() 5365 case Iop_64to32: in fold_IRExpr_Unop() 5383 if (is_Unop(aa, Iop_64to32) in fold_IRExpr_Unop() 5387 Iop_64to32)) { in fold_IRExpr_Unop() 5392 if (is_Unop(aa, Iop_64to32) in fold_IRExpr_Unop() 5396 Iop_64to32)) { in fold_IRExpr_Unop() 5401 Iop_64to32, in fold_IRExpr_Unop()
|
D | guest_s390_toIR.c | 6590 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MR() 6606 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_M() 6622 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MFY() 6638 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MH() 6654 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MHY() 6670 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MHI() 6702 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MLR() 6734 put_gpr_w1(r1 + 1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_ML() 6765 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MSR() 6811 put_gpr_w1(r1, unop(Iop_64to32, mkexpr(result))); in s390_irgen_MS() [all …]
|
D | guest_arm64_toIR.c | 1055 case Ity_I32: return unop(Iop_64to32, e); in narrowFrom64() 1272 return unop(Iop_64to32, in getIReg32orSP() 1282 return unop(Iop_64to32, in getIReg32orZR() 2866 : unop(Iop_64to32, mk_arm64g_calculate_flag_c()) ); in dis_ARM64_data_processing_register() 3121 xMw = unop(Iop_32Uto64, unop(Iop_64to32, xMw)); in dis_ARM64_data_processing_register() 3163 putIReg32orZR(dd, unop(Iop_64to32, mkexpr(res))); in dis_ARM64_data_processing_register() 3164 assign(argL32, unop(Iop_64to32, mkexpr(argL))); in dis_ARM64_data_processing_register() 3165 assign(argR32, unop(Iop_64to32, mkexpr(argR))); in dis_ARM64_data_processing_register() 3168 putIReg32orSP(dd, unop(Iop_64to32, mkexpr(res))); in dis_ARM64_data_processing_register() 3313 unop(Iop_32Uto64, unop(Iop_64to32, mkexpr(dst)))); in dis_ARM64_data_processing_register() [all …]
|
D | guest_arm_toIR.c | 9048 unop(Iop_64to32, mkexpr(irt_prod)), in decode_V6MEDIA_instruction() 10678 assign( irt_resLo, unop(Iop_64to32, mkexpr(irt_res)) ); in decode_V6MEDIA_instruction() 10744 binop(Iop_Shr32, unop(Iop_64to32, mkexpr(irt_prod)), mkU8(16)) in decode_V6MEDIA_instruction() 12465 assign( resLo, unop(Iop_64to32, mkexpr(result)) ); in decode_V6MEDIA_instruction() 12553 assign( resLo, unop(Iop_64to32, mkexpr(result)) ); in decode_V6MEDIA_instruction() 13180 IRExpr* lo32 = unop(Iop_64to32, mkexpr(i64)); in decode_CP10_CP11_instruction() 14373 assign(src32, unop(Iop_64to32, getDRegI64(d))); in decode_CP10_CP11_instruction() 15870 assign( resLo, unop(Iop_64to32, mkexpr(res)) ); in disInstr_ARM_WRK() 15923 assign( resLo, unop(Iop_64to32, mkexpr(res)) ); in disInstr_ARM_WRK() 15975 assign( resLo, unop(Iop_64to32, mkexpr(res)) ); in disInstr_ARM_WRK() [all …]
|
D | host_tilegx_isel.c | 833 case Iop_64to32: in iselWordExpr_R_wrk()
|
D | host_s390_isel.c | 1560 ((unop == Iop_64to32 && in s390_isel_int_expr_wrk() 1719 case Iop_64to32: in s390_isel_int_expr_wrk()
|
D | ir_defs.c | 253 case Iop_64to32: vex_printf("64to32"); return; in ppIROp() 2714 case Iop_64HIto32: case Iop_64to32: in typeOfPrimop()
|
D | host_arm64_isel.c | 1940 case Iop_64to32: in iselIntExpr_R_wrk()
|
D | host_mips_isel.c | 1596 case Iop_64to32: { in iselWordExpr_R_wrk()
|
D | host_x86_isel.c | 1281 case Iop_64to32: { in iselIntExpr_R_wrk()
|
D | host_amd64_isel.c | 1687 case Iop_64to32: in iselIntExpr_R_wrk()
|
/external/valgrind/drd/ |
D | drd_load_store.c | 458 IRExpr_Unop(Iop_64to32, data_expr_lo))); in instr_trace_mem_store()
|
/external/valgrind/VEX/pub/ |
D | libvex_ir.h | 531 Iop_64to32, // :: I64 -> I32, low half enumerator
|
/external/valgrind/coregrind/ |
D | m_translate.c | 1007 return IRExpr_Unop(Iop_64to32, e); in narrowTo32()
|
/external/valgrind/memcheck/ |
D | mc_translate.c | 798 return assignNew('V', mce, Ity_I32, unop(Iop_64to32, tmp)); in mkPCastTo() 4442 case Iop_64to32: in expr2vbits_Unop() 6807 assign( 'B', mce, bTmp32, unop(Iop_64to32, mkexpr(bTmp)) ); in gen_guarded_load_b() 6920 return assignNew( 'B', mce, Ity_I32, unop(Iop_64to32, e) ); in narrowTo32()
|
/external/valgrind/VEX/useful/ |
D | test_main.c | 2007 case Iop_64to32: in expr2vbits_Unop()
|
/external/valgrind/memcheck/tests/vbit-test/ |
D | irops.c | 172 …{ DEFOP(Iop_64to32, UNDEF_TRUNC), .s390x = 1, .amd64 = 1, .x86 = 1, .arm = 1, .ppc64 = 1, .ppc…
|