Home
last modified time | relevance | path

Searched refs:mkU64 (Results 1 – 18 of 18) sorted by relevance

/external/valgrind/VEX/priv/
Dguest_tilegx_toIR.c110 static IRExpr *mkU64 ( ULong i ) in mkU64() function
349 putPC(mkU64(guest_PC_curr_instr + 24)); in disInstr_TILEGX_WRK()
363 putPC(mkU64(guest_PC_curr_instr + 8)); in disInstr_TILEGX_WRK()
371 putIReg(55, mkU64(guest_PC_curr_instr + 24)); in disInstr_TILEGX_WRK()
388 mkU64(guest_PC_curr_instr))); in disInstr_TILEGX_WRK()
390 mkU64(24))); in disInstr_TILEGX_WRK()
393 putPC(mkU64(guest_PC_curr_instr + 24)); in disInstr_TILEGX_WRK()
492 assign(t2, mkU64(extend_s_8to64(imm))); in disInstr_TILEGX_WRK()
497 assign(t2, mkU64(extend_s_16to64(imm))); in disInstr_TILEGX_WRK()
542 mkU64(extend_s_8to64(imm)))); in disInstr_TILEGX_WRK()
[all …]
Dguest_s390_helpers.c1810 #define mkU64(v) IRExpr_Const(IRConst_U64(v)) macro
1957 return unop(Iop_1Uto32, binop(Iop_CmpEQ64, cc_dep1, mkU64(0))); in guest_s390x_spechelper()
1960 return unop(Iop_1Uto32, binop(Iop_CmpNE64, cc_dep1, mkU64(0))); in guest_s390x_spechelper()
1969 return unop(Iop_1Uto32, binop(Iop_CmpLE64S, cc_dep1, mkU64(0))); in guest_s390x_spechelper()
1973 return unop(Iop_1Uto32, binop(Iop_CmpLT64S, mkU64(0), cc_dep1)); in guest_s390x_spechelper()
1981 mkU64(1))); in guest_s390x_spechelper()
2010 return unop(Iop_1Uto32, binop(Iop_CmpEQ64, cc_dep1, mkU64(0))); in guest_s390x_spechelper()
2013 return unop(Iop_1Uto32, binop(Iop_CmpNE64, cc_dep1, mkU64(0))); in guest_s390x_spechelper()
2116 mkU64(0))); in guest_s390x_spechelper()
2121 mkU64(0))); in guest_s390x_spechelper()
[all …]
Dguest_s390_toIR.c207 mkU64(ULong value) in mkU64() function
364 mkU64(guest_IA_curr_instr))); in system_call()
526 mkIRExprVec_2(mkU64((ULong)text), value));
587 op = mkU64(opc); in s390_cc_thunk_put1()
589 dep2 = mkU64(0); in s390_cc_thunk_put1()
590 ndep = mkU64(0); in s390_cc_thunk_put1()
601 op = mkU64(opc); in s390_cc_thunk_put2()
604 ndep = mkU64(0); in s390_cc_thunk_put2()
626 op = mkU64(opc); in s390_cc_thunk_put3()
649 stmt(IRStmt_Put(dep1_off, mkU64(0))); in s390_cc_thunk_put1f()
[all …]
Dguest_mips_toIR.c470 mkU64(extend_s_16to64(imm)))); \
481 assign(t2, binop(Iop_And64, mkexpr(t1), mkU64(0xFFFFFFFFFFFFFFFCULL))); \
484 mkexpr(t1), mkU64(0x3))));
488 assign(t2, binop(Iop_And64, mkexpr(t1), mkU64(0xFFFFFFFFFFFFFFF8ULL))); \
490 assign(t4, binop(Iop_And64, mkexpr(t1), mkU64(0x7)));
532 putIReg(rt, binop(op, getIReg(rs), mkU64(imm)));
869 static IRExpr *mkU64(ULong i) in mkU64() function
1018 stmt(IRStmt_Put(OFFB_PC, mkU64(d64))); in jmp_lit64()
1058 return mode64 ? mkU64(0x0) : mkU32(0x0); in getIReg()
1097 mkU64(0xFF))); in getByteFromReg()
[all …]
Dguest_amd64_helpers.c997 # define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) in guest_amd64_spechelper() macro
1032 mkU64(0))); in guest_amd64_spechelper()
1058 mkU64(1)); in guest_amd64_spechelper()
1124 mkU64(1)); in guest_amd64_spechelper()
1144 mkU64(1)); in guest_amd64_spechelper()
1201 mkU64(1)); in guest_amd64_spechelper()
1271 mkU64(1)); in guest_amd64_spechelper()
1283 mkU64(1)), in guest_amd64_spechelper()
1284 mkU64(1)); in guest_amd64_spechelper()
1375 binop(Iop_And64, cc_dep1, mkU64(0xFF)), in guest_amd64_spechelper()
[all …]
Dir_inject.c40 #define mkU64(v) IRExpr_Const(IRConst_U64(v)) macro
90 addr = mkU64(haddr); in load()
91 next_addr = binop(Iop_Add64, addr, mkU64(8)); in load()
151 addr = mkU64(haddr); in store()
152 next_addr = binop(Iop_Add64, addr, mkU64(8)); in store()
Dguest_ppc_toIR.c646 static IRExpr* mkU64 ( ULong i ) in mkU64() function
1039 return ty == Ity_I64 ? mkU64(imm64) : mkU32((UInt)imm64); in mkSzImm()
1056 mkU64(extend_s_16to64(imm16)) : in mkSzExtendS16()
1065 mkU64(extend_s_32to64(imm32)) : in mkSzExtendS32()
1564 assign( mask[0], mkU64( 0x5555555555555555ULL ) ); in gen_POPCOUNT()
1565 assign( mask[1], mkU64( 0x3333333333333333ULL ) ); in gen_POPCOUNT()
1566 assign( mask[2], mkU64( 0x0F0F0F0F0F0F0F0FULL ) ); in gen_POPCOUNT()
1567 assign( mask[3], mkU64( 0x00FF00FF00FF00FFULL ) ); in gen_POPCOUNT()
1568 assign( mask[4], mkU64( 0x0000FFFF0000FFFFULL ) ); in gen_POPCOUNT()
1569 assign( mask[5], mkU64( 0x00000000FFFFFFFFULL ) ); in gen_POPCOUNT()
[all …]
Dguest_amd64_toIR.c281 static IRExpr* mkU64 ( ULong i ) in mkU64() function
292 case Ity_I64: return mkU64(i); in mkU()
1670 = mkIRExprVec_5( mkU64(cond), in mk_amd64g_calculate_condition()
1799 stmt( IRStmt_Put( OFFB_CC_OP, mkU64(ccOp)) ); in setFlags_DEP1_DEP2()
1825 stmt( IRStmt_Put( OFFB_CC_OP, mkU64(ccOp)) ); in setFlags_DEP1()
1827 stmt( IRStmt_Put( OFFB_CC_DEP2, mkU64(0)) ); in setFlags_DEP1()
1869 mkU64(ccOp), in setFlags_DEP1_DEP2_shift()
1901 stmt( IRStmt_Put( OFFB_CC_OP, mkU64(ccOp)) ); in setFlags_INC_DEC()
1903 stmt( IRStmt_Put( OFFB_CC_DEP2, mkU64(0)) ); in setFlags_INC_DEC()
1915 stmt( IRStmt_Put( OFFB_CC_OP, mkU64(base_op+0) ) ); in setFlags_MUL()
[all …]
Dguest_arm64_toIR.c248 static IRExpr* mkU64 ( ULong i ) in mkU64() function
790 case Ity_I64: return mkU64(imm); in mkU()
1227 return mkU64(0); in getIReg64orZR()
1763 return mk_arm64g_calculate_condition_dyn( mkU64(cond << 4) ); in mk_arm64g_calculate_condition()
1847 stmt( IRStmt_Put( OFFB_CC_OP, mkU64(cc_op) )); in setFlags_D1_D2_ND()
1869 assign(z64, mkU64(0)); in setFlags_ADD_SUB()
1925 assign(z64, mkU64(0)); in setFlags_ADD_SUB_conditionally()
1951 assign(f_dep1, mkU64(nzcv << 28)); in setFlags_ADD_SUB_conditionally()
1958 assign(op, IRExpr_ITE(mkexpr(cond), mkU64(t_op), mkU64(f_op))); in setFlags_ADD_SUB_conditionally()
1984 assign(z64, mkU64(0)); in setFlags_LOGIC()
[all …]
Dguest_arm64_helpers.c719 # define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) in guest_arm64_spechelper() macro
946 mkU64(1)); in guest_arm64_spechelper()
954 mkU64(1)), in guest_arm64_spechelper()
955 mkU64(1)); in guest_arm64_spechelper()
1128 # undef mkU64 in guest_arm64_spechelper()
Dguest_arm_toIR.c277 static IRExpr* mkU64 ( ULong i ) in mkU64() function
405 return binop(Iop_64HLtoV128, mkU64(i), mkU64(i)); in mkU128()
2930 assign(old_mask, mkU64(0)); in dis_neon_vtb()
2931 assign(old_res, mkU64(0)); in dis_neon_vtb()
2945 assign(new_arg, binop(Iop_Sub8x8, mkexpr(old_arg), mkU64(imm))); in dis_neon_vtb()
2946 assign(cur_mask, binop(cmp, mkU64(imm), mkexpr(old_arg))); in dis_neon_vtb()
3089 imm_val = binop(Iop_64HLtoV128, mkU64(imm), mkU64(imm)); in dis_neon_data_3same()
3092 imm_val = mkU64(imm); in dis_neon_data_3same()
3267 mkU64(one), in dis_neon_data_3same()
3268 mkU64(one))), in dis_neon_data_3same()
[all …]
Dguest_x86_toIR.c680 static IRExpr* mkU64 ( ULong i ) in mkU64() function
5753 mkU64(0) in dis_MMX_shiftG_byE()
5814 ? mkU64(0) in dis_MMX_shiftE_imm()
7750 assign(one32x2, mkU64( (1ULL << 32) + 1 )); in dis_PMULHRSW_helper()
7814 assign( zero, mkU64(0) ); in dis_PSIGN_helper()
7855 assign( zero, mkU64(0) ); in dis_PABS_helper()
9309 putXMMRegLane64( gregOfRM(modrm), 1, mkU64(0) ); in disInstr_X86_WRK()
10590 putXMMRegLane64( gregOfRM(modrm), 1, mkU64(0) ); in disInstr_X86_WRK()
10598 putXMMRegLane64( gregOfRM(modrm), 1, mkU64(0) ); in disInstr_X86_WRK()
11412 assign( lo64r, mkU64(0) ); in disInstr_X86_WRK()
[all …]
Dhost_s390_isel.c185 mkU64(ULong value) in mkU64() function
1971 addr_lo = IRExpr_Binop(Iop_Add64, addr_hi, mkU64(8)); in s390_isel_float128_expr_wrk()
2660 addr_lo = IRExpr_Binop(Iop_Add64, addr_hi, mkU64(8)); in s390_isel_dfp128_expr_wrk()
/external/valgrind/coregrind/
Dm_translate.c991 static IRExpr* mkU64 ( ULong n ) { in mkU64() function
1033 IRExpr*(*mkU)(ULong) = mkU64; in gen_PUSH()
1126 IRExpr*(*mkU)(ULong) = mkU64; in gen_POP()
1217 addStmtToIRSB( bb, IRStmt_Put( offB_LR, mkU64( bogus_RA )) ); in gen_push_and_set_LR_R2()
1218 addStmtToIRSB( bb, IRStmt_Put( offB_GPR2, mkU64( new_R2_value )) ); in gen_push_and_set_LR_R2()
1247 bb->next = IRExpr_Binop(Iop_And64, IRExpr_RdTmp(old_LR), mkU64(~(3ULL))); in gen_pop_R2_LR_then_bLR()
1297 addStmtToIRSB( bb, IRStmt_Put( offB_LR, mkU64( bogus_RA )) ); in gen_push_R2_and_set_LR()
1332 nraddr_szB == 8 ? mkU64(0) : mkU32(0) in mk_preamble__set_NRADDR_to_zero()
1344 addStmtToIRSB(bb, IRStmt_Put(offB_GPR25, mkU64(closure->readdr))); in mk_preamble__set_NRADDR_to_zero()
1352 VG_WORDSIZE==8 ? mkU64(0) : mkU32(0) in mk_preamble__set_NRADDR_to_zero()
[all …]
/external/valgrind/exp-dhat/
Ddh_main.c768 #define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) macro
790 IRStmt* st2 = assign(t2, binop(Iop_Add64, mkexpr(t1), mkU64(n))); in add_counter_update()
846 : binop(Iop_Sub64, mkexpr(sp), mkU64(rz_szB))) in addMemEvent()
864 : binop(Iop_CmpLT64U, mkU64(THRESH), mkexpr(diff))) in addMemEvent()
1022 #undef mkU64
/external/valgrind/memcheck/
Dmc_translate.c444 #define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) macro
907 = assignNew('V', mce, Ity_I64, binop(Iop_And64, pcdTo64, mkU64(1))); in mkPCastXXtoXXlsb()
922 = assignNew('V', mce, Ity_I64, binop(Iop_And64, pcd, mkU64(1))); in mkPCastXXtoXXlsb()
1014 top = mkU64(0xFFFFFFFFFFFFFFFFULL); in expensiveCmpEQorNE()
1130 threeLeft1 = m64 ? mkU64(3<<1) : mkU32(3<<1); in doCmpORD()
1153 sevenLeft1 = m64 ? mkU64(7<<1) : mkU32(7<<1); in doCmpORD()
1993 one = mkU64(1); in expensiveCountTrailingZeroes()
4708 eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); in expr2vbits_Load_WRK()
5137 eBiasQ0 = tyAddr==Ity_I32 ? mkU32(bias+offQ0) : mkU64(bias+offQ0); in do_shadow_Store()
5146 eBiasQ1 = tyAddr==Ity_I32 ? mkU32(bias+offQ1) : mkU64(bias+offQ1); in do_shadow_Store()
[all …]
/external/valgrind/VEX/useful/
Dtest_main.c741 #define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) macro
1001 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE64, vbits, mkU64(0))); in mkPCastTo()
2093 eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); in expr2vbits_LDle_WRK()
2309 eBias0 = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); in do_shadow_STle()
2316 eBias8 = tyAddr==Ity_I32 ? mkU32(bias+8) : mkU64(bias+8); in do_shadow_STle()
2335 eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); in do_shadow_STle()
/external/valgrind/helgrind/
Dhg_main.c4396 #define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) macro
4558 : binop(Iop_Add64, mkexpr(addr_minus_sp), mkU64(rz_szB))) in instrument_mem_access()
4568 : binop(Iop_CmpLT64U, mkU64(THRESH), mkexpr(diff))) in instrument_mem_access()
4867 #undef mkU64