• Home
  • Raw
  • Download

Lines Matching refs:mkU64

646 static IRExpr* mkU64 ( ULong i )  in mkU64()  function
1039 return ty == Ity_I64 ? mkU64(imm64) : mkU32((UInt)imm64); in mkSzImm()
1056 mkU64(extend_s_16to64(imm16)) : in mkSzExtendS16()
1065 mkU64(extend_s_32to64(imm32)) : in mkSzExtendS32()
1564 assign( mask[0], mkU64( 0x5555555555555555ULL ) ); in gen_POPCOUNT()
1565 assign( mask[1], mkU64( 0x3333333333333333ULL ) ); in gen_POPCOUNT()
1566 assign( mask[2], mkU64( 0x0F0F0F0F0F0F0F0FULL ) ); in gen_POPCOUNT()
1567 assign( mask[3], mkU64( 0x00FF00FF00FF00FFULL ) ); in gen_POPCOUNT()
1568 assign( mask[4], mkU64( 0x0000FFFF0000FFFFULL ) ); in gen_POPCOUNT()
1569 assign( mask[5], mkU64( 0x00000000FFFFFFFFULL ) ); in gen_POPCOUNT()
1750 binop(Iop_And64, mkexpr(addr), mkU64(align-1)), in gen_SIGBUS_if_misaligned()
1751 mkU64(0)), in gen_SIGBUS_if_misaligned()
1787 binop(Iop_Sub64, getIReg(1), mkU64(szB)), in make_redzone_AbiHint()
1956 binop(Iop_CmpORD64S, result, mkU64(0))) ); in set_CR0()
2256 = AND3( XOR3(argL,argR,mkU64(-1)), in set_XER_OV_64()
2258 mkU64(INT64_MIN) ); in set_XER_OV_64()
2269 binop(Iop_CmpEQ64, argL, mkU64(INT64_MIN)), in set_XER_OV_64()
2270 binop(Iop_CmpEQ64, argR, mkU64(-1)) in set_XER_OV_64()
2272 binop(Iop_CmpEQ64, argR, mkU64(0) ) in set_XER_OV_64()
2279 = binop(Iop_CmpEQ64, argR, mkU64(0)); in set_XER_OV_64()
2298 = binop(Iop_CmpEQ64, argL, mkU64(INT64_MIN)); in set_XER_OV_64()
2306 = AND3( XOR3(NOT(argL),argR,mkU64(-1)), in set_XER_OV_64()
2308 mkU64(INT64_MIN) ); in set_XER_OV_64()
2322 = mkOR1( binop( Iop_CmpEQ64, argR, mkU64( 0 ) ), in set_XER_OV_64()
2323 mkAND1( binop( Iop_CmpEQ64, res, mkU64( 0 ) ), in set_XER_OV_64()
2324 mkAND1( binop( Iop_CmpNE64, argL, mkU64( 0 ) ), in set_XER_OV_64()
2325 binop( Iop_CmpNE64, argR, mkU64( 0 ) ) ) ) ); in set_XER_OV_64()
2330 xer_ov = mkOR1( binop( Iop_CmpEQ64, argR, mkU64( 0 ) ), in set_XER_OV_64()
2529 binop(Iop_CmpEQ64, oldca, mkU64(1)), in set_XER_CA_64()
2543 binop(Iop_CmpEQ64, oldca, mkU64(1)), in set_XER_CA_64()
2572 binop(Iop_Shl64, mkU64(1), in set_XER_CA_64()
2574 mkU64(1) ) in set_XER_CA_64()
2580 binop(Iop_CmpLT64U, mkU64(31), argR), in set_XER_CA_64()
2584 unop(Iop_1Uto32, binop(Iop_CmpNE64, xer_ca, mkU64(0))) in set_XER_CA_64()
2603 binop(Iop_Shl64, mkU64(1), in set_XER_CA_64()
2605 mkU64(1) ) in set_XER_CA_64()
2609 = unop(Iop_1Uto32, binop(Iop_CmpNE64, xer_ca, mkU64(0))); in set_XER_CA_64()
2626 binop(Iop_Shl64, mkU64(1), in set_XER_CA_64()
2628 mkU64(1) ) in set_XER_CA_64()
2634 binop(Iop_CmpLT64U, mkU64(63), argR), in set_XER_CA_64()
2638 unop(Iop_1Uto32, binop(Iop_CmpNE64, xer_ca, mkU64(0))) in set_XER_CA_64()
2658 binop(Iop_Shl64, mkU64(1), in set_XER_CA_64()
2660 mkU64(1) ) in set_XER_CA_64()
2664 = unop(Iop_1Uto32, binop(Iop_CmpNE64, xer_ca, mkU64(0))); in set_XER_CA_64()
2827 unop( Iop_64HIto32, mkU64( mask ) ) ); in getGST_masked_upper()
2988 mkU64( 0x7 ) ) ); in putGST()
2995 mkU64( 0 ) ) ), in putGST()
2998 mkU64( 0x5 ), in putGST()
3005 mkU64( 0 ) ) ), in putGST()
3008 mkU64( 0x5 ), in putGST()
3034 mkU64( 0x1C000000000000) ) ) ); in putGST()
3185 mkU64( NONZERO_FRAC_MASK ) )
3210 mkU64( 0x7ff ) ) ); in fp_exp_part()
3260 assign( sign_less_part, binop( Iop_And64, mkexpr( src ), mkU64( SIGN_MASK ) ) ); in is_Zero()
3352 mkU64( signbit_mask ) ) ), in getNegatedResult()
3369 mkU64( ~signbit_mask ) ), in getNegatedResult()
3459 putGST( PPC_GST_TFIAR, mkU64( err_address ) ); in storeTMfailure()
3460 putGST( PPC_GST_TEXASR, mkU64( tm_reason ) ); in storeTMfailure()
3462 putGST( PPC_GST_TFHAR, mkU64( handler_address ) ); in storeTMfailure()
4181 ? mkU64(0) : mkU32(0); in dis_int_cmp()
4200 ? mkU64(0) : mkU32(0); in dis_int_cmp()
4432 irx = binop(Iop_CmpNE64, mkexpr(rS), mkU64(0)); in dis_int_logic()
4435 mkU64(64) )); in dis_int_logic()
4445 binop( Iop_64HLtoV128, mkU64(0), mkexpr(rS) ), in dis_int_logic()
4446 binop( Iop_64HLtoV128, mkU64(0), mkexpr(rB) ) in dis_int_logic()
4539 IRExpr * res = binop(Iop_And64, mkU64(0), mkU64(0)); in dis_int_logic()
4549 binop( Iop_And64, mkU64( BPERMD_IDX_MASK ), rS_expr ) ); in dis_int_logic()
4551 binop( Iop_CmpLT64U, mkexpr( idx_tmp ), mkU64( 64 ) ) ); in dis_int_logic()
4569 mkU64( BPERMD_BIT_MASK ), in dis_int_logic()
4778 binop(Iop_And64, mkexpr(rot), mkU64(mask64)), in dis_int_rot()
4779 binop(Iop_And64, getIReg(rA_addr), mkU64(~mask64))) ); in dis_int_rot()
4812 assign( rA, binop(Iop_And64, mkexpr(rot), mkU64(mask64)) ); in dis_int_rot()
4858 assign( rA, binop(Iop_And64, mkexpr(rot), mkU64(mask64)) ); in dis_int_rot()
4889 assign( rA, binop(Iop_And64, r, mkU64(mask64)) ); in dis_int_rot()
4895 assign( rA, binop(Iop_And64, r, mkU64(mask64)) ); in dis_int_rot()
4905 assign( rA, binop(Iop_And64, r, mkU64(mask64)) ); in dis_int_rot()
4928 assign( rA, binop(Iop_And64, r, mkU64(mask64)) ); in dis_int_rot()
4945 assign( rA, binop(Iop_And64, r, mkU64(mask64)) ); in dis_int_rot()
4957 binop(Iop_And64, mkU64(mask64), r), in dis_int_rot()
4958 binop(Iop_And64, mkU64(~mask64), in dis_int_rot()
5252 mkU64( 8 ) ) ) ); in dis_int_load()
5257 mkU64( 8 ) ) ) ); in dis_int_load()
5536 irx_addr = binop(mkAdd, mkexpr(EA), mode64 ? mkU64(ea_off) : mkU32(ea_off)); in dis_int_ldst_mult()
5546 irx_addr = binop(mkAdd, mkexpr(EA), mode64 ? mkU64(ea_off) : mkU32(ea_off)); in dis_int_ldst_mult()
5879 assign(t_tgt, mode64 ? mkU64(tgt) : mkU32(tgt) ); in dis_branch()
6145 IRExpr* const0 = is32bit ? mkU32(0) : mkU64(0); in do_trap()
6146 IRExpr* const2 = is32bit ? mkU32(2) : mkU64(2); in do_trap()
6147 IRExpr* const4 = is32bit ? mkU32(4) : mkU64(4); in do_trap()
6148 IRExpr* const8 = is32bit ? mkU32(8) : mkU64(8); in do_trap()
6247 uncond = do_trap( TO, getIReg(rA_addr), mkU64( (ULong)simm16 ), cia ); in dis_trapi()
6687 binop(Iop_Add64, mkexpr(EA), mkU64(8) ), in dis_memsync()
6693 binop(Iop_Add64, mkexpr(EA), mkU64(8) ), in dis_memsync()
6733 store( binop( Iop_Add64, mkexpr(EA), mkU64(8) ), in dis_memsync()
6737 store( binop( Iop_Add64, mkexpr(EA), mkU64(8) ), in dis_memsync()
6919 binop(Iop_And64, mkexpr(rB), mkU64(63)))), in dis_int_shift()
6934 assign( sh_amt, binop(Iop_And64, mkU64(0x7F), mkexpr(rB)) ); in dis_int_shift()
6936 binop(Iop_CmpLT64U, mkU64(63), mkexpr(sh_amt)) ); in dis_int_shift()
6942 mkU64(63), in dis_int_shift()
6961 mkU64(sh_imm), in dis_int_shift()
6980 binop(Iop_And64, mkexpr(rB), mkU64(63)))), in dis_int_shift()
7091 ty == Ity_I64 ? mkU64( 4 ) : mkU32( 4 ) ); in dis_int_ldst_rev()
7478 binop( Iop_And64, high64, mkU64( 0xFFFFFFFF ) ) : in dis_proc_ctl()
7480 binop( Iop_And64, high64, mkU64( 0xFFFFFFFF ) ) ) ); in dis_proc_ctl()
7500 putVSReg( XT, binop( Iop_64HLtoV128, mkexpr( rA ), mkU64( 0 ) ) ); in dis_proc_ctl()
7506 mkU64( 0 ) ) ); in dis_proc_ctl()
7530 mkU64( 0 ) ) ); in dis_proc_ctl()
7554 mkU64( 0 ) ) ); in dis_proc_ctl()
7666 mkU64( ~((ULong)clearszB-1) )) ); in dis_cache_manage()
7669 irx_addr = binop( Iop_Add64, mkexpr(addr), mkU64(i*8) ); in dis_cache_manage()
7670 store( irx_addr, mkU64(0) ); in dis_cache_manage()
9292 assign( EA_lo, binop(Iop_Add64, mkexpr(EA_hi), mkU64(8)) ); in dis_fp_pair()
9492 putGST_masked( PPC_GST_FPSCR, mkU64( 1 <<( 31 - crbD ) ), in dis_fp_scr()
9529 putGST_masked( PPC_GST_FPSCR, mkU64( 0 ), 1ULL << ( 31 - crbD ) ); in dis_fp_scr()
10739 unop( Iop_ReinterpI64asD64, mkU64( 1 ) ) ) ); in dis_dfp_quantize_sig_rrnd()
10836 unop( Iop_ReinterpI64asD64, mkU64( 1 ) ) ) ) ); in dis_dfp_quantize_sig_rrndq()
11315 mkU64( 0x7FFFFFFFFFFFFFFFULL ) ) ) ); in dis_dfp_class_test()
11322 assign( frAI64_lo, mkU64( 0 ) ); in dis_dfp_class_test()
11332 mkU64( 0x2234000000000001ULL ) ) ); // dfp 1.0 in dis_dfp_class_test()
11333 assign( exp_min_normal,mkU64( 398 - 383 ) ); in dis_dfp_class_test()
11349 mkU64( 0x2238000000000000ULL ) ) ) ); in dis_dfp_class_test()
11371 assign( exp_min_normal, mkU64( 6176 - 6143 ) ); in dis_dfp_class_test()
11375 mkU64( 0x2234000000000001ULL ) ) ) ); // dfp 1.0 in dis_dfp_class_test()
11389 mkU64( 0x7FFFFFFFFFFFFFFFULL ) ) ), in dis_dfp_class_test()
11399 mkU64( 0x0ULL ) ) ) ) ); in dis_dfp_class_test()
11821 mkU64( DFP_LONG_BIAS ), in dis_dfp_bcd()
11909 mkU64( DFP_LONG_BIAS ), in dis_dfp_bcd()
12300 mkU64( DFP_EXTND_BIAS ), in dis_dfp_bcdq()
12741 mkexpr( xB ) ), mkU64( 0 ) ) ); in dis_vx_conv()
12752 mkU64( 0ULL ) ) ); in dis_vx_conv()
12762 mkU64( 0ULL ) ) ); in dis_vx_conv()
12776 mkU64( 0 ) ) ); in dis_vx_conv()
12786 mkU64( 0 ) ) ); in dis_vx_conv()
12800 mkU64( 0 ) ) ); in dis_vx_conv()
12810 mkU64( 0 ) ) ); in dis_vx_conv()
12893 mkU64( 0ULL ) ) ); in dis_vx_conv()
12905 mkU64( 0ULL ) ) ); in dis_vx_conv()
12917 mkU64( 0ULL ) ) ); in dis_vx_conv()
12926 mkU64( 0ULL ) ) ); in dis_vx_conv()
12935 mkU64( 0ULL ) ) ); in dis_vx_conv()
13993 binop(Iop_Or64, mkexpr(frA_I64), mkU64(SNAN_MASK)), in _get_maxmin_fp_NaN()
13997 binop(Iop_Or64, mkexpr(frB_I64), mkU64(SNAN_MASK)), in _get_maxmin_fp_NaN()
14059 mkU64( isMin ? MINUS_ZERO : 0ULL ), in get_max_min_fp()
14175 mkU64( SNAN_MASK ), in _do_vsx_fp_roundToInt()
14400 mkU64( SIGN_BIT ) ), in dis_vxv_misc()
14403 mkU64( SIGN_MASK ) ) ), in dis_vxv_misc()
14407 mkU64( SIGN_BIT ) ), in dis_vxv_misc()
14410 mkU64( SIGN_MASK ) ) ) ) ); in dis_vxv_misc()
14683 mkU64( 0 ) ) ); in dis_vxs_arith()
14693 mkU64( 0 ) ) ); in dis_vxs_arith()
14701 mkU64( 0 ) ) ); in dis_vxs_arith()
14711 mkU64( 0 ) ) ); in dis_vxs_arith()
14719 mkU64( 0 ) ) ); in dis_vxs_arith()
14738 mkU64( 0 ) ) ); in dis_vxs_arith()
14753 mkU64( 0 ) ) ); in dis_vxs_arith()
14773 mkU64( 0 ) ) ); in dis_vxs_arith()
14788 mkU64( 0 ) ) ); in dis_vxs_arith()
14813 mkU64( 0 ) ) ); in dis_vxs_arith()
14837 mkU64( 0 ) ) ); in dis_vxs_arith()
14861 mkU64( 0 ) ) ); in dis_vxs_arith()
14882 … putVSReg( XT, binop( Iop_64HLtoV128, mkexpr( getNegatedResult(msubResult) ), mkU64( 0 ) ) ); in dis_vxs_arith()
14895 mkU64( 0 ) ) ); in dis_vxs_arith()
14904 mkU64( 0 ) ) ); in dis_vxs_arith()
14912 mkU64( 0 ) ) ); in dis_vxs_arith()
14923 mkU64( 0 ) ) ); in dis_vxs_arith()
14931 mkU64( 0 ) ) ); in dis_vxs_arith()
15236 mkU64(VG_PPC_SIGN_MASK) ), in dis_vxs_misc()
15257 mkU64(~VG_PPC_SIGN_MASK) ) ); in dis_vxs_misc()
15261 mkU64(VG_PPC_SIGN_MASK) ) ); in dis_vxs_misc()
15266 mkU64(0x0ULL))); in dis_vxs_misc()
15278 mkU64(~VG_PPC_SIGN_MASK) ) ); in dis_vxs_misc()
15280 mkexpr(BHi_signed), mkU64(0x0ULL) ) ); in dis_vxs_misc()
15294 mkU64(VG_PPC_SIGN_MASK) ) ); in dis_vxs_misc()
15302 mkU64(~VG_PPC_SIGN_MASK) ) in dis_vxs_misc()
15315 mkU64(0x0ULL))); in dis_vxs_misc()
15329 putVSReg( XT, binop( Iop_64HLtoV128, get_max_min_fp(frA, frB, isMin), mkU64( 0 ) ) ); in dis_vxs_misc()
15349 mkU64( 0 ) ) ); in dis_vxs_misc()
15383 mkU64( 0 ) ) ); in dis_vxs_misc()
15413 mkU64( 0 ) ) ); in dis_vxs_misc()
15431 mkU64( 0 ) ) ); in dis_vxs_misc()
15549 mkU64(0) ) ); in dis_vx_load()
15564 mkU64(0) ) ); in dis_vx_load()
15580 putVSReg( XT, binop( Iop_64HLtoV128, exp, mkU64( 0 ) ) ); in dis_vx_load()
15602 high_addr = binop( addOp, mkexpr( EA ), ty == Ity_I64 ? mkU64( ea_off ) in dis_vx_load()
15630 assign( perm_LE, binop( Iop_64HLtoV128, mkU64(0x0c0d0e0f08090a0bULL), in dis_vx_load()
15631 mkU64(0x0405060700010203ULL))); in dis_vx_load()
15716 ty == Ity_I64 ? mkU64( 8 ) : mkU32( 8 ) ), low64 ); in dis_vx_store()
15735 ty == Ity_I64 ? mkU64( ea_off ) : mkU32( ea_off ) ); in dis_vx_store()
15739 ty == Ity_I64 ? mkU64( ea_off ) : mkU32( ea_off ) ); in dis_vx_store()
15743 ty == Ity_I64 ? mkU64( ea_off ) : mkU32( ea_off ) ); in dis_vx_store()
16615 putVReg( vD_addr, mkV128from4x64S( mkU64(0), mkexpr(z2), in dis_av_arith()
16616 mkU64(0), mkexpr(z0)) ); in dis_av_arith()
16633 putVReg( vD_addr, mkV128from4x64S( mkU64(0), mkU64(0), in dis_av_arith()
16634 mkU64(0), mkexpr(z0)) ); in dis_av_arith()
18014 mkU64(0), in _get_quad_modulo_or_carry()
18015 mkU64(0) ) : mkexpr(tmp_result), in _get_quad_modulo_or_carry()
18018 mkU64(0), in _get_quad_modulo_or_carry()
18107 mkU64(0), in dis_av_quad()
18108 mkU64(BPERMD_IDX_MASK) ), in dis_av_quad()
18130 mkU64(0), in dis_av_quad()
18139 mkU64( BPERMD_BIT_MASK ), in dis_av_quad()
18140 mkU64(0)), in dis_av_quad()
20108 rA == 0 ? (mode64 ? mkU64(0) : mkU32(0)) in disInstr_PPC_WRK()