• Home
  • Raw
  • Download

Lines Matching refs:assign

301 static void assign ( IRTemp dst, IRExpr* e )  in assign()  function
1011 assign(res, binop(mkOR(ty), in mathROR()
1031 assign(res, binop(mkSAR(ty), in mathREPLICATE()
1660 assign(armEncd, in mk_get_IR_rounding_mode()
1663 assign(swapped, in mk_get_IR_rounding_mode()
1866 assign(argL64, unop(Iop_32Uto64, mkexpr(argL))); in setFlags_ADD_SUB()
1867 assign(argR64, unop(Iop_32Uto64, mkexpr(argR))); in setFlags_ADD_SUB()
1869 assign(z64, mkU64(0)); in setFlags_ADD_SUB()
1895 assign(argL64, unop(Iop_32Uto64, mkexpr(argL))); in setFlags_ADC_SBC()
1896 assign(argR64, unop(Iop_32Uto64, mkexpr(argR))); in setFlags_ADC_SBC()
1897 assign(oldC64, unop(Iop_32Uto64, mkexpr(oldC))); in setFlags_ADC_SBC()
1925 assign(z64, mkU64(0)); in setFlags_ADD_SUB_conditionally()
1943 assign(t_dep1, unop(Iop_32Uto64, mkexpr(argL))); in setFlags_ADD_SUB_conditionally()
1944 assign(t_dep2, unop(Iop_32Uto64, mkexpr(argR))); in setFlags_ADD_SUB_conditionally()
1951 assign(f_dep1, mkU64(nzcv << 28)); in setFlags_ADD_SUB_conditionally()
1958 assign(op, IRExpr_ITE(mkexpr(cond), mkU64(t_op), mkU64(f_op))); in setFlags_ADD_SUB_conditionally()
1959 assign(dep1, IRExpr_ITE(mkexpr(cond), mkexpr(t_dep1), mkexpr(f_dep1))); in setFlags_ADD_SUB_conditionally()
1960 assign(dep2, IRExpr_ITE(mkexpr(cond), mkexpr(t_dep2), mkexpr(f_dep2))); in setFlags_ADD_SUB_conditionally()
1981 assign(res64, unop(Iop_32Uto64, mkexpr(res))); in setFlags_LOGIC()
1984 assign(z64, mkU64(0)); in setFlags_LOGIC()
1994 assign(z64, mkU64(0)); in setFlags_COPY()
2044 assign(maskT, mkU64(mask)); in math_SWAPHELPER()
2045 assign( res, in math_SWAPHELPER()
2102 assign(t16, binop(Iop_Or64, mkexpr(src), in math_DUP_TO_64()
2105 assign(t32, binop(Iop_Or64, mkexpr(t16), in math_DUP_TO_64()
2108 assign(t64, binop(Iop_Or64, mkexpr(t32), in math_DUP_TO_64()
2114 assign(t32, binop(Iop_Or64, mkexpr(src), in math_DUP_TO_64()
2117 assign(t64, binop(Iop_Or64, mkexpr(t32), in math_DUP_TO_64()
2123 assign(t64, binop(Iop_Or64, mkexpr(src), in math_DUP_TO_64()
2140 assign(i64, unop(Iop_ReinterpF64asI64, mkexpr(src))); in math_DUP_TO_V128()
2141 assign(res, binop(Iop_64HLtoV128, mkexpr(i64), mkexpr(i64))); in math_DUP_TO_V128()
2146 assign(i64a, unop(Iop_32Uto64, unop(Iop_ReinterpF32asI32, mkexpr(src)))); in math_DUP_TO_V128()
2148 assign(i64b, binop(Iop_Or64, binop(Iop_Shl64, mkexpr(i64a), mkU8(32)), in math_DUP_TO_V128()
2150 assign(res, binop(Iop_64HLtoV128, mkexpr(i64b), mkexpr(i64b))); in math_DUP_TO_V128()
2154 assign(res, binop(Iop_64HLtoV128, mkexpr(src), mkexpr(src))); in math_DUP_TO_V128()
2159 assign(t1, widenUto64(srcTy, mkexpr(src))); in math_DUP_TO_V128()
2161 assign(res, binop(Iop_64HLtoV128, mkexpr(t2), mkexpr(t2))); in math_DUP_TO_V128()
2181 assign(fullWidthT, fullWidth); in math_MAYBE_ZERO_HI64_fromE()
2233 assign(irRes, unop(Iop_32Uto64, mkexpr(irRes32))); in mk_convert_IRCmpF64Result_to_NZCV()
2235 assign( in mk_convert_IRCmpF64Result_to_NZCV()
2243 assign( in mk_convert_IRCmpF64Result_to_NZCV()
2255 assign( in mk_convert_IRCmpF64Result_to_NZCV()
2263 assign(nzcv, binop(Iop_Sub64, mkexpr(termL), mkexpr(termR))); in mk_convert_IRCmpF64Result_to_NZCV()
2422 assign(argL, getIReg64orSP(nn)); in dis_ARM64_data_processing_immediate()
2423 assign(argR, mkU64(uimm12)); in dis_ARM64_data_processing_immediate()
2424 assign(res, binop(isSub ? Iop_Sub64 : Iop_Add64, in dis_ARM64_data_processing_immediate()
2440 assign(argL, getIReg32orSP(nn)); in dis_ARM64_data_processing_immediate()
2441 assign(argR, mkU32(uimm12)); in dis_ARM64_data_processing_immediate()
2442 assign(res, binop(isSub ? Iop_Sub32 : Iop_Add32, in dis_ARM64_data_processing_immediate()
2512 assign(res, binop(ops64[op], argL, argR)); in dis_ARM64_data_processing_immediate()
2527 assign(res, binop(ops32[op], argL, argR)); in dis_ARM64_data_processing_immediate()
2580 assign(old, getIReg64orZR(dd)); in dis_ARM64_data_processing_immediate()
2591 assign(old, getIReg32orZR(dd)); in dis_ARM64_data_processing_immediate()
2665 assign(dst, inZero ? mkU(ty,0) : getIRegOrZR(is64, dd)); in dis_ARM64_data_processing_immediate()
2666 assign(src, getIRegOrZR(is64, nn)); in dis_ARM64_data_processing_immediate()
2668 assign(bot, binop(mkOR(ty), in dis_ARM64_data_processing_immediate()
2673 assign(top, mkexpr(extend ? mathREPLICATE(ty, src, immS) : dst)); in dis_ARM64_data_processing_immediate()
2675 assign(res, binop(mkOR(ty), in dis_ARM64_data_processing_immediate()
2706 assign(srcHi, getIRegOrZR(is64, nn)); in dis_ARM64_data_processing_immediate()
2707 assign(srcLo, getIRegOrZR(is64, mm)); in dis_ARM64_data_processing_immediate()
2709 assign(res, mkexpr(srcLo)); in dis_ARM64_data_processing_immediate()
2713 assign(res, binop(mkOR(ty), in dis_ARM64_data_processing_immediate()
2762 assign(t0, getIRegOrZR(is64, regNo)); in getShiftedIRegOrZR()
2766 assign(t1, binop(mkSHL(ty), mkexpr(t0), mkU8(sh_amt))); in getShiftedIRegOrZR()
2769 assign(t1, binop(mkSHR(ty), mkexpr(t0), mkU8(sh_amt))); in getShiftedIRegOrZR()
2772 assign(t1, binop(mkSAR(ty), mkexpr(t0), mkU8(sh_amt))); in getShiftedIRegOrZR()
2775 assign(t1, mkexpr(mathROR(ty, t0, sh_amt))); in getShiftedIRegOrZR()
2782 assign(t2, unop(mkNOT(ty), mkexpr(t1))); in getShiftedIRegOrZR()
2823 assign(argL, getIRegOrZR(is64, rN)); in dis_ARM64_data_processing_register()
2827 assign(res, binop(op, mkexpr(argL), mkexpr(argR))); in dis_ARM64_data_processing_register()
2864 assign(oldC, in dis_ARM64_data_processing_register()
2869 assign(argL, getIRegOrZR(is64, rN)); in dis_ARM64_data_processing_register()
2871 assign(argR, getIRegOrZR(is64, rM)); in dis_ARM64_data_processing_register()
2878 assign(res, in dis_ARM64_data_processing_register()
2883 assign(res, in dis_ARM64_data_processing_register()
2930 assign(argL, getIRegOrZR(is64, rN)); in dis_ARM64_data_processing_register()
2940 assign(res, binop(op, mkexpr(argL), mkexpr(argR))); in dis_ARM64_data_processing_register()
3109 assign(xN, getIReg64orSP(nn)); in dis_ARM64_data_processing_register()
3110 assign(xM, getIReg64orZR(mm)); in dis_ARM64_data_processing_register()
3148 assign(argR, binop(Iop_Shl64, xMw, mkU8(imm3))); in dis_ARM64_data_processing_register()
3150 assign(res, binop(isSub ? Iop_Sub64 : Iop_Add64, in dis_ARM64_data_processing_register()
3164 assign(argL32, unop(Iop_64to32, mkexpr(argL))); in dis_ARM64_data_processing_register()
3165 assign(argR32, unop(Iop_64to32, mkexpr(argR))); in dis_ARM64_data_processing_register()
3201 assign(condT, unop(Iop_64to1, mk_arm64g_calculate_condition(cond))); in dis_ARM64_data_processing_register()
3208 assign(argL, getIReg64orZR(nn)); in dis_ARM64_data_processing_register()
3209 assign(argR, mkU64(imm5)); in dis_ARM64_data_processing_register()
3211 assign(argL, getIReg32orZR(nn)); in dis_ARM64_data_processing_register()
3212 assign(argR, mkU32(imm5)); in dis_ARM64_data_processing_register()
3240 assign(condT, unop(Iop_64to1, mk_arm64g_calculate_condition(cond))); in dis_ARM64_data_processing_register()
3247 assign(argL, getIReg64orZR(nn)); in dis_ARM64_data_processing_register()
3248 assign(argR, getIReg64orZR(mm)); in dis_ARM64_data_processing_register()
3250 assign(argL, getIReg32orZR(nn)); in dis_ARM64_data_processing_register()
3251 assign(argR, getIReg32orZR(mm)); in dis_ARM64_data_processing_register()
3310 assign(src, getIReg64orZR(nn)); in dis_ARM64_data_processing_register()
3315 assign(src, getIReg64orZR(nn)); in dis_ARM64_data_processing_register()
3319 assign(src, binop(Iop_Shl64, getIReg64orZR(nn), mkU8(32))); in dis_ARM64_data_processing_register()
3346 assign(src, getIReg64orZR(nn)); in dis_ARM64_data_processing_register()
3348 assign(src, binop(Iop_Shl64, in dis_ARM64_data_processing_register()
3354 assign(srcZ, in dis_ARM64_data_processing_register()
3359 assign(srcZ, mkexpr(src)); in dis_ARM64_data_processing_register()
3363 assign(dst, IRExpr_ITE(binop(Iop_CmpEQ64, mkexpr(srcZ), mkU64(0)), in dis_ARM64_data_processing_register()
3368 assign(dst, IRExpr_ITE(binop(Iop_CmpEQ64, mkexpr(srcZ), mkU64(0)), in dis_ARM64_data_processing_register()
3397 assign(srcL, getIRegOrZR(is64, nn)); in dis_ARM64_data_processing_register()
3398 assign(srcR, binop(Iop_And64, getIReg64orZR(mm), in dis_ARM64_data_processing_register()
3408 assign(res, binop(iop, mkexpr(srcL), in dis_ARM64_data_processing_register()
3416 assign( in dis_ARM64_data_processing_register()
3487 assign(wN, getIReg32orZR(nn)); in dis_ARM64_data_processing_register()
3488 assign(wM, getIReg32orZR(mm)); in dis_ARM64_data_processing_register()
3489 assign(xA, getIReg64orZR(aa)); in dis_ARM64_data_processing_register()
3490 assign(muld, binop(isU ? Iop_MullU32 : Iop_MullS32, in dis_ARM64_data_processing_register()
3492 assign(res, binop(isAdd ? Iop_Add64 : Iop_Sub64, in dis_ARM64_data_processing_register()
3567 assign(*i0, mkexpr(u0)); in math_INTERLEAVE1_128()
3582 assign(*i0, binop(Iop_InterleaveLO64x2, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3583 assign(*i1, binop(Iop_InterleaveHI64x2, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3590 assign(*i0, binop(Iop_InterleaveLO32x4, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3591 assign(*i1, binop(Iop_InterleaveHI32x4, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3599 assign(*i0, binop(Iop_InterleaveLO16x8, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3600 assign(*i1, binop(Iop_InterleaveHI16x8, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3608 assign(*i0, binop(Iop_InterleaveLO8x16, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3609 assign(*i1, binop(Iop_InterleaveHI8x16, mkexpr(u1), mkexpr(u0))); in math_INTERLEAVE2_128()
3628 assign(*i2, IHI64x2( EX(u2), EX(u1) )); in math_INTERLEAVE3_128()
3629 assign(*i1, ILO64x2( ROR(EX(u0),8), EX(u2) )); in math_INTERLEAVE3_128()
3630 assign(*i0, ILO64x2( EX(u1), EX(u0) )); in math_INTERLEAVE3_128()
3645 assign(c1100, mkV128(0xFF00)); in math_INTERLEAVE3_128()
3646 assign(c0011, mkV128(0x00FF)); in math_INTERLEAVE3_128()
3647 assign(c0110, mkV128(0x0FF0)); in math_INTERLEAVE3_128()
3652 assign(*i2, OR2( AND( IHI32x4(EX(p2), ROL(EX(p2),8)), EX(c1100) ), in math_INTERLEAVE3_128()
3654 assign(*i1, OR3( SHL(EX(p2),12), in math_INTERLEAVE3_128()
3657 assign(*i0, OR2( AND( ILO32x4(EX(p0),ROL(EX(p1),4)), EX(c1100) ), in math_INTERLEAVE3_128()
3682 assign(c1000, mkV128(0xF000)); in math_INTERLEAVE3_128()
3683 assign(c0100, mkV128(0x0F00)); in math_INTERLEAVE3_128()
3684 assign(c0010, mkV128(0x00F0)); in math_INTERLEAVE3_128()
3685 assign(c0001, mkV128(0x000F)); in math_INTERLEAVE3_128()
3690 assign(*i2, in math_INTERLEAVE3_128()
3696 assign(*i1, in math_INTERLEAVE3_128()
3702 assign(*i0, in math_INTERLEAVE3_128()
3735 assign(t_##_tempName, \ in math_INTERLEAVE3_128()
3748 assign(i2_FEDC, ILO16x8(EX(t_CfBf), EX(t_AfCe))); in math_INTERLEAVE3_128()
3752 assign(i2_BA98, ILO16x8(EX(t_BeAe), EX(t_CdBd))); in math_INTERLEAVE3_128()
3753 assign(i2_hi64, ILO32x4(EX(i2_FEDC), EX(i2_BA98))); in math_INTERLEAVE3_128()
3757 assign(i2_7654, ILO16x8(EX(t_AdCc), EX(t_BcAc))); in math_INTERLEAVE3_128()
3761 assign(i2_3210, ILO16x8(EX(t_CbBb), EX(t_AbCa))); in math_INTERLEAVE3_128()
3762 assign(i2_lo64, ILO32x4(EX(i2_7654), EX(i2_3210))); in math_INTERLEAVE3_128()
3763 assign(*i2, ILO64x2(EX(i2_hi64), EX(i2_lo64))); in math_INTERLEAVE3_128()
3767 assign(i1_FEDC, ILO16x8(EX(t_BaAa), EX(t_C9B9))); in math_INTERLEAVE3_128()
3771 assign(i1_BA98, ILO16x8(EX(t_A9C8), EX(t_B8A8))); in math_INTERLEAVE3_128()
3772 assign(i1_hi64, ILO32x4(EX(i1_FEDC), EX(i1_BA98))); in math_INTERLEAVE3_128()
3776 assign(i1_7654, ILO16x8(EX(t_C7B7), EX(t_A7C6))); in math_INTERLEAVE3_128()
3780 assign(i1_3210, ILO16x8(EX(t_B6A6), EX(t_C5B5))); in math_INTERLEAVE3_128()
3781 assign(i1_lo64, ILO32x4(EX(i1_7654), EX(i1_3210))); in math_INTERLEAVE3_128()
3782 assign(*i1, ILO64x2(EX(i1_hi64), EX(i1_lo64))); in math_INTERLEAVE3_128()
3786 assign(i0_FEDC, ILO16x8(EX(t_A5C4), EX(t_B4A4))); in math_INTERLEAVE3_128()
3790 assign(i0_BA98, ILO16x8(EX(t_C3B3), EX(t_A3C2))); in math_INTERLEAVE3_128()
3791 assign(i0_hi64, ILO32x4(EX(i0_FEDC), EX(i0_BA98))); in math_INTERLEAVE3_128()
3795 assign(i0_7654, ILO16x8(EX(t_B2A2), EX(t_C1B1))); in math_INTERLEAVE3_128()
3799 assign(i0_3210, ILO16x8(EX(t_A1C0), EX(t_B0A0))); in math_INTERLEAVE3_128()
3800 assign(i0_lo64, ILO32x4(EX(i0_7654), EX(i0_3210))); in math_INTERLEAVE3_128()
3801 assign(*i0, ILO64x2(EX(i0_hi64), EX(i0_lo64))); in math_INTERLEAVE3_128()
3821 assign(*i0, ILO64x2(EX(u1), EX(u0))); in math_INTERLEAVE4_128()
3822 assign(*i1, ILO64x2(EX(u3), EX(u2))); in math_INTERLEAVE4_128()
3823 assign(*i2, IHI64x2(EX(u1), EX(u0))); in math_INTERLEAVE4_128()
3824 assign(*i3, IHI64x2(EX(u3), EX(u2))); in math_INTERLEAVE4_128()
3836 assign(*i0, CEV32x4(EX(p1), EX(p0))); in math_INTERLEAVE4_128()
3837 assign(*i1, COD32x4(EX(p1), EX(p0))); in math_INTERLEAVE4_128()
3838 assign(*i2, CEV32x4(EX(p3), EX(p2))); in math_INTERLEAVE4_128()
3839 assign(*i3, COD32x4(EX(p3), EX(p2))); in math_INTERLEAVE4_128()
3851 assign(*i0, COD16x8(EX(p0), SHL(EX(p0), 2))); in math_INTERLEAVE4_128()
3852 assign(*i1, COD16x8(EX(p1), SHL(EX(p1), 2))); in math_INTERLEAVE4_128()
3853 assign(*i2, COD16x8(EX(p2), SHL(EX(p2), 2))); in math_INTERLEAVE4_128()
3854 assign(*i3, COD16x8(EX(p3), SHL(EX(p3), 2))); in math_INTERLEAVE4_128()
3866 assign(*i0, IHI32x4(COD8x16(EX(p0),EX(p0)), CEV8x16(EX(p0),EX(p0)))); in math_INTERLEAVE4_128()
3867 assign(*i1, IHI32x4(COD8x16(EX(p1),EX(p1)), CEV8x16(EX(p1),EX(p1)))); in math_INTERLEAVE4_128()
3868 assign(*i2, IHI32x4(COD8x16(EX(p2),EX(p2)), CEV8x16(EX(p2),EX(p2)))); in math_INTERLEAVE4_128()
3869 assign(*i3, IHI32x4(COD8x16(EX(p3),EX(p3)), CEV8x16(EX(p3),EX(p3)))); in math_INTERLEAVE4_128()
3882 assign(*u0, mkexpr(i0)); in math_DEINTERLEAVE1_128()
3897 assign(*u0, binop(Iop_InterleaveLO64x2, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3898 assign(*u1, binop(Iop_InterleaveHI64x2, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3905 assign(*u0, binop(Iop_CatEvenLanes32x4, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3906 assign(*u1, binop(Iop_CatOddLanes32x4, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3914 assign(*u0, binop(Iop_CatEvenLanes16x8, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3915 assign(*u1, binop(Iop_CatOddLanes16x8, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3923 assign(*u0, binop(Iop_CatEvenLanes8x16, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3924 assign(*u1, binop(Iop_CatOddLanes8x16, mkexpr(i1), mkexpr(i0))); in math_DEINTERLEAVE2_128()
3943 assign(*u2, ILO64x2( ROL(EX(i2),8), EX(i1) )); in math_DEINTERLEAVE3_128()
3944 assign(*u1, ILO64x2( EX(i2), ROL(EX(i0),8) )); in math_DEINTERLEAVE3_128()
3945 assign(*u0, ILO64x2( ROL(EX(i1),8), EX(i0) )); in math_DEINTERLEAVE3_128()
3962 assign(t_a1c0b0a0, EX(i0)); in math_DEINTERLEAVE3_128()
3963 assign(t_a2c1b1a1, SL(EX(i1),EX(i0),3*4)); in math_DEINTERLEAVE3_128()
3964 assign(t_a3c2b2a2, SL(EX(i2),EX(i1),2*4)); in math_DEINTERLEAVE3_128()
3965 assign(t_a0c3b3a3, SL(EX(i0),EX(i2),1*4)); in math_DEINTERLEAVE3_128()
3967 assign(p0, ILO32x4(EX(t_a2c1b1a1),EX(t_a1c0b0a0))); in math_DEINTERLEAVE3_128()
3968 assign(p1, ILO64x2(ILO32x4(EX(t_a0c3b3a3), EX(t_a3c2b2a2)), in math_DEINTERLEAVE3_128()
3970 assign(p2, ILO32x4(ROR(EX(t_a0c3b3a3),1*4), ROR(EX(t_a3c2b2a2),1*4))); in math_DEINTERLEAVE3_128()
4001 assign(s0, EX(i0)); in math_DEINTERLEAVE3_128()
4002 assign(s1, SL(EX(i1),EX(i0),6*2)); in math_DEINTERLEAVE3_128()
4003 assign(s2, SL(EX(i2),EX(i1),4*2)); in math_DEINTERLEAVE3_128()
4004 assign(s3, SL(EX(i0),EX(i2),2*2)); in math_DEINTERLEAVE3_128()
4010 assign(c00111111, mkV128(0x0FFF)); in math_DEINTERLEAVE3_128()
4011 assign(t0, AND( ILO16x8( ROR(EX(s0),3*2), EX(s0)), EX(c00111111))); in math_DEINTERLEAVE3_128()
4012 assign(t1, AND( ILO16x8( ROR(EX(s1),3*2), EX(s1)), EX(c00111111))); in math_DEINTERLEAVE3_128()
4013 assign(t2, AND( ILO16x8( ROR(EX(s2),3*2), EX(s2)), EX(c00111111))); in math_DEINTERLEAVE3_128()
4014 assign(t3, AND( ILO16x8( ROR(EX(s3),3*2), EX(s3)), EX(c00111111))); in math_DEINTERLEAVE3_128()
4016 assign(p0, OR2(EX(t0), SHL(EX(t1),6*2))); in math_DEINTERLEAVE3_128()
4017 assign(p1, OR2(SHL(EX(t2),4*2), SHR(EX(t1),2*2))); in math_DEINTERLEAVE3_128()
4018 assign(p2, OR2(SHL(EX(t3),2*2), SHR(EX(t2),4*2))); in math_DEINTERLEAVE3_128()
4060 assign(s0, SL(EX(i1),EX(i0), 0)); in math_DEINTERLEAVE3_128()
4061 assign(s1, SL(EX(i1),EX(i0), 6)); in math_DEINTERLEAVE3_128()
4062 assign(s2, SL(EX(i1),EX(i0),12)); in math_DEINTERLEAVE3_128()
4063 assign(s3, SL(EX(i2),EX(i1), 2)); in math_DEINTERLEAVE3_128()
4064 assign(s4, SL(EX(i2),EX(i1), 8)); in math_DEINTERLEAVE3_128()
4065 assign(s5, SL(EX(i2),EX(i1),14)); in math_DEINTERLEAVE3_128()
4066 assign(s6, SL(EX(i0),EX(i2), 4)); in math_DEINTERLEAVE3_128()
4067 assign(s7, SL(EX(i0),EX(i2),10)); in math_DEINTERLEAVE3_128()
4077 assign(cMASK, mkV128(0x003F)); in math_DEINTERLEAVE3_128()
4078 assign(t0, AND( ILO8x16( ROR(EX(s0),3), EX(s0)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4079 assign(t1, AND( ILO8x16( ROR(EX(s1),3), EX(s1)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4080 assign(t2, AND( ILO8x16( ROR(EX(s2),3), EX(s2)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4081 assign(t3, AND( ILO8x16( ROR(EX(s3),3), EX(s3)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4082 assign(t4, AND( ILO8x16( ROR(EX(s4),3), EX(s4)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4083 assign(t5, AND( ILO8x16( ROR(EX(s5),3), EX(s5)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4084 assign(t6, AND( ILO8x16( ROR(EX(s6),3), EX(s6)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4085 assign(t7, AND( ILO8x16( ROR(EX(s7),3), EX(s7)), EX(cMASK))); in math_DEINTERLEAVE3_128()
4087 assign(p0, OR3( SHL(EX(t2),12), SHL(EX(t1),6), EX(t0) )); in math_DEINTERLEAVE3_128()
4088 assign(p1, OR4( SHL(EX(t5),14), SHL(EX(t4),8), in math_DEINTERLEAVE3_128()
4090 assign(p2, OR3( SHL(EX(t7),10), SHL(EX(t6),4), SHR(EX(t5),2) )); in math_DEINTERLEAVE3_128()
4111 assign(*u0, ILO64x2(EX(i2), EX(i0))); in math_DEINTERLEAVE4_128()
4112 assign(*u1, IHI64x2(EX(i2), EX(i0))); in math_DEINTERLEAVE4_128()
4113 assign(*u2, ILO64x2(EX(i3), EX(i1))); in math_DEINTERLEAVE4_128()
4114 assign(*u3, IHI64x2(EX(i3), EX(i1))); in math_DEINTERLEAVE4_128()
4123 assign(p0, ILO32x4(EX(i1), EX(i0))); in math_DEINTERLEAVE4_128()
4124 assign(p1, IHI32x4(EX(i1), EX(i0))); in math_DEINTERLEAVE4_128()
4125 assign(p2, ILO32x4(EX(i3), EX(i2))); in math_DEINTERLEAVE4_128()
4126 assign(p3, IHI32x4(EX(i3), EX(i2))); in math_DEINTERLEAVE4_128()
4138 assign(p0, IHI16x8(EX(i0), SHL(EX(i0), 8))); in math_DEINTERLEAVE4_128()
4139 assign(p1, IHI16x8(EX(i1), SHL(EX(i1), 8))); in math_DEINTERLEAVE4_128()
4140 assign(p2, IHI16x8(EX(i2), SHL(EX(i2), 8))); in math_DEINTERLEAVE4_128()
4141 assign(p3, IHI16x8(EX(i3), SHL(EX(i3), 8))); in math_DEINTERLEAVE4_128()
4153 assign(p0, IHI64x2( IHI8x16(EX(i0),ROL(EX(i0),4)), in math_DEINTERLEAVE4_128()
4155 assign(p1, IHI64x2( IHI8x16(EX(i1),ROL(EX(i1),4)), in math_DEINTERLEAVE4_128()
4157 assign(p2, IHI64x2( IHI8x16(EX(i2),ROL(EX(i2),4)), in math_DEINTERLEAVE4_128()
4159 assign(p3, IHI64x2( IHI8x16(EX(i3),ROL(EX(i3),4)), in math_DEINTERLEAVE4_128()
4203 assign(*i0, mkexpr(u0)); in math_INTERLEAVE1_64()
4214 assign(*i0, EX(u0)); in math_INTERLEAVE2_64()
4215 assign(*i1, EX(u1)); in math_INTERLEAVE2_64()
4225 assign(du0, binop(doubler, EX(u0), EX(u0))); in math_INTERLEAVE2_64()
4226 assign(du1, binop(doubler, EX(u1), EX(u1))); in math_INTERLEAVE2_64()
4230 assign(*i0, binop(halver, EX(di0), EX(di0))); in math_INTERLEAVE2_64()
4231 assign(*i1, binop(halver, EX(di1), EX(di1))); in math_INTERLEAVE2_64()
4244 assign(*i0, EX(u0)); in math_INTERLEAVE3_64()
4245 assign(*i1, EX(u1)); in math_INTERLEAVE3_64()
4246 assign(*i2, EX(u2)); in math_INTERLEAVE3_64()
4257 assign(du0, binop(doubler, EX(u0), EX(u0))); in math_INTERLEAVE3_64()
4258 assign(du1, binop(doubler, EX(u1), EX(u1))); in math_INTERLEAVE3_64()
4259 assign(du2, binop(doubler, EX(u2), EX(u2))); in math_INTERLEAVE3_64()
4264 assign(*i0, binop(halver, EX(di0), EX(di0))); in math_INTERLEAVE3_64()
4265 assign(*i1, binop(halver, EX(di1), EX(di1))); in math_INTERLEAVE3_64()
4266 assign(*i2, binop(halver, EX(di2), EX(di2))); in math_INTERLEAVE3_64()
4279 assign(*i0, EX(u0)); in math_INTERLEAVE4_64()
4280 assign(*i1, EX(u1)); in math_INTERLEAVE4_64()
4281 assign(*i2, EX(u2)); in math_INTERLEAVE4_64()
4282 assign(*i3, EX(u3)); in math_INTERLEAVE4_64()
4294 assign(du0, binop(doubler, EX(u0), EX(u0))); in math_INTERLEAVE4_64()
4295 assign(du1, binop(doubler, EX(u1), EX(u1))); in math_INTERLEAVE4_64()
4296 assign(du2, binop(doubler, EX(u2), EX(u2))); in math_INTERLEAVE4_64()
4297 assign(du3, binop(doubler, EX(u3), EX(u3))); in math_INTERLEAVE4_64()
4304 assign(*i0, binop(halver, EX(di0), EX(di0))); in math_INTERLEAVE4_64()
4305 assign(*i1, binop(halver, EX(di1), EX(di1))); in math_INTERLEAVE4_64()
4306 assign(*i2, binop(halver, EX(di2), EX(di2))); in math_INTERLEAVE4_64()
4307 assign(*i3, binop(halver, EX(di3), EX(di3))); in math_INTERLEAVE4_64()
4316 assign(*u0, mkexpr(i0)); in math_DEINTERLEAVE1_64()
4327 assign(*u0, EX(i0)); in math_DEINTERLEAVE2_64()
4328 assign(*u1, EX(i1)); in math_DEINTERLEAVE2_64()
4338 assign(di0, binop(doubler, EX(i0), EX(i0))); in math_DEINTERLEAVE2_64()
4339 assign(di1, binop(doubler, EX(i1), EX(i1))); in math_DEINTERLEAVE2_64()
4344 assign(*u0, binop(halver, EX(du0), EX(du0))); in math_DEINTERLEAVE2_64()
4345 assign(*u1, binop(halver, EX(du1), EX(du1))); in math_DEINTERLEAVE2_64()
4358 assign(*u0, EX(i0)); in math_DEINTERLEAVE3_64()
4359 assign(*u1, EX(i1)); in math_DEINTERLEAVE3_64()
4360 assign(*u2, EX(i2)); in math_DEINTERLEAVE3_64()
4371 assign(di0, binop(doubler, EX(i0), EX(i0))); in math_DEINTERLEAVE3_64()
4372 assign(di1, binop(doubler, EX(i1), EX(i1))); in math_DEINTERLEAVE3_64()
4373 assign(di2, binop(doubler, EX(i2), EX(i2))); in math_DEINTERLEAVE3_64()
4378 assign(*u0, binop(halver, EX(du0), EX(du0))); in math_DEINTERLEAVE3_64()
4379 assign(*u1, binop(halver, EX(du1), EX(du1))); in math_DEINTERLEAVE3_64()
4380 assign(*u2, binop(halver, EX(du2), EX(du2))); in math_DEINTERLEAVE3_64()
4393 assign(*u0, EX(i0)); in math_DEINTERLEAVE4_64()
4394 assign(*u1, EX(i1)); in math_DEINTERLEAVE4_64()
4395 assign(*u2, EX(i2)); in math_DEINTERLEAVE4_64()
4396 assign(*u3, EX(i3)); in math_DEINTERLEAVE4_64()
4408 assign(di0, binop(doubler, EX(i0), EX(i0))); in math_DEINTERLEAVE4_64()
4409 assign(di1, binop(doubler, EX(i1), EX(i1))); in math_DEINTERLEAVE4_64()
4410 assign(di2, binop(doubler, EX(i2), EX(i2))); in math_DEINTERLEAVE4_64()
4411 assign(di3, binop(doubler, EX(i3), EX(i3))); in math_DEINTERLEAVE4_64()
4418 assign(*u0, binop(halver, EX(du0), EX(du0))); in math_DEINTERLEAVE4_64()
4419 assign(*u1, binop(halver, EX(du1), EX(du1))); in math_DEINTERLEAVE4_64()
4420 assign(*u2, binop(halver, EX(du2), EX(du2))); in math_DEINTERLEAVE4_64()
4421 assign(*u3, binop(halver, EX(du3), EX(du3))); in math_DEINTERLEAVE4_64()
4561 assign(res, binop(Iop_Add64, getIReg64orSP(nn), rhs)); in gen_indexed_EA()
4602 assign(res, loadLE(Ity_I64,addrE)); in gen_zwidening_load()
4605 assign(res, unop(Iop_32Uto64, loadLE(Ity_I32,addrE))); in gen_zwidening_load()
4608 assign(res, unop(Iop_16Uto64, loadLE(Ity_I16,addrE))); in gen_zwidening_load()
4611 assign(res, unop(Iop_8Uto64, loadLE(Ity_I8,addrE))); in gen_zwidening_load()
4664 assign(ta, binop(Iop_Add64, getIReg64orSP(nn), mkU64(offs))); in dis_ARM64_load_store()
4723 assign(tRN, getIReg64orSP(nn)); in dis_ARM64_load_store()
4726 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm9))); in dis_ARM64_load_store()
4732 assign(tTA, mkexpr(tRN)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
4734 assign(tTA, mkexpr(tEA)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
4736 assign(tTA, mkexpr(tEA)); /* tWA is unused */ break; in dis_ARM64_load_store()
4827 assign(tRN, getIReg64orSP(rN)); in dis_ARM64_load_store()
4830 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm7))); in dis_ARM64_load_store()
4836 assign(tTA, mkexpr(tRN)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
4838 assign(tTA, mkexpr(tEA)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
4840 assign(tTA, mkexpr(tEA)); /* tWA is unused */ break; in dis_ARM64_load_store()
5103 assign(tRN, getIReg64orSP(nn)); in dis_ARM64_load_store()
5104 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm9))); in dis_ARM64_load_store()
5179 assign(tRN, getIReg64orSP(nn)); in dis_ARM64_load_store()
5180 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm9))); in dis_ARM64_load_store()
5264 assign(tRN, getIReg64orSP(nn)); in dis_ARM64_load_store()
5267 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm7))); in dis_ARM64_load_store()
5273 assign(tTA, mkexpr(tRN)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
5275 assign(tTA, mkexpr(tEA)); assign(tWA, mkexpr(tEA)); break; in dis_ARM64_load_store()
5278 assign(tTA, mkexpr(tEA)); /* tWA is unused */ break; in dis_ARM64_load_store()
5515 assign(tEA, binop(Iop_Add64, getIReg64orSP(nn), mkU64(pimm12))); in dis_ARM64_load_store()
5564 assign(tRN, getIReg64orSP(nn)); in dis_ARM64_load_store()
5565 assign(tEA, binop(Iop_Add64, mkexpr(tRN), mkU64(simm9))); in dis_ARM64_load_store()
5607 assign(tEA, binop(Iop_Add64, getIReg64orSP(nn), mkU64(simm9))); in dis_ARM64_load_store()
5697 assign(tTA, getIReg64orSP(nn)); in dis_ARM64_load_store()
5702 assign(tWB, binop(Iop_Add64, in dis_ARM64_load_store()
5723 case 4: assign(u3, getQReg128((tt+3) % 32)); /* fallthru */ in dis_ARM64_load_store()
5724 case 3: assign(u2, getQReg128((tt+2) % 32)); /* fallthru */ in dis_ARM64_load_store()
5725 case 2: assign(u1, getQReg128((tt+1) % 32)); /* fallthru */ in dis_ARM64_load_store()
5726 case 1: assign(u0, getQReg128((tt+0) % 32)); break; in dis_ARM64_load_store()
5773 assign(i3, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5779 assign(i2, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5785 assign(i1, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5791 assign(i0, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5908 assign(tTA, getIReg64orSP(nn)); in dis_ARM64_load_store()
5913 assign(tWB, binop(Iop_Add64, in dis_ARM64_load_store()
5934 case 4: assign(u3, getQReg128((tt+3) % 32)); /* fallthru */ in dis_ARM64_load_store()
5935 case 3: assign(u2, getQReg128((tt+2) % 32)); /* fallthru */ in dis_ARM64_load_store()
5936 case 2: assign(u1, getQReg128((tt+1) % 32)); in dis_ARM64_load_store()
5937 assign(u0, getQReg128((tt+0) % 32)); break; in dis_ARM64_load_store()
5968 assign(u3, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5974 assign(u2, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5980 assign(u1, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
5984 assign(u0, MAYBE_WIDEN_FROM_64( in dis_ARM64_load_store()
6076 assign(tTA, getIReg64orSP(nn)); in dis_ARM64_load_store()
6081 assign(tWB, binop(Iop_Add64, in dis_ARM64_load_store()
6097 assign(e3, loadLE(ty, binop(Iop_Add64, mkexpr(tTA), in dis_ARM64_load_store()
6104 assign(e2, loadLE(ty, binop(Iop_Add64, mkexpr(tTA), in dis_ARM64_load_store()
6111 assign(e1, loadLE(ty, binop(Iop_Add64, mkexpr(tTA), in dis_ARM64_load_store()
6118 assign(e0, loadLE(ty, binop(Iop_Add64, mkexpr(tTA), in dis_ARM64_load_store()
6222 assign(tTA, getIReg64orSP(nn)); in dis_ARM64_load_store()
6227 assign(tWB, binop(Iop_Add64, in dis_ARM64_load_store()
6326 assign(ea, getIReg64orSP(nn)); in dis_ARM64_load_store()
6378 assign(ea, getIReg64orSP(nn)); in dis_ARM64_load_store()
6383 assign(res, loadLE(ty, mkexpr(ea))); in dis_ARM64_load_store()
6409 assign(ea, binop(Iop_Add64, getIReg64orSP(nn), mkU64(imm12 * 8))); in dis_ARM64_load_store()
6508 assign(dst, getIReg64orZR(nn)); in dis_ARM64_branch_etc()
6651 assign(qc64, binop(Iop_And64, in dis_ARM64_branch_etc()
6661 assign(qcV128, IRExpr_Get( OFFB_QCFLAG, Ity_V128 )); in dis_ARM64_branch_etc()
6663 assign(qc64, binop(Iop_Or64, unop(Iop_V128HIto64, mkexpr(qcV128)), in dis_ARM64_branch_etc()
6687 assign(t, binop(Iop_And64, getIReg64orZR(tt), mkU64(0xF0000000ULL))); in dis_ARM64_branch_etc()
6692 assign(res, mk_arm64g_calculate_flags_nzcv()); in dis_ARM64_branch_etc()
6771 assign( addr, binop( Iop_And64, in dis_ARM64_branch_etc()
6802 assign( addr, binop( Iop_And64, in dis_ARM64_branch_etc()
7189 assign(x76547654, mk_CatOddLanes64x2 (x76543210, x76543210)); in math_FOLDV()
7190 assign(x32103210, mk_CatEvenLanes64x2(x76543210, x76543210)); in math_FOLDV()
7195 assign(x76767676, mk_CatOddLanes32x4 (x76547654, x76547654)); in math_FOLDV()
7196 assign(x54545454, mk_CatEvenLanes32x4(x76547654, x76547654)); in math_FOLDV()
7197 assign(x32323232, mk_CatOddLanes32x4 (x32103210, x32103210)); in math_FOLDV()
7198 assign(x10101010, mk_CatEvenLanes32x4(x32103210, x32103210)); in math_FOLDV()
7207 assign(x77777777, mk_CatOddLanes16x8 (x76767676, x76767676)); in math_FOLDV()
7208 assign(x66666666, mk_CatEvenLanes16x8(x76767676, x76767676)); in math_FOLDV()
7209 assign(x55555555, mk_CatOddLanes16x8 (x54545454, x54545454)); in math_FOLDV()
7210 assign(x44444444, mk_CatEvenLanes16x8(x54545454, x54545454)); in math_FOLDV()
7211 assign(x33333333, mk_CatOddLanes16x8 (x32323232, x32323232)); in math_FOLDV()
7212 assign(x22222222, mk_CatEvenLanes16x8(x32323232, x32323232)); in math_FOLDV()
7213 assign(x11111111, mk_CatOddLanes16x8 (x10101010, x10101010)); in math_FOLDV()
7214 assign(x00000000, mk_CatEvenLanes16x8(x10101010, x10101010)); in math_FOLDV()
7232 assign(xAllF, mk_CatOddLanes8x16 (x77777777, x77777777)); in math_FOLDV()
7233 assign(xAllE, mk_CatEvenLanes8x16(x77777777, x77777777)); in math_FOLDV()
7234 assign(xAllD, mk_CatOddLanes8x16 (x66666666, x66666666)); in math_FOLDV()
7235 assign(xAllC, mk_CatEvenLanes8x16(x66666666, x66666666)); in math_FOLDV()
7236 assign(xAllB, mk_CatOddLanes8x16 (x55555555, x55555555)); in math_FOLDV()
7237 assign(xAllA, mk_CatEvenLanes8x16(x55555555, x55555555)); in math_FOLDV()
7238 assign(xAll9, mk_CatOddLanes8x16 (x44444444, x44444444)); in math_FOLDV()
7239 assign(xAll8, mk_CatEvenLanes8x16(x44444444, x44444444)); in math_FOLDV()
7240 assign(xAll7, mk_CatOddLanes8x16 (x33333333, x33333333)); in math_FOLDV()
7241 assign(xAll6, mk_CatEvenLanes8x16(x33333333, x33333333)); in math_FOLDV()
7242 assign(xAll5, mk_CatOddLanes8x16 (x22222222, x22222222)); in math_FOLDV()
7243 assign(xAll4, mk_CatEvenLanes8x16(x22222222, x22222222)); in math_FOLDV()
7244 assign(xAll3, mk_CatOddLanes8x16 (x11111111, x11111111)); in math_FOLDV()
7245 assign(xAll2, mk_CatEvenLanes8x16(x11111111, x11111111)); in math_FOLDV()
7246 assign(xAll1, mk_CatOddLanes8x16 (x00000000, x00000000)); in math_FOLDV()
7247 assign(xAll0, mk_CatEvenLanes8x16(x00000000, x00000000)); in math_FOLDV()
7256 assign(maxFE, binop(op, mkexpr(xAllF), mkexpr(xAllE))); in math_FOLDV()
7257 assign(maxDC, binop(op, mkexpr(xAllD), mkexpr(xAllC))); in math_FOLDV()
7258 assign(maxBA, binop(op, mkexpr(xAllB), mkexpr(xAllA))); in math_FOLDV()
7259 assign(max98, binop(op, mkexpr(xAll9), mkexpr(xAll8))); in math_FOLDV()
7260 assign(max76, binop(op, mkexpr(xAll7), mkexpr(xAll6))); in math_FOLDV()
7261 assign(max54, binop(op, mkexpr(xAll5), mkexpr(xAll4))); in math_FOLDV()
7262 assign(max32, binop(op, mkexpr(xAll3), mkexpr(xAll2))); in math_FOLDV()
7263 assign(max10, binop(op, mkexpr(xAll1), mkexpr(xAll0))); in math_FOLDV()
7268 assign(maxFEDC, binop(op, mkexpr(maxFE), mkexpr(maxDC))); in math_FOLDV()
7269 assign(maxBA98, binop(op, mkexpr(maxBA), mkexpr(max98))); in math_FOLDV()
7270 assign(max7654, binop(op, mkexpr(max76), mkexpr(max54))); in math_FOLDV()
7271 assign(max3210, binop(op, mkexpr(max32), mkexpr(max10))); in math_FOLDV()
7274 assign(maxFEDCBA98, binop(op, mkexpr(maxFEDC), mkexpr(maxBA98))); in math_FOLDV()
7275 assign(max76543210, binop(op, mkexpr(max7654), mkexpr(max3210))); in math_FOLDV()
7277 assign(maxAllLanes, binop(op, mkexpr(maxFEDCBA98), in math_FOLDV()
7280 assign(res, unop(Iop_ZeroHI120ofV128, mkexpr(maxAllLanes))); in math_FOLDV()
7288 assign(x76547654, mk_CatOddLanes64x2 (x76543210, x76543210)); in math_FOLDV()
7289 assign(x32103210, mk_CatEvenLanes64x2(x76543210, x76543210)); in math_FOLDV()
7294 assign(x76767676, mk_CatOddLanes32x4 (x76547654, x76547654)); in math_FOLDV()
7295 assign(x54545454, mk_CatEvenLanes32x4(x76547654, x76547654)); in math_FOLDV()
7296 assign(x32323232, mk_CatOddLanes32x4 (x32103210, x32103210)); in math_FOLDV()
7297 assign(x10101010, mk_CatEvenLanes32x4(x32103210, x32103210)); in math_FOLDV()
7306 assign(x77777777, mk_CatOddLanes16x8 (x76767676, x76767676)); in math_FOLDV()
7307 assign(x66666666, mk_CatEvenLanes16x8(x76767676, x76767676)); in math_FOLDV()
7308 assign(x55555555, mk_CatOddLanes16x8 (x54545454, x54545454)); in math_FOLDV()
7309 assign(x44444444, mk_CatEvenLanes16x8(x54545454, x54545454)); in math_FOLDV()
7310 assign(x33333333, mk_CatOddLanes16x8 (x32323232, x32323232)); in math_FOLDV()
7311 assign(x22222222, mk_CatEvenLanes16x8(x32323232, x32323232)); in math_FOLDV()
7312 assign(x11111111, mk_CatOddLanes16x8 (x10101010, x10101010)); in math_FOLDV()
7313 assign(x00000000, mk_CatEvenLanes16x8(x10101010, x10101010)); in math_FOLDV()
7318 assign(max76, binop(op, mkexpr(x77777777), mkexpr(x66666666))); in math_FOLDV()
7319 assign(max54, binop(op, mkexpr(x55555555), mkexpr(x44444444))); in math_FOLDV()
7320 assign(max32, binop(op, mkexpr(x33333333), mkexpr(x22222222))); in math_FOLDV()
7321 assign(max10, binop(op, mkexpr(x11111111), mkexpr(x00000000))); in math_FOLDV()
7324 assign(max7654, binop(op, mkexpr(max76), mkexpr(max54))); in math_FOLDV()
7325 assign(max3210, binop(op, mkexpr(max32), mkexpr(max10))); in math_FOLDV()
7327 assign(max76543210, binop(op, mkexpr(max7654), mkexpr(max3210))); in math_FOLDV()
7329 assign(res, unop(Iop_ZeroHI112ofV128, mkexpr(max76543210))); in math_FOLDV()
7338 assign(x3232, mk_CatOddLanes64x2 (x3210, x3210)); in math_FOLDV()
7339 assign(x1010, mk_CatEvenLanes64x2(x3210, x3210)); in math_FOLDV()
7344 assign(x3333, mk_CatOddLanes32x4 (x3232, x3232)); in math_FOLDV()
7345 assign(x2222, mk_CatEvenLanes32x4(x3232, x3232)); in math_FOLDV()
7346 assign(x1111, mk_CatOddLanes32x4 (x1010, x1010)); in math_FOLDV()
7347 assign(x0000, mk_CatEvenLanes32x4(x1010, x1010)); in math_FOLDV()
7350 assign(max32, binop(op, mkexpr(x3333), mkexpr(x2222))); in math_FOLDV()
7351 assign(max10, binop(op, mkexpr(x1111), mkexpr(x0000))); in math_FOLDV()
7353 assign(max3210, binop(op, mkexpr(max32), mkexpr(max10))); in math_FOLDV()
7355 assign(res, unop(Iop_ZeroHI96ofV128, mkexpr(max3210))); in math_FOLDV()
7362 assign(x11, binop(Iop_InterleaveHI64x2, mkexpr(x10), mkexpr(x10))); in math_FOLDV()
7363 assign(x00, binop(Iop_InterleaveLO64x2, mkexpr(x10), mkexpr(x10))); in math_FOLDV()
7365 assign(max10, binop(op, mkexpr(x11), mkexpr(x00))); in math_FOLDV()
7367 assign(res, unop(Iop_ZeroHI64ofV128, mkexpr(max10))); in math_FOLDV()
7385 assign(half15, mkU64(0x0F0F0F0F0F0F0F0FULL)); in math_TBL_TBX()
7387 assign(half16, mkU64(0x1010101010101010ULL)); in math_TBL_TBX()
7391 assign(allZero, mkV128(0x0000)); in math_TBL_TBX()
7394 assign(all15, binop(Iop_64HLtoV128, mkexpr(half15), mkexpr(half15))); in math_TBL_TBX()
7397 assign(all16, binop(Iop_64HLtoV128, mkexpr(half16), mkexpr(half16))); in math_TBL_TBX()
7400 assign(all32, binop(Iop_Add8x16, mkexpr(all16), mkexpr(all16))); in math_TBL_TBX()
7403 assign(all48, binop(Iop_Add8x16, mkexpr(all16), mkexpr(all32))); in math_TBL_TBX()
7406 assign(all64, binop(Iop_Add8x16, mkexpr(all32), mkexpr(all32))); in math_TBL_TBX()
7415 assign(running_result, mkV128(0)); in math_TBL_TBX()
7421 assign(bias, in math_TBL_TBX()
7424 assign(biased_indices, in math_TBL_TBX()
7427 assign(valid_mask, in math_TBL_TBX()
7430 assign(safe_biased_indices, in math_TBL_TBX()
7433 assign(results_or_junk, in math_TBL_TBX()
7437 assign(results_or_zero, in math_TBL_TBX()
7441 assign(tmp, binop(Iop_OrV128, mkexpr(results_or_zero), in math_TBL_TBX()
7453 assign(overall_valid_mask, in math_TBL_TBX()
7456 assign(result, in math_TBL_TBX()
7478 assign(res, binop(opI64x2toV128, unop(slice, argL), in math_BINARY_WIDENING_V128()
7493 assign(argL, argLE); in math_ABD()
7494 assign(argR, argRE); in math_ABD()
7495 assign(msk, binop(isU ? mkVecCMPGTU(size) : mkVecCMPGTS(size), in math_ABD()
7497 assign(res, in math_ABD()
7518 assign(src, srcE); in math_WIDEN_LO_OR_HI_LANES()
7521 assign(res, in math_WIDEN_LO_OR_HI_LANES()
7530 assign(res, in math_WIDEN_LO_OR_HI_LANES()
7539 assign(res, in math_WIDEN_LO_OR_HI_LANES()
7574 assign(src, srcE); in math_WIDEN_EVEN_OR_ODD_LANES()
7576 assign(res, binop(opSxR, mkexpr(src), mkU8(amt))); in math_WIDEN_EVEN_OR_ODD_LANES()
7578 assign(res, binop(opSxR, binop(opSHL, mkexpr(src), mkU8(amt)), in math_WIDEN_EVEN_OR_ODD_LANES()
7591 assign(res, binop(mkVecCATEVENLANES(sizeNarrow), in math_NARROW_LANES()
7628 assign(res, src); in math_DUP_VEC_ELEM()
7634 assign(tmp, binop(ops[i], mkexpr(res), mkexpr(res))); in math_DUP_VEC_ELEM()
7703 rcS = newTemp(ty); assign(rcS, mkU16( (UShort)imm )); in math_VEC_DUP_IMM()
7708 rcS = newTemp(ty); assign(rcS, mkU32( (UInt)imm )); in math_VEC_DUP_IMM()
7712 rcS = newTemp(ty); assign(rcS, mkU64(imm)); break; in math_VEC_DUP_IMM()
7736 assign(t_zero_oldLO, unop(Iop_ZeroHI64ofV128, getQReg128(dd))); in putLO64andZUorPutHI64()
7738 assign(t_newHI_zero, binop(Iop_InterleaveLO64x2, mkexpr(new64), in putLO64andZUorPutHI64()
7741 assign(res, binop(Iop_OrV128, mkexpr(t_zero_oldLO), in putLO64andZUorPutHI64()
7760 assign(src, srcE); in math_SQABS()
7761 assign(mask, binop(mkVecCMPGTS(size), mkV128(0x0000), mkexpr(src))); in math_SQABS()
7762 assign(maskn, unop(Iop_NotV128, mkexpr(mask))); in math_SQABS()
7763 assign(nsub, binop(mkVecSUB(size), mkV128(0x0000), mkexpr(src))); in math_SQABS()
7764 assign(qsub, binop(mkVecQSUBS(size), mkV128(0x0000), mkexpr(src))); in math_SQABS()
7765 assign(*nabs, binop(Iop_OrV128, in math_SQABS()
7768 assign(*qabs, binop(Iop_OrV128, in math_SQABS()
7782 assign(src, srcE); in math_SQNEG()
7783 assign(*nneg, binop(mkVecSUB(size), mkV128(0x0000), mkexpr(src))); in math_SQNEG()
7784 assign(*qneg, binop(mkVecQSUBS(size), mkV128(0x0000), mkexpr(src))); in math_SQNEG()
7794 assign(t, unop(mkVecZEROHIxxOFV128(size), srcE)); in math_ZERO_ALL_EXCEPT_LOWEST_LANE()
7827 assign(*res, mas == 'm' ? mkexpr(mul) in math_MULL_ACC()
7863 assign(*sat1q, mkexpr(tq)); in math_SQDMULL_ACC()
7864 assign(*sat1n, binop(mkVecADD(size+1), mkexpr(tn), mkexpr(tn))); in math_SQDMULL_ACC()
7869 assign(*res, mkexpr(*sat1q)); in math_SQDMULL_ACC()
7879 assign(*sat2q, binop(mas == 'a' ? mkVecQADDS(size+1) : mkVecQSUBS(size+1), in math_SQDMULL_ACC()
7881 assign(*sat2n, binop(mas == 'a' ? mkVecADD(size+1) : mkVecSUB(size+1), in math_SQDMULL_ACC()
7883 assign(*res, mkexpr(*sat2q)); in math_SQDMULL_ACC()
7900 assign(argLhi, unop(Iop_V128HIto64, mkexpr(argL))); in math_MULLS()
7901 assign(argLlo, unop(Iop_V128to64, mkexpr(argL))); in math_MULLS()
7902 assign(argRhi, unop(Iop_V128HIto64, mkexpr(argR))); in math_MULLS()
7903 assign(argRlo, unop(Iop_V128to64, mkexpr(argR))); in math_MULLS()
7905 assign(*resHI, binop(opMulls, mkexpr(argLhi), mkexpr(argRhi))); in math_MULLS()
7906 assign(*resLO, binop(opMulls, mkexpr(argLlo), mkexpr(argRlo))); in math_MULLS()
7928 assign(*sat1q, binop(mkVecQRDMULHIS(size), mkexpr(vN), mkexpr(vM))); in math_SQDMULH()
7932 assign(*sat1n, in math_SQDMULH()
7941 assign(*sat1q, binop(mkVecQDMULHIS(size), mkexpr(vN), mkexpr(vM))); in math_SQDMULH()
7943 assign(*sat1n, in math_SQDMULH()
7949 assign(*res, mkexpr(*sat1q)); in math_SQDMULH()
7967 assign(z128, mkV128(0x0000)); in math_QSHL_IMM()
7972 assign(*res, binop(qop, mkexpr(src), mkU8(shift))); in math_QSHL_IMM()
7975 assign(*qDiff1, mkexpr(z128)); in math_QSHL_IMM()
7976 assign(*qDiff2, mkexpr(z128)); in math_QSHL_IMM()
7983 assign(*qDiff1, binop(mkVecSHRN(size), mkexpr(src), mkU8(rshift))); in math_QSHL_IMM()
7984 assign(*qDiff2, mkexpr(z128)); in math_QSHL_IMM()
7992 assign(*res, binop(qop, mkexpr(src), mkU8(shift))); in math_QSHL_IMM()
7995 assign(*qDiff1, mkexpr(z128)); in math_QSHL_IMM()
7996 assign(*qDiff2, mkexpr(z128)); in math_QSHL_IMM()
8004 assign(*qDiff1, binop(mkVecSHRN(size), mkexpr(src), mkU8(rshift))); in math_QSHL_IMM()
8007 assign(*qDiff2, binop(mkVecSHRN(size), in math_QSHL_IMM()
8020 assign(*res, binop(qop, mkexpr(src), mkU8(shift))); in math_QSHL_IMM()
8024 assign(*qDiff1, binop(mkVecSHRN(size), mkexpr(src), mkU8(laneBits-1))); in math_QSHL_IMM()
8025 assign(*qDiff2, mkexpr(z128)); in math_QSHL_IMM()
8032 assign(*qDiff1, binop(mkVecSHRN(size), mkexpr(src), mkU8(rshift))); in math_QSHL_IMM()
8033 assign(*qDiff2, mkexpr(z128)); in math_QSHL_IMM()
8057 assign(imm64, mkU64(ones64[size])); in math_RHADD()
8059 assign(vecOne, binop(Iop_64HLtoV128, mkexpr(imm64), mkexpr(imm64))); in math_RHADD()
8061 assign(scaOne, mkU8(1)); in math_RHADD()
8063 assign(res, in math_RHADD()
8102 assign(diff, binop(Iop_XorV128, mkexpr(qres), mkexpr(nres))); in updateQCFLAGwithDifferenceZHI()
8106 assign(diff, unop(opZHI, binop(Iop_XorV128, mkexpr(qres), mkexpr(nres)))); in updateQCFLAGwithDifferenceZHI()
8108 assign(oldQCFLAG, IRExpr_Get(OFFB_QCFLAG, Ity_V128)); in updateQCFLAGwithDifferenceZHI()
8109 assign(newQCFLAG, binop(Iop_OrV128, mkexpr(oldQCFLAG), mkexpr(diff))); in updateQCFLAGwithDifferenceZHI()
8151 assign(*rearrL, binop(Iop_InterleaveHI64x2, mkexpr(vecM), mkexpr(vecN))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8152 assign(*rearrR, binop(Iop_InterleaveLO64x2, mkexpr(vecM), mkexpr(vecN))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8156 assign(*rearrL, binop(Iop_CatOddLanes32x4, mkexpr(vecM), mkexpr(vecN))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8157 assign(*rearrR, binop(Iop_CatEvenLanes32x4, mkexpr(vecM), mkexpr(vecN))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8163 assign(m1n1m0n0, binop(Iop_InterleaveLO32x4, in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8165 assign(m0n0m1n1, triop(Iop_SliceV128, in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8167 assign(*rearrL, unop(Iop_ZeroHI64ofV128, mkexpr(m1n1m0n0))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8168 assign(*rearrR, unop(Iop_ZeroHI64ofV128, mkexpr(m0n0m1n1))); in math_REARRANGE_FOR_FLOATING_PAIRWISE()
8222 assign(sHi, getQReg128(mm)); in dis_AdvSIMD_EXT()
8223 assign(sLo, getQReg128(nn)); in dis_AdvSIMD_EXT()
8226 assign(res, mkexpr(sLo)); in dis_AdvSIMD_EXT()
8229 assign(res, triop(Iop_SliceV128, in dis_AdvSIMD_EXT()
8237 assign(res, mkexpr(sLo)); in dis_AdvSIMD_EXT()
8241 assign(hi64lo64, binop(Iop_InterleaveLO64x2, in dis_AdvSIMD_EXT()
8243 assign(res, triop(Iop_SliceV128, in dis_AdvSIMD_EXT()
8291 assign(oor_values, isTBX ? getQReg128(dd) : mkV128(0)); in dis_AdvSIMD_TBL_TBX()
8294 assign(src, getQReg128(mm)); in dis_AdvSIMD_TBL_TBX()
8301 assign(tab[i], getQReg128((nn + i) % 32)); in dis_AdvSIMD_TBL_TBX()
8349 assign(preL, binop(Iop_InterleaveLO64x2, getQReg128(mm), in dis_AdvSIMD_ZIP_UZP_TRN()
8351 assign(preR, mkexpr(preL)); in dis_AdvSIMD_ZIP_UZP_TRN()
8353 assign(preL, getQReg128(mm)); in dis_AdvSIMD_ZIP_UZP_TRN()
8354 assign(preR, getQReg128(nn)); in dis_AdvSIMD_ZIP_UZP_TRN()
8356 assign(res, binop(op, mkexpr(preL), mkexpr(preR))); in dis_AdvSIMD_ZIP_UZP_TRN()
8376 assign(srcM, getQReg128(mm)); in dis_AdvSIMD_ZIP_UZP_TRN()
8377 assign(srcN, getQReg128(nn)); in dis_AdvSIMD_ZIP_UZP_TRN()
8378 assign(res, binop(op2, binop(op1, mkexpr(srcM), mkexpr(srcM)), in dis_AdvSIMD_ZIP_UZP_TRN()
8400 assign(z128, mkV128(0x0000)); in dis_AdvSIMD_ZIP_UZP_TRN()
8403 assign(preL, triop(Iop_SliceV128, in dis_AdvSIMD_ZIP_UZP_TRN()
8405 assign(preR, triop(Iop_SliceV128, in dis_AdvSIMD_ZIP_UZP_TRN()
8409 assign(preL, getQReg128(mm)); in dis_AdvSIMD_ZIP_UZP_TRN()
8410 assign(preR, getQReg128(nn)); in dis_AdvSIMD_ZIP_UZP_TRN()
8412 assign(res, binop(op, mkexpr(preL), mkexpr(preR))); in dis_AdvSIMD_ZIP_UZP_TRN()
8453 assign(src, getQReg128(nn)); in dis_AdvSIMD_across_lanes()
8471 assign(tWi, widened); in dis_AdvSIMD_across_lanes()
8518 assign(tN1, getQReg128(nn)); in dis_AdvSIMD_across_lanes()
8525 assign(tN2, bitQ == 0 in dis_AdvSIMD_across_lanes()
8554 assign(src, getQReg128(nn)); in dis_AdvSIMD_across_lanes()
8628 assign(w0, unop(Iop_8Uto64, unop(Iop_64to8, getIReg64orZR(nn)))); in dis_AdvSIMD_copy()
8633 assign(w0, unop(Iop_16Uto64, unop(Iop_64to16, getIReg64orZR(nn)))); in dis_AdvSIMD_copy()
8638 assign(w0, unop(Iop_32Uto64, unop(Iop_64to32, getIReg64orZR(nn)))); in dis_AdvSIMD_copy()
8643 assign(w0, getIReg64orZR(nn)); in dis_AdvSIMD_copy()
9036 assign(w0, unop(Iop_8Uto64, getQRegLane(nn, laneNo, laneTy))); in dis_AdvSIMD_scalar_copy()
9042 assign(w0, unop(Iop_16Uto64, getQRegLane(nn, laneNo, laneTy))); in dis_AdvSIMD_scalar_copy()
9048 assign(w0, unop(Iop_32Uto64, getQRegLane(nn, laneNo, laneTy))); in dis_AdvSIMD_scalar_copy()
9054 assign(w0, getQRegLane(nn, laneNo, laneTy)); in dis_AdvSIMD_scalar_copy()
9099 assign(xy, getQReg128(nn)); in dis_AdvSIMD_scalar_pairwise()
9100 assign(xx, binop(Iop_InterleaveHI64x2, mkexpr(xy), mkexpr(xy))); in dis_AdvSIMD_scalar_pairwise()
9116 assign(src, getQReg128(nn)); in dis_AdvSIMD_scalar_pairwise()
9117 assign(argL, unop(opZHI, mkexpr(src))); in dis_AdvSIMD_scalar_pairwise()
9118 assign(argR, unop(opZHI, triop(Iop_SliceV128, mkexpr(src), mkexpr(src), in dis_AdvSIMD_scalar_pairwise()
9142 assign(src, getQReg128(nn)); in dis_AdvSIMD_scalar_pairwise()
9143 assign(argL, unop(opZHI, mkexpr(src))); in dis_AdvSIMD_scalar_pairwise()
9144 assign(argR, unop(opZHI, triop(Iop_SliceV128, mkexpr(src), mkexpr(src), in dis_AdvSIMD_scalar_pairwise()
9194 assign(shf, mkV128(0x0000)); in dis_AdvSIMD_scalar_shift_by_imm()
9201 assign(shf, binop(op, src, mkU8(sh - nudge))); in dis_AdvSIMD_scalar_shift_by_imm()
9203 assign(res, isAcc ? binop(Iop_Add64x2, getQReg128(dd), mkexpr(shf)) in dis_AdvSIMD_scalar_shift_by_imm()
9226 assign(imm8, mkU8((UChar)(-sh))); in dis_AdvSIMD_scalar_shift_by_imm()
9230 assign(shf, binop(op, src, amt)); in dis_AdvSIMD_scalar_shift_by_imm()
9231 assign(res, isAcc ? binop(Iop_Add64x2, getQReg128(dd), mkexpr(shf)) in dis_AdvSIMD_scalar_shift_by_imm()
9251 assign(res, binop(Iop_OrV128, in dis_AdvSIMD_scalar_shift_by_imm()
9283 assign(res, binop(Iop_OrV128, in dis_AdvSIMD_scalar_shift_by_imm()
9367 assign(pair, binop(op, mkexpr(src128), mkU8(shift))); in dis_AdvSIMD_scalar_shift_by_imm()
9370 assign(res64in128, unop(Iop_ZeroHI64ofV128, mkexpr(pair))); in dis_AdvSIMD_scalar_shift_by_imm()
9374 assign(q64q64, binop(Iop_InterleaveHI64x2, mkexpr(pair), mkexpr(pair))); in dis_AdvSIMD_scalar_shift_by_imm()
9376 assign(z128, mkV128(0x0000)); in dis_AdvSIMD_scalar_shift_by_imm()
9409 assign(src, getQRegLane(nn, 0, tyI)); in dis_AdvSIMD_scalar_shift_by_imm()
9410 assign(res, triop(opMUL, mkexpr(rm), in dis_AdvSIMD_scalar_shift_by_imm()
9447 assign(src, getQRegLane(nn, 0, tyF)); in dis_AdvSIMD_scalar_shift_by_imm()
9448 assign(rm, mkU32(Irrm_ZERO)); in dis_AdvSIMD_scalar_shift_by_imm()
9449 assign(res, binop(opCVT, mkexpr(rm), in dis_AdvSIMD_scalar_shift_by_imm()
9510 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_scalar_three_different()
9511 assign(vecM, getQReg128(mm)); in dis_AdvSIMD_scalar_three_different()
9512 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_scalar_three_different()
9579 assign(argL, getQReg128(nn)); in dis_AdvSIMD_scalar_three_same()
9580 assign(argR, getQReg128(mm)); in dis_AdvSIMD_scalar_three_same()
9581 assign(qres, mkexpr(math_ZERO_ALL_EXCEPT_LOWEST_LANE( in dis_AdvSIMD_scalar_three_same()
9583 assign(nres, mkexpr(math_ZERO_ALL_EXCEPT_LOWEST_LANE( in dis_AdvSIMD_scalar_three_same()
9601 assign(res, in dis_AdvSIMD_scalar_three_same()
9618 assign(res, in dis_AdvSIMD_scalar_three_same()
9639 assign(res, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_scalar_three_same()
9667 assign( in dis_AdvSIMD_scalar_three_same()
9672 assign(resSH, unop(Iop_V256toV128_0, mkexpr(res256))); in dis_AdvSIMD_scalar_three_same()
9673 assign(resQ, unop(Iop_V256toV128_1, mkexpr(res256))); in dis_AdvSIMD_scalar_three_same()
9674 assign(zero, mkV128(0x0000)); in dis_AdvSIMD_scalar_three_same()
9689 assign(res, binop(isSUB ? Iop_Sub64 : Iop_Add64, in dis_AdvSIMD_scalar_three_same()
9707 assign(res, in dis_AdvSIMD_scalar_three_same()
9727 assign(vN, getQReg128(nn)); in dis_AdvSIMD_scalar_three_same()
9728 assign(vM, getQReg128(mm)); in dis_AdvSIMD_scalar_three_same()
9745 assign(res, unop(mkABSF(ity), in dis_AdvSIMD_scalar_three_same()
9761 assign(res, triop(mkMULF(ity), in dis_AdvSIMD_scalar_three_same()
9780 assign(res, isGE ? binop(opCMP, getQReg128(mm), getQReg128(nn)) // swapd in dis_AdvSIMD_scalar_three_same()
9795 assign(res, binop(opCMP, getQReg128(mm), getQReg128(nn))); // swapd in dis_AdvSIMD_scalar_three_same()
9813 assign(res, binop(opCMP, unop(opABS, getQReg128(mm)), in dis_AdvSIMD_scalar_three_same()
9830 assign(res, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_scalar_three_same()
9877 assign(argL, getQReg128(nn)); in dis_AdvSIMD_scalar_two_reg_misc()
9878 assign(argR, getQReg128(dd)); in dis_AdvSIMD_scalar_two_reg_misc()
9913 assign(res, isGT ? binop(Iop_CmpGT64Sx2, argL, argR) in dis_AdvSIMD_scalar_two_reg_misc()
9927 assign(res, isEQ ? binop(Iop_CmpEQ64x2, argL, argR) in dis_AdvSIMD_scalar_two_reg_misc()
9993 assign(res, swap ? binop(opCmp, zero, getQReg128(nn)) in dis_AdvSIMD_scalar_two_reg_misc()
10091 assign(src, getQRegLane(nn, 0, tyF)); in dis_AdvSIMD_scalar_two_reg_misc()
10092 assign(res, binop(cvt, mkU32(irrm), mkexpr(src))); in dis_AdvSIMD_scalar_two_reg_misc()
10131 assign(resV, unop(op, getQReg128(nn))); in dis_AdvSIMD_scalar_two_reg_misc()
10146 assign(res, binop(op, mkexpr(rm), getQRegLane(nn, 0, ty))); in dis_AdvSIMD_scalar_two_reg_misc()
10199 assign(elem, getQRegLane(mm, index, ity)); in dis_AdvSIMD_scalar_x_indexed_element()
10208 assign(t1, triop(opMUL, mkexpr(rm), getQReg128(nn), mkexpr(dupd))); in dis_AdvSIMD_scalar_x_indexed_element()
10209 assign(t2, triop(isSUB ? opSUB : opADD, in dis_AdvSIMD_scalar_x_indexed_element()
10233 assign(elem, getQRegLane(mm, index, ity)); in dis_AdvSIMD_scalar_x_indexed_element()
10239 assign(t1, triop(opMUL, mkexpr(rm), getQReg128(nn), mkexpr(dupd))); in dis_AdvSIMD_scalar_x_indexed_element()
10282 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_scalar_x_indexed_element()
10284 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_scalar_x_indexed_element()
10326 assign(vN, getQReg128(nn)); in dis_AdvSIMD_scalar_x_indexed_element()
10390 assign(shf, mkV128(0x0000)); in dis_AdvSIMD_shift_by_immediate()
10397 assign(shf, binop(op, src, mkU8(shift - nudge))); in dis_AdvSIMD_shift_by_immediate()
10399 assign(res, isAcc ? binop(mkVecADD(size), getQReg128(dd), mkexpr(shf)) in dis_AdvSIMD_shift_by_immediate()
10437 assign(imm8, mkU8((UChar)(-shift))); in dis_AdvSIMD_shift_by_immediate()
10441 assign(shf, binop(op, src, amt)); in dis_AdvSIMD_shift_by_immediate()
10442 assign(res, isAcc ? binop(mkVecADD(size), getQReg128(dd), mkexpr(shf)) in dis_AdvSIMD_shift_by_immediate()
10475 assign(res, getQReg128(dd)); in dis_AdvSIMD_shift_by_immediate()
10477 assign(res, binop(mkVecSHRN(size), src, mkU8(shift))); in dis_AdvSIMD_shift_by_immediate()
10481 assign(tmp, binop(Iop_OrV128, in dis_AdvSIMD_shift_by_immediate()
10521 assign(res, src); in dis_AdvSIMD_shift_by_immediate()
10523 assign(res, binop(op, src, mkU8(shift))); in dis_AdvSIMD_shift_by_immediate()
10528 assign(tmp, binop(Iop_OrV128, in dis_AdvSIMD_shift_by_immediate()
10569 assign(src, getQReg128(nn)); in dis_AdvSIMD_shift_by_immediate()
10595 assign(t1, getQReg128(nn)); in dis_AdvSIMD_shift_by_immediate()
10596 assign(t2, isR ? binop(mkVecADD(size+1), in dis_AdvSIMD_shift_by_immediate()
10600 assign(t3, binop(mkVecSHRN(size+1), mkexpr(t2), mkU8(shift))); in dis_AdvSIMD_shift_by_immediate()
10649 assign(src128, getQReg128(nn)); in dis_AdvSIMD_shift_by_immediate()
10651 assign(pair, binop(op, mkexpr(src128), mkU8(shift))); in dis_AdvSIMD_shift_by_immediate()
10654 assign(res64in128, unop(Iop_ZeroHI64ofV128, mkexpr(pair))); in dis_AdvSIMD_shift_by_immediate()
10658 assign(q64q64, binop(Iop_InterleaveHI64x2, mkexpr(pair), mkexpr(pair))); in dis_AdvSIMD_shift_by_immediate()
10660 assign(z128, mkV128(0x0000)); in dis_AdvSIMD_shift_by_immediate()
10692 assign(src, getQReg128(nn)); in dis_AdvSIMD_shift_by_immediate()
10693 assign(zero, mkV128(0x0000)); in dis_AdvSIMD_shift_by_immediate()
10769 assign(src, getQRegLane(nn, i, tyI)); in dis_AdvSIMD_shift_by_immediate()
10770 assign(res, triop(opMUL, mkexpr(rm), in dis_AdvSIMD_shift_by_immediate()
10815 assign(src, getQRegLane(nn, i, tyF)); in dis_AdvSIMD_shift_by_immediate()
10816 assign(rm, mkU32(Irrm_ZERO)); in dis_AdvSIMD_shift_by_immediate()
10817 assign(res, binop(opCVT, mkexpr(rm), in dis_AdvSIMD_shift_by_immediate()
10874 assign(res, binop(isADD ? mkVecADD(size+1) : mkVecSUB(size+1), in dis_AdvSIMD_three_different()
10899 assign(res, binop(isADD ? mkVecADD(size+1) : mkVecSUB(size+1), in dis_AdvSIMD_three_different()
10933 assign(wide, wideE); in dis_AdvSIMD_three_different()
10936 assign(shrd, binop(mkVecSHRN(size+1), mkexpr(wide), mkU8(shift[size]))); in dis_AdvSIMD_three_different()
10939 assign(new64, binop(mkVecCATEVENLANES(size), mkexpr(shrd), mkexpr(shrd))); in dis_AdvSIMD_three_different()
10965 assign(res, isACC ? binop(mkVecADD(size+1), mkexpr(abd), getQReg128(dd)) in dis_AdvSIMD_three_different()
11001 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_three_different()
11002 assign(vecM, getQReg128(mm)); in dis_AdvSIMD_three_different()
11003 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_three_different()
11037 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_three_different()
11038 assign(vecM, getQReg128(mm)); in dis_AdvSIMD_three_different()
11039 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_three_different()
11121 assign(argL, getQReg128(nn)); in dis_AdvSIMD_three_same()
11124 assign(argR, getQReg128(mm)); in dis_AdvSIMD_three_same()
11129 assign(resHi, binop(opSxR, in dis_AdvSIMD_three_same()
11132 assign(resLo, binop(opSxR, in dis_AdvSIMD_three_same()
11152 assign(argL, getQReg128(nn)); in dis_AdvSIMD_three_same()
11153 assign(argR, getQReg128(mm)); in dis_AdvSIMD_three_same()
11183 assign(argL, getQReg128(nn)); in dis_AdvSIMD_three_same()
11184 assign(argR, getQReg128(mm)); in dis_AdvSIMD_three_same()
11185 assign(qres, math_MAYBE_ZERO_HI64_fromE( in dis_AdvSIMD_three_same()
11187 assign(nres, math_MAYBE_ZERO_HI64_fromE( in dis_AdvSIMD_three_same()
11207 assign(res, binop(isORx ? Iop_OrV128 : Iop_AndV128, in dis_AdvSIMD_three_same()
11227 assign(argD, getQReg128(dd)); in dis_AdvSIMD_three_same()
11228 assign(argN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11229 assign(argM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11236 assign(res, binop(opXOR, mkexpr(argM), mkexpr(argN))); in dis_AdvSIMD_three_same()
11239 assign(res, binop(opXOR, mkexpr(argM), in dis_AdvSIMD_three_same()
11245 assign(res, binop(opXOR, mkexpr(argD), in dis_AdvSIMD_three_same()
11251 assign(res, binop(opXOR, mkexpr(argD), in dis_AdvSIMD_three_same()
11275 assign(res, in dis_AdvSIMD_three_same()
11294 assign(res, in dis_AdvSIMD_three_same()
11316 assign(res, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11347 assign(res256, binop(op, in dis_AdvSIMD_three_same()
11350 assign(resSH, unop(Iop_V256toV128_0, mkexpr(res256))); in dis_AdvSIMD_three_same()
11351 assign(resQ, unop(Iop_V256toV128_1, mkexpr(res256))); in dis_AdvSIMD_three_same()
11352 assign(zero, mkV128(0x0000)); in dis_AdvSIMD_three_same()
11374 assign(t, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11395 assign(t2, isACC ? binop(mkVecADD(size), mkexpr(t1), getQReg128(dd)) in dis_AdvSIMD_three_same()
11413 assign(t, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11430 assign(res, in dis_AdvSIMD_three_same()
11452 assign(res, binop(opADDSUB, in dis_AdvSIMD_three_same()
11474 assign(res, binop(opMUL, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11496 assign(vN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11497 assign(vM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11499 assign(res128, in dis_AdvSIMD_three_same()
11527 assign(vN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11528 assign(vM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11545 assign(vN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11546 assign(vM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11548 assign(res128, in dis_AdvSIMD_three_same()
11579 assign(res, binop(opMXX, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11601 assign(t1, triop(opMUL, in dis_AdvSIMD_three_same()
11603 assign(t2, triop(isSUB ? opSUB : opADD, in dis_AdvSIMD_three_same()
11624 assign(t1, triop(op, mkexpr(rm), getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11625 assign(t2, math_MAYBE_ZERO_HI64(bitQ, t1)); in dis_AdvSIMD_three_same()
11643 assign(t1, triop(opSUB, mkexpr(rm), getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11644 assign(t2, unop(opABS, mkexpr(t1))); in dis_AdvSIMD_three_same()
11661 assign(t1, triop(isD ? Iop_Mul64Fx2 : Iop_Mul32Fx4, in dis_AdvSIMD_three_same()
11679 assign(t1, isGE ? binop(opCMP, getQReg128(mm), getQReg128(nn)) // swapd in dis_AdvSIMD_three_same()
11694 assign(t1, binop(opCMP, getQReg128(mm), getQReg128(nn))); // swapd in dis_AdvSIMD_three_same()
11712 assign(t1, binop(opCMP, unop(opABS, getQReg128(mm)), in dis_AdvSIMD_three_same()
11737 assign(srcN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11738 assign(srcM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11760 assign(srcN, getQReg128(nn)); in dis_AdvSIMD_three_same()
11761 assign(srcM, getQReg128(mm)); in dis_AdvSIMD_three_same()
11786 assign(t1, triop(op, mkexpr(rm), getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11787 assign(t2, math_MAYBE_ZERO_HI64(bitQ, t1)); in dis_AdvSIMD_three_same()
11804 assign(res, binop(op, getQReg128(nn), getQReg128(mm))); in dis_AdvSIMD_three_same()
11847 assign(res, unop(iops[size], getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11861 assign(res, unop(iop, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11872 assign(res, unop(Iop_Reverse8sIn16_x8, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11892 assign(src, getQReg128(nn)); in dis_AdvSIMD_two_reg_misc()
11893 assign(sum, in dis_AdvSIMD_two_reg_misc()
11899 assign(res, isACC ? binop(mkVecADD(size+1), mkexpr(sum), getQReg128(dd)) in dis_AdvSIMD_two_reg_misc()
11929 assign(argL, getQReg128(nn)); in dis_AdvSIMD_two_reg_misc()
11930 assign(argR, getQReg128(dd)); in dis_AdvSIMD_two_reg_misc()
11931 assign(qres, math_MAYBE_ZERO_HI64_fromE( in dis_AdvSIMD_two_reg_misc()
11933 assign(nres, math_MAYBE_ZERO_HI64_fromE( in dis_AdvSIMD_two_reg_misc()
11952 assign(res, unop(isCLZ ? opsCLZ[size] : opsCLS[size], getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11964 assign(res, unop(bitU == 0 ? Iop_Cnt8x16 : Iop_NotV128, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11975 assign(res, unop(Iop_Reverse1sIn8_x16, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
11992 assign(qres, math_MAYBE_ZERO_HI64(bitQ, qresFW)); in dis_AdvSIMD_two_reg_misc()
11993 assign(nres, math_MAYBE_ZERO_HI64(bitQ, nresFW)); in dis_AdvSIMD_two_reg_misc()
12011 assign(res, isGT ? binop(opGTS, argL, argR) in dis_AdvSIMD_two_reg_misc()
12028 assign(res, isEQ ? binop(mkVecCMPEQ(size), argL, argR) in dis_AdvSIMD_two_reg_misc()
12044 assign(res, binop(mkVecCMPGTS(size), argR, argL)); in dis_AdvSIMD_two_reg_misc()
12056 assign(res, unop(mkVecABS(size), getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
12067 assign(res, binop(mkVecSUB(size), mkV128(0x0000), getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
12107 assign(res, swap ? binop(opCmp, zero, getQReg128(nn)) in dis_AdvSIMD_two_reg_misc()
12124 assign(res, unop(op, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
12139 assign(resN, unop(Iop_64UtoV128, unop(opN, getQReg128(nn)))); in dis_AdvSIMD_two_reg_misc()
12171 assign(src, getQReg128(nn)); in dis_AdvSIMD_two_reg_misc()
12173 assign(resN, unop(Iop_64UtoV128, unop(opN, mkexpr(src)))); in dis_AdvSIMD_two_reg_misc()
12194 assign(src, getQReg128(nn)); in dis_AdvSIMD_two_reg_misc()
12195 assign(res, binop(opSHL, binop(opINT, mkexpr(src), mkexpr(src)), in dis_AdvSIMD_two_reg_misc()
12214 assign(src[i], getQRegLane(nn, i, srcTy)); in dis_AdvSIMD_two_reg_misc()
12239 assign(src[i], getQRegLane(nn, i, srcTy)); in dis_AdvSIMD_two_reg_misc()
12263 assign(src[i], getQRegLane(nn, nLanes * bitQ + i, srcTy)); in dis_AdvSIMD_two_reg_misc()
12308 case 1: ch = 'n'; assign(irrm, mkU32(Irrm_NEAREST)); break; in dis_AdvSIMD_two_reg_misc()
12309 case 2: ch = 'm'; assign(irrm, mkU32(Irrm_NegINF)); break; in dis_AdvSIMD_two_reg_misc()
12310 case 3: ch = 'p'; assign(irrm, mkU32(Irrm_PosINF)); break; in dis_AdvSIMD_two_reg_misc()
12311 case 4: ch = 'z'; assign(irrm, mkU32(Irrm_ZERO)); break; in dis_AdvSIMD_two_reg_misc()
12313 case 5: ch = 'a'; assign(irrm, mkU32(Irrm_NEAREST)); break; in dis_AdvSIMD_two_reg_misc()
12316 case 6: ch = 'x'; assign(irrm, mkexpr(irrmRM)); break; in dis_AdvSIMD_two_reg_misc()
12317 case 8: ch = 'i'; assign(irrm, mkexpr(irrmRM)); break; in dis_AdvSIMD_two_reg_misc()
12405 assign(res, unop(op, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
12461 assign(resV, unop(op, getQReg128(nn))); in dis_AdvSIMD_two_reg_misc()
12475 assign(resV, binop(op, mkexpr(mk_get_IR_rounding_mode()), in dis_AdvSIMD_two_reg_misc()
12531 assign(elem, getQRegLane(mm, index, ity)); in dis_AdvSIMD_vector_x_indexed_elem()
12540 assign(t1, triop(opMUL, mkexpr(rm), getQReg128(nn), mkexpr(dupd))); in dis_AdvSIMD_vector_x_indexed_elem()
12541 assign(t2, triop(isSUB ? opSUB : opADD, in dis_AdvSIMD_vector_x_indexed_elem()
12565 assign(elem, getQRegLane(mm, index, ity)); in dis_AdvSIMD_vector_x_indexed_elem()
12569 assign(res, triop(isD ? Iop_Mul64Fx2 : Iop_Mul32Fx4, in dis_AdvSIMD_vector_x_indexed_elem()
12610 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_vector_x_indexed_elem()
12611 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_vector_x_indexed_elem()
12614 assign(res, binop(isMLA ? opADD : opSUB, mkexpr(vecD), prod)); in dis_AdvSIMD_vector_x_indexed_elem()
12616 assign(res, prod); in dis_AdvSIMD_vector_x_indexed_elem()
12664 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_vector_x_indexed_elem()
12665 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_vector_x_indexed_elem()
12715 assign(vecN, getQReg128(nn)); in dis_AdvSIMD_vector_x_indexed_elem()
12717 assign(vecD, getQReg128(dd)); in dis_AdvSIMD_vector_x_indexed_elem()
12761 assign(vN, getQReg128(nn)); in dis_AdvSIMD_vector_x_indexed_elem()
12859 assign(argL, getQRegLO(nn, ity)); in dis_AdvSIMD_fp_compare()
12860 assign(argR, in dis_AdvSIMD_fp_compare()
12864 assign(irRes, binop(isD ? Iop_CmpF64 : Iop_CmpF32, in dis_AdvSIMD_fp_compare()
12868 assign(nzcv_28x0, binop(Iop_Shl64, mkexpr(nzcv), mkU8(28))); in dis_AdvSIMD_fp_compare()
12920 assign(argL, getQRegLO(nn, ity)); in dis_AdvSIMD_fp_conditional_compare()
12921 assign(argR, getQRegLO(mm, ity)); in dis_AdvSIMD_fp_conditional_compare()
12922 assign(irRes, binop(isD ? Iop_CmpF64 : Iop_CmpF32, in dis_AdvSIMD_fp_conditional_compare()
12925 assign(condT, unop(Iop_64to1, mk_arm64g_calculate_condition(cond))); in dis_AdvSIMD_fp_conditional_compare()
12929 assign(nzcvT_28x0, binop(Iop_Shl64, mkexpr(nzcvT), mkU8(28))); in dis_AdvSIMD_fp_conditional_compare()
12934 assign(nzcv_28x0, IRExpr_ITE(mkexpr(condT), in dis_AdvSIMD_fp_conditional_compare()
12972 assign(srcT, getQRegLO(nn, ity)); in dis_AdvSIMD_fp_conditional_select()
12973 assign(srcF, getQRegLO(mm, ity)); in dis_AdvSIMD_fp_conditional_select()
12974 assign(res, IRExpr_ITE( in dis_AdvSIMD_fp_conditional_select()
13016 assign(src, getQRegLO(nn, ity)); in dis_AdvSIMD_fp_data_proc_1_source()
13019 nm = "fmov"; assign(res, mkexpr(src)); break; in dis_AdvSIMD_fp_data_proc_1_source()
13021 nm = "fabs"; assign(res, unop(mkABSF(ity), mkexpr(src))); break; in dis_AdvSIMD_fp_data_proc_1_source()
13023 nm = "fabs"; assign(res, unop(mkNEGF(ity), mkexpr(src))); break; in dis_AdvSIMD_fp_data_proc_1_source()
13026 assign(res, binop(mkSQRTF(ity), in dis_AdvSIMD_fp_data_proc_1_source()
13067 assign(res, unop(srcIsH ? Iop_F16toF64 : Iop_F32toF64, in dis_AdvSIMD_fp_data_proc_1_source()
13080 assign(res, binop(dstIsH ? Iop_F64toF16 : Iop_F64toF32, in dis_AdvSIMD_fp_data_proc_1_source()
13096 assign(res, binop(Iop_F32toF16, in dis_AdvSIMD_fp_data_proc_1_source()
13101 assign(res, unop(Iop_F16toF32, in dis_AdvSIMD_fp_data_proc_1_source()
13166 assign(src, getQRegLO(nn, ity)); in dis_AdvSIMD_fp_data_proc_1_source()
13167 assign(dst, binop(isD ? Iop_RoundF64toInt : Iop_RoundF32toInt, in dis_AdvSIMD_fp_data_proc_1_source()
13228 assign(res, triop(iop, mkexpr(mk_get_IR_rounding_mode()), in dis_AdvSIMD_fp_data_proc_2_source()
13251 assign(res, resE); in dis_AdvSIMD_fp_data_proc_2_source()
13318 case 0: assign(res, triop(opADD, rm, eA, eNxM)); break; in dis_AdvSIMD_fp_data_proc_3_source()
13319 case 1: assign(res, triop(opSUB, rm, eA, eNxM)); break; in dis_AdvSIMD_fp_data_proc_3_source()
13320 case 2: assign(res, unop(opNEG, triop(opADD, rm, eA, eNxM))); break; in dis_AdvSIMD_fp_data_proc_3_source()
13321 case 3: assign(res, unop(opNEG, triop(opSUB, rm, eA, eNxM))); break; in dis_AdvSIMD_fp_data_proc_3_source()
13427 assign(irrm, mkU32(Irrm_ZERO)); in dis_AdvSIMD_fp_to_from_fixedp_conv()
13627 assign(src, getQRegLO(nn, srcTy)); in dis_AdvSIMD_fp_to_from_int_conv()
13628 assign(dst, binop(iop, mkU32(irrm), mkexpr(src))); in dis_AdvSIMD_fp_to_from_int_conv()