• Home
  • Raw
  • Download

Lines Matching full:env

168 static HReg lookupIRTemp ( ISelEnv* env, IRTemp tmp )  in lookupIRTemp()  argument
171 vassert(tmp < env->n_vregmap); in lookupIRTemp()
172 return env->vregmap[tmp]; in lookupIRTemp()
176 ISelEnv* env, IRTemp tmp ) in lookupIRTempPair() argument
179 vassert(tmp < env->n_vregmap); in lookupIRTempPair()
180 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair()
181 *vrLO = env->vregmap[tmp]; in lookupIRTempPair()
182 *vrHI = env->vregmapHI[tmp]; in lookupIRTempPair()
185 static void addInstr ( ISelEnv* env, AMD64Instr* instr ) in addInstr() argument
187 addHInstr(env->code, instr); in addInstr()
194 static HReg newVRegI ( ISelEnv* env ) in newVRegI() argument
196 HReg reg = mkHReg(True/*virtual reg*/, HRcInt64, 0/*enc*/, env->vreg_ctr); in newVRegI()
197 env->vreg_ctr++; in newVRegI()
201 static HReg newVRegV ( ISelEnv* env ) in newVRegV() argument
203 HReg reg = mkHReg(True/*virtual reg*/, HRcVec128, 0/*enc*/, env->vreg_ctr); in newVRegV()
204 env->vreg_ctr++; in newVRegV()
219 static AMD64RMI* iselIntExpr_RMI_wrk ( ISelEnv* env, const IRExpr* e );
220 static AMD64RMI* iselIntExpr_RMI ( ISelEnv* env, const IRExpr* e );
222 static AMD64RI* iselIntExpr_RI_wrk ( ISelEnv* env, const IRExpr* e );
223 static AMD64RI* iselIntExpr_RI ( ISelEnv* env, const IRExpr* e );
225 static AMD64RM* iselIntExpr_RM_wrk ( ISelEnv* env, const IRExpr* e );
226 static AMD64RM* iselIntExpr_RM ( ISelEnv* env, const IRExpr* e );
228 static HReg iselIntExpr_R_wrk ( ISelEnv* env, const IRExpr* e );
229 static HReg iselIntExpr_R ( ISelEnv* env, const IRExpr* e );
231 static AMD64AMode* iselIntExpr_AMode_wrk ( ISelEnv* env, const IRExpr* e );
232 static AMD64AMode* iselIntExpr_AMode ( ISelEnv* env, const IRExpr* e );
235 ISelEnv* env, const IRExpr* e );
237 ISelEnv* env, const IRExpr* e );
239 static AMD64CondCode iselCondCode_wrk ( ISelEnv* env, const IRExpr* e );
240 static AMD64CondCode iselCondCode ( ISelEnv* env, const IRExpr* e );
242 static HReg iselDblExpr_wrk ( ISelEnv* env, const IRExpr* e );
243 static HReg iselDblExpr ( ISelEnv* env, const IRExpr* e );
245 static HReg iselFltExpr_wrk ( ISelEnv* env, const IRExpr* e );
246 static HReg iselFltExpr ( ISelEnv* env, const IRExpr* e );
248 static HReg iselVecExpr_wrk ( ISelEnv* env, const IRExpr* e );
249 static HReg iselVecExpr ( ISelEnv* env, const IRExpr* e );
252 ISelEnv* env, const IRExpr* e );
254 ISelEnv* env, const IRExpr* e );
328 static void add_to_rsp ( ISelEnv* env, Int n ) in add_to_rsp() argument
331 addInstr(env, in add_to_rsp()
336 static void sub_from_rsp ( ISelEnv* env, Int n ) in sub_from_rsp() argument
339 addInstr(env, in sub_from_rsp()
345 static void push_uimm64( ISelEnv* env, ULong uimm64 ) in push_uimm64() argument
351 addInstr( env, AMD64Instr_Push(AMD64RMI_Imm( (UInt)uimm64 )) ); in push_uimm64()
353 HReg tmp = newVRegI(env); in push_uimm64()
354 addInstr( env, AMD64Instr_Imm64(uimm64, tmp) ); in push_uimm64()
355 addInstr( env, AMD64Instr_Push(AMD64RMI_Reg(tmp)) ); in push_uimm64()
364 static AMD64Instr* iselIntExpr_single_instruction ( ISelEnv* env, in iselIntExpr_single_instruction() argument
377 vassert(typeOfIRExpr(env->type_env, e) == Ity_I64); in iselIntExpr_single_instruction()
393 HReg src = lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselIntExpr_single_instruction()
410 HReg src = lookupIRTemp(env, e->Iex.Unop.arg->Iex.RdTmp.tmp); in iselIntExpr_single_instruction()
430 ISelEnv* env, in doHelperCall() argument
568 vassert(typeOfIRExpr(env->type_env, args[i]) == Ity_I64); in doHelperCall()
571 = iselIntExpr_single_instruction( env, argregs[i], args[i] ); in doHelperCall()
579 addInstr(env, fastinstrs[i]); in doHelperCall()
600 r_vecRetAddr = newVRegI(env); in doHelperCall()
601 sub_from_rsp(env, 16); in doHelperCall()
602 addInstr(env, mk_iMOVsd_RR( hregAMD64_RSP(), r_vecRetAddr )); in doHelperCall()
605 r_vecRetAddr = newVRegI(env); in doHelperCall()
606 sub_from_rsp(env, 32); in doHelperCall()
607 addInstr(env, mk_iMOVsd_RR( hregAMD64_RSP(), r_vecRetAddr )); in doHelperCall()
614 tmpregs[i] = newVRegI(env); in doHelperCall()
615 addInstr(env, mk_iMOVsd_RR( hregAMD64_RBP(), tmpregs[i])); in doHelperCall()
626 vassert(typeOfIRExpr(env->type_env, args[i]) == Ity_I64); in doHelperCall()
627 tmpregs[i] = iselIntExpr_R(env, args[i]); in doHelperCall()
642 cc = iselCondCode( env, guard ); in doHelperCall()
650 addInstr( env, mk_iMOVsd_RR( tmpregs[i], argregs[i] ) ); in doHelperCall()
692 addInstr(env, in doHelperCall()
702 AMD64AMode* genGuestArrayOffset ( ISelEnv* env, IRRegArray* descr, in genGuestArrayOffset() argument
723 tmp = newVRegI(env); in genGuestArrayOffset()
724 roff = iselIntExpr_R(env, off); in genGuestArrayOffset()
725 addInstr(env, mk_iMOVsd_RR(roff, tmp)); in genGuestArrayOffset()
730 addInstr(env, in genGuestArrayOffset()
733 addInstr(env, in genGuestArrayOffset()
744 void set_SSE_rounding_default ( ISelEnv* env ) in set_SSE_rounding_default() argument
751 addInstr(env, AMD64Instr_Push(AMD64RMI_Imm(DEFAULT_MXCSR))); in set_SSE_rounding_default()
752 addInstr(env, AMD64Instr_LdMXCSR(zero_rsp)); in set_SSE_rounding_default()
753 add_to_rsp(env, 8); in set_SSE_rounding_default()
759 void set_FPU_rounding_default ( ISelEnv* env ) in set_FPU_rounding_default() argument
765 addInstr(env, AMD64Instr_Alu64M( in set_FPU_rounding_default()
767 addInstr(env, AMD64Instr_A87LdCW(m8_rsp)); in set_FPU_rounding_default()
777 void set_SSE_rounding_mode ( ISelEnv* env, IRExpr* mode ) in set_SSE_rounding_mode() argument
792 HReg reg = newVRegI(env); in set_SSE_rounding_mode()
794 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV, AMD64RMI_Imm(3), reg)); in set_SSE_rounding_mode()
795 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in set_SSE_rounding_mode()
796 iselIntExpr_RMI(env, mode), reg)); in set_SSE_rounding_mode()
797 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 13, reg)); in set_SSE_rounding_mode()
798 addInstr(env, AMD64Instr_Alu64R( in set_SSE_rounding_mode()
800 addInstr(env, AMD64Instr_Push(AMD64RMI_Reg(reg))); in set_SSE_rounding_mode()
801 addInstr(env, AMD64Instr_LdMXCSR(zero_rsp)); in set_SSE_rounding_mode()
802 add_to_rsp(env, 8); in set_SSE_rounding_mode()
812 void set_FPU_rounding_mode ( ISelEnv* env, IRExpr* mode ) in set_FPU_rounding_mode() argument
814 HReg rrm = iselIntExpr_R(env, mode); in set_FPU_rounding_mode()
815 HReg rrm2 = newVRegI(env); in set_FPU_rounding_mode()
825 addInstr(env, mk_iMOVsd_RR(rrm, rrm2)); in set_FPU_rounding_mode()
826 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, AMD64RMI_Imm(3), rrm2)); in set_FPU_rounding_mode()
827 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 10, rrm2)); in set_FPU_rounding_mode()
828 addInstr(env, AMD64Instr_Alu64R(Aalu_OR, in set_FPU_rounding_mode()
830 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, in set_FPU_rounding_mode()
832 addInstr(env, AMD64Instr_A87LdCW(m8_rsp)); in set_FPU_rounding_mode()
838 static HReg generate_zeroes_V128 ( ISelEnv* env ) in generate_zeroes_V128() argument
840 HReg dst = newVRegV(env); in generate_zeroes_V128()
841 addInstr(env, AMD64Instr_SseReRg(Asse_XOR, dst, dst)); in generate_zeroes_V128()
847 static HReg generate_ones_V128 ( ISelEnv* env ) in generate_ones_V128() argument
849 HReg dst = newVRegV(env); in generate_ones_V128()
850 addInstr(env, AMD64Instr_SseReRg(Asse_CMPEQ32, dst, dst)); in generate_ones_V128()
858 static HReg do_sse_NotV128 ( ISelEnv* env, HReg src ) in do_sse_NotV128() argument
860 HReg dst = generate_ones_V128(env); in do_sse_NotV128()
861 addInstr(env, AMD64Instr_SseReRg(Asse_XOR, src, dst)); in do_sse_NotV128()
899 static HReg iselIntExpr_R ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_R() argument
901 HReg r = iselIntExpr_R_wrk(env, e); in iselIntExpr_R()
912 static HReg iselIntExpr_R_wrk ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_R_wrk() argument
923 IRType ty = typeOfIRExpr(env->type_env,e); in iselIntExpr_R_wrk()
933 return lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselIntExpr_R_wrk()
938 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
939 AMD64AMode* amode = iselIntExpr_AMode ( env, e->Iex.Load.addr ); in iselIntExpr_R_wrk()
946 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV, in iselIntExpr_R_wrk()
951 addInstr(env, AMD64Instr_LoadEX(4,False,amode,dst)); in iselIntExpr_R_wrk()
955 addInstr(env, AMD64Instr_LoadEX(2,False,amode,dst)); in iselIntExpr_R_wrk()
959 addInstr(env, AMD64Instr_LoadEX(1,False,amode,dst)); in iselIntExpr_R_wrk()
974 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
975 HReg reg = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
976 addInstr(env, mk_iMOVsd_RR(reg,dst)); in iselIntExpr_R_wrk()
977 addInstr(env, AMD64Instr_Unary64(Aun_NEG,dst)); in iselIntExpr_R_wrk()
1001 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1002 HReg reg = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1003 AMD64RMI* rmi = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1004 addInstr(env, mk_iMOVsd_RR(reg,dst)); in iselIntExpr_R_wrk()
1005 addInstr(env, AMD64Instr_Alu64R(aluOp, rmi, dst)); in iselIntExpr_R_wrk()
1021 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1024 HReg regL = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1025 addInstr(env, mk_iMOVsd_RR(regL,dst)); in iselIntExpr_R_wrk()
1034 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1038 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1042 addInstr(env, AMD64Instr_MovxLQ(False, dst, dst)); in iselIntExpr_R_wrk()
1045 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 56, dst)); in iselIntExpr_R_wrk()
1046 addInstr(env, AMD64Instr_Sh64(Ash_SAR, 56, dst)); in iselIntExpr_R_wrk()
1049 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 48, dst)); in iselIntExpr_R_wrk()
1050 addInstr(env, AMD64Instr_Sh64(Ash_SAR, 48, dst)); in iselIntExpr_R_wrk()
1053 addInstr(env, AMD64Instr_MovxLQ(True, dst, dst)); in iselIntExpr_R_wrk()
1070 addInstr(env, AMD64Instr_Sh64(shOp, nshift, dst)); in iselIntExpr_R_wrk()
1073 HReg regR = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1074 addInstr(env, mk_iMOVsd_RR(regR,hregAMD64_RCX())); in iselIntExpr_R_wrk()
1075 addInstr(env, AMD64Instr_Sh64(shOp, 0/* %cl */, dst)); in iselIntExpr_R_wrk()
1223 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1224 HReg argL = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1225 HReg argR = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1227 addInstr(env, AMD64Instr_MovxLQ(False, argR, argR)); in iselIntExpr_R_wrk()
1228 addInstr(env, mk_iMOVsd_RR(argL, hregAMD64_RDI()) ); in iselIntExpr_R_wrk()
1229 addInstr(env, mk_iMOVsd_RR(argR, hregAMD64_RSI()) ); in iselIntExpr_R_wrk()
1230 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, 2, in iselIntExpr_R_wrk()
1232 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1239 HReg src1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1240 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1241 HReg src2 = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1242 addInstr(env, mk_iMOVsd_RR(src1, dst)); in iselIntExpr_R_wrk()
1243 addInstr(env, AMD64Instr_Alu32R(Aalu_CMP, AMD64RMI_Reg(src2), dst)); in iselIntExpr_R_wrk()
1244 addInstr(env, AMD64Instr_CMov64(Acc_B, src2, dst)); in iselIntExpr_R_wrk()
1255 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1257 AMD64RM* rmRight = iselIntExpr_RM(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1260 HReg left64 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1261 addInstr(env, mk_iMOVsd_RR(left64, rdx)); in iselIntExpr_R_wrk()
1262 addInstr(env, mk_iMOVsd_RR(left64, rax)); in iselIntExpr_R_wrk()
1263 addInstr(env, AMD64Instr_Sh64(Ash_SHR, 32, rdx)); in iselIntExpr_R_wrk()
1264 addInstr(env, AMD64Instr_Div(syned, 4, rmRight)); in iselIntExpr_R_wrk()
1265 addInstr(env, AMD64Instr_MovxLQ(False, rdx, rdx)); in iselIntExpr_R_wrk()
1266 addInstr(env, AMD64Instr_MovxLQ(False, rax, rax)); in iselIntExpr_R_wrk()
1267 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 32, rdx)); in iselIntExpr_R_wrk()
1268 addInstr(env, mk_iMOVsd_RR(rax, dst)); in iselIntExpr_R_wrk()
1269 addInstr(env, AMD64Instr_Alu64R(Aalu_OR, AMD64RMI_Reg(rdx), dst)); in iselIntExpr_R_wrk()
1274 HReg hi32 = newVRegI(env); in iselIntExpr_R_wrk()
1275 HReg lo32 = newVRegI(env); in iselIntExpr_R_wrk()
1276 HReg hi32s = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1277 HReg lo32s = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1278 addInstr(env, mk_iMOVsd_RR(hi32s, hi32)); in iselIntExpr_R_wrk()
1279 addInstr(env, mk_iMOVsd_RR(lo32s, lo32)); in iselIntExpr_R_wrk()
1280 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 32, hi32)); in iselIntExpr_R_wrk()
1281 addInstr(env, AMD64Instr_MovxLQ(False, lo32, lo32)); in iselIntExpr_R_wrk()
1282 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1288 HReg hi16 = newVRegI(env); in iselIntExpr_R_wrk()
1289 HReg lo16 = newVRegI(env); in iselIntExpr_R_wrk()
1290 HReg hi16s = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1291 HReg lo16s = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1292 addInstr(env, mk_iMOVsd_RR(hi16s, hi16)); in iselIntExpr_R_wrk()
1293 addInstr(env, mk_iMOVsd_RR(lo16s, lo16)); in iselIntExpr_R_wrk()
1294 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 16, hi16)); in iselIntExpr_R_wrk()
1295 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1297 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1303 HReg hi8 = newVRegI(env); in iselIntExpr_R_wrk()
1304 HReg lo8 = newVRegI(env); in iselIntExpr_R_wrk()
1305 HReg hi8s = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1306 HReg lo8s = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1307 addInstr(env, mk_iMOVsd_RR(hi8s, hi8)); in iselIntExpr_R_wrk()
1308 addInstr(env, mk_iMOVsd_RR(lo8s, lo8)); in iselIntExpr_R_wrk()
1309 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 8, hi8)); in iselIntExpr_R_wrk()
1310 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1312 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1323 HReg a32 = newVRegI(env); in iselIntExpr_R_wrk()
1324 HReg b32 = newVRegI(env); in iselIntExpr_R_wrk()
1325 HReg a32s = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1326 HReg b32s = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1339 addInstr(env, mk_iMOVsd_RR(a32s, a32)); in iselIntExpr_R_wrk()
1340 addInstr(env, mk_iMOVsd_RR(b32s, b32)); in iselIntExpr_R_wrk()
1341 addInstr(env, AMD64Instr_Sh64(Ash_SHL, shift, a32)); in iselIntExpr_R_wrk()
1342 addInstr(env, AMD64Instr_Sh64(Ash_SHL, shift, b32)); in iselIntExpr_R_wrk()
1343 addInstr(env, AMD64Instr_Sh64(shr_op, shift, a32)); in iselIntExpr_R_wrk()
1344 addInstr(env, AMD64Instr_Sh64(shr_op, shift, b32)); in iselIntExpr_R_wrk()
1345 addInstr(env, AMD64Instr_Alu64R(Aalu_MUL, AMD64RMI_Reg(a32), b32)); in iselIntExpr_R_wrk()
1350 HReg fL = iselDblExpr(env, e->Iex.Binop.arg1); in iselIntExpr_R_wrk()
1351 HReg fR = iselDblExpr(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1352 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1353 addInstr(env, AMD64Instr_SseUComIS(8,fL,fR,dst)); in iselIntExpr_R_wrk()
1356 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, AMD64RMI_Imm(0x45), dst)); in iselIntExpr_R_wrk()
1363 HReg rf = iselDblExpr(env, e->Iex.Binop.arg2); in iselIntExpr_R_wrk()
1364 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1365 set_SSE_rounding_mode( env, e->Iex.Binop.arg1 ); in iselIntExpr_R_wrk()
1366 addInstr(env, AMD64Instr_SseSF2SI( 8, szD, rf, dst )); in iselIntExpr_R_wrk()
1367 set_SSE_rounding_default(env); in iselIntExpr_R_wrk()
1383 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1384 HReg src = iselIntExpr_R(env, expr64); in iselIntExpr_R_wrk()
1385 addInstr(env, mk_iMOVsd_RR(src,dst) ); in iselIntExpr_R_wrk()
1386 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselIntExpr_R_wrk()
1398 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1399 AMD64AMode* amode = iselIntExpr_AMode ( env, mi.bindee[0] ); in iselIntExpr_R_wrk()
1400 addInstr(env, AMD64Instr_LoadEX(1,False,amode,dst)); in iselIntExpr_R_wrk()
1411 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1412 AMD64AMode* amode = iselIntExpr_AMode ( env, mi.bindee[0] ); in iselIntExpr_R_wrk()
1413 addInstr(env, AMD64Instr_LoadEX(2,False,amode,dst)); in iselIntExpr_R_wrk()
1437 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1438 HReg reg = iselIntExpr_R(env, argL); in iselIntExpr_R_wrk()
1439 AMD64RMI* rmi = iselIntExpr_RMI(env, argR); in iselIntExpr_R_wrk()
1440 addInstr(env, mk_iMOVsd_RR(reg,dst)); in iselIntExpr_R_wrk()
1441 addInstr(env, AMD64Instr_Alu32R(aluOp, rmi, dst)); in iselIntExpr_R_wrk()
1451 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1452 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1453 addInstr(env, AMD64Instr_MovxLQ(e->Iex.Unop.op == Iop_32Sto64, in iselIntExpr_R_wrk()
1459 iselInt128Expr(&rHi,&rLo, env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1464 iselInt128Expr(&rHi,&rLo, env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1472 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1473 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1477 addInstr(env, mk_iMOVsd_RR(src,dst) ); in iselIntExpr_R_wrk()
1478 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselIntExpr_R_wrk()
1487 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1488 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1492 addInstr(env, mk_iMOVsd_RR(src,dst) ); in iselIntExpr_R_wrk()
1493 addInstr(env, AMD64Instr_Sh64(Ash_SHL, amt, dst)); in iselIntExpr_R_wrk()
1494 addInstr(env, AMD64Instr_Sh64(Ash_SAR, amt, dst)); in iselIntExpr_R_wrk()
1501 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1502 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1503 addInstr(env, mk_iMOVsd_RR(src,dst) ); in iselIntExpr_R_wrk()
1504 addInstr(env, AMD64Instr_Unary64(Aun_NOT,dst)); in iselIntExpr_R_wrk()
1510 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1511 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1519 addInstr(env, mk_iMOVsd_RR(src,dst) ); in iselIntExpr_R_wrk()
1520 addInstr(env, AMD64Instr_Sh64(Ash_SHR, shift, dst)); in iselIntExpr_R_wrk()
1526 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1527 AMD64CondCode cond = iselCondCode(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1528 addInstr(env, AMD64Instr_Set64(cond,dst)); in iselIntExpr_R_wrk()
1536 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1537 AMD64CondCode cond = iselCondCode(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1538 addInstr(env, AMD64Instr_Set64(cond,dst)); in iselIntExpr_R_wrk()
1539 addInstr(env, AMD64Instr_Sh64(Ash_SHL, 63, dst)); in iselIntExpr_R_wrk()
1540 addInstr(env, AMD64Instr_Sh64(Ash_SAR, 63, dst)); in iselIntExpr_R_wrk()
1545 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1546 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1547 addInstr(env, AMD64Instr_Bsfr64(True,src,dst)); in iselIntExpr_R_wrk()
1554 HReg tmp = newVRegI(env); in iselIntExpr_R_wrk()
1555 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1556 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1557 addInstr(env, AMD64Instr_Bsfr64(False,src,tmp)); in iselIntExpr_R_wrk()
1558 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV, in iselIntExpr_R_wrk()
1560 addInstr(env, AMD64Instr_Alu64R(Aalu_SUB, in iselIntExpr_R_wrk()
1566 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1567 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1568 addInstr(env, mk_iMOVsd_RR(src,dst)); in iselIntExpr_R_wrk()
1569 addInstr(env, AMD64Instr_Unary64(Aun_NEG,dst)); in iselIntExpr_R_wrk()
1570 addInstr(env, AMD64Instr_Alu64R(Aalu_OR, in iselIntExpr_R_wrk()
1572 addInstr(env, AMD64Instr_Sh64(Ash_SAR, 63, dst)); in iselIntExpr_R_wrk()
1577 HReg src = newVRegI(env); in iselIntExpr_R_wrk()
1578 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1579 HReg pre = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1580 addInstr(env, mk_iMOVsd_RR(pre,src)); in iselIntExpr_R_wrk()
1581 addInstr(env, AMD64Instr_MovxLQ(False, src, src)); in iselIntExpr_R_wrk()
1582 addInstr(env, mk_iMOVsd_RR(src,dst)); in iselIntExpr_R_wrk()
1583 addInstr(env, AMD64Instr_Unary64(Aun_NEG,dst)); in iselIntExpr_R_wrk()
1584 addInstr(env, AMD64Instr_Alu64R(Aalu_OR, in iselIntExpr_R_wrk()
1586 addInstr(env, AMD64Instr_Sh64(Ash_SAR, 63, dst)); in iselIntExpr_R_wrk()
1594 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1595 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1596 addInstr(env, mk_iMOVsd_RR(src, dst)); in iselIntExpr_R_wrk()
1597 addInstr(env, AMD64Instr_Unary64(Aun_NEG, dst)); in iselIntExpr_R_wrk()
1598 addInstr(env, AMD64Instr_Alu64R(Aalu_OR, AMD64RMI_Reg(src), dst)); in iselIntExpr_R_wrk()
1603 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1604 HReg vec = iselVecExpr(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1606 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vec, rsp_m16)); in iselIntExpr_R_wrk()
1607 addInstr(env, AMD64Instr_LoadEX(4, False/*z-widen*/, rsp_m16, dst)); in iselIntExpr_R_wrk()
1614 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1617 HReg vec = iselVecExpr(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1620 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, in iselIntExpr_R_wrk()
1622 addInstr(env, AMD64Instr_Alu64R( Aalu_MOV, in iselIntExpr_R_wrk()
1630 iselDVecExpr(&vHi, &vLo, env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1642 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1646 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, in iselIntExpr_R_wrk()
1648 addInstr(env, AMD64Instr_Alu64R( Aalu_MOV, in iselIntExpr_R_wrk()
1658 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1659 HReg src = iselDblExpr(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1661 set_SSE_rounding_default(env); in iselIntExpr_R_wrk()
1662 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, src, m8_rsp)); in iselIntExpr_R_wrk()
1663 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1673 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1674 HReg src = iselFltExpr(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1676 set_SSE_rounding_default(env); in iselIntExpr_R_wrk()
1677 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 4, src, m8_rsp)); in iselIntExpr_R_wrk()
1678 addInstr(env, AMD64Instr_LoadEX(4, False/*unsigned*/, m8_rsp, dst )); in iselIntExpr_R_wrk()
1689 return iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1696 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1697 HReg arg = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1699 addInstr(env, mk_iMOVsd_RR(arg, hregAMD64_RDI()) ); in iselIntExpr_R_wrk()
1700 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, in iselIntExpr_R_wrk()
1707 addInstr(env, AMD64Instr_MovxLQ(False, hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1715 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1716 HReg vec = iselVecExpr(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1721 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, in iselIntExpr_R_wrk()
1724 addInstr(env, AMD64Instr_Alu64R( Aalu_MOV, in iselIntExpr_R_wrk()
1728 addInstr(env, AMD64Instr_Alu64R( Aalu_MOV, in iselIntExpr_R_wrk()
1731 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, in iselIntExpr_R_wrk()
1738 addInstr(env, AMD64Instr_MovxLQ(False, hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1763 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1764 HReg arg = iselIntExpr_R(env, e->Iex.Unop.arg); in iselIntExpr_R_wrk()
1765 addInstr(env, mk_iMOVsd_RR(arg, hregAMD64_RDI()) ); in iselIntExpr_R_wrk()
1766 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, 1, in iselIntExpr_R_wrk()
1768 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1778 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1779 addInstr(env, AMD64Instr_Alu64R( in iselIntExpr_R_wrk()
1788 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1789 addInstr(env, AMD64Instr_LoadEX( in iselIntExpr_R_wrk()
1802 env, e->Iex.GetI.descr, in iselIntExpr_R_wrk()
1804 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1806 addInstr(env, AMD64Instr_LoadEX( 1, False, am, dst )); in iselIntExpr_R_wrk()
1810 addInstr(env, AMD64Instr_Alu64R( Aalu_MOV, AMD64RMI_Mem(am), dst )); in iselIntExpr_R_wrk()
1818 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1829 doHelperCall( &addToSp, &rloc, env, NULL/*guard*/, in iselIntExpr_R_wrk()
1838 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1840 addInstr(env, AMD64Instr_MovxLQ(False, hregAMD64_RAX(), dst)); in iselIntExpr_R_wrk()
1849 HReg r = newVRegI(env); in iselIntExpr_R_wrk()
1850 addInstr(env, AMD64Instr_Imm64(e->Iex.Const.con->Ico.U64, r)); in iselIntExpr_R_wrk()
1853 AMD64RMI* rmi = iselIntExpr_RMI ( env, e ); in iselIntExpr_R_wrk()
1854 HReg r = newVRegI(env); in iselIntExpr_R_wrk()
1855 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV, rmi, r)); in iselIntExpr_R_wrk()
1862 && typeOfIRExpr(env->type_env,e->Iex.ITE.cond) == Ity_I1) { in iselIntExpr_R_wrk()
1863 HReg r1 = iselIntExpr_R(env, e->Iex.ITE.iftrue); in iselIntExpr_R_wrk()
1864 HReg r0 = iselIntExpr_R(env, e->Iex.ITE.iffalse); in iselIntExpr_R_wrk()
1865 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1866 addInstr(env, mk_iMOVsd_RR(r1,dst)); in iselIntExpr_R_wrk()
1867 AMD64CondCode cc = iselCondCode(env, e->Iex.ITE.cond); in iselIntExpr_R_wrk()
1868 addInstr(env, AMD64Instr_CMov64(cc ^ 1, r0, dst)); in iselIntExpr_R_wrk()
1882 HReg arg1 = iselDblExpr(env, triop->arg2); in iselIntExpr_R_wrk()
1883 HReg arg2 = iselDblExpr(env, triop->arg3); in iselIntExpr_R_wrk()
1884 HReg dst = newVRegI(env); in iselIntExpr_R_wrk()
1885 addInstr(env, AMD64Instr_A87Free(2)); in iselIntExpr_R_wrk()
1888 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, arg2, m8_rsp)); in iselIntExpr_R_wrk()
1889 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselIntExpr_R_wrk()
1892 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, arg1, m8_rsp)); in iselIntExpr_R_wrk()
1893 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselIntExpr_R_wrk()
1897 addInstr(env, AMD64Instr_A87FpOp(Afp_PREM)); in iselIntExpr_R_wrk()
1900 addInstr(env, AMD64Instr_A87FpOp(Afp_PREM1)); in iselIntExpr_R_wrk()
1907 addInstr(env, AMD64Instr_A87StSW(m8_rsp)); in iselIntExpr_R_wrk()
1908 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV,AMD64RMI_Mem(m8_rsp),dst)); in iselIntExpr_R_wrk()
1909 addInstr(env, AMD64Instr_Alu64R(Aalu_AND,AMD64RMI_Imm(0x4700),dst)); in iselIntExpr_R_wrk()
1937 static AMD64AMode* iselIntExpr_AMode ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_AMode() argument
1939 AMD64AMode* am = iselIntExpr_AMode_wrk(env, e); in iselIntExpr_AMode()
1945 static AMD64AMode* iselIntExpr_AMode_wrk ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_AMode_wrk() argument
1949 IRType ty = typeOfIRExpr(env->type_env,e); in iselIntExpr_AMode_wrk()
1977 HReg r1 = iselIntExpr_R(env, expr1); in iselIntExpr_AMode_wrk()
1978 HReg r2 = iselIntExpr_R(env, expr2); in iselIntExpr_AMode_wrk()
1993 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_AMode_wrk()
1994 HReg r2 = iselIntExpr_R(env, e->Iex.Binop.arg2->Iex.Binop.arg1 ); in iselIntExpr_AMode_wrk()
2005 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselIntExpr_AMode_wrk()
2015 HReg r1 = iselIntExpr_R(env, e); in iselIntExpr_AMode_wrk()
2026 static AMD64RMI* iselIntExpr_RMI ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RMI() argument
2028 AMD64RMI* rmi = iselIntExpr_RMI_wrk(env, e); in iselIntExpr_RMI()
2046 static AMD64RMI* iselIntExpr_RMI_wrk ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RMI_wrk() argument
2048 IRType ty = typeOfIRExpr(env->type_env,e); in iselIntExpr_RMI_wrk()
2080 AMD64AMode* am = iselIntExpr_AMode(env, e->Iex.Load.addr); in iselIntExpr_RMI_wrk()
2086 HReg r = iselIntExpr_R ( env, e ); in iselIntExpr_RMI_wrk()
2098 static AMD64RI* iselIntExpr_RI ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RI() argument
2100 AMD64RI* ri = iselIntExpr_RI_wrk(env, e); in iselIntExpr_RI()
2115 static AMD64RI* iselIntExpr_RI_wrk ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RI_wrk() argument
2117 IRType ty = typeOfIRExpr(env->type_env,e); in iselIntExpr_RI_wrk()
2142 HReg r = iselIntExpr_R ( env, e ); in iselIntExpr_RI_wrk()
2154 static AMD64RM* iselIntExpr_RM ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RM() argument
2156 AMD64RM* rm = iselIntExpr_RM_wrk(env, e); in iselIntExpr_RM()
2172 static AMD64RM* iselIntExpr_RM_wrk ( ISelEnv* env, const IRExpr* e ) in iselIntExpr_RM_wrk() argument
2174 IRType ty = typeOfIRExpr(env->type_env,e); in iselIntExpr_RM_wrk()
2187 HReg r = iselIntExpr_R ( env, e ); in iselIntExpr_RM_wrk()
2199 static AMD64CondCode iselCondCode ( ISelEnv* env, const IRExpr* e ) in iselCondCode() argument
2202 return iselCondCode_wrk(env,e); in iselCondCode()
2206 static AMD64CondCode iselCondCode_wrk ( ISelEnv* env, const IRExpr* e ) in iselCondCode_wrk() argument
2211 vassert(typeOfIRExpr(env->type_env,e) == Ity_I1); in iselCondCode_wrk()
2215 HReg r64 = lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselCondCode_wrk()
2216 HReg dst = newVRegI(env); in iselCondCode_wrk()
2217 addInstr(env, mk_iMOVsd_RR(r64,dst)); in iselCondCode_wrk()
2218 addInstr(env, AMD64Instr_Alu64R(Aalu_AND,AMD64RMI_Imm(1),dst)); in iselCondCode_wrk()
2228 r = newVRegI(env); in iselCondCode_wrk()
2229 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV,AMD64RMI_Imm(0),r)); in iselCondCode_wrk()
2230 addInstr(env, AMD64Instr_Alu64R(Aalu_XOR,AMD64RMI_Reg(r),r)); in iselCondCode_wrk()
2237 return 1 ^ iselCondCode(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2244 HReg reg = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2245 addInstr(env, AMD64Instr_Test64(1,reg)); in iselCondCode_wrk()
2253 HReg reg = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2254 addInstr(env, AMD64Instr_Test64(1,reg)); in iselCondCode_wrk()
2263 HReg r = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2264 addInstr(env, AMD64Instr_Test64(0xFF,r)); in iselCondCode_wrk()
2273 HReg r = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2274 addInstr(env, AMD64Instr_Test64(0xFFFF,r)); in iselCondCode_wrk()
2283 HReg r1 = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2285 addInstr(env, AMD64Instr_Alu32R(Aalu_CMP,rmi2,r1)); in iselCondCode_wrk()
2297 HReg r0 = iselIntExpr_R(env, mi.bindee[0]); in iselCondCode_wrk()
2298 AMD64RMI* rmi1 = iselIntExpr_RMI(env, mi.bindee[1]); in iselCondCode_wrk()
2299 HReg tmp = newVRegI(env); in iselCondCode_wrk()
2300 addInstr(env, mk_iMOVsd_RR(r0, tmp)); in iselCondCode_wrk()
2301 addInstr(env, AMD64Instr_Alu64R(Aalu_OR,rmi1,tmp)); in iselCondCode_wrk()
2309 HReg r1 = iselIntExpr_R(env, e->Iex.Unop.arg); in iselCondCode_wrk()
2311 addInstr(env, AMD64Instr_Alu64R(Aalu_CMP,rmi2,r1)); in iselCondCode_wrk()
2324 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselCondCode_wrk()
2325 addInstr(env, AMD64Instr_Test64(0xFF,r1)); in iselCondCode_wrk()
2332 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselCondCode_wrk()
2333 AMD64RMI* rmi2 = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselCondCode_wrk()
2334 HReg r = newVRegI(env); in iselCondCode_wrk()
2335 addInstr(env, mk_iMOVsd_RR(r1,r)); in iselCondCode_wrk()
2336 addInstr(env, AMD64Instr_Alu64R(Aalu_XOR,rmi2,r)); in iselCondCode_wrk()
2337 addInstr(env, AMD64Instr_Alu64R(Aalu_AND,AMD64RMI_Imm(0xFF),r)); in iselCondCode_wrk()
2352 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselCondCode_wrk()
2353 AMD64RMI* rmi2 = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselCondCode_wrk()
2354 HReg r = newVRegI(env); in iselCondCode_wrk()
2355 addInstr(env, mk_iMOVsd_RR(r1,r)); in iselCondCode_wrk()
2356 addInstr(env, AMD64Instr_Alu64R(Aalu_XOR,rmi2,r)); in iselCondCode_wrk()
2357 addInstr(env, AMD64Instr_Alu64R(Aalu_AND,AMD64RMI_Imm(0xFFFF),r)); in iselCondCode_wrk()
2373 HReg tmp = newVRegI(env); in iselCondCode_wrk()
2380 doHelperCall( &addToSp, &rloc, env, NULL/*guard*/, in iselCondCode_wrk()
2387 addInstr(env, AMD64Instr_Imm64(con->Iex.Const.con->Ico.U64, tmp)); in iselCondCode_wrk()
2388 addInstr(env, AMD64Instr_Alu64R(Aalu_CMP, in iselCondCode_wrk()
2404 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselCondCode_wrk()
2405 AMD64RMI* rmi2 = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselCondCode_wrk()
2406 addInstr(env, AMD64Instr_Alu64R(Aalu_CMP,rmi2,r1)); in iselCondCode_wrk()
2430 HReg r1 = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselCondCode_wrk()
2431 AMD64RMI* rmi2 = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselCondCode_wrk()
2432 addInstr(env, AMD64Instr_Alu32R(Aalu_CMP,rmi2,r1)); in iselCondCode_wrk()
2460 ISelEnv* env, const IRExpr* e ) in iselInt128Expr() argument
2462 iselInt128Expr_wrk(rHi, rLo, env, e); in iselInt128Expr()
2474 ISelEnv* env, const IRExpr* e ) in iselInt128Expr_wrk() argument
2477 vassert(typeOfIRExpr(env->type_env,e) == Ity_I128); in iselInt128Expr_wrk()
2481 lookupIRTempPair( rHi, rLo, env, e->Iex.RdTmp.tmp); in iselInt128Expr_wrk()
2494 HReg tLo = newVRegI(env); in iselInt128Expr_wrk()
2495 HReg tHi = newVRegI(env); in iselInt128Expr_wrk()
2497 AMD64RM* rmLeft = iselIntExpr_RM(env, e->Iex.Binop.arg1); in iselInt128Expr_wrk()
2498 HReg rRight = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselInt128Expr_wrk()
2499 addInstr(env, mk_iMOVsd_RR(rRight, hregAMD64_RAX())); in iselInt128Expr_wrk()
2500 addInstr(env, AMD64Instr_MulL(syned, rmLeft)); in iselInt128Expr_wrk()
2502 addInstr(env, mk_iMOVsd_RR(hregAMD64_RDX(), tHi)); in iselInt128Expr_wrk()
2503 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(), tLo)); in iselInt128Expr_wrk()
2515 HReg tLo = newVRegI(env); in iselInt128Expr_wrk()
2516 HReg tHi = newVRegI(env); in iselInt128Expr_wrk()
2518 AMD64RM* rmRight = iselIntExpr_RM(env, e->Iex.Binop.arg2); in iselInt128Expr_wrk()
2519 iselInt128Expr(&sHi,&sLo, env, e->Iex.Binop.arg1); in iselInt128Expr_wrk()
2520 addInstr(env, mk_iMOVsd_RR(sHi, hregAMD64_RDX())); in iselInt128Expr_wrk()
2521 addInstr(env, mk_iMOVsd_RR(sLo, hregAMD64_RAX())); in iselInt128Expr_wrk()
2522 addInstr(env, AMD64Instr_Div(syned, 8, rmRight)); in iselInt128Expr_wrk()
2523 addInstr(env, mk_iMOVsd_RR(hregAMD64_RDX(), tHi)); in iselInt128Expr_wrk()
2524 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(), tLo)); in iselInt128Expr_wrk()
2532 *rHi = iselIntExpr_R(env, e->Iex.Binop.arg1); in iselInt128Expr_wrk()
2533 *rLo = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselInt128Expr_wrk()
2553 static HReg iselFltExpr ( ISelEnv* env, const IRExpr* e ) in iselFltExpr() argument
2555 HReg r = iselFltExpr_wrk( env, e ); in iselFltExpr()
2565 static HReg iselFltExpr_wrk ( ISelEnv* env, const IRExpr* e ) in iselFltExpr_wrk() argument
2567 IRType ty = typeOfIRExpr(env->type_env,e); in iselFltExpr_wrk()
2571 return lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselFltExpr_wrk()
2576 HReg res = newVRegV(env); in iselFltExpr_wrk()
2578 am = iselIntExpr_AMode(env, e->Iex.Load.addr); in iselFltExpr_wrk()
2579 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 4, res, am)); in iselFltExpr_wrk()
2588 HReg dst = newVRegV(env); in iselFltExpr_wrk()
2589 HReg src = iselDblExpr(env, e->Iex.Binop.arg2); in iselFltExpr_wrk()
2590 set_SSE_rounding_mode( env, e->Iex.Binop.arg1 ); in iselFltExpr_wrk()
2591 addInstr(env, AMD64Instr_SseSDSS(True/*D->S*/,src,dst)); in iselFltExpr_wrk()
2592 set_SSE_rounding_default( env ); in iselFltExpr_wrk()
2599 HReg res = newVRegV(env); in iselFltExpr_wrk()
2600 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 4, res, am )); in iselFltExpr_wrk()
2608 HReg dst = newVRegV(env); in iselFltExpr_wrk()
2609 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselFltExpr_wrk()
2611 addInstr(env, AMD64Instr_Store(4, src, m4_rsp)); in iselFltExpr_wrk()
2612 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 4, dst, m4_rsp )); in iselFltExpr_wrk()
2618 HReg arg = iselFltExpr(env, e->Iex.Binop.arg2); in iselFltExpr_wrk()
2619 HReg dst = newVRegV(env); in iselFltExpr_wrk()
2625 set_FPU_rounding_mode( env, e->Iex.Binop.arg1 ); in iselFltExpr_wrk()
2627 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 4, arg, m8_rsp)); in iselFltExpr_wrk()
2628 addInstr(env, AMD64Instr_A87Free(1)); in iselFltExpr_wrk()
2629 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 4)); in iselFltExpr_wrk()
2630 addInstr(env, AMD64Instr_A87FpOp(Afp_ROUND)); in iselFltExpr_wrk()
2631 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, False/*pop*/, 4)); in iselFltExpr_wrk()
2632 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 4, dst, m8_rsp)); in iselFltExpr_wrk()
2635 set_FPU_rounding_default( env ); in iselFltExpr_wrk()
2644 HReg r1 = newVRegI(env); in iselFltExpr_wrk()
2645 HReg dst = newVRegV(env); in iselFltExpr_wrk()
2646 HReg tmp = newVRegV(env); in iselFltExpr_wrk()
2647 HReg src = iselFltExpr(env, e->Iex.Unop.arg); in iselFltExpr_wrk()
2649 addInstr(env, mk_vMOVsd_RR(src,tmp)); in iselFltExpr_wrk()
2650 addInstr(env, AMD64Instr_Push(AMD64RMI_Imm(0))); in iselFltExpr_wrk()
2651 addInstr(env, AMD64Instr_Imm64( 1ULL<<31, r1 )); in iselFltExpr_wrk()
2652 addInstr(env, AMD64Instr_Push(AMD64RMI_Reg(r1))); in iselFltExpr_wrk()
2653 addInstr(env, AMD64Instr_SseLdSt(True, 16, dst, rsp0)); in iselFltExpr_wrk()
2654 addInstr(env, AMD64Instr_SseReRg(Asse_XOR, tmp, dst)); in iselFltExpr_wrk()
2655 add_to_rsp(env, 16); in iselFltExpr_wrk()
2661 HReg dst = newVRegV(env); in iselFltExpr_wrk()
2662 HReg argX = iselFltExpr(env, qop->arg2); in iselFltExpr_wrk()
2663 HReg argY = iselFltExpr(env, qop->arg3); in iselFltExpr_wrk()
2664 HReg argZ = iselFltExpr(env, qop->arg4); in iselFltExpr_wrk()
2668 sub_from_rsp(env, 16); in iselFltExpr_wrk()
2675 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, hregAMD64_RSP()), in iselFltExpr_wrk()
2677 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(4, hregAMD64_RSP()), in iselFltExpr_wrk()
2679 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(8, hregAMD64_RSP()), in iselFltExpr_wrk()
2681 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(12, hregAMD64_RSP()), in iselFltExpr_wrk()
2688 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 4, argX, in iselFltExpr_wrk()
2690 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 4, argY, in iselFltExpr_wrk()
2692 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 4, argZ, in iselFltExpr_wrk()
2695 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, in iselFltExpr_wrk()
2700 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 4, dst, in iselFltExpr_wrk()
2703 add_to_rsp(env, 16); in iselFltExpr_wrk()
2740 static HReg iselDblExpr ( ISelEnv* env, const IRExpr* e ) in iselDblExpr() argument
2742 HReg r = iselDblExpr_wrk( env, e ); in iselDblExpr()
2752 static HReg iselDblExpr_wrk ( ISelEnv* env, const IRExpr* e ) in iselDblExpr_wrk() argument
2754 IRType ty = typeOfIRExpr(env->type_env,e); in iselDblExpr_wrk()
2759 return lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselDblExpr_wrk()
2764 HReg res = newVRegV(env); in iselDblExpr_wrk()
2765 HReg tmp = newVRegI(env); in iselDblExpr_wrk()
2779 addInstr(env, AMD64Instr_Imm64(u.u64, tmp)); in iselDblExpr_wrk()
2780 addInstr(env, AMD64Instr_Push(AMD64RMI_Reg(tmp))); in iselDblExpr_wrk()
2781 addInstr(env, AMD64Instr_SseLdSt( in iselDblExpr_wrk()
2785 add_to_rsp(env, 8); in iselDblExpr_wrk()
2791 HReg res = newVRegV(env); in iselDblExpr_wrk()
2793 am = iselIntExpr_AMode(env, e->Iex.Load.addr); in iselDblExpr_wrk()
2794 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 8, res, am )); in iselDblExpr_wrk()
2801 HReg res = newVRegV(env); in iselDblExpr_wrk()
2802 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 8, res, am )); in iselDblExpr_wrk()
2809 env, e->Iex.GetI.descr, in iselDblExpr_wrk()
2811 HReg res = newVRegV(env); in iselDblExpr_wrk()
2812 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 8, res, am )); in iselDblExpr_wrk()
2827 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2828 HReg argL = iselDblExpr(env, triop->arg2); in iselDblExpr_wrk()
2829 HReg argR = iselDblExpr(env, triop->arg3); in iselDblExpr_wrk()
2830 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselDblExpr_wrk()
2833 addInstr(env, AMD64Instr_Sse64FLo(op, argR, dst)); in iselDblExpr_wrk()
2840 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2841 HReg argX = iselDblExpr(env, qop->arg2); in iselDblExpr_wrk()
2842 HReg argY = iselDblExpr(env, qop->arg3); in iselDblExpr_wrk()
2843 HReg argZ = iselDblExpr(env, qop->arg4); in iselDblExpr_wrk()
2847 sub_from_rsp(env, 32); in iselDblExpr_wrk()
2854 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, hregAMD64_RSP()), in iselDblExpr_wrk()
2856 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(8, hregAMD64_RSP()), in iselDblExpr_wrk()
2858 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(16, hregAMD64_RSP()), in iselDblExpr_wrk()
2860 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(24, hregAMD64_RSP()), in iselDblExpr_wrk()
2867 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 8, argX, in iselDblExpr_wrk()
2869 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 8, argY, in iselDblExpr_wrk()
2871 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 8, argZ, in iselDblExpr_wrk()
2874 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, in iselDblExpr_wrk()
2879 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 8, dst, in iselDblExpr_wrk()
2882 add_to_rsp(env, 32); in iselDblExpr_wrk()
2888 HReg arg = iselDblExpr(env, e->Iex.Binop.arg2); in iselDblExpr_wrk()
2889 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2895 set_FPU_rounding_mode( env, e->Iex.Binop.arg1 ); in iselDblExpr_wrk()
2897 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, arg, m8_rsp)); in iselDblExpr_wrk()
2898 addInstr(env, AMD64Instr_A87Free(1)); in iselDblExpr_wrk()
2899 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselDblExpr_wrk()
2900 addInstr(env, AMD64Instr_A87FpOp(Afp_ROUND)); in iselDblExpr_wrk()
2901 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, False/*pop*/, 8)); in iselDblExpr_wrk()
2902 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 8, dst, m8_rsp)); in iselDblExpr_wrk()
2905 set_FPU_rounding_default( env ); in iselDblExpr_wrk()
2920 HReg arg1 = iselDblExpr(env, triop->arg2); in iselDblExpr_wrk()
2921 HReg arg2 = iselDblExpr(env, triop->arg3); in iselDblExpr_wrk()
2922 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2926 addInstr(env, AMD64Instr_A87Free(2)); in iselDblExpr_wrk()
2929 addInstr(env, AMD64Instr_SseLdSt( in iselDblExpr_wrk()
2931 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselDblExpr_wrk()
2934 addInstr(env, AMD64Instr_SseLdSt( in iselDblExpr_wrk()
2936 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselDblExpr_wrk()
2943 addInstr(env, AMD64Instr_A87FpOp(Afp_SCALE)); in iselDblExpr_wrk()
2946 addInstr(env, AMD64Instr_A87FpOp(Afp_ATAN)); in iselDblExpr_wrk()
2949 addInstr(env, AMD64Instr_A87FpOp(Afp_YL2X)); in iselDblExpr_wrk()
2952 addInstr(env, AMD64Instr_A87FpOp(Afp_YL2XP1)); in iselDblExpr_wrk()
2955 addInstr(env, AMD64Instr_A87FpOp(Afp_PREM)); in iselDblExpr_wrk()
2958 addInstr(env, AMD64Instr_A87FpOp(Afp_PREM1)); in iselDblExpr_wrk()
2965 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, False/*pop*/, 8)); in iselDblExpr_wrk()
2966 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 8, dst, m8_rsp)); in iselDblExpr_wrk()
2971 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2972 HReg src = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselDblExpr_wrk()
2973 set_SSE_rounding_mode( env, e->Iex.Binop.arg1 ); in iselDblExpr_wrk()
2974 addInstr(env, AMD64Instr_SseSI2SF( 8, 8, src, dst )); in iselDblExpr_wrk()
2975 set_SSE_rounding_default( env ); in iselDblExpr_wrk()
2980 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2981 HReg src = iselIntExpr_R(env, e->Iex.Unop.arg); in iselDblExpr_wrk()
2982 set_SSE_rounding_default( env ); in iselDblExpr_wrk()
2983 addInstr(env, AMD64Instr_SseSI2SF( 4, 8, src, dst )); in iselDblExpr_wrk()
2993 HReg r1 = newVRegI(env); in iselDblExpr_wrk()
2994 HReg dst = newVRegV(env); in iselDblExpr_wrk()
2995 HReg tmp = newVRegV(env); in iselDblExpr_wrk()
2996 HReg src = iselDblExpr(env, e->Iex.Unop.arg); in iselDblExpr_wrk()
2998 addInstr(env, mk_vMOVsd_RR(src,tmp)); in iselDblExpr_wrk()
2999 addInstr(env, AMD64Instr_Push(AMD64RMI_Imm(0))); in iselDblExpr_wrk()
3000 addInstr(env, AMD64Instr_Imm64( 1ULL<<63, r1 )); in iselDblExpr_wrk()
3001 addInstr(env, AMD64Instr_Push(AMD64RMI_Reg(r1))); in iselDblExpr_wrk()
3002 addInstr(env, AMD64Instr_SseLdSt(True, 16, dst, rsp0)); in iselDblExpr_wrk()
3005 addInstr(env, AMD64Instr_SseReRg(Asse_XOR, tmp, dst)); in iselDblExpr_wrk()
3007 addInstr(env, AMD64Instr_SseReRg(Asse_ANDN, tmp, dst)); in iselDblExpr_wrk()
3009 add_to_rsp(env, 16); in iselDblExpr_wrk()
3025 HReg arg = iselDblExpr(env, e->Iex.Binop.arg2); in iselDblExpr_wrk()
3026 HReg dst = newVRegV(env); in iselDblExpr_wrk()
3028 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, arg, m8_rsp)); in iselDblExpr_wrk()
3029 addInstr(env, AMD64Instr_A87Free(nNeeded)); in iselDblExpr_wrk()
3030 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, True/*push*/, 8)); in iselDblExpr_wrk()
3038 addInstr(env, AMD64Instr_A87FpOp(fpop)); in iselDblExpr_wrk()
3039 addInstr(env, AMD64Instr_A87PushPop(m8_rsp, False/*pop*/, 8)); in iselDblExpr_wrk()
3040 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 8, dst, m8_rsp)); in iselDblExpr_wrk()
3048 //.. HReg dst = newVRegF(env); in iselDblExpr_wrk()
3049 //.. HReg ri = iselIntExpr_R(env, e->Iex.Unop.arg); in iselDblExpr_wrk()
3050 //.. addInstr(env, X86Instr_Push(X86RMI_Reg(ri))); in iselDblExpr_wrk()
3051 //.. set_FPU_rounding_default(env); in iselDblExpr_wrk()
3052 //.. addInstr(env, X86Instr_FpLdStI( in iselDblExpr_wrk()
3055 //.. add_to_esp(env, 4); in iselDblExpr_wrk()
3062 HReg dst = newVRegV(env); in iselDblExpr_wrk()
3063 AMD64RI* src = iselIntExpr_RI(env, e->Iex.Unop.arg); in iselDblExpr_wrk()
3065 set_SSE_rounding_default(env); in iselDblExpr_wrk()
3066 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, src, m8_rsp)); in iselDblExpr_wrk()
3067 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 8, dst, m8_rsp)); in iselDblExpr_wrk()
3072 HReg f64 = newVRegV(env); in iselDblExpr_wrk()
3074 set_SSE_rounding_default(env); in iselDblExpr_wrk()
3075 f32 = iselFltExpr(env, e->Iex.Unop.arg); in iselDblExpr_wrk()
3076 addInstr(env, AMD64Instr_SseSDSS(False/*S->D*/, f32, f64)); in iselDblExpr_wrk()
3088 vassert(typeOfIRExpr(env->type_env,e->Iex.ITE.cond) == Ity_I1); in iselDblExpr_wrk()
3089 r1 = iselDblExpr(env, e->Iex.ITE.iftrue); in iselDblExpr_wrk()
3090 r0 = iselDblExpr(env, e->Iex.ITE.iffalse); in iselDblExpr_wrk()
3091 dst = newVRegV(env); in iselDblExpr_wrk()
3092 addInstr(env, mk_vMOVsd_RR(r1,dst)); in iselDblExpr_wrk()
3093 AMD64CondCode cc = iselCondCode(env, e->Iex.ITE.cond); in iselDblExpr_wrk()
3094 addInstr(env, AMD64Instr_SseCMov(cc ^ 1, r0, dst)); in iselDblExpr_wrk()
3107 static HReg iselVecExpr ( ISelEnv* env, const IRExpr* e ) in iselVecExpr() argument
3109 HReg r = iselVecExpr_wrk( env, e ); in iselVecExpr()
3120 static HReg iselVecExpr_wrk ( ISelEnv* env, const IRExpr* e ) in iselVecExpr_wrk() argument
3125 IRType ty = typeOfIRExpr(env->type_env,e); in iselVecExpr_wrk()
3130 return lookupIRTemp(env, e->Iex.RdTmp.tmp); in iselVecExpr_wrk()
3134 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3135 addInstr(env, AMD64Instr_SseLdSt( in iselVecExpr_wrk()
3146 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3147 AMD64AMode* am = iselIntExpr_AMode(env, e->Iex.Load.addr); in iselVecExpr_wrk()
3148 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dst, am )); in iselVecExpr_wrk()
3153 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3157 dst = generate_zeroes_V128(env); in iselVecExpr_wrk()
3160 dst = generate_ones_V128(env); in iselVecExpr_wrk()
3165 push_uimm64(env, bitmask8_to_bytemask64( in iselVecExpr_wrk()
3168 push_uimm64(env, bitmask8_to_bytemask64( in iselVecExpr_wrk()
3171 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dst, rsp0 )); in iselVecExpr_wrk()
3172 add_to_rsp(env, 16); in iselVecExpr_wrk()
3183 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3184 return do_sse_NotV128(env, arg); in iselVecExpr_wrk()
3203 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3204 HReg tmp = generate_zeroes_V128(env); in iselVecExpr_wrk()
3205 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3206 addInstr(env, AMD64Instr_SseReRg(Asse_CMPEQ32, arg, tmp)); in iselVecExpr_wrk()
3207 tmp = do_sse_NotV128(env, tmp); in iselVecExpr_wrk()
3208 addInstr(env, AMD64Instr_SseShuf(0xB1, tmp, dst)); in iselVecExpr_wrk()
3209 addInstr(env, AMD64Instr_SseReRg(Asse_OR, tmp, dst)); in iselVecExpr_wrk()
3218 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3219 HReg tmp = newVRegV(env); in iselVecExpr_wrk()
3220 HReg zero = generate_zeroes_V128(env); in iselVecExpr_wrk()
3222 addInstr(env, mk_vMOVsd_RR(arg, tmp)); in iselVecExpr_wrk()
3223 addInstr(env, AMD64Instr_SseReRg(op, zero, tmp)); in iselVecExpr_wrk()
3224 dst = do_sse_NotV128(env, tmp); in iselVecExpr_wrk()
3232 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3233 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3234 addInstr(env, AMD64Instr_Sse32Fx4(op, arg, dst)); in iselVecExpr_wrk()
3249 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3250 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3251 addInstr(env, mk_vMOVsd_RR(arg, dst)); in iselVecExpr_wrk()
3252 addInstr(env, AMD64Instr_Sse32FLo(op, arg, dst)); in iselVecExpr_wrk()
3265 HReg arg = iselVecExpr(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3266 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3267 addInstr(env, mk_vMOVsd_RR(arg, dst)); in iselVecExpr_wrk()
3268 addInstr(env, AMD64Instr_Sse64FLo(op, arg, dst)); in iselVecExpr_wrk()
3273 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3275 AMD64RI* ri = iselIntExpr_RI(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3276 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, ri, rsp_m32)); in iselVecExpr_wrk()
3277 addInstr(env, AMD64Instr_SseLdzLO(4, dst, rsp_m32)); in iselVecExpr_wrk()
3282 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3284 AMD64RMI* rmi = iselIntExpr_RMI(env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3285 addInstr(env, AMD64Instr_Push(rmi)); in iselVecExpr_wrk()
3286 addInstr(env, AMD64Instr_SseLdzLO(8, dst, rsp0)); in iselVecExpr_wrk()
3287 add_to_rsp(env, 8); in iselVecExpr_wrk()
3294 iselDVecExpr(&vHi, &vLo, env, e->Iex.Unop.arg); in iselVecExpr_wrk()
3309 HReg arg = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3310 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3313 addInstr(env, (e->Iex.Binop.op == Iop_Sqrt64Fx2 in iselVecExpr_wrk()
3321 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3322 HReg srcV = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3323 HReg srcI = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3325 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, srcV, rsp_m16)); in iselVecExpr_wrk()
3326 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, AMD64RI_Reg(srcI), rsp_m16)); in iselVecExpr_wrk()
3327 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, dst, rsp_m16)); in iselVecExpr_wrk()
3333 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3334 HReg srcV = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3335 HReg srcI = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3337 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, srcV, rsp_m16)); in iselVecExpr_wrk()
3338 addInstr(env, AMD64Instr_Store(4, srcI, rsp_m16)); in iselVecExpr_wrk()
3339 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, dst, rsp_m16)); in iselVecExpr_wrk()
3347 AMD64RI* qHi = iselIntExpr_RI(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3348 AMD64RI* qLo = iselIntExpr_RI(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3349 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, qHi, m8_rsp)); in iselVecExpr_wrk()
3350 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, qLo, m16_rsp)); in iselVecExpr_wrk()
3351 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3353 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, dst, m16_rsp)); in iselVecExpr_wrk()
3365 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3366 HReg argR = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3367 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3368 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3369 addInstr(env, AMD64Instr_Sse32Fx4(op, argR, dst)); in iselVecExpr_wrk()
3381 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3382 HReg argR = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3383 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3384 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3385 addInstr(env, AMD64Instr_Sse64Fx2(op, argR, dst)); in iselVecExpr_wrk()
3400 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3401 HReg argR = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3402 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3403 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3404 addInstr(env, AMD64Instr_Sse32FLo(op, argR, dst)); in iselVecExpr_wrk()
3419 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3420 HReg argR = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3421 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3422 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3423 addInstr(env, AMD64Instr_Sse64FLo(op, argR, dst)); in iselVecExpr_wrk()
3487 HReg arg1 = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3488 HReg arg2 = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3489 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3491 addInstr(env, mk_vMOVsd_RR(arg2, dst)); in iselVecExpr_wrk()
3492 addInstr(env, AMD64Instr_SseReRg(op, arg1, dst)); in iselVecExpr_wrk()
3494 addInstr(env, mk_vMOVsd_RR(arg1, dst)); in iselVecExpr_wrk()
3495 addInstr(env, AMD64Instr_SseReRg(op, arg2, dst)); in iselVecExpr_wrk()
3509 HReg greg = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3510 AMD64RMI* rmi = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3512 HReg ereg = newVRegV(env); in iselVecExpr_wrk()
3513 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3514 addInstr(env, AMD64Instr_Push(AMD64RMI_Imm(0))); in iselVecExpr_wrk()
3515 addInstr(env, AMD64Instr_Push(rmi)); in iselVecExpr_wrk()
3516 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, ereg, rsp0)); in iselVecExpr_wrk()
3517 addInstr(env, mk_vMOVsd_RR(greg, dst)); in iselVecExpr_wrk()
3518 addInstr(env, AMD64Instr_SseReRg(op, ereg, dst)); in iselVecExpr_wrk()
3519 add_to_rsp(env, 16); in iselVecExpr_wrk()
3560 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3561 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3562 HReg argR = iselVecExpr(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3563 HReg argp = newVRegI(env); in iselVecExpr_wrk()
3565 sub_from_rsp(env, 112); in iselVecExpr_wrk()
3567 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(48, hregAMD64_RSP()), in iselVecExpr_wrk()
3570 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselVecExpr_wrk()
3578 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, argp), in iselVecExpr_wrk()
3580 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(16, argp), in iselVecExpr_wrk()
3582 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(32, argp), in iselVecExpr_wrk()
3588 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argL, in iselVecExpr_wrk()
3590 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argR, in iselVecExpr_wrk()
3593 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, in iselVecExpr_wrk()
3597 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dst, in iselVecExpr_wrk()
3600 add_to_rsp(env, 112); in iselVecExpr_wrk()
3612 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3613 HReg argL = iselVecExpr(env, e->Iex.Binop.arg1); in iselVecExpr_wrk()
3614 HReg argR = iselIntExpr_R(env, e->Iex.Binop.arg2); in iselVecExpr_wrk()
3615 HReg argp = newVRegI(env); in iselVecExpr_wrk()
3617 sub_from_rsp(env, 112); in iselVecExpr_wrk()
3619 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(48, hregAMD64_RSP()), in iselVecExpr_wrk()
3622 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselVecExpr_wrk()
3629 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, argp), in iselVecExpr_wrk()
3631 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(16, argp), in iselVecExpr_wrk()
3636 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argL, in iselVecExpr_wrk()
3639 addInstr(env, mk_iMOVsd_RR(argR, hregAMD64_RDX())); in iselVecExpr_wrk()
3642 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, in iselVecExpr_wrk()
3646 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dst, in iselVecExpr_wrk()
3649 add_to_rsp(env, 112); in iselVecExpr_wrk()
3668 HReg argL = iselVecExpr(env, triop->arg2); in iselVecExpr_wrk()
3669 HReg argR = iselVecExpr(env, triop->arg3); in iselVecExpr_wrk()
3670 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3671 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3674 addInstr(env, AMD64Instr_Sse64Fx2(op, argR, dst)); in iselVecExpr_wrk()
3684 HReg argL = iselVecExpr(env, triop->arg2); in iselVecExpr_wrk()
3685 HReg argR = iselVecExpr(env, triop->arg3); in iselVecExpr_wrk()
3686 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3687 addInstr(env, mk_vMOVsd_RR(argL, dst)); in iselVecExpr_wrk()
3690 addInstr(env, AMD64Instr_Sse32Fx4(op, argR, dst)); in iselVecExpr_wrk()
3700 HReg r1 = iselVecExpr(env, e->Iex.ITE.iftrue); in iselVecExpr_wrk()
3701 HReg r0 = iselVecExpr(env, e->Iex.ITE.iffalse); in iselVecExpr_wrk()
3702 HReg dst = newVRegV(env); in iselVecExpr_wrk()
3703 addInstr(env, mk_vMOVsd_RR(r1,dst)); in iselVecExpr_wrk()
3704 AMD64CondCode cc = iselCondCode(env, e->Iex.ITE.cond); in iselVecExpr_wrk()
3705 addInstr(env, AMD64Instr_SseCMov(cc ^ 1, r0, dst)); in iselVecExpr_wrk()
3711 LibVEX_ppVexHwCaps(VexArchAMD64, env->hwcaps)); in iselVecExpr_wrk()
3722 ISelEnv* env, const IRExpr* e ) in iselDVecExpr() argument
3724 iselDVecExpr_wrk( rHi, rLo, env, e ); in iselDVecExpr()
3737 ISelEnv* env, const IRExpr* e ) in iselDVecExpr_wrk() argument
3741 IRType ty = typeOfIRExpr(env->type_env,e); in iselDVecExpr_wrk()
3748 lookupIRTempPair( rHi, rLo, env, e->Iex.RdTmp.tmp); in iselDVecExpr_wrk()
3753 HReg vHi = newVRegV(env); in iselDVecExpr_wrk()
3754 HReg vLo = newVRegV(env); in iselDVecExpr_wrk()
3758 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vLo, am0)); in iselDVecExpr_wrk()
3759 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vHi, am16)); in iselDVecExpr_wrk()
3766 HReg vHi = newVRegV(env); in iselDVecExpr_wrk()
3767 HReg vLo = newVRegV(env); in iselDVecExpr_wrk()
3768 HReg rA = iselIntExpr_R(env, e->Iex.Load.addr); in iselDVecExpr_wrk()
3771 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vLo, am0)); in iselDVecExpr_wrk()
3772 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vHi, am16)); in iselDVecExpr_wrk()
3782 HReg vHi = generate_zeroes_V128(env); in iselDVecExpr_wrk()
3783 HReg vLo = newVRegV(env); in iselDVecExpr_wrk()
3784 addInstr(env, mk_vMOVsd_RR(vHi, vLo)); in iselDVecExpr_wrk()
3799 iselDVecExpr(&argHi, &argLo, env, e->Iex.Unop.arg); in iselDVecExpr_wrk()
3800 *rHi = do_sse_NotV128(env, argHi); in iselDVecExpr_wrk()
3801 *rLo = do_sse_NotV128(env, argLo); in iselDVecExpr_wrk()
3811 iselDVecExpr(&argHi, &argLo, env, e->Iex.Unop.arg); in iselDVecExpr_wrk()
3812 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3813 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3814 addInstr(env, AMD64Instr_Sse32Fx4(op, argHi, dstHi)); in iselDVecExpr_wrk()
3815 addInstr(env, AMD64Instr_Sse32Fx4(op, argLo, dstLo)); in iselDVecExpr_wrk()
3825 iselDVecExpr(&argHi, &argLo, env, e->Iex.Unop.arg); in iselDVecExpr_wrk()
3826 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3827 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3828 addInstr(env, AMD64Instr_Sse64Fx2(op, argHi, dstHi)); in iselDVecExpr_wrk()
3829 addInstr(env, AMD64Instr_Sse64Fx2(op, argLo, dstLo)); in iselDVecExpr_wrk()
3841 iselDVecExpr(&argHi, &argLo, env, e->Iex.Unop.arg); in iselDVecExpr_wrk()
3842 HReg tmpHi = generate_zeroes_V128(env); in iselDVecExpr_wrk()
3843 HReg tmpLo = newVRegV(env); in iselDVecExpr_wrk()
3844 addInstr(env, mk_vMOVsd_RR(tmpHi, tmpLo)); in iselDVecExpr_wrk()
3845 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3846 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3847 addInstr(env, AMD64Instr_SseReRg(Asse_CMPEQ32, argHi, tmpHi)); in iselDVecExpr_wrk()
3848 addInstr(env, AMD64Instr_SseReRg(Asse_CMPEQ32, argLo, tmpLo)); in iselDVecExpr_wrk()
3849 tmpHi = do_sse_NotV128(env, tmpHi); in iselDVecExpr_wrk()
3850 tmpLo = do_sse_NotV128(env, tmpLo); in iselDVecExpr_wrk()
3851 addInstr(env, AMD64Instr_SseShuf(0xB1, tmpHi, dstHi)); in iselDVecExpr_wrk()
3852 addInstr(env, AMD64Instr_SseShuf(0xB1, tmpLo, dstLo)); in iselDVecExpr_wrk()
3853 addInstr(env, AMD64Instr_SseReRg(Asse_OR, tmpHi, dstHi)); in iselDVecExpr_wrk()
3854 addInstr(env, AMD64Instr_SseReRg(Asse_OR, tmpLo, dstLo)); in iselDVecExpr_wrk()
3866 iselDVecExpr(&argHi, &argLo, env, e->Iex.Unop.arg); in iselDVecExpr_wrk()
3867 HReg tmpHi = newVRegV(env); in iselDVecExpr_wrk()
3868 HReg tmpLo = newVRegV(env); in iselDVecExpr_wrk()
3869 HReg zero = generate_zeroes_V128(env); in iselDVecExpr_wrk()
3871 addInstr(env, mk_vMOVsd_RR(argHi, tmpHi)); in iselDVecExpr_wrk()
3872 addInstr(env, mk_vMOVsd_RR(argLo, tmpLo)); in iselDVecExpr_wrk()
3873 addInstr(env, AMD64Instr_SseReRg(op, zero, tmpHi)); in iselDVecExpr_wrk()
3874 addInstr(env, AMD64Instr_SseReRg(op, zero, tmpLo)); in iselDVecExpr_wrk()
3875 dstHi = do_sse_NotV128(env, tmpHi); in iselDVecExpr_wrk()
3876 dstLo = do_sse_NotV128(env, tmpLo); in iselDVecExpr_wrk()
3895 iselDVecExpr(&argLhi, &argLlo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
3896 iselDVecExpr(&argRhi, &argRlo, env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
3897 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3898 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3899 addInstr(env, mk_vMOVsd_RR(argLhi, dstHi)); in iselDVecExpr_wrk()
3900 addInstr(env, mk_vMOVsd_RR(argLlo, dstLo)); in iselDVecExpr_wrk()
3901 addInstr(env, AMD64Instr_Sse64Fx2(op, argRhi, dstHi)); in iselDVecExpr_wrk()
3902 addInstr(env, AMD64Instr_Sse64Fx2(op, argRlo, dstLo)); in iselDVecExpr_wrk()
3913 iselDVecExpr(&argLhi, &argLlo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
3914 iselDVecExpr(&argRhi, &argRlo, env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
3915 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3916 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3917 addInstr(env, mk_vMOVsd_RR(argLhi, dstHi)); in iselDVecExpr_wrk()
3918 addInstr(env, mk_vMOVsd_RR(argLlo, dstLo)); in iselDVecExpr_wrk()
3919 addInstr(env, AMD64Instr_Sse32Fx4(op, argRhi, dstHi)); in iselDVecExpr_wrk()
3920 addInstr(env, AMD64Instr_Sse32Fx4(op, argRlo, dstLo)); in iselDVecExpr_wrk()
3963 iselDVecExpr(&argLhi, &argLlo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
3964 iselDVecExpr(&argRhi, &argRlo, env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
3965 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3966 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3967 addInstr(env, mk_vMOVsd_RR(argLhi, dstHi)); in iselDVecExpr_wrk()
3968 addInstr(env, mk_vMOVsd_RR(argLlo, dstLo)); in iselDVecExpr_wrk()
3969 addInstr(env, AMD64Instr_SseReRg(op, argRhi, dstHi)); in iselDVecExpr_wrk()
3970 addInstr(env, AMD64Instr_SseReRg(op, argRlo, dstLo)); in iselDVecExpr_wrk()
3986 iselDVecExpr(&gregHi, &gregLo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
3987 AMD64RMI* rmi = iselIntExpr_RMI(env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
3989 HReg ereg = newVRegV(env); in iselDVecExpr_wrk()
3990 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
3991 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
3992 addInstr(env, AMD64Instr_Push(AMD64RMI_Imm(0))); in iselDVecExpr_wrk()
3993 addInstr(env, AMD64Instr_Push(rmi)); in iselDVecExpr_wrk()
3994 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, ereg, rsp0)); in iselDVecExpr_wrk()
3995 addInstr(env, mk_vMOVsd_RR(gregHi, dstHi)); in iselDVecExpr_wrk()
3996 addInstr(env, AMD64Instr_SseReRg(op, ereg, dstHi)); in iselDVecExpr_wrk()
3997 addInstr(env, mk_vMOVsd_RR(gregLo, dstLo)); in iselDVecExpr_wrk()
3998 addInstr(env, AMD64Instr_SseReRg(op, ereg, dstLo)); in iselDVecExpr_wrk()
3999 add_to_rsp(env, 16); in iselDVecExpr_wrk()
4006 *rHi = iselVecExpr(env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
4007 *rLo = iselVecExpr(env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
4037 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
4038 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
4040 iselDVecExpr(&argLhi, &argLlo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
4041 iselDVecExpr(&argRhi, &argRlo, env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
4042 HReg argp = newVRegI(env); in iselDVecExpr_wrk()
4044 sub_from_rsp(env, 160); in iselDVecExpr_wrk()
4046 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(48, hregAMD64_RSP()), in iselDVecExpr_wrk()
4049 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselDVecExpr_wrk()
4057 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, argp), in iselDVecExpr_wrk()
4059 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(16, argp), in iselDVecExpr_wrk()
4061 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(32, argp), in iselDVecExpr_wrk()
4067 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argLhi, in iselDVecExpr_wrk()
4069 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argRhi, in iselDVecExpr_wrk()
4075 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argLlo, in iselDVecExpr_wrk()
4077 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argRlo, in iselDVecExpr_wrk()
4080 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, 3, in iselDVecExpr_wrk()
4087 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(48, argp), in iselDVecExpr_wrk()
4089 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(64, argp), in iselDVecExpr_wrk()
4091 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(80, argp), in iselDVecExpr_wrk()
4094 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, 3, in iselDVecExpr_wrk()
4098 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dstHi, in iselDVecExpr_wrk()
4100 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dstLo, in iselDVecExpr_wrk()
4103 add_to_rsp(env, 160); in iselDVecExpr_wrk()
4115 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
4116 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
4118 iselDVecExpr(&argLhi, &argLlo, env, e->Iex.Binop.arg1); in iselDVecExpr_wrk()
4119 iselDVecExpr(&argRhi, &argRlo, env, e->Iex.Binop.arg2); in iselDVecExpr_wrk()
4120 HReg argp = newVRegI(env); in iselDVecExpr_wrk()
4122 sub_from_rsp(env, 160); in iselDVecExpr_wrk()
4124 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(48, hregAMD64_RSP()), in iselDVecExpr_wrk()
4127 addInstr(env, AMD64Instr_Alu64R(Aalu_AND, in iselDVecExpr_wrk()
4135 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(0, argp), in iselDVecExpr_wrk()
4137 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(32, argp), in iselDVecExpr_wrk()
4139 addInstr(env, AMD64Instr_Lea64(AMD64AMode_IR(64, argp), in iselDVecExpr_wrk()
4147 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argLlo, in iselDVecExpr_wrk()
4149 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argLhi, in iselDVecExpr_wrk()
4151 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argRlo, in iselDVecExpr_wrk()
4153 addInstr(env, AMD64Instr_SseLdSt(False/*!isLoad*/, 16, argRhi, in iselDVecExpr_wrk()
4156 addInstr(env, AMD64Instr_Call( Acc_ALWAYS, (ULong)fn, 3, in iselDVecExpr_wrk()
4160 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dstLo, in iselDVecExpr_wrk()
4162 addInstr(env, AMD64Instr_SseLdSt(True/*isLoad*/, 16, dstHi, in iselDVecExpr_wrk()
4165 add_to_rsp(env, 160); in iselDVecExpr_wrk()
4187 iselDVecExpr(&argLhi, &argLlo, env, triop->arg2); in iselDVecExpr_wrk()
4188 iselDVecExpr(&argRhi, &argRlo, env, triop->arg3); in iselDVecExpr_wrk()
4189 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
4190 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
4191 addInstr(env, mk_vMOVsd_RR(argLhi, dstHi)); in iselDVecExpr_wrk()
4192 addInstr(env, mk_vMOVsd_RR(argLlo, dstLo)); in iselDVecExpr_wrk()
4195 addInstr(env, AMD64Instr_Sse64Fx2(op, argRhi, dstHi)); in iselDVecExpr_wrk()
4196 addInstr(env, AMD64Instr_Sse64Fx2(op, argRlo, dstLo)); in iselDVecExpr_wrk()
4209 iselDVecExpr(&argLhi, &argLlo, env, triop->arg2); in iselDVecExpr_wrk()
4210 iselDVecExpr(&argRhi, &argRlo, env, triop->arg3); in iselDVecExpr_wrk()
4211 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
4212 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
4213 addInstr(env, mk_vMOVsd_RR(argLhi, dstHi)); in iselDVecExpr_wrk()
4214 addInstr(env, mk_vMOVsd_RR(argLlo, dstLo)); in iselDVecExpr_wrk()
4217 addInstr(env, AMD64Instr_Sse32Fx4(op, argRhi, dstHi)); in iselDVecExpr_wrk()
4218 addInstr(env, AMD64Instr_Sse32Fx4(op, argRlo, dstLo)); in iselDVecExpr_wrk()
4232 HReg vHi = newVRegV(env); in iselDVecExpr_wrk()
4233 HReg vLo = newVRegV(env); in iselDVecExpr_wrk()
4238 AMD64RI* q3 = iselIntExpr_RI(env, e->Iex.Qop.details->arg1); in iselDVecExpr_wrk()
4239 AMD64RI* q2 = iselIntExpr_RI(env, e->Iex.Qop.details->arg2); in iselDVecExpr_wrk()
4240 AMD64RI* q1 = iselIntExpr_RI(env, e->Iex.Qop.details->arg3); in iselDVecExpr_wrk()
4241 AMD64RI* q0 = iselIntExpr_RI(env, e->Iex.Qop.details->arg4); in iselDVecExpr_wrk()
4243 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, q3, m8_rsp)); in iselDVecExpr_wrk()
4244 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, q2, m16_rsp)); in iselDVecExpr_wrk()
4245 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vHi, m16_rsp)); in iselDVecExpr_wrk()
4247 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, q1, m8_rsp)); in iselDVecExpr_wrk()
4248 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV, q0, m16_rsp)); in iselDVecExpr_wrk()
4249 addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 16, vLo, m16_rsp)); in iselDVecExpr_wrk()
4257 iselDVecExpr(&r1Hi, &r1Lo, env, e->Iex.ITE.iftrue); in iselDVecExpr_wrk()
4258 iselDVecExpr(&r0Hi, &r0Lo, env, e->Iex.ITE.iffalse); in iselDVecExpr_wrk()
4259 HReg dstHi = newVRegV(env); in iselDVecExpr_wrk()
4260 HReg dstLo = newVRegV(env); in iselDVecExpr_wrk()
4261 addInstr(env, mk_vMOVsd_RR(r1Hi,dstHi)); in iselDVecExpr_wrk()
4262 addInstr(env, mk_vMOVsd_RR(r1Lo,dstLo)); in iselDVecExpr_wrk()
4263 AMD64CondCode cc = iselCondCode(env, e->Iex.ITE.cond); in iselDVecExpr_wrk()
4264 addInstr(env, AMD64Instr_SseCMov(cc ^ 1, r0Hi, dstHi)); in iselDVecExpr_wrk()
4265 addInstr(env, AMD64Instr_SseCMov(cc ^ 1, r0Lo, dstLo)); in iselDVecExpr_wrk()
4273 LibVEX_ppVexHwCaps(VexArchAMD64, env->hwcaps)); in iselDVecExpr_wrk()
4283 static void iselStmt ( ISelEnv* env, IRStmt* stmt ) in iselStmt() argument
4310 = iselIntExpr_AMode(env, lg->addr); in iselStmt()
4312 = szB == 16 ? iselVecExpr(env, lg->alt) in iselStmt()
4313 : iselIntExpr_R(env, lg->alt); in iselStmt()
4315 = lookupIRTemp(env, lg->dst); in iselStmt()
4320 addInstr(env, mk_vMOVsd_RR(rAlt, rDst)); in iselStmt()
4322 addInstr(env, mk_iMOVsd_RR(rAlt, rDst)); in iselStmt()
4324 AMD64CondCode cc = iselCondCode(env, lg->guard); in iselStmt()
4326 addInstr(env, AMD64Instr_SseCLoad(cc, amAddr, rDst)); in iselStmt()
4328 addInstr(env, AMD64Instr_CLoad(cc, szB, amAddr, rDst)); in iselStmt()
4340 switch (typeOfIRExpr(env->type_env, sg->data)) { in iselStmt()
4350 = iselIntExpr_AMode(env, sg->addr); in iselStmt()
4352 = szB == 16 ? iselVecExpr(env, sg->data) in iselStmt()
4353 : iselIntExpr_R(env, sg->data); in iselStmt()
4355 = iselCondCode(env, sg->guard); in iselStmt()
4357 addInstr(env, AMD64Instr_SseCStore(cc, rSrc, amAddr)); in iselStmt()
4359 addInstr(env, AMD64Instr_CStore(cc, szB, rSrc, amAddr)); in iselStmt()
4366 IRType tya = typeOfIRExpr(env->type_env, stmt->Ist.Store.addr); in iselStmt()
4367 IRType tyd = typeOfIRExpr(env->type_env, stmt->Ist.Store.data); in iselStmt()
4374 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.Store.addr); in iselStmt()
4375 AMD64RI* ri = iselIntExpr_RI(env, stmt->Ist.Store.data); in iselStmt()
4376 addInstr(env, AMD64Instr_Alu64M(Aalu_MOV,ri,am)); in iselStmt()
4380 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.Store.addr); in iselStmt()
4381 HReg r = iselIntExpr_R(env, stmt->Ist.Store.data); in iselStmt()
4382 addInstr(env, AMD64Instr_Store( in iselStmt()
4388 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.Store.addr); in iselStmt()
4389 HReg r = iselDblExpr(env, stmt->Ist.Store.data); in iselStmt()
4390 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, r, am)); in iselStmt()
4394 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.Store.addr); in iselStmt()
4395 HReg r = iselFltExpr(env, stmt->Ist.Store.data); in iselStmt()
4396 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 4, r, am)); in iselStmt()
4400 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.Store.addr); in iselStmt()
4401 HReg r = iselVecExpr(env, stmt->Ist.Store.data); in iselStmt()
4402 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, r, am)); in iselStmt()
4406 HReg rA = iselIntExpr_R(env, stmt->Ist.Store.addr); in iselStmt()
4410 iselDVecExpr(&vHi, &vLo, env, stmt->Ist.Store.data); in iselStmt()
4411 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vLo, am0)); in iselStmt()
4412 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vHi, am16)); in iselStmt()
4420 IRType ty = typeOfIRExpr(env->type_env, stmt->Ist.Put.data); in iselStmt()
4424 AMD64RI* ri = iselIntExpr_RI(env, stmt->Ist.Put.data); in iselStmt()
4425 addInstr(env, in iselStmt()
4435 HReg r = iselIntExpr_R(env, stmt->Ist.Put.data); in iselStmt()
4436 addInstr(env, AMD64Instr_Store( in iselStmt()
4444 HReg f32 = iselFltExpr(env, stmt->Ist.Put.data); in iselStmt()
4446 set_SSE_rounding_default(env); /* paranoia */ in iselStmt()
4447 addInstr(env, AMD64Instr_SseLdSt( False/*store*/, 4, f32, am )); in iselStmt()
4451 HReg f64 = iselDblExpr(env, stmt->Ist.Put.data); in iselStmt()
4454 addInstr(env, AMD64Instr_SseLdSt( False/*store*/, 8, f64, am )); in iselStmt()
4458 HReg vec = iselVecExpr(env, stmt->Ist.Put.data); in iselStmt()
4461 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vec, am)); in iselStmt()
4466 iselDVecExpr(&vHi, &vLo, env, stmt->Ist.Put.data); in iselStmt()
4470 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vLo, am0)); in iselStmt()
4471 addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, vHi, am16)); in iselStmt()
4483 env, puti->descr, in iselStmt()
4486 IRType ty = typeOfIRExpr(env->type_env, puti->data); in iselStmt()
4488 HReg val = iselDblExpr(env, puti->data); in iselStmt()
4489 addInstr(env, AMD64Instr_SseLdSt( False/*store*/, 8, val, am )); in iselStmt()
4493 HReg r = iselIntExpr_R(env, puti->data); in iselStmt()
4494 addInstr(env, AMD64Instr_Store( 1, r, am )); in iselStmt()
4498 AMD64RI* ri = iselIntExpr_RI(env, puti->data); in iselStmt()
4499 addInstr(env, AMD64Instr_Alu64M( Aalu_MOV, ri, am )); in iselStmt()
4508 IRType ty = typeOfIRTemp(env->type_env, tmp); in iselStmt()
4519 AMD64AMode* am = iselIntExpr_AMode(env, stmt->Ist.WrTmp.data); in iselStmt()
4520 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4526 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV, AMD64RMI_Reg(src), dst)); in iselStmt()
4528 addInstr(env, AMD64Instr_Lea64(am,dst)); in iselStmt()
4535 AMD64RMI* rmi = iselIntExpr_RMI(env, stmt->Ist.WrTmp.data); in iselStmt()
4536 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4537 addInstr(env, AMD64Instr_Alu64R(Aalu_MOV,rmi,dst)); in iselStmt()
4542 iselInt128Expr(&rHi,&rLo, env, stmt->Ist.WrTmp.data); in iselStmt()
4543 lookupIRTempPair( &dstHi, &dstLo, env, tmp); in iselStmt()
4544 addInstr(env, mk_iMOVsd_RR(rHi,dstHi) ); in iselStmt()
4545 addInstr(env, mk_iMOVsd_RR(rLo,dstLo) ); in iselStmt()
4549 AMD64CondCode cond = iselCondCode(env, stmt->Ist.WrTmp.data); in iselStmt()
4550 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4551 addInstr(env, AMD64Instr_Set64(cond, dst)); in iselStmt()
4555 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4556 HReg src = iselDblExpr(env, stmt->Ist.WrTmp.data); in iselStmt()
4557 addInstr(env, mk_vMOVsd_RR(src, dst)); in iselStmt()
4561 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4562 HReg src = iselFltExpr(env, stmt->Ist.WrTmp.data); in iselStmt()
4563 addInstr(env, mk_vMOVsd_RR(src, dst)); in iselStmt()
4567 HReg dst = lookupIRTemp(env, tmp); in iselStmt()
4568 HReg src = iselVecExpr(env, stmt->Ist.WrTmp.data); in iselStmt()
4569 addInstr(env, mk_vMOVsd_RR(src, dst)); in iselStmt()
4574 iselDVecExpr(&rHi,&rLo, env, stmt->Ist.WrTmp.data); in iselStmt()
4575 lookupIRTempPair( &dstHi, &dstLo, env, tmp); in iselStmt()
4576 addInstr(env, mk_vMOVsd_RR(rHi,dstHi) ); in iselStmt()
4577 addInstr(env, mk_vMOVsd_RR(rLo,dstLo) ); in iselStmt()
4590 retty = typeOfIRTemp(env->type_env, d->tmp); in iselStmt()
4610 doHelperCall( &addToSp, &rloc, env, d->guard, d->cee, retty, d->args ); in iselStmt()
4627 HReg dst = lookupIRTemp(env, d->tmp); in iselStmt()
4628 addInstr(env, mk_iMOVsd_RR(hregAMD64_RAX(),dst) ); in iselStmt()
4638 HReg dst = lookupIRTemp(env, d->tmp); in iselStmt()
4640 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dst, am )); in iselStmt()
4641 add_to_rsp(env, addToSp); in iselStmt()
4649 lookupIRTempPair(&dstHi, &dstLo, env, d->tmp); in iselStmt()
4651 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dstLo, amLo )); in iselStmt()
4653 addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dstHi, amHi )); in iselStmt()
4654 add_to_rsp(env, addToSp); in iselStmt()
4668 addInstr(env, AMD64Instr_MFence()); in iselStmt()
4681 IRType ty = typeOfIRExpr(env->type_env, cas->dataLo); in iselStmt()
4683 AMD64AMode* am = iselIntExpr_AMode(env, cas->addr); in iselStmt()
4684 HReg rData = iselIntExpr_R(env, cas->dataLo); in iselStmt()
4685 HReg rExpd = iselIntExpr_R(env, cas->expdLo); in iselStmt()
4686 HReg rOld = lookupIRTemp(env, cas->oldLo); in iselStmt()
4689 addInstr(env, mk_iMOVsd_RR(rExpd, rOld)); in iselStmt()
4690 addInstr(env, mk_iMOVsd_RR(rExpd, hregAMD64_RAX())); in iselStmt()
4691 addInstr(env, mk_iMOVsd_RR(rData, hregAMD64_RBX())); in iselStmt()
4699 addInstr(env, AMD64Instr_ACAS(am, sz)); in iselStmt()
4700 addInstr(env, AMD64Instr_CMov64(Acc_NZ, hregAMD64_RAX(), rOld)); in iselStmt()
4706 IRType ty = typeOfIRExpr(env->type_env, cas->dataLo); in iselStmt()
4710 AMD64AMode* am = iselIntExpr_AMode(env, cas->addr); in iselStmt()
4711 HReg rDataHi = iselIntExpr_R(env, cas->dataHi); in iselStmt()
4712 HReg rDataLo = iselIntExpr_R(env, cas->dataLo); in iselStmt()
4713 HReg rExpdHi = iselIntExpr_R(env, cas->expdHi); in iselStmt()
4714 HReg rExpdLo = iselIntExpr_R(env, cas->expdLo); in iselStmt()
4715 HReg rOldHi = lookupIRTemp(env, cas->oldHi); in iselStmt()
4716 HReg rOldLo = lookupIRTemp(env, cas->oldLo); in iselStmt()
4719 if (!(env->hwcaps & VEX_HWCAPS_AMD64_CX16)) in iselStmt()
4731 addInstr(env, mk_iMOVsd_RR(rExpdHi, rOldHi)); in iselStmt()
4732 addInstr(env, mk_iMOVsd_RR(rExpdLo, rOldLo)); in iselStmt()
4733 addInstr(env, mk_iMOVsd_RR(rExpdHi, hregAMD64_RDX())); in iselStmt()
4734 addInstr(env, mk_iMOVsd_RR(rExpdLo, hregAMD64_RAX())); in iselStmt()
4735 addInstr(env, mk_iMOVsd_RR(rDataHi, hregAMD64_RCX())); in iselStmt()
4736 addInstr(env, mk_iMOVsd_RR(rDataLo, hregAMD64_RBX())); in iselStmt()
4737 addInstr(env, AMD64Instr_DACAS(am, sz)); in iselStmt()
4738 addInstr(env, AMD64Instr_CMov64(Acc_NZ, hregAMD64_RDX(), rOldHi)); in iselStmt()
4739 addInstr(env, AMD64Instr_CMov64(Acc_NZ, hregAMD64_RAX(), rOldLo)); in iselStmt()
4765 AMD64CondCode cc = iselCondCode(env, stmt->Ist.Exit.guard); in iselStmt()
4771 if (env->chainingAllowed) { in iselStmt()
4776 = ((Addr64)stmt->Ist.Exit.dst->Ico.U64) > env->max_ga; in iselStmt()
4778 addInstr(env, AMD64Instr_XDirect(stmt->Ist.Exit.dst->Ico.U64, in iselStmt()
4784 HReg r = iselIntExpr_R(env, IRExpr_Const(stmt->Ist.Exit.dst)); in iselStmt()
4785 addInstr(env, AMD64Instr_XAssisted(r, amRIP, cc, Ijk_Boring)); in iselStmt()
4804 HReg r = iselIntExpr_R(env, IRExpr_Const(stmt->Ist.Exit.dst)); in iselStmt()
4805 addInstr(env, AMD64Instr_XAssisted(r, amRIP, cc, stmt->Ist.Exit.jk)); in iselStmt()
4828 static void iselNext ( ISelEnv* env, in iselNext() argument
4846 if (env->chainingAllowed) { in iselNext()
4851 = ((Addr64)cdst->Ico.U64) > env->max_ga; in iselNext()
4853 addInstr(env, AMD64Instr_XDirect(cdst->Ico.U64, in iselNext()
4861 HReg r = iselIntExpr_R(env, next); in iselNext()
4862 addInstr(env, AMD64Instr_XAssisted(r, amRIP, Acc_ALWAYS, in iselNext()
4872 HReg r = iselIntExpr_R(env, next); in iselNext()
4874 if (env->chainingAllowed) { in iselNext()
4875 addInstr(env, AMD64Instr_XIndir(r, amRIP, Acc_ALWAYS)); in iselNext()
4877 addInstr(env, AMD64Instr_XAssisted(r, amRIP, Acc_ALWAYS, in iselNext()
4899 HReg r = iselIntExpr_R(env, next); in iselNext()
4901 addInstr(env, AMD64Instr_XAssisted(r, amRIP, Acc_ALWAYS, jk)); in iselNext()
4935 ISelEnv* env; in iselSB_AMD64() local
4954 env = LibVEX_Alloc_inline(sizeof(ISelEnv)); in iselSB_AMD64()
4955 env->vreg_ctr = 0; in iselSB_AMD64()
4958 env->code = newHInstrArray(); in iselSB_AMD64()
4960 /* Copy BB's type env. */ in iselSB_AMD64()
4961 env->type_env = bb->tyenv; in iselSB_AMD64()
4965 env->n_vregmap = bb->tyenv->types_used; in iselSB_AMD64()
4966 env->vregmap = LibVEX_Alloc_inline(env->n_vregmap * sizeof(HReg)); in iselSB_AMD64()
4967 env->vregmapHI = LibVEX_Alloc_inline(env->n_vregmap * sizeof(HReg)); in iselSB_AMD64()
4970 env->chainingAllowed = chainingAllowed; in iselSB_AMD64()
4971 env->hwcaps = hwcaps_host; in iselSB_AMD64()
4972 env->max_ga = max_ga; in iselSB_AMD64()
4977 for (i = 0; i < env->n_vregmap; i++) { in iselSB_AMD64()
5001 env->vregmap[i] = hreg; in iselSB_AMD64()
5002 env->vregmapHI[i] = hregHI; in iselSB_AMD64()
5004 env->vreg_ctr = j; in iselSB_AMD64()
5009 addInstr(env, AMD64Instr_EvCheck(amCounter, amFailAddr)); in iselSB_AMD64()
5016 addInstr(env, AMD64Instr_ProfInc()); in iselSB_AMD64()
5022 iselStmt(env, bb->stmts[i]); in iselSB_AMD64()
5024 iselNext(env, bb->next, bb->jumpkind, bb->offsIP); in iselSB_AMD64()
5027 env->code->n_vregs = env->vreg_ctr; in iselSB_AMD64()
5028 return env->code; in iselSB_AMD64()