Home
last modified time | relevance | path

Searched refs:target64 (Results 1 – 15 of 15) sorted by relevance

/art/compiler/dex/quick/x86/
Dcall_x86.cc98 DCHECK_EQ(start_of_method_reg.Is64Bit(), cu_->target64); in GenLargePackedSwitch()
117 OpRegReg(kOpAdd, start_of_method_reg, cu_->target64 ? As64BitReg(disp_reg) : disp_reg); in GenLargePackedSwitch()
176 int ex_offset = cu_->target64 ? in GenMoveException()
180 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, rl_result.reg.GetReg(), ex_offset); in GenMoveException()
181 NewLIR2(cu_->target64 ? kX86Mov64TI : kX86Mov32TI, ex_offset, 0); in GenMoveException()
189 DCHECK_EQ(tgt_addr_reg.Is64Bit(), cu_->target64); in MarkGCCard()
190 DCHECK_EQ(val_reg.Is64Bit(), cu_->target64); in MarkGCCard()
194 int ct_offset = cu_->target64 ? in MarkGCCard()
197 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, reg_card_base.GetReg(), ct_offset); in MarkGCCard()
221 InstructionSet isa = cu_->target64 ? kX86_64 : kX86; in GenEntrySequence()
[all …]
Dtarget_x86.cc184 return cu_->target64 ? x86_64_loc_c_return_ref : x86_loc_c_return_ref; in LocCReturnRef()
188 return cu_->target64 ? x86_64_loc_c_return_wide : x86_loc_c_return_wide; in LocCReturnWide()
227 case kHiddenFpArg: DCHECK(!cu_->target64); res_reg = rs_fr0; break; in TargetReg32()
447 if (!cu_->target64) { in AllocateByteRegister()
458 return cu_->target64 || reg.GetRegNum() < rs_rX86_SP.GetRegNum(); in IsByteRegister()
463 if (cu_->target64) { in ClobberCallerSave()
522 if (cu_->target64) { in LockCallTemps()
542 if (cu_->target64) { in FreeCallTemps()
614 if (cu_->target64) { in CompilerInitializeRegAlloc()
627 const ArrayRef<const RegStorage> *xp_regs = cu_->target64 ? &xp_regs_64 : &xp_regs_32; in CompilerInitializeRegAlloc()
[all …]
Dint_x86.cc36 if (cu_->target64) { in GenCmpLong()
401 if (cu_->target64) { in GenFusedLongCmpBranch()
455 if (cu_->target64) { in GenFusedLongCmpImmBranch()
852 if (!cu_->target64) { in GenInlinedPeek()
874 if (!cu_->target64) { in GenInlinedPoke()
888 if (!cu_->target64 && size == kSignedByte) { in GenInlinedPoke()
928 if (!cu_->target64) { in GenInlinedCas()
936 if (is_long && cu_->target64) { in GenInlinedCas()
1037 if (cu_->target64) { in GenInlinedCas()
1121 if (cu_->target64) { in GenDivZeroCheckWide()
[all …]
Dutility_x86.cc498 if (op == kOpMul && !cu_->target64) { in OpRegRegImm()
501 } else if (op == kOpAnd && !cu_->target64) { in OpRegRegImm()
656 if (cu_->target64) { in LoadBaseIndexedDisp()
803 if (cu_->target64) { in StoreBaseIndexedDisp()
916 base_of_code_ = mir_graph_->GetNewCompilerTemp(kCompilerTempVR, cu_->target64 == true); in AnalyzeMIR()
1020 if (use.is_const && !cu_->target64) { in AnalyzeDoubleUse()
1055 if (cu_->target64) { in AnalyzeInvokeStatic()
1079 if (cu_->target64) { in InvokeTrampoline()
Dfp_x86.cc258 if (cu_->target64) { in GenConversion()
266 if (cu_->target64) { in GenConversion()
274 if (cu_->target64) { in GenConversion()
299 if (cu_->target64) { in GenConversion()
573 if (cu_->target64) { in GenNegDouble()
651 if (cu_->target64) { in GenInlinedAbsDouble()
Dcodegen_x86.h89 if (cu_->target64) { in TargetReg()
99 } else if (wide_kind == kRef && cu_->target64) { in TargetReg()
106 return TargetReg(symbolic_reg, cu_->target64 ? kWide : kNotWide); in TargetPtrReg()
947 return cu_->target64; // On 64b, we have 64b GPRs. in WideGPRsAreAliases()
Dassemble_x86.cc628 if (cu_->target64 || kIsDebugBuild) { in ComputeSize()
638 DCHECK(cu_->target64) << "Attempt to use a 64-bit only addressable register " in ComputeSize()
658 || (cu_->target64 && entry->skeleton.prefix1 == THREAD_PREFIX)) { in ComputeSize()
835 ComputeSize(&X86Mir2Lir::EncodingMap[cu_->target64 ? kX86Sub64RI : kX86Sub32RI], in GetInsnSize()
872 CHECK(cu_->target64 || !entry->skeleton.r8_form) in CheckValidByteRegister()
916 if (cu_->target64 && entry->skeleton.prefix1 == THREAD_PREFIX) { in EmitPrefix()
921 DCHECK(cu_->target64); in EmitPrefix()
931 DCHECK(cu_->target64); in EmitPrefix()
943 DCHECK(cu_->target64); in EmitPrefix()
984 if (cu_->target64) { in EmitModrmThread()
[all …]
/art/runtime/
Dvmap_table.h68 bool target64 = (kRuntimeISA == kArm64) || (kRuntimeISA == kX86_64); in IsInContext() local
69 if (target64 && high_reg) { in IsInContext()
Dstack.cc165 bool target64 = Is64BitInstructionSet(kRuntimeISA); in GetVReg() local
166 if (target64) { in GetVReg()
222 bool target64 = Is64BitInstructionSet(kRuntimeISA); in GetVRegPair() local
223 if (target64) { in GetVRegPair()
261 bool target64 = Is64BitInstructionSet(kRuntimeISA); in SetVReg() local
263 if (target64) { in SetVReg()
329 bool target64 = Is64BitInstructionSet(kRuntimeISA); in SetVRegPair() local
331 if (target64) { in SetVRegPair()
/art/compiler/dex/
Dcompiler_ir.h67 bool target64; member
Dfrontend.cc495 target64(false), in CompilationUnit()
656 cu.target64 = Is64BitInstructionSet(cu.instruction_set); in CompileMethod()
/art/compiler/dex/quick/
Dcodegen_util.cc437 static void PushPointer(std::vector<uint8_t>&buf, const void* pointer, bool target64) { in PushPointer() argument
439 if (target64) { in PushPointer()
477 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools()
495 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools()
509 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools()
Dralloc_util.cc496 if (cu_->target64) { in AllocLiveReg()
504 if (wide && !reg.IsFloat() && !cu_->target64) { in AllocLiveReg()
1319 bool wide = curr->wide || (cu_->target64 && curr->ref); in DoPromotion()
1334 if (wide && !cu_->target64) { in DoPromotion()
Dmir_to_lir.cc89 if (cu_->target64) { in LoadArg()
190 if (cu_->target64) { in LoadArgDirect()
1269 if (cu_->target64 && !rs.Is64Bit()) { in CheckRegStorageImpl()
Dgen_invoke.cc245 DCHECK(!cu_->target64); in CallRuntimeHelperRegLocationRegLocation()
665 if (cu->target64) { in NextInvokeInsnSP()