/art/compiler/optimizing/ |
D | code_generator.cc | 155 void AddSlowPath(SlowPathCode* slow_path) { in AddSlowPath() argument 156 slow_paths_.emplace_back(std::unique_ptr<SlowPathCode>(slow_path)); in AddSlowPath() 358 SlowPathCode* slow_path = slow_path_ptr.get(); in GenerateSlowPaths() local 359 current_slow_path_ = slow_path; in GenerateSlowPaths() 364 MaybeRecordNativeDebugInfo(slow_path->GetInstruction(), slow_path->GetDexPc(), slow_path); in GenerateSlowPaths() 365 slow_path->EmitNativeCode(this); in GenerateSlowPaths() 367 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths() 576 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) { in GenerateInvokeStaticOrDirectRuntimeCall() argument 604 InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), slow_path); in GenerateInvokeStaticOrDirectRuntimeCall() 637 SlowPathCode* slow_path) { in GenerateInvokePolymorphicCall() argument [all …]
|
D | graph_visualizer.h | 46 const SlowPathCode* slow_path; member 72 void AddSlowPathInterval(SlowPathCode* slow_path, size_t start, size_t end) { in AddSlowPathInterval() argument 73 slow_path_intervals_.push_back({slow_path, {start, end}}); in AddSlowPathInterval()
|
D | intrinsics_arm64.cc | 1305 ReadBarrierCasSlowPathARM64* slow_path = in GenUnsafeCas() local 1319 codegen->AddSlowPath(slow_path); in GenUnsafeCas() 1320 exit_loop = slow_path->GetExitLabel(); in GenUnsafeCas() 1321 cmp_failure = slow_path->GetEntryLabel(); in GenUnsafeCas() 1525 SlowPathCodeARM64* slow_path = nullptr; in VisitStringCompareTo() local 1528 slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARM64(invoke); in VisitStringCompareTo() 1529 codegen_->AddSlowPath(slow_path); in VisitStringCompareTo() 1530 __ Cbz(arg, slow_path->GetEntryLabel()); in VisitStringCompareTo() 1687 __ Bind(slow_path->GetExitLabel()); in VisitStringCompareTo() 1907 SlowPathCodeARM64* slow_path = nullptr; in GenerateVisitStringIndexOf() local [all …]
|
D | code_generator.h | 344 SlowPathCode* slow_path = nullptr, 354 SlowPathCode* slow_path = nullptr, 366 SlowPathCode* slow_path = nullptr); 384 void AddSlowPath(SlowPathCode* slow_path); 500 SlowPathCode* slow_path); 504 SlowPathCode* slow_path); 597 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path); 601 void GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke, SlowPathCode* slow_path = nullptr); 650 SlowPathCode* slow_path = nullptr) = 0; 690 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) = 0; [all …]
|
D | intrinsics_x86.cc | 817 SlowPathCode* slow_path, in CheckPosition() argument 834 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 840 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 848 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 854 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition() 859 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 863 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 873 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 895 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); in VisitSystemArrayCopyChar() local 896 codegen_->AddSlowPath(slow_path); in VisitSystemArrayCopyChar() [all …]
|
D | intrinsics_arm_vixl.cc | 631 SlowPathCodeARMVIXL* slow_path = nullptr; in VisitStringCompareTo() local 634 slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in VisitStringCompareTo() 635 codegen_->AddSlowPath(slow_path); in VisitStringCompareTo() 636 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel()); in VisitStringCompareTo() 696 __ Bind(slow_path->GetExitLabel()); in VisitStringCompareTo() 1102 SlowPathCodeARMVIXL* slow_path = nullptr; in GenerateVisitStringIndexOf() local 1109 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in GenerateVisitStringIndexOf() 1110 codegen->AddSlowPath(slow_path); in GenerateVisitStringIndexOf() 1111 __ B(slow_path->GetEntryLabel()); in GenerateVisitStringIndexOf() 1112 __ Bind(slow_path->GetExitLabel()); in GenerateVisitStringIndexOf() [all …]
|
D | intrinsics_x86_64.cc | 610 SlowPathCode* slow_path, in CheckPosition() argument 627 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 633 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 641 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 647 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition() 652 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 656 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 666 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition() 688 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86_64(invoke); in VisitSystemArrayCopyChar() local 689 codegen_->AddSlowPath(slow_path); in VisitSystemArrayCopyChar() [all …]
|
D | code_generator_x86_64.cc | 1046 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) { in GenerateStaticOrDirectCall() argument 1063 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path); in GenerateStaticOrDirectCall() 1081 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall() 1097 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall() 1132 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall() 1140 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) { in GenerateVirtualCall() argument 1172 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateVirtualCall() 1391 SlowPathCode* slow_path) { in InvokeRuntime() argument 1392 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime() 1395 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() [all …]
|
D | code_generator_arm64.cc | 1808 SlowPathCode* slow_path) { in InvokeRuntime() argument 1809 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime() 1815 if (slow_path == nullptr || GetCompilerOptions().IsJitCompiler()) { in InvokeRuntime() 1821 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() 1828 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() 1835 SlowPathCode* slow_path) { in InvokeRuntimeWithoutRecordingPcInfo() argument 1836 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); in InvokeRuntimeWithoutRecordingPcInfo() 1841 void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path, in GenerateClassInitializationCheck() argument 1858 __ B(lo, slow_path->GetEntryLabel()); in GenerateClassInitializationCheck() 1859 __ Bind(slow_path->GetExitLabel()); in GenerateClassInitializationCheck() [all …]
|
D | code_generator_x86.cc | 1025 SlowPathCode* slow_path) { in InvokeRuntime() argument 1026 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime() 1029 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() 1035 SlowPathCode* slow_path) { in InvokeRuntimeWithoutRecordingPcInfo() argument 1036 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); in InvokeRuntimeWithoutRecordingPcInfo() 1995 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize); in VisitDeoptimize() local 1998 slow_path->GetEntryLabel(), in VisitDeoptimize() 4002 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86( in GenerateDivRemIntegral() local 4004 codegen_->AddSlowPath(slow_path); in GenerateDivRemIntegral() 4012 __ j(kEqual, slow_path->GetEntryLabel()); in GenerateDivRemIntegral() [all …]
|
D | code_generator_arm_vixl.cc | 2590 SlowPathCode* slow_path) { in InvokeRuntime() argument 2591 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime() 2597 if (slow_path == nullptr || GetCompilerOptions().IsJitCompiler()) { in InvokeRuntime() 2606 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() 2615 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime() 2622 SlowPathCode* slow_path) { in InvokeRuntimeWithoutRecordingPcInfo() argument 2623 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); in InvokeRuntimeWithoutRecordingPcInfo() 2840 SlowPathCodeARMVIXL* slow_path = in VisitDeoptimize() local 2844 slow_path->GetEntryLabel(), in VisitDeoptimize() 5005 DivZeroCheckSlowPathARMVIXL* slow_path = in VisitDivZeroCheck() local [all …]
|
D | code_generator_x86_64.h | 239 void GenerateClassInitializationCheck(SlowPathCode* slow_path, CpuRegister class_reg); 352 SlowPathCode* slow_path = nullptr) override; 358 SlowPathCode* slow_path); 457 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; 459 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
|
D | code_generator_x86.h | 259 void GenerateClassInitializationCheck(SlowPathCode* slow_path, Register class_reg); 378 SlowPathCode* slow_path = nullptr) override; 384 SlowPathCode* slow_path); 478 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; 481 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
|
D | code_generator_arm_vixl.h | 353 void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path, 531 SlowPathCode* slow_path = nullptr) override; 537 SlowPathCode* slow_path); 587 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; 589 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
|
D | code_generator_arm64.h | 318 void GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path, 688 SlowPathCode* slow_path = nullptr) override; 694 SlowPathCode* slow_path); 720 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; 722 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
|
/art/runtime/entrypoints/ |
D | entrypoint_utils-inl.h | 121 bool* slow_path) in CheckClassInitializedForObjectAlloc() argument 135 *slow_path = true; in CheckClassInitializedForObjectAlloc() 149 bool* slow_path) in CheckObjectAlloc() argument 154 *slow_path = true; in CheckObjectAlloc() 160 *slow_path = true; in CheckObjectAlloc() 163 return CheckClassInitializedForObjectAlloc(klass, self, slow_path); in CheckObjectAlloc() 173 bool slow_path = false; in AllocObjectFromCode() local 174 klass = CheckObjectAlloc(klass, self, &slow_path); in AllocObjectFromCode() 175 if (UNLIKELY(slow_path)) { in AllocObjectFromCode() 195 bool slow_path = false; in AllocObjectFromCodeResolved() local [all …]
|
D | entrypoint_utils.h | 84 bool* slow_path)
|
/art/runtime/interpreter/mterp/arm64ng/ |
D | object.S | 67 % slow_path = add_helper(lambda: op_iget_slow_path(volatile_load, maybe_extend, wide, is_object)) 69 FETCH_FROM_THREAD_CACHE x0, ${slow_path} 147 % slow_path = "nterp_op_iput_helper_" + store + wide + is_object 148 % add_helper(lambda: op_iput_slow_path(volatile_store, wide, is_object), slow_path) 156 FETCH_FROM_THREAD_CACHE x0, ${slow_path} 222 % slow_path = add_helper(lambda: op_sget_slow_path(volatile_load, maybe_extend, wide, is_object)) 224 FETCH_FROM_THREAD_CACHE x0, ${slow_path} 313 % slow_path = "nterp_op_sput_helper_" + store + wide + is_object 314 % add_helper(lambda: op_sput_slow_path(volatile_store, wide, is_object), slow_path) 322 FETCH_FROM_THREAD_CACHE x0, ${slow_path}
|
/art/runtime/gc/collector/ |
D | semi_space-inl.h | 63 auto slow_path = [this](const mirror::Object* ref) { in MarkObject() 68 if (!mark_bitmap_->Set(obj, slow_path)) { in MarkObject()
|
/art/runtime/interpreter/mterp/armng/ |
D | object.S | 71 % slow_path = add_helper(lambda: op_iget_slow_path(load, wide, is_object)) 73 FETCH_FROM_THREAD_CACHE r0, ${slow_path} 163 % slow_path = "nterp_op_iput_helper_" + store + wide + is_object 164 % add_helper(lambda: op_iput_slow_path(store, wide, is_object), slow_path) 170 FETCH_FROM_THREAD_CACHE r0, ${slow_path} 254 % slow_path = add_helper(lambda: op_sget_slow_path(load, wide, is_object)) 256 FETCH_FROM_THREAD_CACHE r0, ${slow_path} 356 % slow_path = "nterp_op_sput_helper_" + store + wide + is_object 357 % add_helper(lambda: op_sput_slow_path(store, wide, is_object), slow_path) 363 FETCH_FROM_THREAD_CACHE r0, ${slow_path}
|
D | invoke.S | 77 % slow_path = add_helper(lambda: op_invoke_interface_slow_path()) 80 FETCH_FROM_THREAD_CACHE r4, ${slow_path}
|
D | main.S | 1487 .macro FETCH_FROM_THREAD_CACHE dest_reg, slow_path argument 1495 bne \slow_path
|
/art/runtime/interpreter/mterp/x86_64ng/ |
D | invoke.S | 75 % slow_path = add_helper(lambda: op_invoke_interface_slow_path()) 78 FETCH_FROM_THREAD_CACHE %rax, ${slow_path}
|
/art/runtime/arch/arm/ |
D | quick_entrypoints_arm.S | 1258 .macro COMPUTE_ARRAY_SIZE_UNKNOWN slow_path argument 1261 bhi \slow_path 1283 .macro COMPUTE_ARRAY_SIZE_8 slow_path argument 1288 bhi \slow_path 1293 .macro COMPUTE_ARRAY_SIZE_16 slow_path argument 1298 bhi \slow_path 1304 .macro COMPUTE_ARRAY_SIZE_32 slow_path argument 1309 bhi \slow_path 1315 .macro COMPUTE_ARRAY_SIZE_64 slow_path argument 1320 bhi \slow_path
|
/art/runtime/arch/x86/ |
D | quick_entrypoints_x86.S | 1096 MACRO1(COMPUTE_ARRAY_SIZE_UNKNOWN, slow_path) 1102 ja RAW_VAR(slow_path) 1122 MACRO1(COMPUTE_ARRAY_SIZE_8, slow_path) 1127 ja RAW_VAR(slow_path) 1132 MACRO1(COMPUTE_ARRAY_SIZE_16, slow_path) 1137 ja RAW_VAR(slow_path) 1143 MACRO1(COMPUTE_ARRAY_SIZE_32, slow_path) 1148 ja RAW_VAR(slow_path) 1154 MACRO1(COMPUTE_ARRAY_SIZE_64, slow_path) 1159 ja RAW_VAR(slow_path)
|