Home
last modified time | relevance | path

Searched refs:slow_path (Results 1 – 25 of 39) sorted by relevance

12

/art/compiler/optimizing/
Dcode_generator.cc158 void AddSlowPath(SlowPathCode* slow_path) { in AddSlowPath() argument
159 slow_paths_.emplace_back(std::unique_ptr<SlowPathCode>(slow_path)); in AddSlowPath()
361 SlowPathCode* slow_path = slow_path_ptr.get(); in GenerateSlowPaths() local
362 current_slow_path_ = slow_path; in GenerateSlowPaths()
367 MaybeRecordNativeDebugInfo(slow_path->GetInstruction(), slow_path->GetDexPc(), slow_path); in GenerateSlowPaths()
368 slow_path->EmitNativeCode(this); in GenerateSlowPaths()
370 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths()
586 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) { in GenerateInvokeStaticOrDirectRuntimeCall() argument
614 InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), slow_path); in GenerateInvokeStaticOrDirectRuntimeCall()
647 SlowPathCode* slow_path) { in GenerateInvokePolymorphicCall() argument
[all …]
Dintrinsics_x86_64.cc659 SlowPathCode* slow_path, in CheckPosition() argument
676 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
682 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
690 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
696 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition()
701 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
705 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
715 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
738 SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86_64(invoke); in SystemArrayCopyPrimitive() local
739 codegen->AddSlowPath(slow_path); in SystemArrayCopyPrimitive()
[all …]
Dgraph_visualizer.h47 const SlowPathCode* slow_path; member
73 void AddSlowPathInterval(SlowPathCode* slow_path, size_t start, size_t end) { in AddSlowPathInterval() argument
74 slow_path_intervals_.push_back({slow_path, {start, end}}); in AddSlowPathInterval()
Dintrinsics_arm64.cc1457 ReadBarrierCasSlowPathARM64* slow_path = in GenUnsafeCas() local
1471 codegen->AddSlowPath(slow_path); in GenUnsafeCas()
1472 exit_loop = slow_path->GetExitLabel(); in GenUnsafeCas()
1473 cmp_failure = slow_path->GetEntryLabel(); in GenUnsafeCas()
1725 SlowPathCodeARM64* slow_path = nullptr; in VisitStringCompareTo() local
1728 slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARM64(invoke); in VisitStringCompareTo()
1729 codegen_->AddSlowPath(slow_path); in VisitStringCompareTo()
1730 __ Cbz(arg, slow_path->GetEntryLabel()); in VisitStringCompareTo()
1887 __ Bind(slow_path->GetExitLabel()); in VisitStringCompareTo()
2107 SlowPathCodeARM64* slow_path = nullptr; in GenerateVisitStringIndexOf() local
[all …]
Dintrinsics_x86.cc829 SlowPathCode* slow_path, in CheckPosition() argument
846 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
852 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
860 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
866 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition()
871 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
875 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
885 __ j(kLess, slow_path->GetEntryLabel()); in CheckPosition()
908 SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); in SystemArrayCopyPrimitive() local
909 codegen->AddSlowPath(slow_path); in SystemArrayCopyPrimitive()
[all …]
Dcode_generator.h364 SlowPathCode* slow_path = nullptr,
374 SlowPathCode* slow_path = nullptr,
386 SlowPathCode* slow_path = nullptr);
404 void AddSlowPath(SlowPathCode* slow_path);
528 SlowPathCode* slow_path);
532 SlowPathCode* slow_path);
625 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path);
629 void GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke, SlowPathCode* slow_path = nullptr);
678 SlowPathCode* slow_path = nullptr) = 0;
718 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) = 0;
[all …]
Dintrinsics_arm_vixl.cc631 SlowPathCodeARMVIXL* slow_path = nullptr; in VisitStringCompareTo() local
634 slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in VisitStringCompareTo()
635 codegen_->AddSlowPath(slow_path); in VisitStringCompareTo()
636 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel()); in VisitStringCompareTo()
696 __ Bind(slow_path->GetExitLabel()); in VisitStringCompareTo()
1102 SlowPathCodeARMVIXL* slow_path = nullptr; in GenerateVisitStringIndexOf() local
1109 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in GenerateVisitStringIndexOf()
1110 codegen->AddSlowPath(slow_path); in GenerateVisitStringIndexOf()
1111 __ B(slow_path->GetEntryLabel()); in GenerateVisitStringIndexOf()
1112 __ Bind(slow_path->GetExitLabel()); in GenerateVisitStringIndexOf()
[all …]
Dcode_generator_x86_64.cc1117 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) { in GenerateStaticOrDirectCall() argument
1134 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path); in GenerateStaticOrDirectCall()
1153 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall()
1169 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall()
1204 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateStaticOrDirectCall()
1212 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) { in GenerateVirtualCall() argument
1244 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); in GenerateVirtualCall()
1478 SlowPathCode* slow_path) { in InvokeRuntime() argument
1479 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime()
1482 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime()
[all …]
Dcode_generator_arm64.cc1198 SlowPathCodeARM64* slow_path = in GenerateMethodEntryExitHook() local
1200 codegen_->AddSlowPath(slow_path); in GenerateMethodEntryExitHook()
1209 __ Cbnz(value, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
1218 __ Cbnz(value, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
1219 __ Bind(slow_path->GetExitLabel()); in GenerateMethodEntryExitHook()
1257 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) CompileOptimizedSlowPathARM64(); in MaybeIncrementHotness() local
1258 AddSlowPath(slow_path); in MaybeIncrementHotness()
1269 __ Cbz(counter, slow_path->GetEntryLabel()); in MaybeIncrementHotness()
1272 __ Bind(slow_path->GetExitLabel()); in MaybeIncrementHotness()
1960 SlowPathCode* slow_path) { in InvokeRuntime() argument
[all …]
Dcode_generator_x86.cc1090 SlowPathCode* slow_path) { in InvokeRuntime() argument
1091 ValidateInvokeRuntime(entrypoint, instruction, slow_path); in InvokeRuntime()
1094 RecordPcInfo(instruction, dex_pc, slow_path); in InvokeRuntime()
1100 SlowPathCode* slow_path) { in InvokeRuntimeWithoutRecordingPcInfo() argument
1101 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); in InvokeRuntimeWithoutRecordingPcInfo()
1227 SlowPathCode* slow_path = in GenerateMethodEntryExitHook() local
1229 codegen_->AddSlowPath(slow_path); in GenerateMethodEntryExitHook()
1238 __ j(kNotEqual, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
1246 __ j(kNotEqual, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
1247 __ Bind(slow_path->GetExitLabel()); in GenerateMethodEntryExitHook()
[all …]
Dcode_generator_arm_vixl.cc2197 SlowPathCodeARMVIXL* slow_path = in GenerateMethodEntryExitHook() local
2199 codegen_->AddSlowPath(slow_path); in GenerateMethodEntryExitHook()
2211 __ CompareAndBranchIfNonZero(temp, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
2220 __ CompareAndBranchIfNonZero(temp, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook()
2221 __ Bind(slow_path->GetExitLabel()); in GenerateMethodEntryExitHook()
2265 SlowPathCodeARMVIXL* slow_path = new (GetScopedAllocator()) CompileOptimizedSlowPathARMVIXL(); in MaybeIncrementHotness() local
2266 AddSlowPath(slow_path); in MaybeIncrementHotness()
2276 __ B(cc, slow_path->GetEntryLabel()); in MaybeIncrementHotness()
2278 __ Bind(slow_path->GetExitLabel()); in MaybeIncrementHotness()
2742 SlowPathCode* slow_path) { in InvokeRuntime() argument
[all …]
Dcode_generator_x86_64.h311 void GenerateClassInitializationCheck(SlowPathCode* slow_path, CpuRegister class_reg);
416 SlowPathCode* slow_path = nullptr) override;
422 SlowPathCode* slow_path);
527 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
529 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
Dcode_generator_x86.h318 void GenerateClassInitializationCheck(SlowPathCode* slow_path, Register class_reg);
440 SlowPathCode* slow_path = nullptr) override;
446 SlowPathCode* slow_path);
541 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
544 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
/art/runtime/entrypoints/
Dentrypoint_utils-inl.h187 bool* slow_path) in CheckClassInitializedForObjectAlloc() argument
201 *slow_path = true; in CheckClassInitializedForObjectAlloc()
215 bool* slow_path) in CheckObjectAlloc() argument
220 *slow_path = true; in CheckObjectAlloc()
226 *slow_path = true; in CheckObjectAlloc()
229 return CheckClassInitializedForObjectAlloc(klass, self, slow_path); in CheckObjectAlloc()
239 bool slow_path = false; in AllocObjectFromCode() local
240 klass = CheckObjectAlloc(klass, self, &slow_path); in AllocObjectFromCode()
241 if (UNLIKELY(slow_path)) { in AllocObjectFromCode()
261 bool slow_path = false; in AllocObjectFromCodeResolved() local
[all …]
/art/runtime/interpreter/mterp/arm64ng/
Dobject.S2 % slow_path = add_slow_path(op_check_cast_slow_path)
12 bne ${slow_path}
68 % slow_path = add_slow_path(op_instance_of_slow_path)
79 bne ${slow_path}
151 % slow_path = add_slow_path(op_iget_slow_path, volatile_load, maybe_extend, wide, is_object)
153 % fetch_from_thread_cache("x0", miss_label=slow_path)
229 % slow_path = add_slow_path(op_iput_slow_path, volatile_store, wide, is_object)
237 % fetch_from_thread_cache("x0", miss_label=slow_path)
303 % slow_path = add_slow_path(op_sget_slow_path, volatile_load, maybe_extend, wide, is_object)
305 % fetch_from_thread_cache("x0", miss_label=slow_path)
[all …]
/art/runtime/interpreter/mterp/x86ng/
Dobject.S2 % slow_path = add_slow_path(op_check_cast_slow_path)
11 jne ${slow_path}
37 % slow_path = add_slow_path(op_instance_of_slow_path)
49 jne ${slow_path}
Dinvoke.S77 % slow_path = add_slow_path(op_invoke_interface_slow_path)
80 % fetch_from_thread_cache("%eax", miss_label=slow_path)
/art/runtime/interpreter/mterp/armng/
Dobject.S2 % slow_path = add_slow_path(op_check_cast_slow_path)
13 bne ${slow_path}
68 % slow_path = add_slow_path(op_instance_of_slow_path)
80 bne ${slow_path}
153 % slow_path = add_slow_path(op_iget_slow_path, load, wide, is_object)
155 % fetch_from_thread_cache("r0", miss_label=slow_path)
241 % slow_path = add_slow_path(op_iput_slow_path, store, wide, is_object)
247 % fetch_from_thread_cache("r0", miss_label=slow_path)
331 % slow_path = add_slow_path(op_sget_slow_path, load, wide, is_object)
333 % fetch_from_thread_cache("r0", miss_label=slow_path)
[all …]
Dinvoke.S77 % slow_path = add_slow_path(op_invoke_interface_slow_path)
80 % fetch_from_thread_cache("r4", miss_label=slow_path)
/art/runtime/interpreter/mterp/common/
Dgen_setup.py81 for name, slow_path in sorted(slow_paths.items()):
82 out.write(slow_path)
/art/runtime/gc/collector/
Dsemi_space-inl.h63 auto slow_path = [this](const mirror::Object* ref) { in MarkObject()
68 if (!mark_bitmap_->Set(obj, slow_path)) { in MarkObject()
/art/runtime/interpreter/mterp/x86_64ng/
Dobject.S2 % slow_path = add_slow_path(op_check_cast_slow_path)
11 jne ${slow_path}
81 % slow_path = add_slow_path(op_instance_of_slow_path)
94 jne ${slow_path}
Dinvoke.S75 % slow_path = add_slow_path(op_invoke_interface_slow_path)
78 % fetch_from_thread_cache("%rax", miss_label=slow_path)
/art/runtime/arch/arm/
Dquick_entrypoints_arm.S1280 .macro COMPUTE_ARRAY_SIZE_UNKNOWN slow_path argument
1283 bhi \slow_path
1305 .macro COMPUTE_ARRAY_SIZE_8 slow_path argument
1310 bhi \slow_path
1315 .macro COMPUTE_ARRAY_SIZE_16 slow_path argument
1320 bhi \slow_path
1326 .macro COMPUTE_ARRAY_SIZE_32 slow_path argument
1331 bhi \slow_path
1337 .macro COMPUTE_ARRAY_SIZE_64 slow_path argument
1342 bhi \slow_path
/art/compiler/utils/x86_64/
Djni_macro_assembler_x86_64.h94 JNIMacroLabel* slow_path,

12