/art/compiler/optimizing/ |
D | parallel_move_test.cc | 189 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 200 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 216 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 227 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 238 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 249 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST() 274 resolver.EmitNativeCode(moves); in TYPED_TEST() 295 resolver.EmitNativeCode(moves); in TYPED_TEST() 312 resolver.EmitNativeCode(moves); in TYPED_TEST() 329 resolver.EmitNativeCode(moves); in TYPED_TEST() [all …]
|
D | parallel_move_resolver.h | 38 virtual void EmitNativeCode(HParallelMove* parallel_move) = 0; 58 void EmitNativeCode(HParallelMove* parallel_move) OVERRIDE; 128 void EmitNativeCode(HParallelMove* parallel_move) OVERRIDE;
|
D | intrinsics.h | 110 codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); in INTRINSICS_LIST()
|
D | parallel_move_resolver.cc | 36 void ParallelMoveResolverWithSwap::EmitNativeCode(HParallelMove* parallel_move) { in EmitNativeCode() function in art::ParallelMoveResolverWithSwap 289 void ParallelMoveResolverNoSwap::EmitNativeCode(HParallelMove* parallel_move) { in EmitNativeCode() function in art::ParallelMoveResolverNoSwap
|
D | code_generator_arm64.cc | 117 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::BoundsCheckSlowPathARM64 143 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::DivZeroCheckSlowPathARM64 166 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::LoadClassSlowPathARM64 217 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::LoadStringSlowPathARM64 247 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::NullCheckSlowPathARM64 267 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::SuspendCheckSlowPathARM64 313 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::TypeCheckSlowPathARM64 361 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::DeoptimizationSlowPathARM64 2415 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
|
D | code_generator_mips64.cc | 118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::BoundsCheckSlowPathMIPS64 149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::DivZeroCheckSlowPathMIPS64 174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::LoadClassSlowPathMIPS64 225 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::LoadStringSlowPathMIPS64 259 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::NullCheckSlowPathMIPS64 281 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::SuspendCheckSlowPathMIPS64 325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::TypeCheckSlowPathMIPS64 380 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::DeoptimizationSlowPathMIPS64 2823 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
|
D | code_generator.h | 78 virtual void EmitNativeCode(CodeGenerator* codegen) = 0;
|
D | code_generator_x86_64.cc | 55 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::NullCheckSlowPathX86_64 71 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DivZeroCheckSlowPathX86_64 88 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DivRemMinusOneSlowPathX86_64 120 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::SuspendCheckSlowPathX86_64 160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::BoundsCheckSlowPathX86_64 195 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::LoadClassSlowPathX86_64 241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::LoadStringSlowPathX86_64 277 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::TypeCheckSlowPathX86_64 329 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DeoptimizationSlowPathX86_64 3847 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
|
D | code_generator_x86.cc | 52 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::NullCheckSlowPathX86 67 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DivZeroCheckSlowPathX86 82 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DivRemMinusOneSlowPathX86 107 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::BoundsCheckSlowPathX86 137 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::SuspendCheckSlowPathX86 172 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::LoadStringSlowPathX86 206 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::LoadClassSlowPathX86 258 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::TypeCheckSlowPathX86 309 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DeoptimizationSlowPathX86 3980 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
|
D | code_generator_arm.cc | 63 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::NullCheckSlowPathARM 79 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::DivZeroCheckSlowPathARM 96 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::SuspendCheckSlowPathARM 139 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::BoundsCheckSlowPathARM 174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::LoadClassSlowPathARM 219 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::LoadStringSlowPathARM 254 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::TypeCheckSlowPathARM 301 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::DeoptimizationSlowPathARM 3519 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
|
D | code_generator.cc | 187 slow_paths_.Get(i)->EmitNativeCode(this); in CompileInternal() 912 GetMoveResolver()->EmitNativeCode(¶llel_move); in EmitParallelMoves()
|
D | intrinsics_arm.cc | 95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::arm::IntrinsicSlowPathARM
|
D | intrinsics_arm64.cc | 104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::arm64::IntrinsicSlowPathARM64
|
D | intrinsics_x86_64.cc | 123 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::x86_64::IntrinsicSlowPathX86_64
|
D | intrinsics_x86.cc | 132 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::x86::IntrinsicSlowPathX86
|