• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
18 #define ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
19 
20 #include "base/macros.h"
21 #include "code_generator.h"
22 #include "locations.h"
23 #include "nodes.h"
24 #include "utils/assembler.h"
25 #include "utils/label.h"
26 
27 namespace art {
28 
29 // Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
30 // intrinsified call. This will copy the arguments into the positions for a regular call.
31 //
32 // Note: The actual parameters are required to be in the locations given by the invoke's location
33 //       summary. If an intrinsic modifies those locations before a slowpath call, they must be
34 //       restored!
35 //
36 // Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
37 //       sub-optimal (compared to a direct pointer call), but this is a slow-path.
38 
39 template <typename TDexCallingConvention>
40 class IntrinsicSlowPath : public SlowPathCode {
41  public:
IntrinsicSlowPath(HInvoke * invoke)42   explicit IntrinsicSlowPath(HInvoke* invoke) : SlowPathCode(invoke), invoke_(invoke) { }
43 
MoveArguments(CodeGenerator * codegen)44   Location MoveArguments(CodeGenerator* codegen) {
45     TDexCallingConvention calling_convention_visitor;
46     IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
47     return calling_convention_visitor.GetMethodLocation();
48   }
49 
EmitNativeCode(CodeGenerator * codegen)50   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
51     Assembler* assembler = codegen->GetAssembler();
52     assembler->Bind(GetEntryLabel());
53 
54     SaveLiveRegisters(codegen, invoke_->GetLocations());
55 
56     Location method_loc = MoveArguments(codegen);
57 
58     if (invoke_->IsInvokeStaticOrDirect()) {
59       codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), method_loc);
60     } else {
61       codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc);
62     }
63     codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
64 
65     // Copy the result back to the expected output.
66     Location out = invoke_->GetLocations()->Out();
67     if (out.IsValid()) {
68       DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
69       DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
70       codegen->MoveFromReturnRegister(out, invoke_->GetType());
71     }
72 
73     RestoreLiveRegisters(codegen, invoke_->GetLocations());
74     assembler->Jump(GetExitLabel());
75   }
76 
GetDescription()77   const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPath"; }
78 
79  private:
80   // The instruction where this slow path is happening.
81   HInvoke* const invoke_;
82 
83   DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPath);
84 };
85 
86 }  // namespace art
87 
88 #endif  // ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
89