• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
18 #define ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
19 
20 #include <stdint.h>
21 #include <memory>
22 #include <vector>
23 
24 #include "assembler_arm64.h"
25 #include "base/arena_containers.h"
26 #include "base/enums.h"
27 #include "base/logging.h"
28 #include "utils/assembler.h"
29 #include "utils/jni_macro_assembler.h"
30 #include "offsets.h"
31 
32 // TODO(VIXL): Make VIXL compile with -Wshadow.
33 #pragma GCC diagnostic push
34 #pragma GCC diagnostic ignored "-Wshadow"
35 #include "aarch64/macro-assembler-aarch64.h"
36 #pragma GCC diagnostic pop
37 
38 namespace art {
39 namespace arm64 {
40 
41 class Arm64JNIMacroAssembler FINAL : public JNIMacroAssemblerFwd<Arm64Assembler, PointerSize::k64> {
42  public:
Arm64JNIMacroAssembler(ArenaAllocator * arena)43   explicit Arm64JNIMacroAssembler(ArenaAllocator* arena)
44       : JNIMacroAssemblerFwd(arena),
45         exception_blocks_(arena->Adapter(kArenaAllocAssembler)) {}
46 
47   ~Arm64JNIMacroAssembler();
48 
49   // Finalize the code.
50   void FinalizeCode() OVERRIDE;
51 
52   // Emit code that will create an activation on the stack.
53   void BuildFrame(size_t frame_size,
54                   ManagedRegister method_reg,
55                   ArrayRef<const ManagedRegister> callee_save_regs,
56                   const ManagedRegisterEntrySpills& entry_spills) OVERRIDE;
57 
58   // Emit code that will remove an activation from the stack.
59   void RemoveFrame(size_t frame_size, ArrayRef<const ManagedRegister> callee_save_regs)
60       OVERRIDE;
61 
62   void IncreaseFrameSize(size_t adjust) OVERRIDE;
63   void DecreaseFrameSize(size_t adjust) OVERRIDE;
64 
65   // Store routines.
66   void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE;
67   void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE;
68   void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE;
69   void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE;
70   void StoreStackOffsetToThread(ThreadOffset64 thr_offs,
71                                 FrameOffset fr_offs,
72                                 ManagedRegister scratch) OVERRIDE;
73   void StoreStackPointerToThread(ThreadOffset64 thr_offs) OVERRIDE;
74   void StoreSpanning(FrameOffset dest,
75                      ManagedRegister src,
76                      FrameOffset in_off,
77                      ManagedRegister scratch) OVERRIDE;
78 
79   // Load routines.
80   void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE;
81   void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) OVERRIDE;
82   void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
83   void LoadRef(ManagedRegister dest,
84                ManagedRegister base,
85                MemberOffset offs,
86                bool unpoison_reference) OVERRIDE;
87   void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
88   void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) OVERRIDE;
89 
90   // Copying routines.
91   void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE;
92   void CopyRawPtrFromThread(FrameOffset fr_offs,
93                             ThreadOffset64 thr_offs,
94                             ManagedRegister scratch) OVERRIDE;
95   void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
96       OVERRIDE;
97   void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE;
98   void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE;
99   void Copy(FrameOffset dest,
100             ManagedRegister src_base,
101             Offset src_offset,
102             ManagedRegister scratch,
103             size_t size) OVERRIDE;
104   void Copy(ManagedRegister dest_base,
105             Offset dest_offset,
106             FrameOffset src,
107             ManagedRegister scratch,
108             size_t size) OVERRIDE;
109   void Copy(FrameOffset dest,
110             FrameOffset src_base,
111             Offset src_offset,
112             ManagedRegister scratch,
113             size_t size) OVERRIDE;
114   void Copy(ManagedRegister dest,
115             Offset dest_offset,
116             ManagedRegister src,
117             Offset src_offset,
118             ManagedRegister scratch,
119             size_t size) OVERRIDE;
120   void Copy(FrameOffset dest,
121             Offset dest_offset,
122             FrameOffset src,
123             Offset src_offset,
124             ManagedRegister scratch,
125             size_t size) OVERRIDE;
126   void MemoryBarrier(ManagedRegister scratch) OVERRIDE;
127 
128   // Sign extension.
129   void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE;
130 
131   // Zero extension.
132   void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE;
133 
134   // Exploit fast access in managed code to Thread::Current().
135   void GetCurrentThread(ManagedRegister tr) OVERRIDE;
136   void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE;
137 
138   // Set up out_reg to hold a Object** into the handle scope, or to be null if the
139   // value is null and null_allowed. in_reg holds a possibly stale reference
140   // that can be used to avoid loading the handle scope entry to see if the value is
141   // null.
142   void CreateHandleScopeEntry(ManagedRegister out_reg,
143                               FrameOffset handlescope_offset,
144                               ManagedRegister in_reg,
145                               bool null_allowed) OVERRIDE;
146 
147   // Set up out_off to hold a Object** into the handle scope, or to be null if the
148   // value is null and null_allowed.
149   void CreateHandleScopeEntry(FrameOffset out_off,
150                               FrameOffset handlescope_offset,
151                               ManagedRegister scratch,
152                               bool null_allowed) OVERRIDE;
153 
154   // src holds a handle scope entry (Object**) load this into dst.
155   void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) OVERRIDE;
156 
157   // Heap::VerifyObject on src. In some cases (such as a reference to this) we
158   // know that src may not be null.
159   void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE;
160   void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE;
161 
162   // Call to address held at [base+offset].
163   void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE;
164   void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE;
165   void CallFromThread(ThreadOffset64 offset, ManagedRegister scratch) OVERRIDE;
166 
167   // Generate code to check if Thread::Current()->exception_ is non-null
168   // and branch to a ExceptionSlowPath if it is.
169   void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
170 
171   // Create a new label that can be used with Jump/Bind calls.
172   std::unique_ptr<JNIMacroLabel> CreateLabel() OVERRIDE;
173   // Emit an unconditional jump to the label.
174   void Jump(JNIMacroLabel* label) OVERRIDE;
175   // Emit a conditional jump to the label by applying a unary condition test to the register.
176   void Jump(JNIMacroLabel* label, JNIMacroUnaryCondition cond, ManagedRegister test) OVERRIDE;
177   // Code at this offset will serve as the target for the Jump call.
178   void Bind(JNIMacroLabel* label) OVERRIDE;
179 
180  private:
181   class Arm64Exception {
182    public:
Arm64Exception(Arm64ManagedRegister scratch,size_t stack_adjust)183     Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
184         : scratch_(scratch), stack_adjust_(stack_adjust) {}
185 
Entry()186     vixl::aarch64::Label* Entry() { return &exception_entry_; }
187 
188     // Register used for passing Thread::Current()->exception_ .
189     const Arm64ManagedRegister scratch_;
190 
191     // Stack adjust for ExceptionPool.
192     const size_t stack_adjust_;
193 
194     vixl::aarch64::Label exception_entry_;
195 
196    private:
197     DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
198   };
199 
200   // Emits Exception block.
201   void EmitExceptionPoll(Arm64Exception *exception);
202 
203   void StoreWToOffset(StoreOperandType type,
204                       WRegister source,
205                       XRegister base,
206                       int32_t offset);
207   void StoreToOffset(XRegister source, XRegister base, int32_t offset);
208   void StoreSToOffset(SRegister source, XRegister base, int32_t offset);
209   void StoreDToOffset(DRegister source, XRegister base, int32_t offset);
210 
211   void LoadImmediate(XRegister dest,
212                      int32_t value,
213                      vixl::aarch64::Condition cond = vixl::aarch64::al);
214   void Load(Arm64ManagedRegister dst, XRegister src, int32_t src_offset, size_t size);
215   void LoadWFromOffset(LoadOperandType type,
216                        WRegister dest,
217                        XRegister base,
218                        int32_t offset);
219   void LoadFromOffset(XRegister dest, XRegister base, int32_t offset);
220   void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset);
221   void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset);
222   void AddConstant(XRegister rd,
223                    int32_t value,
224                    vixl::aarch64::Condition cond = vixl::aarch64::al);
225   void AddConstant(XRegister rd,
226                    XRegister rn,
227                    int32_t value,
228                    vixl::aarch64::Condition cond = vixl::aarch64::al);
229 
230   // List of exception blocks to generate at the end of the code cache.
231   ArenaVector<std::unique_ptr<Arm64Exception>> exception_blocks_;
232 };
233 
234 class Arm64JNIMacroLabel FINAL
235     : public JNIMacroLabelCommon<Arm64JNIMacroLabel,
236                                  vixl::aarch64::Label,
237                                  kArm64> {
238  public:
AsArm64()239   vixl::aarch64::Label* AsArm64() {
240     return AsPlatformLabel();
241   }
242 };
243 
244 }  // namespace arm64
245 }  // namespace art
246 
247 #endif  // ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
248