• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
18 #define ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
19 
20 #include "dex_instruction-inl.h"
21 #include "entrypoints/entrypoint_utils.h"
22 #include "interpreter/interpreter.h"
23 #include "mirror/art_method-inl.h"
24 #include "mirror/object-inl.h"
25 #include "object_utils.h"
26 #include "scoped_thread_state_change.h"
27 
28 namespace art {
29 
30 // Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
31 class PortableArgumentVisitor {
32  public:
33 // Offset to first (not the Method*) argument in a Runtime::kRefAndArgs callee save frame.
34 // Size of Runtime::kRefAndArgs callee save frame.
35 // Size of Method* and register parameters in out stack arguments.
36 #if defined(__arm__)
37 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 8
38 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 48
39 #define PORTABLE_STACK_ARG_SKIP 0
40 #elif defined(__mips__)
41 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
42 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 64
43 #define PORTABLE_STACK_ARG_SKIP 16
44 #elif defined(__i386__)
45 // For x86 there are no register arguments and the stack pointer will point directly to the called
46 // method argument passed by the caller.
47 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
48 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
49 #define PORTABLE_STACK_ARG_SKIP 4
50 #else
51 #error "Unsupported architecture"
52 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
53 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
54 #define PORTABLE_STACK_ARG_SKIP 0
55 #endif
56 
PortableArgumentVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp)57   PortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp)
58     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
59     caller_mh_(caller_mh),
60     args_in_regs_(ComputeArgsInRegs(caller_mh)),
61     num_params_(caller_mh.NumArgs()),
62     reg_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET),
63     stack_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE
64                 + PORTABLE_STACK_ARG_SKIP),
65     cur_args_(reg_args_),
66     cur_arg_index_(0),
67     param_index_(0) {
68   }
69 
~PortableArgumentVisitor()70   virtual ~PortableArgumentVisitor() {}
71 
72   virtual void Visit() = 0;
73 
IsParamAReference() const74   bool IsParamAReference() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
75     return caller_mh_.IsParamAReference(param_index_);
76   }
77 
IsParamALongOrDouble() const78   bool IsParamALongOrDouble() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
79     return caller_mh_.IsParamALongOrDouble(param_index_);
80   }
81 
GetParamPrimitiveType() const82   Primitive::Type GetParamPrimitiveType() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
83     return caller_mh_.GetParamPrimitiveType(param_index_);
84   }
85 
GetParamAddress() const86   byte* GetParamAddress() const {
87     return cur_args_ + (cur_arg_index_ * kPointerSize);
88   }
89 
VisitArguments()90   void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
91     for (cur_arg_index_ = 0;  cur_arg_index_ < args_in_regs_ && param_index_ < num_params_; ) {
92 #if (defined(__arm__) || defined(__mips__))
93       if (IsParamALongOrDouble() && cur_arg_index_ == 2) {
94         break;
95       }
96 #endif
97       Visit();
98       cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
99       param_index_++;
100     }
101     cur_args_ = stack_args_;
102     cur_arg_index_ = 0;
103     while (param_index_ < num_params_) {
104 #if (defined(__arm__) || defined(__mips__))
105       if (IsParamALongOrDouble() && cur_arg_index_ % 2 != 0) {
106         cur_arg_index_++;
107       }
108 #endif
109       Visit();
110       cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
111       param_index_++;
112     }
113   }
114 
115  private:
ComputeArgsInRegs(MethodHelper & mh)116   static size_t ComputeArgsInRegs(MethodHelper& mh) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
117 #if (defined(__i386__))
118     return 0;
119 #else
120     size_t args_in_regs = 0;
121     size_t num_params = mh.NumArgs();
122     for (size_t i = 0; i < num_params; i++) {
123       args_in_regs = args_in_regs + (mh.IsParamALongOrDouble(i) ? 2 : 1);
124       if (args_in_regs > 3) {
125         args_in_regs = 3;
126         break;
127       }
128     }
129     return args_in_regs;
130 #endif
131   }
132   MethodHelper& caller_mh_;
133   const size_t args_in_regs_;
134   const size_t num_params_;
135   byte* const reg_args_;
136   byte* const stack_args_;
137   byte* cur_args_;
138   size_t cur_arg_index_;
139   size_t param_index_;
140 };
141 
142 // Visits arguments on the stack placing them into the shadow frame.
143 class BuildPortableShadowFrameVisitor : public PortableArgumentVisitor {
144  public:
BuildPortableShadowFrameVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp,ShadowFrame & sf,size_t first_arg_reg)145   BuildPortableShadowFrameVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp,
146       ShadowFrame& sf, size_t first_arg_reg) :
147     PortableArgumentVisitor(caller_mh, sp), sf_(sf), cur_reg_(first_arg_reg) { }
Visit()148   virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
149     Primitive::Type type = GetParamPrimitiveType();
150     switch (type) {
151       case Primitive::kPrimLong:  // Fall-through.
152       case Primitive::kPrimDouble:
153         sf_.SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
154         ++cur_reg_;
155         break;
156       case Primitive::kPrimNot:
157         sf_.SetVRegReference(cur_reg_, *reinterpret_cast<mirror::Object**>(GetParamAddress()));
158         break;
159       case Primitive::kPrimBoolean:  // Fall-through.
160       case Primitive::kPrimByte:     // Fall-through.
161       case Primitive::kPrimChar:     // Fall-through.
162       case Primitive::kPrimShort:    // Fall-through.
163       case Primitive::kPrimInt:      // Fall-through.
164       case Primitive::kPrimFloat:
165         sf_.SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
166         break;
167       case Primitive::kPrimVoid:
168         LOG(FATAL) << "UNREACHABLE";
169         break;
170     }
171     ++cur_reg_;
172   }
173 
174  private:
175   ShadowFrame& sf_;
176   size_t cur_reg_;
177 
178   DISALLOW_COPY_AND_ASSIGN(BuildPortableShadowFrameVisitor);
179 };
180 
artPortableToInterpreterBridge(mirror::ArtMethod * method,Thread * self,mirror::ArtMethod ** sp)181 extern "C" uint64_t artPortableToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
182                                                    mirror::ArtMethod** sp)
183     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
184   // Ensure we don't get thread suspension until the object arguments are safely in the shadow
185   // frame.
186   // FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
187 
188   if (method->IsAbstract()) {
189     ThrowAbstractMethodError(method);
190     return 0;
191   } else {
192     const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
193     MethodHelper mh(method);
194     const DexFile::CodeItem* code_item = mh.GetCodeItem();
195     uint16_t num_regs = code_item->registers_size_;
196     void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
197     ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL,  // No last shadow coming from quick.
198                                                   method, 0, memory));
199     size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
200     BuildPortableShadowFrameVisitor shadow_frame_builder(mh, sp,
201                                                  *shadow_frame, first_arg_reg);
202     shadow_frame_builder.VisitArguments();
203     // Push a transition back into managed code onto the linked list in thread.
204     ManagedStack fragment;
205     self->PushManagedStackFragment(&fragment);
206     self->PushShadowFrame(shadow_frame);
207     self->EndAssertNoThreadSuspension(old_cause);
208 
209     if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
210       // Ensure static method's class is initialized.
211       if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(),
212                                                                    true, true)) {
213         DCHECK(Thread::Current()->IsExceptionPending());
214         self->PopManagedStackFragment(fragment);
215         return 0;
216       }
217     }
218 
219     JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
220     // Pop transition.
221     self->PopManagedStackFragment(fragment);
222     return result.GetJ();
223   }
224 }
225 
226 // Visits arguments on the stack placing them into the args vector, Object* arguments are converted
227 // to jobjects.
228 class BuildPortableArgumentVisitor : public PortableArgumentVisitor {
229  public:
BuildPortableArgumentVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp,ScopedObjectAccessUnchecked & soa,std::vector<jvalue> & args)230   BuildPortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp,
231                                ScopedObjectAccessUnchecked& soa, std::vector<jvalue>& args) :
232     PortableArgumentVisitor(caller_mh, sp), soa_(soa), args_(args) {}
233 
Visit()234   virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
235     jvalue val;
236     Primitive::Type type = GetParamPrimitiveType();
237     switch (type) {
238       case Primitive::kPrimNot: {
239         mirror::Object* obj = *reinterpret_cast<mirror::Object**>(GetParamAddress());
240         val.l = soa_.AddLocalReference<jobject>(obj);
241         break;
242       }
243       case Primitive::kPrimLong:  // Fall-through.
244       case Primitive::kPrimDouble:
245         val.j = *reinterpret_cast<jlong*>(GetParamAddress());
246         break;
247       case Primitive::kPrimBoolean:  // Fall-through.
248       case Primitive::kPrimByte:     // Fall-through.
249       case Primitive::kPrimChar:     // Fall-through.
250       case Primitive::kPrimShort:    // Fall-through.
251       case Primitive::kPrimInt:      // Fall-through.
252       case Primitive::kPrimFloat:
253         val.i =  *reinterpret_cast<jint*>(GetParamAddress());
254         break;
255       case Primitive::kPrimVoid:
256         LOG(FATAL) << "UNREACHABLE";
257         val.j = 0;
258         break;
259     }
260     args_.push_back(val);
261   }
262 
263  private:
264   ScopedObjectAccessUnchecked& soa_;
265   std::vector<jvalue>& args_;
266 
267   DISALLOW_COPY_AND_ASSIGN(BuildPortableArgumentVisitor);
268 };
269 
270 // Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
271 // which is responsible for recording callee save registers. We explicitly place into jobjects the
272 // incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
273 // field within the proxy object, which will box the primitive arguments and deal with error cases.
artPortableProxyInvokeHandler(mirror::ArtMethod * proxy_method,mirror::Object * receiver,Thread * self,mirror::ArtMethod ** sp)274 extern "C" uint64_t artPortableProxyInvokeHandler(mirror::ArtMethod* proxy_method,
275                                                   mirror::Object* receiver,
276                                                   Thread* self, mirror::ArtMethod** sp)
277     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
278   // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
279   const char* old_cause =
280       self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
281   self->VerifyStack();
282   // Start new JNI local reference state.
283   JNIEnvExt* env = self->GetJniEnv();
284   ScopedObjectAccessUnchecked soa(env);
285   ScopedJniEnvLocalRefState env_state(env);
286   // Create local ref. copies of proxy method and the receiver.
287   jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
288 
289   // Placing arguments into args vector and remove the receiver.
290   MethodHelper proxy_mh(proxy_method);
291   std::vector<jvalue> args;
292   BuildPortableArgumentVisitor local_ref_visitor(proxy_mh, sp, soa, args);
293   local_ref_visitor.VisitArguments();
294   args.erase(args.begin());
295 
296   // Convert proxy method into expected interface method.
297   mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
298   DCHECK(interface_method != NULL);
299   DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
300   jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
301 
302   // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
303   // that performs allocations.
304   self->EndAssertNoThreadSuspension(old_cause);
305   JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
306                                                rcvr_jobj, interface_method_jobj, args);
307   return result.GetJ();
308 }
309 
310 // Lazily resolve a method for portable. Called by stub code.
artPortableResolutionTrampoline(mirror::ArtMethod * called,mirror::Object * receiver,Thread * thread,mirror::ArtMethod ** called_addr)311 extern "C" const void* artPortableResolutionTrampoline(mirror::ArtMethod* called,
312                                                        mirror::Object* receiver,
313                                                        Thread* thread,
314                                                        mirror::ArtMethod** called_addr)
315     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
316   uint32_t dex_pc;
317   mirror::ArtMethod* caller = thread->GetCurrentMethod(&dex_pc);
318 
319   ClassLinker* linker = Runtime::Current()->GetClassLinker();
320   InvokeType invoke_type;
321   bool is_range;
322   if (called->IsRuntimeMethod()) {
323     const DexFile::CodeItem* code = MethodHelper(caller).GetCodeItem();
324     CHECK_LT(dex_pc, code->insns_size_in_code_units_);
325     const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
326     Instruction::Code instr_code = instr->Opcode();
327     switch (instr_code) {
328       case Instruction::INVOKE_DIRECT:
329         invoke_type = kDirect;
330         is_range = false;
331         break;
332       case Instruction::INVOKE_DIRECT_RANGE:
333         invoke_type = kDirect;
334         is_range = true;
335         break;
336       case Instruction::INVOKE_STATIC:
337         invoke_type = kStatic;
338         is_range = false;
339         break;
340       case Instruction::INVOKE_STATIC_RANGE:
341         invoke_type = kStatic;
342         is_range = true;
343         break;
344       case Instruction::INVOKE_SUPER:
345         invoke_type = kSuper;
346         is_range = false;
347         break;
348       case Instruction::INVOKE_SUPER_RANGE:
349         invoke_type = kSuper;
350         is_range = true;
351         break;
352       case Instruction::INVOKE_VIRTUAL:
353         invoke_type = kVirtual;
354         is_range = false;
355         break;
356       case Instruction::INVOKE_VIRTUAL_RANGE:
357         invoke_type = kVirtual;
358         is_range = true;
359         break;
360       case Instruction::INVOKE_INTERFACE:
361         invoke_type = kInterface;
362         is_range = false;
363         break;
364       case Instruction::INVOKE_INTERFACE_RANGE:
365         invoke_type = kInterface;
366         is_range = true;
367         break;
368       default:
369         LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
370         // Avoid used uninitialized warnings.
371         invoke_type = kDirect;
372         is_range = true;
373     }
374     uint32_t dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
375     called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
376     // Incompatible class change should have been handled in resolve method.
377     CHECK(!called->CheckIncompatibleClassChange(invoke_type));
378     // Refine called method based on receiver.
379     if (invoke_type == kVirtual) {
380       called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
381     } else if (invoke_type == kInterface) {
382       called = receiver->GetClass()->FindVirtualMethodForInterface(called);
383     }
384   } else {
385     CHECK(called->IsStatic()) << PrettyMethod(called);
386     invoke_type = kStatic;
387     // Incompatible class change should have been handled in resolve method.
388     CHECK(!called->CheckIncompatibleClassChange(invoke_type));
389   }
390   const void* code = NULL;
391   if (LIKELY(!thread->IsExceptionPending())) {
392     // Ensure that the called method's class is initialized.
393     mirror::Class* called_class = called->GetDeclaringClass();
394     linker->EnsureInitialized(called_class, true, true);
395     if (LIKELY(called_class->IsInitialized())) {
396       code = called->GetEntryPointFromCompiledCode();
397       // TODO: remove this after we solve the link issue.
398       {  // for lazy link.
399         if (code == NULL) {
400           code = linker->GetOatCodeFor(called);
401         }
402       }
403     } else if (called_class->IsInitializing()) {
404       if (invoke_type == kStatic) {
405         // Class is still initializing, go to oat and grab code (trampoline must be left in place
406         // until class is initialized to stop races between threads).
407         code = linker->GetOatCodeFor(called);
408       } else {
409         // No trampoline for non-static methods.
410         code = called->GetEntryPointFromCompiledCode();
411         // TODO: remove this after we solve the link issue.
412         {  // for lazy link.
413           if (code == NULL) {
414             code = linker->GetOatCodeFor(called);
415           }
416         }
417       }
418     } else {
419       DCHECK(called_class->IsErroneous());
420     }
421   }
422   if (LIKELY(code != NULL)) {
423     // Expect class to at least be initializing.
424     DCHECK(called->GetDeclaringClass()->IsInitializing());
425     // Don't want infinite recursion.
426     DCHECK(code != GetResolutionTrampoline(linker));
427     // Set up entry into main method
428     *called_addr = called;
429   }
430   return code;
431 }
432 
433 }  // namespace art
434 
435 #endif  // ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
436