• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "art_method-inl.h"
18 #include "base/casts.h"
19 #include "entrypoints/entrypoint_utils-inl.h"
20 #include "indirect_reference_table.h"
21 #include "mirror/object-inl.h"
22 #include "thread-inl.h"
23 #include "verify_object.h"
24 
25 namespace art {
26 
27 static_assert(sizeof(IRTSegmentState) == sizeof(uint32_t), "IRTSegmentState size unexpected");
28 static_assert(std::is_trivial<IRTSegmentState>::value, "IRTSegmentState not trivial");
29 
30 template <bool kDynamicFast>
31 static inline void GoToRunnableFast(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
32 
ReadBarrierJni(mirror::CompressedReference<mirror::Object> * handle_on_stack,Thread * self ATTRIBUTE_UNUSED)33 extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack,
34                            Thread* self ATTRIBUTE_UNUSED) {
35   DCHECK(kUseReadBarrier);
36   if (kUseBakerReadBarrier) {
37     DCHECK(handle_on_stack->AsMirrorPtr() != nullptr)
38         << "The class of a static jni call must not be null";
39     // Check the mark bit and return early if it's already marked.
40     if (LIKELY(handle_on_stack->AsMirrorPtr()->GetMarkBit() != 0)) {
41       return;
42     }
43   }
44   // Call the read barrier and update the handle.
45   mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack);
46   handle_on_stack->Assign(to_ref);
47 }
48 
49 // Called on entry to fast JNI, push a new local reference table only.
JniMethodFastStart(Thread * self)50 extern uint32_t JniMethodFastStart(Thread* self) {
51   JNIEnvExt* env = self->GetJniEnv();
52   DCHECK(env != nullptr);
53   uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
54   env->local_ref_cookie = env->locals.GetSegmentState();
55 
56   if (kIsDebugBuild) {
57     ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
58     CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod();
59   }
60 
61   return saved_local_ref_cookie;
62 }
63 
64 // Called on entry to JNI, transition out of Runnable and release share of mutator_lock_.
JniMethodStart(Thread * self)65 extern uint32_t JniMethodStart(Thread* self) {
66   JNIEnvExt* env = self->GetJniEnv();
67   DCHECK(env != nullptr);
68   uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
69   env->local_ref_cookie = env->locals.GetSegmentState();
70   ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
71   if (!native_method->IsFastNative()) {
72     // When not fast JNI we transition out of runnable.
73     self->TransitionFromRunnableToSuspended(kNative);
74   }
75   return saved_local_ref_cookie;
76 }
77 
JniMethodStartSynchronized(jobject to_lock,Thread * self)78 extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) {
79   self->DecodeJObject(to_lock)->MonitorEnter(self);
80   return JniMethodStart(self);
81 }
82 
83 // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
GoToRunnable(Thread * self)84 static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
85   ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
86   bool is_fast = native_method->IsFastNative();
87   if (!is_fast) {
88     self->TransitionFromSuspendedToRunnable();
89   } else {
90     GoToRunnableFast</*kDynamicFast*/true>(self);
91   }
92 }
93 
94 // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
95 template <bool kDynamicFast>
GoToRunnableFast(Thread * self)96 ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
97   if (kIsDebugBuild) {
98     // Should only enter here if the method is !Fast JNI or @FastNative.
99     ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
100 
101     if (kDynamicFast) {
102       CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
103     } else {
104       CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod();
105     }
106   }
107 
108   // When we are in "fast" JNI or @FastNative, we are already Runnable.
109   // Only do a suspend check on the way out of JNI.
110   if (UNLIKELY(self->TestAllFlags())) {
111     // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there
112     // is a flag raised.
113     DCHECK(Locks::mutator_lock_->IsSharedHeld(self));
114     self->CheckSuspend();
115   }
116 }
117 
PopLocalReferences(uint32_t saved_local_ref_cookie,Thread * self)118 static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self)
119     REQUIRES_SHARED(Locks::mutator_lock_) {
120   JNIEnvExt* env = self->GetJniEnv();
121   if (UNLIKELY(env->check_jni)) {
122     env->CheckNoHeldMonitors();
123   }
124   env->locals.SetSegmentState(env->local_ref_cookie);
125   env->local_ref_cookie = bit_cast<IRTSegmentState>(saved_local_ref_cookie);
126   self->PopHandleScope();
127 }
128 
129 // TODO: These should probably be templatized or macro-ized.
130 // Otherwise there's just too much repetitive boilerplate.
131 
JniMethodEnd(uint32_t saved_local_ref_cookie,Thread * self)132 extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) {
133   GoToRunnable(self);
134   PopLocalReferences(saved_local_ref_cookie, self);
135 }
136 
JniMethodFastEnd(uint32_t saved_local_ref_cookie,Thread * self)137 extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) {
138   GoToRunnableFast</*kDynamicFast*/false>(self);
139   PopLocalReferences(saved_local_ref_cookie, self);
140 }
141 
JniMethodEndSynchronized(uint32_t saved_local_ref_cookie,jobject locked,Thread * self)142 extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie,
143                                      jobject locked,
144                                      Thread* self) {
145   GoToRunnable(self);
146   UnlockJniSynchronizedMethod(locked, self);  // Must decode before pop.
147   PopLocalReferences(saved_local_ref_cookie, self);
148 }
149 
150 // Common result handling for EndWithReference.
JniMethodEndWithReferenceHandleResult(jobject result,uint32_t saved_local_ref_cookie,Thread * self)151 static mirror::Object* JniMethodEndWithReferenceHandleResult(jobject result,
152                                                              uint32_t saved_local_ref_cookie,
153                                                              Thread* self)
154     NO_THREAD_SAFETY_ANALYSIS {
155   // Must decode before pop. The 'result' may not be valid in case of an exception, though.
156   ObjPtr<mirror::Object> o;
157   if (!self->IsExceptionPending()) {
158     o = self->DecodeJObject(result);
159   }
160   PopLocalReferences(saved_local_ref_cookie, self);
161   // Process result.
162   if (UNLIKELY(self->GetJniEnv()->check_jni)) {
163     // CheckReferenceResult can resolve types.
164     StackHandleScope<1> hs(self);
165     HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
166     CheckReferenceResult(h_obj, self);
167   }
168   VerifyObject(o);
169   return o.Ptr();
170 }
171 
JniMethodFastEndWithReference(jobject result,uint32_t saved_local_ref_cookie,Thread * self)172 extern mirror::Object* JniMethodFastEndWithReference(jobject result,
173                                                      uint32_t saved_local_ref_cookie,
174                                                      Thread* self) {
175   GoToRunnableFast</*kDynamicFast*/false>(self);
176   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
177 }
178 
JniMethodEndWithReference(jobject result,uint32_t saved_local_ref_cookie,Thread * self)179 extern mirror::Object* JniMethodEndWithReference(jobject result,
180                                                  uint32_t saved_local_ref_cookie,
181                                                  Thread* self) {
182   GoToRunnable(self);
183   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
184 }
185 
JniMethodEndWithReferenceSynchronized(jobject result,uint32_t saved_local_ref_cookie,jobject locked,Thread * self)186 extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
187                                                              uint32_t saved_local_ref_cookie,
188                                                              jobject locked,
189                                                              Thread* self) {
190   GoToRunnable(self);
191   UnlockJniSynchronizedMethod(locked, self);
192   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
193 }
194 
GenericJniMethodEnd(Thread * self,uint32_t saved_local_ref_cookie,jvalue result,uint64_t result_f,ArtMethod * called,HandleScope * handle_scope)195 extern uint64_t GenericJniMethodEnd(Thread* self,
196                                     uint32_t saved_local_ref_cookie,
197                                     jvalue result,
198                                     uint64_t result_f,
199                                     ArtMethod* called,
200                                     HandleScope* handle_scope)
201     // TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS
202     NO_THREAD_SAFETY_ANALYSIS {
203   bool critical_native = called->IsAnnotatedWithCriticalNative();
204   bool fast_native = called->IsAnnotatedWithFastNative();
205   bool normal_native = !critical_native && !fast_native;
206 
207   // @Fast and @CriticalNative do not do a state transition.
208   if (LIKELY(normal_native)) {
209     GoToRunnable(self);
210   }
211   // We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the
212   // locked object.
213   jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr;
214   char return_shorty_char = called->GetShorty()[0];
215   if (return_shorty_char == 'L') {
216     if (locked != nullptr) {
217       DCHECK(normal_native) << " @FastNative and synchronize is not supported";
218       UnlockJniSynchronizedMethod(locked, self);
219     }
220     return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult(
221         result.l, saved_local_ref_cookie, self));
222   } else {
223     if (locked != nullptr) {
224       DCHECK(normal_native) << " @FastNative and synchronize is not supported";
225       UnlockJniSynchronizedMethod(locked, self);  // Must decode before pop.
226     }
227     if (LIKELY(!critical_native)) {
228       PopLocalReferences(saved_local_ref_cookie, self);
229     }
230     switch (return_shorty_char) {
231       case 'F': {
232         if (kRuntimeISA == kX86) {
233           // Convert back the result to float.
234           double d = bit_cast<double, uint64_t>(result_f);
235           return bit_cast<uint32_t, float>(static_cast<float>(d));
236         } else {
237           return result_f;
238         }
239       }
240       case 'D':
241         return result_f;
242       case 'Z':
243         return result.z;
244       case 'B':
245         return result.b;
246       case 'C':
247         return result.c;
248       case 'S':
249         return result.s;
250       case 'I':
251         return result.i;
252       case 'J':
253         return result.j;
254       case 'V':
255         return 0;
256       default:
257         LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
258         return 0;
259     }
260   }
261 }
262 
263 }  // namespace art
264