• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "profiling_info.h"
18 
19 #include "art_method-inl.h"
20 #include "dex/dex_instruction.h"
21 #include "jit/jit.h"
22 #include "jit/jit_code_cache.h"
23 #include "scoped_thread_state_change-inl.h"
24 #include "thread.h"
25 
26 namespace art {
27 
ProfilingInfo(ArtMethod * method,const std::vector<uint32_t> & entries)28 ProfilingInfo::ProfilingInfo(ArtMethod* method, const std::vector<uint32_t>& entries)
29       : baseline_hotness_count_(0),
30         method_(method),
31         number_of_inline_caches_(entries.size()),
32         current_inline_uses_(0) {
33   memset(&cache_, 0, number_of_inline_caches_ * sizeof(InlineCache));
34   for (size_t i = 0; i < number_of_inline_caches_; ++i) {
35     cache_[i].dex_pc_ = entries[i];
36   }
37 }
38 
Create(Thread * self,ArtMethod * method)39 ProfilingInfo* ProfilingInfo::Create(Thread* self, ArtMethod* method) {
40   // Walk over the dex instructions of the method and keep track of
41   // instructions we are interested in profiling.
42   DCHECK(!method->IsNative());
43 
44   std::vector<uint32_t> entries;
45   for (const DexInstructionPcPair& inst : method->DexInstructions()) {
46     switch (inst->Opcode()) {
47       case Instruction::INVOKE_VIRTUAL:
48       case Instruction::INVOKE_VIRTUAL_RANGE:
49       case Instruction::INVOKE_INTERFACE:
50       case Instruction::INVOKE_INTERFACE_RANGE:
51         entries.push_back(inst.DexPc());
52         break;
53 
54       default:
55         break;
56     }
57   }
58 
59   // We always create a `ProfilingInfo` object, even if there is no instruction we are
60   // interested in. The JIT code cache internally uses it.
61 
62   // Allocate the `ProfilingInfo` object int the JIT's data space.
63   jit::JitCodeCache* code_cache = Runtime::Current()->GetJit()->GetCodeCache();
64   return code_cache->AddProfilingInfo(self, method, entries);
65 }
66 
GetInlineCache(uint32_t dex_pc)67 InlineCache* ProfilingInfo::GetInlineCache(uint32_t dex_pc) {
68   // TODO: binary search if array is too long.
69   for (size_t i = 0; i < number_of_inline_caches_; ++i) {
70     if (cache_[i].dex_pc_ == dex_pc) {
71       return &cache_[i];
72     }
73   }
74   ScopedObjectAccess soa(Thread::Current());
75   LOG(FATAL) << "No inline cache found for "  << ArtMethod::PrettyMethod(method_) << "@" << dex_pc;
76   UNREACHABLE();
77 }
78 
AddInvokeInfo(uint32_t dex_pc,mirror::Class * cls)79 void ProfilingInfo::AddInvokeInfo(uint32_t dex_pc, mirror::Class* cls) {
80   InlineCache* cache = GetInlineCache(dex_pc);
81   for (size_t i = 0; i < InlineCache::kIndividualCacheSize; ++i) {
82     mirror::Class* existing = cache->classes_[i].Read<kWithoutReadBarrier>();
83     mirror::Class* marked = ReadBarrier::IsMarked(existing);
84     if (marked == cls) {
85       // Receiver type is already in the cache, nothing else to do.
86       return;
87     } else if (marked == nullptr) {
88       // Cache entry is empty, try to put `cls` in it.
89       // Note: it's ok to spin on 'existing' here: if 'existing' is not null, that means
90       // it is a stalled heap address, which will only be cleared during SweepSystemWeaks,
91       // *after* this thread hits a suspend point.
92       GcRoot<mirror::Class> expected_root(existing);
93       GcRoot<mirror::Class> desired_root(cls);
94       auto atomic_root = reinterpret_cast<Atomic<GcRoot<mirror::Class>>*>(&cache->classes_[i]);
95       if (!atomic_root->CompareAndSetStrongSequentiallyConsistent(expected_root, desired_root)) {
96         // Some other thread put a class in the cache, continue iteration starting at this
97         // entry in case the entry contains `cls`.
98         --i;
99       } else {
100         // We successfully set `cls`, just return.
101         return;
102       }
103     }
104   }
105   // Unsuccessfull - cache is full, making it megamorphic. We do not DCHECK it though,
106   // as the garbage collector might clear the entries concurrently.
107 }
108 
ScopedProfilingInfoUse(jit::Jit * jit,ArtMethod * method,Thread * self)109 ScopedProfilingInfoUse::ScopedProfilingInfoUse(jit::Jit* jit, ArtMethod* method, Thread* self)
110     : jit_(jit),
111       method_(method),
112       self_(self),
113       // Fetch the profiling info ahead of using it. If it's null when fetching,
114       // we should not call JitCodeCache::DoneCompilerUse.
115       profiling_info_(jit->GetCodeCache()->NotifyCompilerUse(method, self)) {
116 }
117 
~ScopedProfilingInfoUse()118 ScopedProfilingInfoUse::~ScopedProfilingInfoUse() {
119   if (profiling_info_ != nullptr) {
120     jit_->GetCodeCache()->DoneCompilerUse(method_, self_);
121   }
122 }
123 
124 }  // namespace art
125