• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
19 
20 #include "register_line.h"
21 
22 #include "base/logging.h"  // For VLOG.
23 #include "method_verifier.h"
24 #include "reg_type_cache-inl.h"
25 
26 namespace art HIDDEN {
27 namespace verifier {
28 
29 // Should we dump a warning on failures to verify balanced locking? That would be an indication to
30 // developers that their code will be slow.
31 static constexpr bool kDumpLockFailures = true;
32 
GetRegisterTypeId(uint32_t vsrc)33 inline uint16_t RegisterLine::GetRegisterTypeId(uint32_t vsrc) const {
34   // The register index was validated during the static pass, so we don't need to check it here.
35   DCHECK_LT(vsrc, num_regs_);
36   return line_[vsrc];
37 }
38 
GetRegisterType(MethodVerifier * verifier,uint32_t vsrc)39 inline const RegType& RegisterLine::GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const {
40   return verifier->GetRegTypeCache()->GetFromId(GetRegisterTypeId(vsrc));
41 }
42 
43 template <LockOp kLockOp>
SetRegisterTypeImpl(uint32_t vdst,uint16_t new_id)44 inline void RegisterLine::SetRegisterTypeImpl(uint32_t vdst, uint16_t new_id) {
45   DCHECK_LT(vdst, num_regs_);
46   // Note: previously we failed when asked to set a conflict. However, conflicts are OK as long
47   //       as they are not accessed, and our backends can handle this nowadays.
48   line_[vdst] = new_id;
49   switch (kLockOp) {
50     case LockOp::kClear:
51       // Clear the monitor entry bits for this register.
52       ClearAllRegToLockDepths(vdst);
53       break;
54     case LockOp::kKeep:
55       break;
56   }
57 }
58 
SetRegisterType(uint32_t vdst,RegType::Kind new_kind)59 inline void RegisterLine::SetRegisterType(uint32_t vdst, RegType::Kind new_kind) {
60   DCHECK(!RegType::IsLowHalf(new_kind));
61   DCHECK(!RegType::IsHighHalf(new_kind));
62   SetRegisterTypeImpl<LockOp::kClear>(vdst, RegTypeCache::IdForRegKind(new_kind));
63 }
64 
65 template <LockOp kLockOp>
SetRegisterType(uint32_t vdst,const RegType & new_type)66 inline void RegisterLine::SetRegisterType(uint32_t vdst, const RegType& new_type) {
67   DCHECK(!new_type.IsLowHalf());
68   DCHECK(!new_type.IsHighHalf());
69   // Should only keep locks for reference types, or when copying a conflict with `move-object`.
70   DCHECK_IMPLIES(kLockOp == LockOp::kKeep, new_type.IsReferenceTypes() || new_type.IsConflict());
71   SetRegisterTypeImpl<kLockOp>(vdst, new_type.GetId());
72 }
73 
SetRegisterTypeWideImpl(uint32_t vdst,uint16_t new_id1,uint16_t new_id2)74 inline void RegisterLine::SetRegisterTypeWideImpl(uint32_t vdst,
75                                                   uint16_t new_id1,
76                                                   uint16_t new_id2) {
77   DCHECK_LT(vdst + 1, num_regs_);
78   line_[vdst] = new_id1;
79   line_[vdst + 1] = new_id2;
80   // Clear the monitor entry bits for this register.
81   ClearAllRegToLockDepths(vdst);
82   ClearAllRegToLockDepths(vdst + 1);
83 }
84 
SetRegisterTypeWide(uint32_t vdst,RegType::Kind new_kind1,RegType::Kind new_kind2)85 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
86                                               RegType::Kind new_kind1,
87                                               RegType::Kind new_kind2) {
88   DCHECK(RegType::CheckWidePair(new_kind1, new_kind2));
89   SetRegisterTypeWideImpl(
90       vdst, RegTypeCache::IdForRegKind(new_kind1), RegTypeCache::IdForRegKind(new_kind2));
91 }
92 
SetRegisterTypeWide(uint32_t vdst,const RegType & new_type1,const RegType & new_type2)93 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
94                                               const RegType& new_type1,
95                                               const RegType& new_type2) {
96   DCHECK(new_type1.CheckWidePair(new_type2));
97   SetRegisterTypeWideImpl(vdst, new_type1.GetId(), new_type2.GetId());
98 }
99 
SetResultTypeToUnknown()100 inline void RegisterLine::SetResultTypeToUnknown() {
101   result_[0] = RegTypeCache::kUndefinedCacheId;
102   result_[1] = RegTypeCache::kUndefinedCacheId;
103 }
104 
SetResultRegisterType(const RegType & new_type)105 inline void RegisterLine::SetResultRegisterType(const RegType& new_type) {
106   DCHECK(!new_type.IsLowHalf());
107   DCHECK(!new_type.IsHighHalf());
108   result_[0] = new_type.GetId();
109   result_[1] = RegTypeCache::kUndefinedCacheId;
110 }
111 
SetResultRegisterTypeWide(const RegType & new_type1,const RegType & new_type2)112 inline void RegisterLine::SetResultRegisterTypeWide(const RegType& new_type1,
113                                                     const RegType& new_type2) {
114   DCHECK(new_type1.CheckWidePair(new_type2));
115   result_[0] = new_type1.GetId();
116   result_[1] = new_type2.GetId();
117 }
118 
SetRegisterTypeForNewInstance(uint32_t vdst,const RegType & uninit_type,uint32_t dex_pc)119 inline void RegisterLine::SetRegisterTypeForNewInstance(uint32_t vdst,
120                                                         const RegType& uninit_type,
121                                                         uint32_t dex_pc) {
122   DCHECK_LT(vdst, num_regs_);
123   DCHECK(NeedsAllocationDexPc(uninit_type));
124   SetRegisterType<LockOp::kClear>(vdst, uninit_type);
125   EnsureAllocationDexPcsAvailable();
126   allocation_dex_pcs_[vdst] = dex_pc;
127 }
128 
CopyReference(uint32_t vdst,uint32_t vsrc,const RegType & type)129 inline void RegisterLine::CopyReference(uint32_t vdst, uint32_t vsrc, const RegType& type) {
130   DCHECK_EQ(type.GetId(), GetRegisterTypeId(vsrc));
131   DCHECK(type.IsConflict() || type.IsReferenceTypes());
132   SetRegisterType<LockOp::kKeep>(vdst, type);
133   CopyRegToLockDepth(vdst, vsrc);
134   if (allocation_dex_pcs_ != nullptr) {
135     // Copy allocation dex pc for uninitialized types. (Copy unused value for other types.)
136     allocation_dex_pcs_[vdst] = allocation_dex_pcs_[vsrc];
137   }
138 }
139 
NeedsAllocationDexPc(const RegType & reg_type)140 inline bool RegisterLine::NeedsAllocationDexPc(const RegType& reg_type) {
141   return reg_type.IsUninitializedReference() || reg_type.IsUnresolvedUninitializedReference();
142 }
143 
DCheckUniqueNewInstanceDexPc(MethodVerifier * verifier,uint32_t dex_pc)144 inline void RegisterLine::DCheckUniqueNewInstanceDexPc(MethodVerifier* verifier, uint32_t dex_pc) {
145   if (kIsDebugBuild && allocation_dex_pcs_ != nullptr) {
146     // Note: We do not clear the `allocation_dex_pcs_` entries when copying data from
147     // a register line without `allocation_dex_pcs_`, or when we merge types and find
148     // a conflict, so the same dex pc can remain in the `allocation_dex_pcs_` array
149     // but it cannot be recorded for a `new-instance` uninitialized type.
150     RegTypeCache* reg_types = verifier->GetRegTypeCache();
151     for (uint32_t i = 0; i != num_regs_; ++i) {
152       if (NeedsAllocationDexPc(reg_types->GetFromId(line_[i]))) {
153         CHECK_NE(allocation_dex_pcs_[i], dex_pc) << i << " " << reg_types->GetFromId(line_[i]);
154       }
155     }
156   }
157 }
158 
EnsureAllocationDexPcsAvailable()159 inline void RegisterLine::EnsureAllocationDexPcsAvailable() {
160   DCHECK_NE(num_regs_, 0u);
161   if (allocation_dex_pcs_ == nullptr) {
162     ArenaAllocatorAdapter<uint32_t> allocator(monitors_.get_allocator());
163     allocation_dex_pcs_ = allocator.allocate(num_regs_);
164     std::fill_n(allocation_dex_pcs_, num_regs_, kNoDexPc);
165   }
166 }
167 
VerifyMonitorStackEmpty(MethodVerifier * verifier)168 inline void RegisterLine::VerifyMonitorStackEmpty(MethodVerifier* verifier) const {
169   if (MonitorStackDepth() != 0) {
170     verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
171     if (kDumpLockFailures) {
172       VLOG(verifier) << "expected empty monitor stack in "
173                      << verifier->GetMethodReference().PrettyMethod();
174     }
175   }
176 }
177 
ComputeSize(size_t num_regs)178 inline size_t RegisterLine::ComputeSize(size_t num_regs) {
179   return OFFSETOF_MEMBER(RegisterLine, line_) + num_regs * sizeof(uint16_t);
180 }
181 
Create(size_t num_regs,ArenaAllocator & allocator)182 inline RegisterLine* RegisterLine::Create(size_t num_regs, ArenaAllocator& allocator) {
183   void* memory = allocator.Alloc(ComputeSize(num_regs));
184   return new (memory) RegisterLine(num_regs, allocator);
185 }
186 
RegisterLine(size_t num_regs,ArenaAllocator & allocator)187 inline RegisterLine::RegisterLine(size_t num_regs, ArenaAllocator& allocator)
188     : num_regs_(num_regs),
189       allocation_dex_pcs_(nullptr),
190       monitors_(allocator.Adapter(kArenaAllocVerifier)),
191       reg_to_lock_depths_(std::less<uint32_t>(),
192                           allocator.Adapter(kArenaAllocVerifier)),
193       this_initialized_(false) {
194   // `ArenaAllocator` guarantees zero-initialization.
195   static_assert(RegTypeCache::kUndefinedCacheId == 0u);
196   DCHECK(std::all_of(line_,
197                      line_ + num_regs_,
198                      [](auto id) { return id == RegTypeCache::kUndefinedCacheId;}));
199   SetResultTypeToUnknown();
200 }
201 
ClearRegToLockDepth(size_t reg,size_t depth)202 inline void RegisterLine::ClearRegToLockDepth(size_t reg, size_t depth) {
203   CHECK_LT(depth, 32u);
204   DCHECK(IsSetLockDepth(reg, depth));
205   auto it = reg_to_lock_depths_.find(reg);
206   DCHECK(it != reg_to_lock_depths_.end());
207   uint32_t depths = it->second ^ (1 << depth);
208   if (depths != 0) {
209     it->second = depths;
210   } else {
211     reg_to_lock_depths_.erase(it);
212   }
213   // Need to unlock every register at the same lock depth. These are aliased locks.
214   uint32_t mask = 1 << depth;
215   for (auto& pair : reg_to_lock_depths_) {
216     if ((pair.second & mask) != 0) {
217       VLOG(verifier) << "Also unlocking " << pair.first;
218       pair.second ^= mask;
219     }
220   }
221 }
222 
operator()223 inline void RegisterLineArenaDelete::operator()(RegisterLine* ptr) const {
224   if (ptr != nullptr) {
225     uint32_t num_regs = ptr->NumRegs();
226     uint32_t* allocation_dex_pcs = ptr->allocation_dex_pcs_;
227     ptr->~RegisterLine();
228     ProtectMemory(ptr, RegisterLine::ComputeSize(num_regs));
229     if (allocation_dex_pcs != nullptr) {
230       struct AllocationDexPcsDelete : ArenaDelete<uint32_t> {
231         void operator()(uint32_t* ptr, size_t size) {
232           ProtectMemory(ptr, size);
233         }
234       };
235       AllocationDexPcsDelete()(allocation_dex_pcs, num_regs * sizeof(*allocation_dex_pcs));
236     }
237   }
238 }
239 
240 }  // namespace verifier
241 }  // namespace art
242 
243 #endif  // ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
244