• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "register_line.h"
18 
19 #include "android-base/stringprintf.h"
20 
21 #include "dex/dex_instruction-inl.h"
22 #include "method_verifier-inl.h"
23 #include "reg_type-inl.h"
24 #include "register_line-inl.h"
25 
26 namespace art HIDDEN {
27 namespace verifier {
28 
29 using android::base::StringPrintf;
30 
CheckConstructorReturn(MethodVerifier * verifier) const31 bool RegisterLine::CheckConstructorReturn(MethodVerifier* verifier) const {
32   if (kIsDebugBuild && this_initialized_) {
33     // Ensure that there is no UninitializedThisReference type anymore if this_initialized_ is true.
34     for (size_t i = 0; i < num_regs_; i++) {
35       const RegType& type = GetRegisterType(verifier, i);
36       CHECK(!type.IsUninitializedThisReference() &&
37             !type.IsUnresolvedUninitializedThisReference())
38           << i << ": " << type.IsUninitializedThisReference() << " in "
39           << verifier->GetMethodReference().PrettyMethod();
40     }
41   }
42   if (!this_initialized_) {
43     verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
44         << "Constructor returning without calling superclass constructor";
45   }
46   return this_initialized_;
47 }
48 
CopyFromLine(const RegisterLine * src)49 void RegisterLine::CopyFromLine(const RegisterLine* src) {
50   DCHECK_EQ(num_regs_, src->num_regs_);
51   memcpy(&line_, &src->line_, num_regs_ * sizeof(uint16_t));
52   // Copy `allocation_dex_pcs_`. Note that if the `src` does not have `allocation_dex_pcs_`
53   // allocated, we retain the array allocated for this register line to avoid wasting
54   // memory by allocating a new array later. This means that the `allocation_dex_pcs_` can
55   // be filled with bogus values not tied to a `new-instance` uninitialized type.
56   if (src->allocation_dex_pcs_ != nullptr) {
57     EnsureAllocationDexPcsAvailable();
58     memcpy(allocation_dex_pcs_, src->allocation_dex_pcs_, num_regs_ * sizeof(uint32_t));
59   }
60   monitors_ = src->monitors_;
61   reg_to_lock_depths_ = src->reg_to_lock_depths_;
62   this_initialized_ = src->this_initialized_;
63 }
64 
MarkRefsAsInitialized(MethodVerifier * verifier,uint32_t vsrc)65 void RegisterLine::MarkRefsAsInitialized(MethodVerifier* verifier, uint32_t vsrc) {
66   const RegType& uninit_type = GetRegisterType(verifier, vsrc);
67   DCHECK(uninit_type.IsUninitializedTypes());
68   const RegType& init_type = verifier->GetRegTypeCache()->FromUninitialized(uninit_type);
69   size_t changed = 0;
70   // Is this initializing "this"?
71   if (uninit_type.IsUninitializedThisReference() ||
72       uninit_type.IsUnresolvedUninitializedThisReference()) {
73     this_initialized_ = true;
74     for (uint32_t i = 0; i < num_regs_; i++) {
75       if (GetRegisterType(verifier, i).Equals(uninit_type)) {
76         line_[i] = init_type.GetId();
77         changed++;
78       }
79     }
80   } else {
81     DCHECK(NeedsAllocationDexPc(uninit_type));
82     DCHECK(allocation_dex_pcs_ != nullptr);
83     uint32_t dex_pc = allocation_dex_pcs_[vsrc];
84     for (uint32_t i = 0; i < num_regs_; i++) {
85       if (GetRegisterType(verifier, i).Equals(uninit_type) && allocation_dex_pcs_[i] == dex_pc) {
86         line_[i] = init_type.GetId();
87         changed++;
88       }
89     }
90   }
91   DCHECK_GT(changed, 0u);
92 }
93 
Dump(MethodVerifier * verifier) const94 std::string RegisterLine::Dump(MethodVerifier* verifier) const {
95   std::string result;
96   for (size_t i = 0; i < num_regs_; i++) {
97     result += StringPrintf("%zd:[", i);
98     result += GetRegisterType(verifier, i).Dump();
99     result += "],";
100   }
101   for (const auto& monitor : monitors_) {
102     result += StringPrintf("{%d},", monitor);
103   }
104   for (auto& pairs : reg_to_lock_depths_) {
105     result += StringPrintf("<%d -> %" PRIx64 ">",
106                            pairs.first,
107                            static_cast<uint64_t>(pairs.second));
108   }
109   return result;
110 }
111 
CopyResultRegister1(MethodVerifier * verifier,uint32_t vdst,bool is_reference)112 void RegisterLine::CopyResultRegister1(MethodVerifier* verifier, uint32_t vdst, bool is_reference) {
113   const RegType& type = verifier->GetRegTypeCache()->GetFromId(result_[0]);
114   if ((!is_reference && !type.IsCategory1Types()) ||
115       (is_reference && !type.IsReferenceTypes())) {
116     verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
117         << "copyRes1 v" << vdst << "<- result0"  << " type=" << type;
118   } else {
119     DCHECK_EQ(result_[1], RegTypeCache::kUndefinedCacheId);
120     SetRegisterType<LockOp::kClear>(vdst, type);
121     result_[0] = RegTypeCache::kUndefinedCacheId;
122   }
123 }
124 
125 /*
126  * Implement "move-result-wide". Copy the category-2 value from the result
127  * register to another register, and reset the result register.
128  */
CopyResultRegister2(MethodVerifier * verifier,uint32_t vdst)129 void RegisterLine::CopyResultRegister2(MethodVerifier* verifier, uint32_t vdst) {
130   const RegType& type_l = verifier->GetRegTypeCache()->GetFromId(result_[0]);
131   const RegType& type_h = verifier->GetRegTypeCache()->GetFromId(result_[1]);
132   if (!type_l.IsCategory2Types()) {
133     verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
134         << "copyRes2 v" << vdst << "<- result0"  << " type=" << type_l;
135   } else {
136     DCHECK(type_l.CheckWidePair(type_h));  // Set should never allow this case
137     SetRegisterTypeWide(vdst, type_l, type_h);  // also sets the high
138     result_[0] = RegTypeCache::kUndefinedCacheId;
139     result_[1] = RegTypeCache::kUndefinedCacheId;
140   }
141 }
142 
143 static constexpr uint32_t kVirtualNullRegister = std::numeric_limits<uint32_t>::max();
144 
PushMonitor(MethodVerifier * verifier,uint32_t vreg,const RegType & reg_type,int32_t insn_idx)145 void RegisterLine::PushMonitor(
146     MethodVerifier* verifier, uint32_t vreg, const RegType& reg_type, int32_t insn_idx) {
147   DCHECK_EQ(reg_type.GetId(), GetRegisterTypeId(vreg));
148   if (monitors_.size() >= kMaxMonitorStackDepth) {
149     verifier->Fail(VERIFY_ERROR_LOCKING);
150     if (kDumpLockFailures) {
151       VLOG(verifier) << "monitor-enter stack overflow while verifying "
152                      << verifier->GetMethodReference().PrettyMethod();
153     }
154   } else {
155     if (SetRegToLockDepth(vreg, monitors_.size())) {
156       // Null literals can establish aliases that we can't easily track. As such, handle the zero
157       // case as the 2^32-1 register (which isn't available in dex bytecode).
158       if (reg_type.IsZero()) {
159         SetRegToLockDepth(kVirtualNullRegister, monitors_.size());
160       }
161 
162       monitors_.push_back(insn_idx);
163     } else {
164       verifier->Fail(VERIFY_ERROR_LOCKING);
165       if (kDumpLockFailures) {
166         VLOG(verifier) << "unexpected monitor-enter on register v" <<  vreg << " in "
167                        << verifier->GetMethodReference().PrettyMethod();
168       }
169     }
170   }
171 }
172 
PopMonitor(MethodVerifier * verifier,uint32_t vreg,const RegType & reg_type)173 void RegisterLine::PopMonitor(MethodVerifier* verifier, uint32_t vreg, const RegType& reg_type) {
174   DCHECK_EQ(reg_type.GetId(), GetRegisterTypeId(vreg));
175   if (monitors_.empty()) {
176     verifier->Fail(VERIFY_ERROR_LOCKING);
177     if (kDumpLockFailures) {
178       VLOG(verifier) << "monitor-exit stack underflow while verifying "
179                      << verifier->GetMethodReference().PrettyMethod();
180     }
181   } else {
182     monitors_.pop_back();
183 
184     bool success = IsSetLockDepth(vreg, monitors_.size());
185 
186     if (!success && reg_type.IsZero()) {
187       // Null literals can establish aliases that we can't easily track. As such, handle the zero
188       // case as the 2^32-1 register (which isn't available in dex bytecode).
189       success = IsSetLockDepth(kVirtualNullRegister, monitors_.size());
190       if (success) {
191         vreg = kVirtualNullRegister;
192       }
193     }
194 
195     if (!success) {
196       verifier->Fail(VERIFY_ERROR_LOCKING);
197       if (kDumpLockFailures) {
198         VLOG(verifier) << "monitor-exit not unlocking the top of the monitor stack while verifying "
199                        << verifier->GetMethodReference().PrettyMethod();
200       }
201     } else {
202       // Record the register was unlocked. This clears all aliases, thus it will also clear the
203       // null lock, if necessary.
204       ClearRegToLockDepth(vreg, monitors_.size());
205     }
206   }
207 }
208 
FindLockAliasedRegister(uint32_t src,const RegisterLine::RegToLockDepthsMap & src_map,const RegisterLine::RegToLockDepthsMap & search_map)209 bool FindLockAliasedRegister(uint32_t src,
210                              const RegisterLine::RegToLockDepthsMap& src_map,
211                              const RegisterLine::RegToLockDepthsMap& search_map) {
212   auto it = src_map.find(src);
213   if (it == src_map.end()) {
214     // "Not locked" is trivially aliased.
215     return true;
216   }
217   uint32_t src_lock_levels = it->second;
218   if (src_lock_levels == 0) {
219     // "Not locked" is trivially aliased.
220     return true;
221   }
222 
223   // Scan the map for the same value.
224   for (const std::pair<const uint32_t, uint32_t>& pair : search_map) {
225     if (pair.first != src && pair.second == src_lock_levels) {
226       return true;
227     }
228   }
229 
230   // Nothing found, no alias.
231   return false;
232 }
233 
MergeRegisters(MethodVerifier * verifier,const RegisterLine * incoming_line)234 bool RegisterLine::MergeRegisters(MethodVerifier* verifier, const RegisterLine* incoming_line) {
235   bool changed = false;
236   DCHECK(incoming_line != nullptr);
237   for (size_t idx = 0; idx < num_regs_; idx++) {
238     if (line_[idx] != incoming_line->line_[idx]) {
239       const RegType& incoming_reg_type = incoming_line->GetRegisterType(verifier, idx);
240       const RegType& cur_type = GetRegisterType(verifier, idx);
241       const RegType& new_type = cur_type.Merge(
242           incoming_reg_type, verifier->GetRegTypeCache(), verifier);
243       changed = changed || !cur_type.Equals(new_type);
244       line_[idx] = new_type.GetId();
245     } else {
246       auto needs_allocation_dex_pc = [&]() {
247         return NeedsAllocationDexPc(verifier->GetRegTypeCache()->GetFromId(line_[idx]));
248       };
249       DCHECK_IMPLIES(needs_allocation_dex_pc(), allocation_dex_pcs_ != nullptr);
250       DCHECK_IMPLIES(needs_allocation_dex_pc(), incoming_line->allocation_dex_pcs_ != nullptr);
251       // Check for allocation dex pc mismatch first to try and avoid costly virtual calls.
252       // For methods without any `new-instance` instructions, the `allocation_dex_pcs_` is null.
253       if (allocation_dex_pcs_ != nullptr &&
254           incoming_line->allocation_dex_pcs_ != nullptr &&
255           allocation_dex_pcs_[idx] != incoming_line->allocation_dex_pcs_[idx] &&
256           needs_allocation_dex_pc()) {
257         line_[idx] = RegTypeCache::kConflictCacheId;
258       }
259     }
260   }
261   if (monitors_.size() > 0 || incoming_line->monitors_.size() > 0) {
262     if (monitors_.size() != incoming_line->monitors_.size()) {
263       verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
264       if (kDumpLockFailures) {
265         VLOG(verifier) << "mismatched stack depths (depth=" << MonitorStackDepth()
266                        << ", incoming depth=" << incoming_line->MonitorStackDepth() << ") in "
267                        << verifier->GetMethodReference().PrettyMethod();
268       }
269     } else if (reg_to_lock_depths_ != incoming_line->reg_to_lock_depths_) {
270       for (uint32_t idx = 0; idx < num_regs_; idx++) {
271         size_t depths = reg_to_lock_depths_.count(idx);
272         size_t incoming_depths = incoming_line->reg_to_lock_depths_.count(idx);
273         if (depths != incoming_depths) {
274           // Stack levels aren't matching. This is potentially bad, as we don't do a
275           // flow-sensitive analysis.
276           // However, this could be an alias of something locked in one path, and the alias was
277           // destroyed in another path. It is fine to drop this as long as there's another alias
278           // for the lock around. The last vanishing alias will then report that things would be
279           // left unlocked. We need to check for aliases for both lock levels.
280           //
281           // Example (lock status in curly braces as pair of register and lock leels):
282           //
283           //                            lock v1 {v1=1}
284           //                        |                    |
285           //              v0 = v1 {v0=1, v1=1}       v0 = v2 {v1=1}
286           //                        |                    |
287           //                                 {v1=1}
288           //                                         // Dropping v0, as the status can't be merged
289           //                                         // but the lock info ("locked at depth 1" and)
290           //                                         // "not locked at all") is available.
291           if (!FindLockAliasedRegister(idx,
292                                        reg_to_lock_depths_,
293                                        reg_to_lock_depths_) ||
294               !FindLockAliasedRegister(idx,
295                                        incoming_line->reg_to_lock_depths_,
296                                        reg_to_lock_depths_)) {
297             verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
298             if (kDumpLockFailures) {
299               VLOG(verifier) << "mismatched stack depths for register v" << idx
300                              << ": " << depths  << " != " << incoming_depths << " in "
301                              << verifier->GetMethodReference().PrettyMethod();
302             }
303             break;
304           }
305           // We found aliases, set this to zero.
306           reg_to_lock_depths_.erase(idx);
307         } else if (depths > 0) {
308           // Check whether they're actually the same levels.
309           uint32_t locked_levels = reg_to_lock_depths_.find(idx)->second;
310           uint32_t incoming_locked_levels = incoming_line->reg_to_lock_depths_.find(idx)->second;
311           if (locked_levels != incoming_locked_levels) {
312             // Lock levels aren't matching. This is potentially bad, as we don't do a
313             // flow-sensitive analysis.
314             // However, this could be an alias of something locked in one path, and the alias was
315             // destroyed in another path. It is fine to drop this as long as there's another alias
316             // for the lock around. The last vanishing alias will then report that things would be
317             // left unlocked. We need to check for aliases for both lock levels.
318             //
319             // Example (lock status in curly braces as pair of register and lock leels):
320             //
321             //                          lock v1 {v1=1}
322             //                          lock v2 {v1=1, v2=2}
323             //                        |                      |
324             //         v0 = v1 {v0=1, v1=1, v2=2}  v0 = v2 {v0=2, v1=1, v2=2}
325             //                        |                      |
326             //                             {v1=1, v2=2}
327             //                                           // Dropping v0, as the status can't be
328             //                                           // merged but the lock info ("locked at
329             //                                           // depth 1" and "locked at depth 2") is
330             //                                           // available.
331             if (!FindLockAliasedRegister(idx,
332                                          reg_to_lock_depths_,
333                                          reg_to_lock_depths_) ||
334                 !FindLockAliasedRegister(idx,
335                                          incoming_line->reg_to_lock_depths_,
336                                          reg_to_lock_depths_)) {
337               // No aliases for both current and incoming, we'll lose information.
338               verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
339               if (kDumpLockFailures) {
340                 VLOG(verifier) << "mismatched lock levels for register v" << idx << ": "
341                                << std::hex << locked_levels << std::dec  << " != "
342                                << std::hex << incoming_locked_levels << std::dec << " in "
343                                << verifier->GetMethodReference().PrettyMethod();
344               }
345               break;
346             }
347             // We found aliases, set this to zero.
348             reg_to_lock_depths_.erase(idx);
349           }
350         }
351       }
352     }
353   }
354 
355   // Check whether "this" was initialized in both paths.
356   if (this_initialized_ && !incoming_line->this_initialized_) {
357     this_initialized_ = false;
358     changed = true;
359   }
360   return changed;
361 }
362 
363 }  // namespace verifier
364 }  // namespace art
365