1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
19
20 #include "register_line.h"
21
22 #include "base/logging.h" // For VLOG.
23 #include "debug_print.h"
24 #include "method_verifier.h"
25 #include "reg_type_cache-inl.h"
26
27 namespace art {
28 namespace verifier {
29
30 // Should we dump a warning on failures to verify balanced locking? That would be an indication to
31 // developers that their code will be slow.
32 static constexpr bool kDumpLockFailures = true;
33
GetRegisterType(MethodVerifier * verifier,uint32_t vsrc)34 inline const RegType& RegisterLine::GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const {
35 // The register index was validated during the static pass, so we don't need to check it here.
36 DCHECK_LT(vsrc, num_regs_);
37 return verifier->GetRegTypeCache()->GetFromId(line_[vsrc]);
38 }
39
40 template <LockOp kLockOp>
SetRegisterType(uint32_t vdst,const RegType & new_type)41 inline void RegisterLine::SetRegisterType(uint32_t vdst, const RegType& new_type) {
42 DCHECK_LT(vdst, num_regs_);
43 DCHECK(!new_type.IsLowHalf());
44 DCHECK(!new_type.IsHighHalf());
45 // Note: previously we failed when asked to set a conflict. However, conflicts are OK as long
46 // as they are not accessed, and our backends can handle this nowadays.
47 line_[vdst] = new_type.GetId();
48 switch (kLockOp) {
49 case LockOp::kClear:
50 // Clear the monitor entry bits for this register.
51 ClearAllRegToLockDepths(vdst);
52 break;
53 case LockOp::kKeep:
54 // Should only be doing this with reference types.
55 DCHECK(new_type.IsReferenceTypes());
56 break;
57 }
58 }
59
SetRegisterTypeWide(uint32_t vdst,const RegType & new_type1,const RegType & new_type2)60 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
61 const RegType& new_type1,
62 const RegType& new_type2) {
63 DCHECK_LT(vdst + 1, num_regs_);
64 DCHECK(new_type1.CheckWidePair(new_type2));
65 line_[vdst] = new_type1.GetId();
66 line_[vdst + 1] = new_type2.GetId();
67 // Clear the monitor entry bits for this register.
68 ClearAllRegToLockDepths(vdst);
69 ClearAllRegToLockDepths(vdst + 1);
70 }
71
SetResultTypeToUnknown(RegTypeCache * reg_types)72 inline void RegisterLine::SetResultTypeToUnknown(RegTypeCache* reg_types) {
73 result_[0] = reg_types->Undefined().GetId();
74 result_[1] = result_[0];
75 }
76
SetResultRegisterType(MethodVerifier * verifier,const RegType & new_type)77 inline void RegisterLine::SetResultRegisterType(MethodVerifier* verifier, const RegType& new_type) {
78 DCHECK(!new_type.IsLowHalf());
79 DCHECK(!new_type.IsHighHalf());
80 result_[0] = new_type.GetId();
81 result_[1] = verifier->GetRegTypeCache()->Undefined().GetId();
82 }
83
SetResultRegisterTypeWide(const RegType & new_type1,const RegType & new_type2)84 inline void RegisterLine::SetResultRegisterTypeWide(const RegType& new_type1,
85 const RegType& new_type2) {
86 DCHECK(new_type1.CheckWidePair(new_type2));
87 result_[0] = new_type1.GetId();
88 result_[1] = new_type2.GetId();
89 }
90
CopyRegister1(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc,TypeCategory cat)91 inline void RegisterLine::CopyRegister1(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc,
92 TypeCategory cat) {
93 DCHECK(cat == kTypeCategory1nr || cat == kTypeCategoryRef);
94 const RegType& type = GetRegisterType(verifier, vsrc);
95 if (type.IsLowHalf() || type.IsHighHalf()) {
96 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Expected category1 register type not '"
97 << type << "'";
98 return;
99 }
100 SetRegisterType<LockOp::kClear>(vdst, type);
101 if (!type.IsConflict() && // Allow conflicts to be copied around.
102 ((cat == kTypeCategory1nr && !type.IsCategory1Types()) ||
103 (cat == kTypeCategoryRef && !type.IsReferenceTypes()))) {
104 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy1 v" << vdst << "<-v" << vsrc << " type=" << type
105 << " cat=" << static_cast<int>(cat);
106 } else if (cat == kTypeCategoryRef) {
107 CopyRegToLockDepth(vdst, vsrc);
108 }
109 }
110
CopyRegister2(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc)111 inline void RegisterLine::CopyRegister2(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc) {
112 const RegType& type_l = GetRegisterType(verifier, vsrc);
113 const RegType& type_h = GetRegisterType(verifier, vsrc + 1);
114
115 if (!type_l.CheckWidePair(type_h)) {
116 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy2 v" << vdst << "<-v" << vsrc
117 << " type=" << type_l << "/" << type_h;
118 } else {
119 SetRegisterTypeWide(vdst, type_l, type_h);
120 }
121 }
122
VerifyRegisterType(MethodVerifier * verifier,uint32_t vsrc,const RegType & check_type)123 inline bool RegisterLine::VerifyRegisterType(MethodVerifier* verifier, uint32_t vsrc,
124 const RegType& check_type) {
125 // Verify the src register type against the check type refining the type of the register
126 const RegType& src_type = GetRegisterType(verifier, vsrc);
127 if (UNLIKELY(!check_type.IsAssignableFrom(src_type, verifier))) {
128 enum VerifyError fail_type;
129 if (!check_type.IsNonZeroReferenceTypes() || !src_type.IsNonZeroReferenceTypes()) {
130 // Hard fail if one of the types is primitive, since they are concretely known.
131 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
132 } else if (check_type.IsUninitializedTypes() || src_type.IsUninitializedTypes()) {
133 // Hard fail for uninitialized types, which don't match anything but themselves.
134 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
135 } else if (check_type.IsUnresolvedTypes() || src_type.IsUnresolvedTypes()) {
136 fail_type = VERIFY_ERROR_UNRESOLVED_TYPE_CHECK;
137 } else {
138 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
139 }
140 verifier->Fail(fail_type) << "register v" << vsrc << " has type "
141 << src_type << " but expected " << check_type;
142 if (check_type.IsNonZeroReferenceTypes() &&
143 !check_type.IsUnresolvedTypes() &&
144 check_type.HasClass() &&
145 src_type.IsNonZeroReferenceTypes() &&
146 !src_type.IsUnresolvedTypes() &&
147 src_type.HasClass()) {
148 DumpB77342775DebugData(check_type.GetClass(), src_type.GetClass());
149 }
150 return false;
151 }
152 if (check_type.IsLowHalf()) {
153 const RegType& src_type_h = GetRegisterType(verifier, vsrc + 1);
154 if (UNLIKELY(!src_type.CheckWidePair(src_type_h))) {
155 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "wide register v" << vsrc << " has type "
156 << src_type << "/" << src_type_h;
157 return false;
158 }
159 }
160 // The register at vsrc has a defined type, we know the lower-upper-bound, but this is less
161 // precise than the subtype in vsrc so leave it for reference types. For primitive types
162 // if they are a defined type then they are as precise as we can get, however, for constant
163 // types we may wish to refine them. Unfortunately constant propagation has rendered this useless.
164 return true;
165 }
166
VerifyMonitorStackEmpty(MethodVerifier * verifier)167 inline void RegisterLine::VerifyMonitorStackEmpty(MethodVerifier* verifier) const {
168 if (MonitorStackDepth() != 0) {
169 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
170 if (kDumpLockFailures) {
171 VLOG(verifier) << "expected empty monitor stack in "
172 << verifier->GetMethodReference().PrettyMethod();
173 }
174 }
175 }
176
ComputeSize(size_t num_regs)177 inline size_t RegisterLine::ComputeSize(size_t num_regs) {
178 return OFFSETOF_MEMBER(RegisterLine, line_) + num_regs * sizeof(uint16_t);
179 }
180
Create(size_t num_regs,ScopedArenaAllocator & allocator,RegTypeCache * reg_types)181 inline RegisterLine* RegisterLine::Create(size_t num_regs,
182 ScopedArenaAllocator& allocator,
183 RegTypeCache* reg_types) {
184 void* memory = allocator.Alloc(ComputeSize(num_regs));
185 return new (memory) RegisterLine(num_regs, allocator, reg_types);
186 }
187
RegisterLine(size_t num_regs,ScopedArenaAllocator & allocator,RegTypeCache * reg_types)188 inline RegisterLine::RegisterLine(size_t num_regs,
189 ScopedArenaAllocator& allocator,
190 RegTypeCache* reg_types)
191 : num_regs_(num_regs),
192 monitors_(allocator.Adapter(kArenaAllocVerifier)),
193 reg_to_lock_depths_(std::less<uint32_t>(),
194 allocator.Adapter(kArenaAllocVerifier)),
195 this_initialized_(false) {
196 std::uninitialized_fill_n(line_, num_regs_, 0u);
197 SetResultTypeToUnknown(reg_types);
198 }
199
ClearRegToLockDepth(size_t reg,size_t depth)200 inline void RegisterLine::ClearRegToLockDepth(size_t reg, size_t depth) {
201 CHECK_LT(depth, 32u);
202 DCHECK(IsSetLockDepth(reg, depth));
203 auto it = reg_to_lock_depths_.find(reg);
204 DCHECK(it != reg_to_lock_depths_.end());
205 uint32_t depths = it->second ^ (1 << depth);
206 if (depths != 0) {
207 it->second = depths;
208 } else {
209 reg_to_lock_depths_.erase(it);
210 }
211 // Need to unlock every register at the same lock depth. These are aliased locks.
212 uint32_t mask = 1 << depth;
213 for (auto& pair : reg_to_lock_depths_) {
214 if ((pair.second & mask) != 0) {
215 VLOG(verifier) << "Also unlocking " << pair.first;
216 pair.second ^= mask;
217 }
218 }
219 }
220
operator()221 inline void RegisterLineArenaDelete::operator()(RegisterLine* ptr) const {
222 if (ptr != nullptr) {
223 ptr->~RegisterLine();
224 ProtectMemory(ptr, RegisterLine::ComputeSize(ptr->NumRegs()));
225 }
226 }
227
228 } // namespace verifier
229 } // namespace art
230
231 #endif // ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
232