1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23
24 #include "base/arena_allocator.h"
25 #include "base/arena_bit_vector.h"
26 #include "base/arena_containers.h"
27 #include "base/arena_object.h"
28 #include "base/array_ref.h"
29 #include "base/intrusive_forward_list.h"
30 #include "base/iteration_range.h"
31 #include "base/mutex.h"
32 #include "base/quasi_atomic.h"
33 #include "base/stl_util.h"
34 #include "base/transform_array_ref.h"
35 #include "art_method.h"
36 #include "block_namer.h"
37 #include "class_root.h"
38 #include "compilation_kind.h"
39 #include "data_type.h"
40 #include "deoptimization_kind.h"
41 #include "dex/dex_file.h"
42 #include "dex/dex_file_types.h"
43 #include "dex/invoke_type.h"
44 #include "dex/method_reference.h"
45 #include "entrypoints/quick/quick_entrypoints_enum.h"
46 #include "handle.h"
47 #include "handle_scope.h"
48 #include "intrinsics_enum.h"
49 #include "locations.h"
50 #include "mirror/class.h"
51 #include "mirror/method_type.h"
52 #include "offsets.h"
53
54 namespace art {
55
56 class ArenaStack;
57 class GraphChecker;
58 class HBasicBlock;
59 class HConstructorFence;
60 class HCurrentMethod;
61 class HDoubleConstant;
62 class HEnvironment;
63 class HFloatConstant;
64 class HGraphBuilder;
65 class HGraphVisitor;
66 class HInstruction;
67 class HIntConstant;
68 class HInvoke;
69 class HLongConstant;
70 class HNullConstant;
71 class HParameterValue;
72 class HPhi;
73 class HSuspendCheck;
74 class HTryBoundary;
75 class FieldInfo;
76 class LiveInterval;
77 class LocationSummary;
78 class SlowPathCode;
79 class SsaBuilder;
80
81 namespace mirror {
82 class DexCache;
83 } // namespace mirror
84
85 static const int kDefaultNumberOfBlocks = 8;
86 static const int kDefaultNumberOfSuccessors = 2;
87 static const int kDefaultNumberOfPredecessors = 2;
88 static const int kDefaultNumberOfExceptionalPredecessors = 0;
89 static const int kDefaultNumberOfDominatedBlocks = 1;
90 static const int kDefaultNumberOfBackEdges = 1;
91
92 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
93 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
94 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
95 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
96
97 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
98 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
99
100 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
101
102 static constexpr uint32_t kNoDexPc = -1;
103
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)104 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
105 // For the purposes of the compiler, the dex files must actually be the same object
106 // if we want to safely treat them as the same. This is especially important for JIT
107 // as custom class loaders can open the same underlying file (or memory) multiple
108 // times and provide different class resolution but no two class loaders should ever
109 // use the same DexFile object - doing so is an unsupported hack that can lead to
110 // all sorts of weird failures.
111 return &lhs == &rhs;
112 }
113
114 enum IfCondition {
115 // All types.
116 kCondEQ, // ==
117 kCondNE, // !=
118 // Signed integers and floating-point numbers.
119 kCondLT, // <
120 kCondLE, // <=
121 kCondGT, // >
122 kCondGE, // >=
123 // Unsigned integers.
124 kCondB, // <
125 kCondBE, // <=
126 kCondA, // >
127 kCondAE, // >=
128 // First and last aliases.
129 kCondFirst = kCondEQ,
130 kCondLast = kCondAE,
131 };
132
133 enum GraphAnalysisResult {
134 kAnalysisSkipped,
135 kAnalysisInvalidBytecode,
136 kAnalysisFailThrowCatchLoop,
137 kAnalysisFailAmbiguousArrayOp,
138 kAnalysisFailIrreducibleLoopAndStringInit,
139 kAnalysisFailPhiEquivalentInOsr,
140 kAnalysisSuccess,
141 };
142
143 template <typename T>
MakeUnsigned(T x)144 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
145 return static_cast<typename std::make_unsigned<T>::type>(x);
146 }
147
148 class HInstructionList : public ValueObject {
149 public:
HInstructionList()150 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
151
152 void AddInstruction(HInstruction* instruction);
153 void RemoveInstruction(HInstruction* instruction);
154
155 // Insert `instruction` before/after an existing instruction `cursor`.
156 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
157 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
158
159 // Return true if this list contains `instruction`.
160 bool Contains(HInstruction* instruction) const;
161
162 // Return true if `instruction1` is found before `instruction2` in
163 // this instruction list and false otherwise. Abort if none
164 // of these instructions is found.
165 bool FoundBefore(const HInstruction* instruction1,
166 const HInstruction* instruction2) const;
167
IsEmpty()168 bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()169 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
170
171 // Update the block of all instructions to be `block`.
172 void SetBlockOfInstructions(HBasicBlock* block) const;
173
174 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
175 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
176 void Add(const HInstructionList& instruction_list);
177
178 // Return the number of instructions in the list. This is an expensive operation.
179 size_t CountSize() const;
180
181 private:
182 HInstruction* first_instruction_;
183 HInstruction* last_instruction_;
184
185 friend class HBasicBlock;
186 friend class HGraph;
187 friend class HInstruction;
188 friend class HInstructionIterator;
189 friend class HInstructionIteratorHandleChanges;
190 friend class HBackwardInstructionIterator;
191
192 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
193 };
194
195 class ReferenceTypeInfo : ValueObject {
196 public:
197 typedef Handle<mirror::Class> TypeHandle;
198
199 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
200
Create(TypeHandle type_handle)201 static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
202 return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
203 }
204
CreateUnchecked(TypeHandle type_handle,bool is_exact)205 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
206 return ReferenceTypeInfo(type_handle, is_exact);
207 }
208
CreateInvalid()209 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
210
IsValidHandle(TypeHandle handle)211 static bool IsValidHandle(TypeHandle handle) {
212 return handle.GetReference() != nullptr;
213 }
214
IsValid()215 bool IsValid() const {
216 return IsValidHandle(type_handle_);
217 }
218
IsExact()219 bool IsExact() const { return is_exact_; }
220
IsObjectClass()221 bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
222 DCHECK(IsValid());
223 return GetTypeHandle()->IsObjectClass();
224 }
225
IsStringClass()226 bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
227 DCHECK(IsValid());
228 return GetTypeHandle()->IsStringClass();
229 }
230
IsObjectArray()231 bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
232 DCHECK(IsValid());
233 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
234 }
235
IsInterface()236 bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
237 DCHECK(IsValid());
238 return GetTypeHandle()->IsInterface();
239 }
240
IsArrayClass()241 bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
242 DCHECK(IsValid());
243 return GetTypeHandle()->IsArrayClass();
244 }
245
IsPrimitiveArrayClass()246 bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
247 DCHECK(IsValid());
248 return GetTypeHandle()->IsPrimitiveArray();
249 }
250
IsNonPrimitiveArrayClass()251 bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
252 DCHECK(IsValid());
253 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
254 }
255
CanArrayHold(ReferenceTypeInfo rti)256 bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
257 DCHECK(IsValid());
258 if (!IsExact()) return false;
259 if (!IsArrayClass()) return false;
260 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
261 }
262
CanArrayHoldValuesOf(ReferenceTypeInfo rti)263 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
264 DCHECK(IsValid());
265 if (!IsExact()) return false;
266 if (!IsArrayClass()) return false;
267 if (!rti.IsArrayClass()) return false;
268 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
269 rti.GetTypeHandle()->GetComponentType());
270 }
271
GetTypeHandle()272 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
273
IsSupertypeOf(ReferenceTypeInfo rti)274 bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
275 DCHECK(IsValid());
276 DCHECK(rti.IsValid());
277 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
278 }
279
280 // Returns true if the type information provide the same amount of details.
281 // Note that it does not mean that the instructions have the same actual type
282 // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)283 bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
284 if (!IsValid() && !rti.IsValid()) {
285 // Invalid types are equal.
286 return true;
287 }
288 if (!IsValid() || !rti.IsValid()) {
289 // One is valid, the other not.
290 return false;
291 }
292 return IsExact() == rti.IsExact()
293 && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
294 }
295
296 private:
ReferenceTypeInfo()297 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)298 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
299 : type_handle_(type_handle), is_exact_(is_exact) { }
300
301 // The class of the object.
302 TypeHandle type_handle_;
303 // Whether or not the type is exact or a superclass of the actual type.
304 // Whether or not we have any information about this type.
305 bool is_exact_;
306 };
307
308 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
309
310 class HandleCache {
311 public:
HandleCache(VariableSizedHandleScope * handles)312 explicit HandleCache(VariableSizedHandleScope* handles) : handles_(handles) { }
313
GetHandles()314 VariableSizedHandleScope* GetHandles() { return handles_; }
315
316 template <typename T>
NewHandle(T * object)317 MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_) {
318 return handles_->NewHandle(object);
319 }
320
321 template <typename T>
NewHandle(ObjPtr<T> object)322 MutableHandle<T> NewHandle(ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_) {
323 return handles_->NewHandle(object);
324 }
325
GetObjectClassHandle()326 ReferenceTypeInfo::TypeHandle GetObjectClassHandle() {
327 return GetRootHandle(ClassRoot::kJavaLangObject, &object_class_handle_);
328 }
329
GetClassClassHandle()330 ReferenceTypeInfo::TypeHandle GetClassClassHandle() {
331 return GetRootHandle(ClassRoot::kJavaLangClass, &class_class_handle_);
332 }
333
GetMethodHandleClassHandle()334 ReferenceTypeInfo::TypeHandle GetMethodHandleClassHandle() {
335 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodHandleImpl, &method_handle_class_handle_);
336 }
337
GetMethodTypeClassHandle()338 ReferenceTypeInfo::TypeHandle GetMethodTypeClassHandle() {
339 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodType, &method_type_class_handle_);
340 }
341
GetStringClassHandle()342 ReferenceTypeInfo::TypeHandle GetStringClassHandle() {
343 return GetRootHandle(ClassRoot::kJavaLangString, &string_class_handle_);
344 }
345
GetThrowableClassHandle()346 ReferenceTypeInfo::TypeHandle GetThrowableClassHandle() {
347 return GetRootHandle(ClassRoot::kJavaLangThrowable, &throwable_class_handle_);
348 }
349
350
351 private:
GetRootHandle(ClassRoot class_root,ReferenceTypeInfo::TypeHandle * cache)352 inline ReferenceTypeInfo::TypeHandle GetRootHandle(ClassRoot class_root,
353 ReferenceTypeInfo::TypeHandle* cache) {
354 if (UNLIKELY(!ReferenceTypeInfo::IsValidHandle(*cache))) {
355 *cache = CreateRootHandle(handles_, class_root);
356 }
357 return *cache;
358 }
359
360 static ReferenceTypeInfo::TypeHandle CreateRootHandle(VariableSizedHandleScope* handles,
361 ClassRoot class_root);
362
363 VariableSizedHandleScope* handles_;
364
365 ReferenceTypeInfo::TypeHandle object_class_handle_;
366 ReferenceTypeInfo::TypeHandle class_class_handle_;
367 ReferenceTypeInfo::TypeHandle method_handle_class_handle_;
368 ReferenceTypeInfo::TypeHandle method_type_class_handle_;
369 ReferenceTypeInfo::TypeHandle string_class_handle_;
370 ReferenceTypeInfo::TypeHandle throwable_class_handle_;
371 };
372
373 // Control-flow graph of a method. Contains a list of basic blocks.
374 class HGraph : public ArenaObject<kArenaAllocGraph> {
375 public:
376 HGraph(ArenaAllocator* allocator,
377 ArenaStack* arena_stack,
378 VariableSizedHandleScope* handles,
379 const DexFile& dex_file,
380 uint32_t method_idx,
381 InstructionSet instruction_set,
382 InvokeType invoke_type = kInvalidInvokeType,
383 bool dead_reference_safe = false,
384 bool debuggable = false,
385 CompilationKind compilation_kind = CompilationKind::kOptimized,
386 int start_instruction_id = 0)
allocator_(allocator)387 : allocator_(allocator),
388 arena_stack_(arena_stack),
389 handle_cache_(handles),
390 blocks_(allocator->Adapter(kArenaAllocBlockList)),
391 reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
392 linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
393 reachability_graph_(allocator, 0, 0, true, kArenaAllocReachabilityGraph),
394 entry_block_(nullptr),
395 exit_block_(nullptr),
396 maximum_number_of_out_vregs_(0),
397 number_of_vregs_(0),
398 number_of_in_vregs_(0),
399 temporaries_vreg_slots_(0),
400 has_bounds_checks_(false),
401 has_try_catch_(false),
402 has_monitor_operations_(false),
403 has_simd_(false),
404 has_loops_(false),
405 has_irreducible_loops_(false),
406 has_direct_critical_native_call_(false),
407 dead_reference_safe_(dead_reference_safe),
408 debuggable_(debuggable),
409 current_instruction_id_(start_instruction_id),
410 dex_file_(dex_file),
411 method_idx_(method_idx),
412 invoke_type_(invoke_type),
413 in_ssa_form_(false),
414 number_of_cha_guards_(0),
415 instruction_set_(instruction_set),
416 cached_null_constant_(nullptr),
417 cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
418 cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
419 cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
420 cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
421 cached_current_method_(nullptr),
422 art_method_(nullptr),
423 compilation_kind_(compilation_kind),
424 cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
425 blocks_.reserve(kDefaultNumberOfBlocks);
426 }
427
428 std::ostream& Dump(std::ostream& os,
429 std::optional<std::reference_wrapper<const BlockNamer>> namer = std::nullopt);
430
GetAllocator()431 ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()432 ArenaStack* GetArenaStack() const { return arena_stack_; }
433
GetHandleCache()434 HandleCache* GetHandleCache() { return &handle_cache_; }
435
GetBlocks()436 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
437
438 // An iterator to only blocks that are still actually in the graph (when
439 // blocks are removed they are replaced with 'nullptr' in GetBlocks to
440 // simplify block-id assignment and avoid memmoves in the block-list).
GetActiveBlocks()441 IterationRange<FilterNull<ArenaVector<HBasicBlock*>::const_iterator>> GetActiveBlocks() const {
442 return FilterOutNull(MakeIterationRange(GetBlocks()));
443 }
444
IsInSsaForm()445 bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()446 void SetInSsaForm() { in_ssa_form_ = true; }
447
GetEntryBlock()448 HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()449 HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()450 bool HasExitBlock() const { return exit_block_ != nullptr; }
451
SetEntryBlock(HBasicBlock * block)452 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)453 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
454
455 void AddBlock(HBasicBlock* block);
456
457 void ComputeDominanceInformation();
458 void ClearDominanceInformation();
459 void ComputeReachabilityInformation();
460 void ClearReachabilityInformation();
461 void ClearLoopInformation();
462 void FindBackEdges(ArenaBitVector* visited);
463 GraphAnalysisResult BuildDominatorTree();
464 void SimplifyCFG();
465 void SimplifyCatchBlocks();
466
467 // Analyze all natural loops in this graph. Returns a code specifying that it
468 // was successful or the reason for failure. The method will fail if a loop
469 // is a throw-catch loop, i.e. the header is a catch block.
470 GraphAnalysisResult AnalyzeLoops() const;
471
472 // Iterate over blocks to compute try block membership. Needs reverse post
473 // order and loop information.
474 void ComputeTryBlockInformation();
475
476 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
477 // Returns the instruction to replace the invoke expression or null if the
478 // invoke is for a void method. Note that the caller is responsible for replacing
479 // and removing the invoke instruction.
480 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
481
482 // Update the loop and try membership of `block`, which was spawned from `reference`.
483 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
484 // should be the new back edge.
485 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
486 HBasicBlock* reference,
487 bool replace_if_back_edge);
488
489 // Need to add a couple of blocks to test if the loop body is entered and
490 // put deoptimization instructions, etc.
491 void TransformLoopHeaderForBCE(HBasicBlock* header);
492
493 // Adds a new loop directly after the loop with the given header and exit.
494 // Returns the new preheader.
495 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
496 HBasicBlock* body,
497 HBasicBlock* exit);
498
499 // Removes `block` from the graph. Assumes `block` has been disconnected from
500 // other blocks and has no instructions or phis.
501 void DeleteDeadEmptyBlock(HBasicBlock* block);
502
503 // Splits the edge between `block` and `successor` while preserving the
504 // indices in the predecessor/successor lists. If there are multiple edges
505 // between the blocks, the lowest indices are used.
506 // Returns the new block which is empty and has the same dex pc as `successor`.
507 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
508
509 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
510 void OrderLoopHeaderPredecessors(HBasicBlock* header);
511
512 // Transform a loop into a format with a single preheader.
513 //
514 // Each phi in the header should be split: original one in the header should only hold
515 // inputs reachable from the back edges and a single input from the preheader. The newly created
516 // phi in the preheader should collate the inputs from the original multiple incoming blocks.
517 //
518 // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
519 // that no longer have this property.
520 void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
521
522 void SimplifyLoop(HBasicBlock* header);
523
GetNextInstructionId()524 int32_t GetNextInstructionId() {
525 CHECK_NE(current_instruction_id_, INT32_MAX);
526 return current_instruction_id_++;
527 }
528
GetCurrentInstructionId()529 int32_t GetCurrentInstructionId() const {
530 return current_instruction_id_;
531 }
532
SetCurrentInstructionId(int32_t id)533 void SetCurrentInstructionId(int32_t id) {
534 CHECK_GE(id, current_instruction_id_);
535 current_instruction_id_ = id;
536 }
537
GetMaximumNumberOfOutVRegs()538 uint16_t GetMaximumNumberOfOutVRegs() const {
539 return maximum_number_of_out_vregs_;
540 }
541
SetMaximumNumberOfOutVRegs(uint16_t new_value)542 void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
543 maximum_number_of_out_vregs_ = new_value;
544 }
545
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)546 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
547 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
548 }
549
UpdateTemporariesVRegSlots(size_t slots)550 void UpdateTemporariesVRegSlots(size_t slots) {
551 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
552 }
553
GetTemporariesVRegSlots()554 size_t GetTemporariesVRegSlots() const {
555 DCHECK(!in_ssa_form_);
556 return temporaries_vreg_slots_;
557 }
558
SetNumberOfVRegs(uint16_t number_of_vregs)559 void SetNumberOfVRegs(uint16_t number_of_vregs) {
560 number_of_vregs_ = number_of_vregs;
561 }
562
GetNumberOfVRegs()563 uint16_t GetNumberOfVRegs() const {
564 return number_of_vregs_;
565 }
566
SetNumberOfInVRegs(uint16_t value)567 void SetNumberOfInVRegs(uint16_t value) {
568 number_of_in_vregs_ = value;
569 }
570
GetNumberOfInVRegs()571 uint16_t GetNumberOfInVRegs() const {
572 return number_of_in_vregs_;
573 }
574
GetNumberOfLocalVRegs()575 uint16_t GetNumberOfLocalVRegs() const {
576 DCHECK(!in_ssa_form_);
577 return number_of_vregs_ - number_of_in_vregs_;
578 }
579
GetReversePostOrder()580 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
581 return reverse_post_order_;
582 }
583
GetReversePostOrderSkipEntryBlock()584 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() const {
585 DCHECK(GetReversePostOrder()[0] == entry_block_);
586 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
587 }
588
GetPostOrder()589 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
590 return ReverseRange(GetReversePostOrder());
591 }
592
GetLinearOrder()593 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
594 return linear_order_;
595 }
596
GetLinearPostOrder()597 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
598 return ReverseRange(GetLinearOrder());
599 }
600
HasBoundsChecks()601 bool HasBoundsChecks() const {
602 return has_bounds_checks_;
603 }
604
SetHasBoundsChecks(bool value)605 void SetHasBoundsChecks(bool value) {
606 has_bounds_checks_ = value;
607 }
608
609 // Returns true if dest is reachable from source, using either blocks or block-ids.
610 bool PathBetween(const HBasicBlock* source, const HBasicBlock* dest) const;
611 bool PathBetween(uint32_t source_id, uint32_t dest_id) const;
612
613 // Is the code known to be robust against eliminating dead references
614 // and the effects of early finalization?
IsDeadReferenceSafe()615 bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
616
MarkDeadReferenceUnsafe()617 void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
618
IsDebuggable()619 bool IsDebuggable() const { return debuggable_; }
620
621 // Returns a constant of the given type and value. If it does not exist
622 // already, it is created and inserted into the graph. This method is only for
623 // integral types.
624 HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
625
626 // TODO: This is problematic for the consistency of reference type propagation
627 // because it can be created anytime after the pass and thus it will be left
628 // with an invalid type.
629 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
630
631 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
632 return CreateConstant(value, &cached_int_constants_, dex_pc);
633 }
634 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
635 return CreateConstant(value, &cached_long_constants_, dex_pc);
636 }
637 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
638 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
639 }
640 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
641 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
642 }
643
644 HCurrentMethod* GetCurrentMethod();
645
GetDexFile()646 const DexFile& GetDexFile() const {
647 return dex_file_;
648 }
649
GetMethodIdx()650 uint32_t GetMethodIdx() const {
651 return method_idx_;
652 }
653
654 // Get the method name (without the signature), e.g. "<init>"
655 const char* GetMethodName() const;
656
657 // Get the pretty method name (class + name + optionally signature).
658 std::string PrettyMethod(bool with_signature = true) const;
659
GetInvokeType()660 InvokeType GetInvokeType() const {
661 return invoke_type_;
662 }
663
GetInstructionSet()664 InstructionSet GetInstructionSet() const {
665 return instruction_set_;
666 }
667
IsCompilingOsr()668 bool IsCompilingOsr() const { return compilation_kind_ == CompilationKind::kOsr; }
669
IsCompilingBaseline()670 bool IsCompilingBaseline() const { return compilation_kind_ == CompilationKind::kBaseline; }
671
GetCompilationKind()672 CompilationKind GetCompilationKind() const { return compilation_kind_; }
673
GetCHASingleImplementationList()674 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
675 return cha_single_implementation_list_;
676 }
677
AddCHASingleImplementationDependency(ArtMethod * method)678 void AddCHASingleImplementationDependency(ArtMethod* method) {
679 cha_single_implementation_list_.insert(method);
680 }
681
HasShouldDeoptimizeFlag()682 bool HasShouldDeoptimizeFlag() const {
683 return number_of_cha_guards_ != 0;
684 }
685
HasTryCatch()686 bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)687 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
688
HasMonitorOperations()689 bool HasMonitorOperations() const { return has_monitor_operations_; }
SetHasMonitorOperations(bool value)690 void SetHasMonitorOperations(bool value) { has_monitor_operations_ = value; }
691
HasSIMD()692 bool HasSIMD() const { return has_simd_; }
SetHasSIMD(bool value)693 void SetHasSIMD(bool value) { has_simd_ = value; }
694
HasLoops()695 bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)696 void SetHasLoops(bool value) { has_loops_ = value; }
697
HasIrreducibleLoops()698 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)699 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
700
HasDirectCriticalNativeCall()701 bool HasDirectCriticalNativeCall() const { return has_direct_critical_native_call_; }
SetHasDirectCriticalNativeCall(bool value)702 void SetHasDirectCriticalNativeCall(bool value) { has_direct_critical_native_call_ = value; }
703
GetArtMethod()704 ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)705 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
706
707 // Returns an instruction with the opposite Boolean value from 'cond'.
708 // The instruction has been inserted into the graph, either as a constant, or
709 // before cursor.
710 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
711
GetInexactObjectRti()712 ReferenceTypeInfo GetInexactObjectRti() {
713 return ReferenceTypeInfo::Create(handle_cache_.GetObjectClassHandle(), /* is_exact= */ false);
714 }
715
GetNumberOfCHAGuards()716 uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)717 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()718 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
719
720 private:
721 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
722 void RemoveDeadBlocks(const ArenaBitVector& visited);
723
724 template <class InstructionType, typename ValueType>
725 InstructionType* CreateConstant(ValueType value,
726 ArenaSafeMap<ValueType, InstructionType*>* cache,
727 uint32_t dex_pc = kNoDexPc) {
728 // Try to find an existing constant of the given value.
729 InstructionType* constant = nullptr;
730 auto cached_constant = cache->find(value);
731 if (cached_constant != cache->end()) {
732 constant = cached_constant->second;
733 }
734
735 // If not found or previously deleted, create and cache a new instruction.
736 // Don't bother reviving a previously deleted instruction, for simplicity.
737 if (constant == nullptr || constant->GetBlock() == nullptr) {
738 constant = new (allocator_) InstructionType(value, dex_pc);
739 cache->Overwrite(value, constant);
740 InsertConstant(constant);
741 }
742 return constant;
743 }
744
745 void InsertConstant(HConstant* instruction);
746
747 // Cache a float constant into the graph. This method should only be
748 // called by the SsaBuilder when creating "equivalent" instructions.
749 void CacheFloatConstant(HFloatConstant* constant);
750
751 // See CacheFloatConstant comment.
752 void CacheDoubleConstant(HDoubleConstant* constant);
753
754 ArenaAllocator* const allocator_;
755 ArenaStack* const arena_stack_;
756
757 HandleCache handle_cache_;
758
759 // List of blocks in insertion order.
760 ArenaVector<HBasicBlock*> blocks_;
761
762 // List of blocks to perform a reverse post order tree traversal.
763 ArenaVector<HBasicBlock*> reverse_post_order_;
764
765 // List of blocks to perform a linear order tree traversal. Unlike the reverse
766 // post order, this order is not incrementally kept up-to-date.
767 ArenaVector<HBasicBlock*> linear_order_;
768
769 // Reachability graph for checking connectedness between nodes. Acts as a partitioned vector where
770 // each RoundUp(blocks_.size(), BitVector::kWordBits) is the reachability of each node.
771 ArenaBitVectorArray reachability_graph_;
772
773 HBasicBlock* entry_block_;
774 HBasicBlock* exit_block_;
775
776 // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
777 uint16_t maximum_number_of_out_vregs_;
778
779 // The number of virtual registers in this method. Contains the parameters.
780 uint16_t number_of_vregs_;
781
782 // The number of virtual registers used by parameters of this method.
783 uint16_t number_of_in_vregs_;
784
785 // Number of vreg size slots that the temporaries use (used in baseline compiler).
786 size_t temporaries_vreg_slots_;
787
788 // Flag whether there are bounds checks in the graph. We can skip
789 // BCE if it's false. It's only best effort to keep it up to date in
790 // the presence of code elimination so there might be false positives.
791 bool has_bounds_checks_;
792
793 // Flag whether there are try/catch blocks in the graph. We will skip
794 // try/catch-related passes if it's false. It's only best effort to keep
795 // it up to date in the presence of code elimination so there might be
796 // false positives.
797 bool has_try_catch_;
798
799 // Flag whether there are any HMonitorOperation in the graph. If yes this will mandate
800 // DexRegisterMap to be present to allow deadlock analysis for non-debuggable code.
801 bool has_monitor_operations_;
802
803 // Flag whether SIMD instructions appear in the graph. If true, the
804 // code generators may have to be more careful spilling the wider
805 // contents of SIMD registers.
806 bool has_simd_;
807
808 // Flag whether there are any loops in the graph. We can skip loop
809 // optimization if it's false. It's only best effort to keep it up
810 // to date in the presence of code elimination so there might be false
811 // positives.
812 bool has_loops_;
813
814 // Flag whether there are any irreducible loops in the graph. It's only
815 // best effort to keep it up to date in the presence of code elimination
816 // so there might be false positives.
817 bool has_irreducible_loops_;
818
819 // Flag whether there are any direct calls to native code registered
820 // for @CriticalNative methods.
821 bool has_direct_critical_native_call_;
822
823 // Is the code known to be robust against eliminating dead references
824 // and the effects of early finalization? If false, dead reference variables
825 // are kept if they might be visible to the garbage collector.
826 // Currently this means that the class was declared to be dead-reference-safe,
827 // the method accesses no reachability-sensitive fields or data, and the same
828 // is true for any methods that were inlined into the current one.
829 bool dead_reference_safe_;
830
831 // Indicates whether the graph should be compiled in a way that
832 // ensures full debuggability. If false, we can apply more
833 // aggressive optimizations that may limit the level of debugging.
834 const bool debuggable_;
835
836 // The current id to assign to a newly added instruction. See HInstruction.id_.
837 int32_t current_instruction_id_;
838
839 // The dex file from which the method is from.
840 const DexFile& dex_file_;
841
842 // The method index in the dex file.
843 const uint32_t method_idx_;
844
845 // If inlined, this encodes how the callee is being invoked.
846 const InvokeType invoke_type_;
847
848 // Whether the graph has been transformed to SSA form. Only used
849 // in debug mode to ensure we are not using properties only valid
850 // for non-SSA form (like the number of temporaries).
851 bool in_ssa_form_;
852
853 // Number of CHA guards in the graph. Used to short-circuit the
854 // CHA guard optimization pass when there is no CHA guard left.
855 uint32_t number_of_cha_guards_;
856
857 const InstructionSet instruction_set_;
858
859 // Cached constants.
860 HNullConstant* cached_null_constant_;
861 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
862 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
863 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
864 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
865
866 HCurrentMethod* cached_current_method_;
867
868 // The ArtMethod this graph is for. Note that for AOT, it may be null,
869 // for example for methods whose declaring class could not be resolved
870 // (such as when the superclass could not be found).
871 ArtMethod* art_method_;
872
873 // How we are compiling the graph: either optimized, osr, or baseline.
874 // For osr, we will make all loops seen as irreducible and emit special
875 // stack maps to mark compiled code entries which the interpreter can
876 // directly jump to.
877 const CompilationKind compilation_kind_;
878
879 // List of methods that are assumed to have single implementation.
880 ArenaSet<ArtMethod*> cha_single_implementation_list_;
881
882 friend class SsaBuilder; // For caching constants.
883 friend class SsaLivenessAnalysis; // For the linear order.
884 friend class HInliner; // For the reverse post order.
885 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
886 DISALLOW_COPY_AND_ASSIGN(HGraph);
887 };
888
889 inline std::ostream& operator<<(std::ostream& os, HGraph& graph) {
890 return graph.Dump(os);
891 }
892
893 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
894 public:
HLoopInformation(HBasicBlock * header,HGraph * graph)895 HLoopInformation(HBasicBlock* header, HGraph* graph)
896 : header_(header),
897 suspend_check_(nullptr),
898 irreducible_(false),
899 contains_irreducible_loop_(false),
900 back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
901 // Make bit vector growable, as the number of blocks may change.
902 blocks_(graph->GetAllocator(),
903 graph->GetBlocks().size(),
904 true,
905 kArenaAllocLoopInfoBackEdges) {
906 back_edges_.reserve(kDefaultNumberOfBackEdges);
907 }
908
IsIrreducible()909 bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()910 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
911
912 void Dump(std::ostream& os);
913
GetHeader()914 HBasicBlock* GetHeader() const {
915 return header_;
916 }
917
SetHeader(HBasicBlock * block)918 void SetHeader(HBasicBlock* block) {
919 header_ = block;
920 }
921
GetSuspendCheck()922 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)923 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()924 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
925
AddBackEdge(HBasicBlock * back_edge)926 void AddBackEdge(HBasicBlock* back_edge) {
927 back_edges_.push_back(back_edge);
928 }
929
RemoveBackEdge(HBasicBlock * back_edge)930 void RemoveBackEdge(HBasicBlock* back_edge) {
931 RemoveElement(back_edges_, back_edge);
932 }
933
IsBackEdge(const HBasicBlock & block)934 bool IsBackEdge(const HBasicBlock& block) const {
935 return ContainsElement(back_edges_, &block);
936 }
937
NumberOfBackEdges()938 size_t NumberOfBackEdges() const {
939 return back_edges_.size();
940 }
941
942 HBasicBlock* GetPreHeader() const;
943
GetBackEdges()944 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
945 return back_edges_;
946 }
947
948 // Returns the lifetime position of the back edge that has the
949 // greatest lifetime position.
950 size_t GetLifetimeEnd() const;
951
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)952 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
953 ReplaceElement(back_edges_, existing, new_back_edge);
954 }
955
956 // Finds blocks that are part of this loop.
957 void Populate();
958
959 // Updates blocks population of the loop and all of its outer' ones recursively after the
960 // population of the inner loop is updated.
961 void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
962
963 // Returns whether this loop information contains `block`.
964 // Note that this loop information *must* be populated before entering this function.
965 bool Contains(const HBasicBlock& block) const;
966
967 // Returns whether this loop information is an inner loop of `other`.
968 // Note that `other` *must* be populated before entering this function.
969 bool IsIn(const HLoopInformation& other) const;
970
971 // Returns true if instruction is not defined within this loop.
972 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
973
GetBlocks()974 const ArenaBitVector& GetBlocks() const { return blocks_; }
975
976 void Add(HBasicBlock* block);
977 void Remove(HBasicBlock* block);
978
ClearAllBlocks()979 void ClearAllBlocks() {
980 blocks_.ClearAllBits();
981 }
982
983 bool HasBackEdgeNotDominatedByHeader() const;
984
IsPopulated()985 bool IsPopulated() const {
986 return blocks_.GetHighestBitSet() != -1;
987 }
988
989 bool DominatesAllBackEdges(HBasicBlock* block);
990
991 bool HasExitEdge() const;
992
993 // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()994 void ResetBasicBlockData() {
995 back_edges_.clear();
996 ClearAllBlocks();
997 }
998
999 private:
1000 // Internal recursive implementation of `Populate`.
1001 void PopulateRecursive(HBasicBlock* block);
1002 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
1003
1004 HBasicBlock* header_;
1005 HSuspendCheck* suspend_check_;
1006 bool irreducible_;
1007 bool contains_irreducible_loop_;
1008 ArenaVector<HBasicBlock*> back_edges_;
1009 ArenaBitVector blocks_;
1010
1011 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
1012 };
1013
1014 // Stores try/catch information for basic blocks.
1015 // Note that HGraph is constructed so that catch blocks cannot simultaneously
1016 // be try blocks.
1017 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
1018 public:
1019 // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)1020 explicit TryCatchInformation(const HTryBoundary& try_entry)
1021 : try_entry_(&try_entry),
1022 catch_dex_file_(nullptr),
1023 catch_type_index_(dex::TypeIndex::Invalid()) {
1024 DCHECK(try_entry_ != nullptr);
1025 }
1026
1027 // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)1028 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
1029 : try_entry_(nullptr),
1030 catch_dex_file_(&dex_file),
1031 catch_type_index_(catch_type_index) {}
1032
IsTryBlock()1033 bool IsTryBlock() const { return try_entry_ != nullptr; }
1034
GetTryEntry()1035 const HTryBoundary& GetTryEntry() const {
1036 DCHECK(IsTryBlock());
1037 return *try_entry_;
1038 }
1039
IsCatchBlock()1040 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
1041
IsValidTypeIndex()1042 bool IsValidTypeIndex() const {
1043 DCHECK(IsCatchBlock());
1044 return catch_type_index_.IsValid();
1045 }
1046
GetCatchTypeIndex()1047 dex::TypeIndex GetCatchTypeIndex() const {
1048 DCHECK(IsCatchBlock());
1049 return catch_type_index_;
1050 }
1051
GetCatchDexFile()1052 const DexFile& GetCatchDexFile() const {
1053 DCHECK(IsCatchBlock());
1054 return *catch_dex_file_;
1055 }
1056
SetInvalidTypeIndex()1057 void SetInvalidTypeIndex() {
1058 catch_type_index_ = dex::TypeIndex::Invalid();
1059 }
1060
1061 private:
1062 // One of possibly several TryBoundary instructions entering the block's try.
1063 // Only set for try blocks.
1064 const HTryBoundary* try_entry_;
1065
1066 // Exception type information. Only set for catch blocks.
1067 const DexFile* catch_dex_file_;
1068 dex::TypeIndex catch_type_index_;
1069 };
1070
1071 static constexpr size_t kNoLifetime = -1;
1072 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
1073
1074 // A block in a method. Contains the list of instructions represented
1075 // as a double linked list. Each block knows its predecessors and
1076 // successors.
1077
1078 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
1079 public:
1080 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)1081 : graph_(graph),
1082 predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
1083 successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
1084 loop_information_(nullptr),
1085 dominator_(nullptr),
1086 dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
1087 block_id_(kInvalidBlockId),
1088 dex_pc_(dex_pc),
1089 lifetime_start_(kNoLifetime),
1090 lifetime_end_(kNoLifetime),
1091 try_catch_information_(nullptr) {
1092 predecessors_.reserve(kDefaultNumberOfPredecessors);
1093 successors_.reserve(kDefaultNumberOfSuccessors);
1094 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
1095 }
1096
GetPredecessors()1097 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
1098 return predecessors_;
1099 }
1100
GetNumberOfPredecessors()1101 size_t GetNumberOfPredecessors() const {
1102 return GetPredecessors().size();
1103 }
1104
GetSuccessors()1105 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
1106 return successors_;
1107 }
1108
1109 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
1110 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
1111
1112 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
1113 return ContainsElement(successors_, block, start_from);
1114 }
1115
GetDominatedBlocks()1116 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
1117 return dominated_blocks_;
1118 }
1119
IsEntryBlock()1120 bool IsEntryBlock() const {
1121 return graph_->GetEntryBlock() == this;
1122 }
1123
IsExitBlock()1124 bool IsExitBlock() const {
1125 return graph_->GetExitBlock() == this;
1126 }
1127
1128 bool IsSingleGoto() const;
1129 bool IsSingleReturn() const;
1130 bool IsSingleReturnOrReturnVoidAllowingPhis() const;
1131 bool IsSingleTryBoundary() const;
1132
1133 // Returns true if this block emits nothing but a jump.
IsSingleJump()1134 bool IsSingleJump() const {
1135 HLoopInformation* loop_info = GetLoopInformation();
1136 return (IsSingleGoto() || IsSingleTryBoundary())
1137 // Back edges generate a suspend check.
1138 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1139 }
1140
AddBackEdge(HBasicBlock * back_edge)1141 void AddBackEdge(HBasicBlock* back_edge) {
1142 if (loop_information_ == nullptr) {
1143 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1144 }
1145 DCHECK_EQ(loop_information_->GetHeader(), this);
1146 loop_information_->AddBackEdge(back_edge);
1147 }
1148
1149 // Registers a back edge; if the block was not a loop header before the call associates a newly
1150 // created loop info with it.
1151 //
1152 // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1153 // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1154 void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1155 if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1156 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1157 }
1158 loop_information_->AddBackEdge(back_edge);
1159 }
1160
GetGraph()1161 HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1162 void SetGraph(HGraph* graph) { graph_ = graph; }
1163
GetBlockId()1164 uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1165 void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1166 uint32_t GetDexPc() const { return dex_pc_; }
1167
GetDominator()1168 HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1169 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1170 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1171
RemoveDominatedBlock(HBasicBlock * block)1172 void RemoveDominatedBlock(HBasicBlock* block) {
1173 RemoveElement(dominated_blocks_, block);
1174 }
1175
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1176 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1177 ReplaceElement(dominated_blocks_, existing, new_block);
1178 }
1179
1180 void ClearDominanceInformation();
1181
NumberOfBackEdges()1182 int NumberOfBackEdges() const {
1183 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1184 }
1185
GetFirstInstruction()1186 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1187 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1188 const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1189 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1190 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1191 const HInstructionList& GetPhis() const { return phis_; }
1192
1193 HInstruction* GetFirstInstructionDisregardMoves() const;
1194
AddSuccessor(HBasicBlock * block)1195 void AddSuccessor(HBasicBlock* block) {
1196 successors_.push_back(block);
1197 block->predecessors_.push_back(this);
1198 }
1199
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1200 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1201 size_t successor_index = GetSuccessorIndexOf(existing);
1202 existing->RemovePredecessor(this);
1203 new_block->predecessors_.push_back(this);
1204 successors_[successor_index] = new_block;
1205 }
1206
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1207 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1208 size_t predecessor_index = GetPredecessorIndexOf(existing);
1209 existing->RemoveSuccessor(this);
1210 new_block->successors_.push_back(this);
1211 predecessors_[predecessor_index] = new_block;
1212 }
1213
1214 // Insert `this` between `predecessor` and `successor. This method
1215 // preserves the indices, and will update the first edge found between
1216 // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1217 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1218 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1219 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1220 successor->predecessors_[predecessor_index] = this;
1221 predecessor->successors_[successor_index] = this;
1222 successors_.push_back(successor);
1223 predecessors_.push_back(predecessor);
1224 }
1225
RemovePredecessor(HBasicBlock * block)1226 void RemovePredecessor(HBasicBlock* block) {
1227 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1228 }
1229
RemoveSuccessor(HBasicBlock * block)1230 void RemoveSuccessor(HBasicBlock* block) {
1231 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1232 }
1233
ClearAllPredecessors()1234 void ClearAllPredecessors() {
1235 predecessors_.clear();
1236 }
1237
AddPredecessor(HBasicBlock * block)1238 void AddPredecessor(HBasicBlock* block) {
1239 predecessors_.push_back(block);
1240 block->successors_.push_back(this);
1241 }
1242
SwapPredecessors()1243 void SwapPredecessors() {
1244 DCHECK_EQ(predecessors_.size(), 2u);
1245 std::swap(predecessors_[0], predecessors_[1]);
1246 }
1247
SwapSuccessors()1248 void SwapSuccessors() {
1249 DCHECK_EQ(successors_.size(), 2u);
1250 std::swap(successors_[0], successors_[1]);
1251 }
1252
GetPredecessorIndexOf(HBasicBlock * predecessor)1253 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1254 return IndexOfElement(predecessors_, predecessor);
1255 }
1256
GetSuccessorIndexOf(HBasicBlock * successor)1257 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1258 return IndexOfElement(successors_, successor);
1259 }
1260
GetSinglePredecessor()1261 HBasicBlock* GetSinglePredecessor() const {
1262 DCHECK_EQ(GetPredecessors().size(), 1u);
1263 return GetPredecessors()[0];
1264 }
1265
GetSingleSuccessor()1266 HBasicBlock* GetSingleSuccessor() const {
1267 DCHECK_EQ(GetSuccessors().size(), 1u);
1268 return GetSuccessors()[0];
1269 }
1270
1271 // Returns whether the first occurrence of `predecessor` in the list of
1272 // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1273 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1274 DCHECK_EQ(GetPredecessors()[idx], predecessor);
1275 return GetPredecessorIndexOf(predecessor) == idx;
1276 }
1277
1278 // Create a new block between this block and its predecessors. The new block
1279 // is added to the graph, all predecessor edges are relinked to it and an edge
1280 // is created to `this`. Returns the new empty block. Reverse post order or
1281 // loop and try/catch information are not updated.
1282 HBasicBlock* CreateImmediateDominator();
1283
1284 // Split the block into two blocks just before `cursor`. Returns the newly
1285 // created, latter block. Note that this method will add the block to the
1286 // graph, create a Goto at the end of the former block and will create an edge
1287 // between the blocks. It will not, however, update the reverse post order or
1288 // loop and try/catch information.
1289 HBasicBlock* SplitBefore(HInstruction* cursor);
1290
1291 // Split the block into two blocks just before `cursor`. Returns the newly
1292 // created block. Note that this method just updates raw block information,
1293 // like predecessors, successors, dominators, and instruction list. It does not
1294 // update the graph, reverse post order, loop information, nor make sure the
1295 // blocks are consistent (for example ending with a control flow instruction).
1296 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1297
1298 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1299 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1300
1301 // Merge `other` at the end of `this`. Successors and dominated blocks of
1302 // `other` are changed to be successors and dominated blocks of `this`. Note
1303 // that this method does not update the graph, reverse post order, loop
1304 // information, nor make sure the blocks are consistent (for example ending
1305 // with a control flow instruction).
1306 void MergeWithInlined(HBasicBlock* other);
1307
1308 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1309 // of `this` are moved to `other`.
1310 // Note that this method does not update the graph, reverse post order, loop
1311 // information, nor make sure the blocks are consistent (for example ending
1312 // with a control flow instruction).
1313 void ReplaceWith(HBasicBlock* other);
1314
1315 // Merges the instructions of `other` at the end of `this`.
1316 void MergeInstructionsWith(HBasicBlock* other);
1317
1318 // Merge `other` at the end of `this`. This method updates loops, reverse post
1319 // order, links to predecessors, successors, dominators and deletes the block
1320 // from the graph. The two blocks must be successive, i.e. `this` the only
1321 // predecessor of `other` and vice versa.
1322 void MergeWith(HBasicBlock* other);
1323
1324 // Disconnects `this` from all its predecessors, successors and dominator,
1325 // removes it from all loops it is included in and eventually from the graph.
1326 // The block must not dominate any other block. Predecessors and successors
1327 // are safely updated.
1328 void DisconnectAndDelete();
1329
1330 void AddInstruction(HInstruction* instruction);
1331 // Insert `instruction` before/after an existing instruction `cursor`.
1332 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1333 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1334 // Replace phi `initial` with `replacement` within this block.
1335 void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1336 // Replace instruction `initial` with `replacement` within this block.
1337 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1338 HInstruction* replacement);
1339 void AddPhi(HPhi* phi);
1340 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1341 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1342 // instruction list. With 'ensure_safety' set to true, it verifies that the
1343 // instruction is not in use and removes it from the use lists of its inputs.
1344 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1345 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1346 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1347
IsLoopHeader()1348 bool IsLoopHeader() const {
1349 return IsInLoop() && (loop_information_->GetHeader() == this);
1350 }
1351
IsLoopPreHeaderFirstPredecessor()1352 bool IsLoopPreHeaderFirstPredecessor() const {
1353 DCHECK(IsLoopHeader());
1354 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1355 }
1356
IsFirstPredecessorBackEdge()1357 bool IsFirstPredecessorBackEdge() const {
1358 DCHECK(IsLoopHeader());
1359 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1360 }
1361
GetLoopInformation()1362 HLoopInformation* GetLoopInformation() const {
1363 return loop_information_;
1364 }
1365
1366 // Set the loop_information_ on this block. Overrides the current
1367 // loop_information if it is an outer loop of the passed loop information.
1368 // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1369 void SetInLoop(HLoopInformation* info) {
1370 if (IsLoopHeader()) {
1371 // Nothing to do. This just means `info` is an outer loop.
1372 } else if (!IsInLoop()) {
1373 loop_information_ = info;
1374 } else if (loop_information_->Contains(*info->GetHeader())) {
1375 // Block is currently part of an outer loop. Make it part of this inner loop.
1376 // Note that a non loop header having a loop information means this loop information
1377 // has already been populated
1378 loop_information_ = info;
1379 } else {
1380 // Block is part of an inner loop. Do not update the loop information.
1381 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1382 // at this point, because this method is being called while populating `info`.
1383 }
1384 }
1385
1386 // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1387 void SetLoopInformation(HLoopInformation* info) {
1388 loop_information_ = info;
1389 }
1390
IsInLoop()1391 bool IsInLoop() const { return loop_information_ != nullptr; }
1392
GetTryCatchInformation()1393 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1394
SetTryCatchInformation(TryCatchInformation * try_catch_information)1395 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1396 try_catch_information_ = try_catch_information;
1397 }
1398
IsTryBlock()1399 bool IsTryBlock() const {
1400 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1401 }
1402
IsCatchBlock()1403 bool IsCatchBlock() const {
1404 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1405 }
1406
1407 // Returns the try entry that this block's successors should have. They will
1408 // be in the same try, unless the block ends in a try boundary. In that case,
1409 // the appropriate try entry will be returned.
1410 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1411
1412 bool HasThrowingInstructions() const;
1413
1414 // Returns whether this block dominates the blocked passed as parameter.
1415 bool Dominates(HBasicBlock* block) const;
1416
GetLifetimeStart()1417 size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1418 size_t GetLifetimeEnd() const { return lifetime_end_; }
1419
SetLifetimeStart(size_t start)1420 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1421 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1422
1423 bool EndsWithControlFlowInstruction() const;
1424 bool EndsWithReturn() const;
1425 bool EndsWithIf() const;
1426 bool EndsWithTryBoundary() const;
1427 bool HasSinglePhi() const;
1428
1429 private:
1430 HGraph* graph_;
1431 ArenaVector<HBasicBlock*> predecessors_;
1432 ArenaVector<HBasicBlock*> successors_;
1433 HInstructionList instructions_;
1434 HInstructionList phis_;
1435 HLoopInformation* loop_information_;
1436 HBasicBlock* dominator_;
1437 ArenaVector<HBasicBlock*> dominated_blocks_;
1438 uint32_t block_id_;
1439 // The dex program counter of the first instruction of this block.
1440 const uint32_t dex_pc_;
1441 size_t lifetime_start_;
1442 size_t lifetime_end_;
1443 TryCatchInformation* try_catch_information_;
1444
1445 friend class HGraph;
1446 friend class HInstruction;
1447 // Allow manual control of the ordering of predecessors/successors
1448 friend class OptimizingUnitTestHelper;
1449
1450 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1451 };
1452
1453 // Iterates over the LoopInformation of all loops which contain 'block'
1454 // from the innermost to the outermost.
1455 class HLoopInformationOutwardIterator : public ValueObject {
1456 public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1457 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1458 : current_(block.GetLoopInformation()) {}
1459
Done()1460 bool Done() const { return current_ == nullptr; }
1461
Advance()1462 void Advance() {
1463 DCHECK(!Done());
1464 current_ = current_->GetPreHeader()->GetLoopInformation();
1465 }
1466
Current()1467 HLoopInformation* Current() const {
1468 DCHECK(!Done());
1469 return current_;
1470 }
1471
1472 private:
1473 HLoopInformation* current_;
1474
1475 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1476 };
1477
1478 #define FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1479 M(Above, Condition) \
1480 M(AboveOrEqual, Condition) \
1481 M(Abs, UnaryOperation) \
1482 M(Add, BinaryOperation) \
1483 M(And, BinaryOperation) \
1484 M(ArrayGet, Instruction) \
1485 M(ArrayLength, Instruction) \
1486 M(ArraySet, Instruction) \
1487 M(Below, Condition) \
1488 M(BelowOrEqual, Condition) \
1489 M(BooleanNot, UnaryOperation) \
1490 M(BoundsCheck, Instruction) \
1491 M(BoundType, Instruction) \
1492 M(CheckCast, Instruction) \
1493 M(ClassTableGet, Instruction) \
1494 M(ClearException, Instruction) \
1495 M(ClinitCheck, Instruction) \
1496 M(Compare, BinaryOperation) \
1497 M(ConstructorFence, Instruction) \
1498 M(CurrentMethod, Instruction) \
1499 M(ShouldDeoptimizeFlag, Instruction) \
1500 M(Deoptimize, Instruction) \
1501 M(Div, BinaryOperation) \
1502 M(DivZeroCheck, Instruction) \
1503 M(DoubleConstant, Constant) \
1504 M(Equal, Condition) \
1505 M(Exit, Instruction) \
1506 M(FloatConstant, Constant) \
1507 M(Goto, Instruction) \
1508 M(GreaterThan, Condition) \
1509 M(GreaterThanOrEqual, Condition) \
1510 M(If, Instruction) \
1511 M(InstanceFieldGet, Instruction) \
1512 M(InstanceFieldSet, Instruction) \
1513 M(PredicatedInstanceFieldGet, Instruction) \
1514 M(InstanceOf, Instruction) \
1515 M(IntConstant, Constant) \
1516 M(IntermediateAddress, Instruction) \
1517 M(InvokeUnresolved, Invoke) \
1518 M(InvokeInterface, Invoke) \
1519 M(InvokeStaticOrDirect, Invoke) \
1520 M(InvokeVirtual, Invoke) \
1521 M(InvokePolymorphic, Invoke) \
1522 M(InvokeCustom, Invoke) \
1523 M(LessThan, Condition) \
1524 M(LessThanOrEqual, Condition) \
1525 M(LoadClass, Instruction) \
1526 M(LoadException, Instruction) \
1527 M(LoadMethodHandle, Instruction) \
1528 M(LoadMethodType, Instruction) \
1529 M(LoadString, Instruction) \
1530 M(LongConstant, Constant) \
1531 M(Max, Instruction) \
1532 M(MemoryBarrier, Instruction) \
1533 M(Min, BinaryOperation) \
1534 M(MonitorOperation, Instruction) \
1535 M(Mul, BinaryOperation) \
1536 M(NativeDebugInfo, Instruction) \
1537 M(Neg, UnaryOperation) \
1538 M(NewArray, Instruction) \
1539 M(NewInstance, Instruction) \
1540 M(Not, UnaryOperation) \
1541 M(NotEqual, Condition) \
1542 M(NullConstant, Instruction) \
1543 M(NullCheck, Instruction) \
1544 M(Or, BinaryOperation) \
1545 M(PackedSwitch, Instruction) \
1546 M(ParallelMove, Instruction) \
1547 M(ParameterValue, Instruction) \
1548 M(Phi, Instruction) \
1549 M(Rem, BinaryOperation) \
1550 M(Return, Instruction) \
1551 M(ReturnVoid, Instruction) \
1552 M(Ror, BinaryOperation) \
1553 M(Shl, BinaryOperation) \
1554 M(Shr, BinaryOperation) \
1555 M(StaticFieldGet, Instruction) \
1556 M(StaticFieldSet, Instruction) \
1557 M(StringBuilderAppend, Instruction) \
1558 M(UnresolvedInstanceFieldGet, Instruction) \
1559 M(UnresolvedInstanceFieldSet, Instruction) \
1560 M(UnresolvedStaticFieldGet, Instruction) \
1561 M(UnresolvedStaticFieldSet, Instruction) \
1562 M(Select, Instruction) \
1563 M(Sub, BinaryOperation) \
1564 M(SuspendCheck, Instruction) \
1565 M(Throw, Instruction) \
1566 M(TryBoundary, Instruction) \
1567 M(TypeConversion, Instruction) \
1568 M(UShr, BinaryOperation) \
1569 M(Xor, BinaryOperation)
1570
1571 #define FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M) \
1572 M(VecReplicateScalar, VecUnaryOperation) \
1573 M(VecExtractScalar, VecUnaryOperation) \
1574 M(VecReduce, VecUnaryOperation) \
1575 M(VecCnv, VecUnaryOperation) \
1576 M(VecNeg, VecUnaryOperation) \
1577 M(VecAbs, VecUnaryOperation) \
1578 M(VecNot, VecUnaryOperation) \
1579 M(VecAdd, VecBinaryOperation) \
1580 M(VecHalvingAdd, VecBinaryOperation) \
1581 M(VecSub, VecBinaryOperation) \
1582 M(VecMul, VecBinaryOperation) \
1583 M(VecDiv, VecBinaryOperation) \
1584 M(VecMin, VecBinaryOperation) \
1585 M(VecMax, VecBinaryOperation) \
1586 M(VecAnd, VecBinaryOperation) \
1587 M(VecAndNot, VecBinaryOperation) \
1588 M(VecOr, VecBinaryOperation) \
1589 M(VecXor, VecBinaryOperation) \
1590 M(VecSaturationAdd, VecBinaryOperation) \
1591 M(VecSaturationSub, VecBinaryOperation) \
1592 M(VecShl, VecBinaryOperation) \
1593 M(VecShr, VecBinaryOperation) \
1594 M(VecUShr, VecBinaryOperation) \
1595 M(VecSetScalars, VecOperation) \
1596 M(VecMultiplyAccumulate, VecOperation) \
1597 M(VecSADAccumulate, VecOperation) \
1598 M(VecDotProd, VecOperation) \
1599 M(VecLoad, VecMemoryOperation) \
1600 M(VecStore, VecMemoryOperation) \
1601 M(VecPredSetAll, VecPredSetOperation) \
1602 M(VecPredWhile, VecPredSetOperation) \
1603 M(VecPredCondition, VecOperation) \
1604
1605 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1606 FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1607 FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)
1608
1609 /*
1610 * Instructions, shared across several (not all) architectures.
1611 */
1612 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1613 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1614 #else
1615 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1616 M(BitwiseNegatedRight, Instruction) \
1617 M(DataProcWithShifterOp, Instruction) \
1618 M(MultiplyAccumulate, Instruction) \
1619 M(IntermediateAddressIndex, Instruction)
1620 #endif
1621
1622 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1623
1624 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1625
1626 #ifndef ART_ENABLE_CODEGEN_x86
1627 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1628 #else
1629 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1630 M(X86ComputeBaseMethodAddress, Instruction) \
1631 M(X86LoadFromConstantTable, Instruction) \
1632 M(X86FPNeg, Instruction) \
1633 M(X86PackedSwitch, Instruction)
1634 #endif
1635
1636 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1637 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M) \
1638 M(X86AndNot, Instruction) \
1639 M(X86MaskOrResetLeastSetBit, Instruction)
1640 #else
1641 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1642 #endif
1643
1644 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1645
1646 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1647 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1648 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1649 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1650 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1651 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1652 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) \
1653 FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1654
1655 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1656 M(Condition, BinaryOperation) \
1657 M(Constant, Instruction) \
1658 M(UnaryOperation, Instruction) \
1659 M(BinaryOperation, Instruction) \
1660 M(Invoke, Instruction) \
1661 M(VecOperation, Instruction) \
1662 M(VecUnaryOperation, VecOperation) \
1663 M(VecBinaryOperation, VecOperation) \
1664 M(VecMemoryOperation, VecOperation) \
1665 M(VecPredSetOperation, VecOperation)
1666
1667 #define FOR_EACH_INSTRUCTION(M) \
1668 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1669 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1670
1671 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1672 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1673 #undef FORWARD_DECLARATION
1674
1675 #define DECLARE_INSTRUCTION(type) \
1676 private: \
1677 H##type& operator=(const H##type&) = delete; \
1678 public: \
1679 const char* DebugName() const override { return #type; } \
1680 HInstruction* Clone(ArenaAllocator* arena) const override { \
1681 DCHECK(IsClonable()); \
1682 return new (arena) H##type(*this->As##type()); \
1683 } \
1684 void Accept(HGraphVisitor* visitor) override
1685
1686 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1687 private: \
1688 H##type& operator=(const H##type&) = delete; \
1689 public:
1690
1691 #define DEFAULT_COPY_CONSTRUCTOR(type) H##type(const H##type& other) = default;
1692
1693 template <typename T>
1694 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1695 public IntrusiveForwardListNode<HUseListNode<T>> {
1696 public:
1697 // Get the instruction which has this use as one of the inputs.
1698 T GetUser() const { return user_; }
1699 // Get the position of the input record that this use corresponds to.
1700 size_t GetIndex() const { return index_; }
1701 // Set the position of the input record that this use corresponds to.
1702 void SetIndex(size_t index) { index_ = index; }
1703
1704 private:
1705 HUseListNode(T user, size_t index)
1706 : user_(user), index_(index) {}
1707
1708 T const user_;
1709 size_t index_;
1710
1711 friend class HInstruction;
1712
1713 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1714 };
1715
1716 template <typename T>
1717 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1718
1719 // This class is used by HEnvironment and HInstruction classes to record the
1720 // instructions they use and pointers to the corresponding HUseListNodes kept
1721 // by the used instructions.
1722 template <typename T>
1723 class HUserRecord : public ValueObject {
1724 public:
HUserRecord()1725 HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1726 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1727
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1728 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1729 : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1730 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1731 : instruction_(instruction), before_use_node_(before_use_node) {
1732 DCHECK(instruction_ != nullptr);
1733 }
1734
GetInstruction()1735 HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1736 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1737 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1738
1739 private:
1740 // Instruction used by the user.
1741 HInstruction* instruction_;
1742
1743 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1744 typename HUseList<T>::iterator before_use_node_;
1745 };
1746
1747 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1748 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1749 // HInstruction* values even though the underlying container has HUserRecord<>s.
1750 struct HInputExtractor {
operatorHInputExtractor1751 HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1752 return record.GetInstruction();
1753 }
operatorHInputExtractor1754 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1755 return record.GetInstruction();
1756 }
1757 };
1758
1759 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1760 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1761
1762 /**
1763 * Side-effects representation.
1764 *
1765 * For write/read dependences on fields/arrays, the dependence analysis uses
1766 * type disambiguation (e.g. a float field write cannot modify the value of an
1767 * integer field read) and the access type (e.g. a reference array write cannot
1768 * modify the value of a reference field read [although it may modify the
1769 * reference fetch prior to reading the field, which is represented by its own
1770 * write/read dependence]). The analysis makes conservative points-to
1771 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1772 * the same, and any reference read depends on any reference read without
1773 * further regard of its type).
1774 *
1775 * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1776 * alive across the point where garbage collection might happen.
1777 *
1778 * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1779 *
1780 * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1781 * those instructions from the compiler perspective (between this instruction and the next one
1782 * in the IR).
1783 *
1784 * Note: Instructions which can cause GC only on a fatal slow path do not need
1785 * kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1786 * one. However the execution may return to compiled code if there is a catch block in the
1787 * current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1788 * set.
1789 *
1790 * The internal representation uses 38-bit and is described in the table below.
1791 * The first line indicates the side effect, and for field/array accesses the
1792 * second line indicates the type of the access (in the order of the
1793 * DataType::Type enum).
1794 * The two numbered lines below indicate the bit position in the bitfield (read
1795 * vertically).
1796 *
1797 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1798 * +-------------+---------+---------+--------------+---------+---------+
1799 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1800 * | 3 |333333322|222222221| 1 |111111110|000000000|
1801 * | 7 |654321098|765432109| 8 |765432109|876543210|
1802 *
1803 * Note that, to ease the implementation, 'changes' bits are least significant
1804 * bits, while 'dependency' bits are most significant bits.
1805 */
1806 class SideEffects : public ValueObject {
1807 public:
SideEffects()1808 SideEffects() : flags_(0) {}
1809
None()1810 static SideEffects None() {
1811 return SideEffects(0);
1812 }
1813
All()1814 static SideEffects All() {
1815 return SideEffects(kAllChangeBits | kAllDependOnBits);
1816 }
1817
AllChanges()1818 static SideEffects AllChanges() {
1819 return SideEffects(kAllChangeBits);
1820 }
1821
AllDependencies()1822 static SideEffects AllDependencies() {
1823 return SideEffects(kAllDependOnBits);
1824 }
1825
AllExceptGCDependency()1826 static SideEffects AllExceptGCDependency() {
1827 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1828 }
1829
AllWritesAndReads()1830 static SideEffects AllWritesAndReads() {
1831 return SideEffects(kAllWrites | kAllReads);
1832 }
1833
AllWrites()1834 static SideEffects AllWrites() {
1835 return SideEffects(kAllWrites);
1836 }
1837
AllReads()1838 static SideEffects AllReads() {
1839 return SideEffects(kAllReads);
1840 }
1841
FieldWriteOfType(DataType::Type type,bool is_volatile)1842 static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1843 return is_volatile
1844 ? AllWritesAndReads()
1845 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1846 }
1847
ArrayWriteOfType(DataType::Type type)1848 static SideEffects ArrayWriteOfType(DataType::Type type) {
1849 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1850 }
1851
FieldReadOfType(DataType::Type type,bool is_volatile)1852 static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1853 return is_volatile
1854 ? AllWritesAndReads()
1855 : SideEffects(TypeFlag(type, kFieldReadOffset));
1856 }
1857
ArrayReadOfType(DataType::Type type)1858 static SideEffects ArrayReadOfType(DataType::Type type) {
1859 return SideEffects(TypeFlag(type, kArrayReadOffset));
1860 }
1861
1862 // Returns whether GC might happen across this instruction from the compiler perspective so
1863 // the next instruction in the IR would see that.
1864 //
1865 // See the SideEffect class comments.
CanTriggerGC()1866 static SideEffects CanTriggerGC() {
1867 return SideEffects(1ULL << kCanTriggerGCBit);
1868 }
1869
1870 // Returns whether the instruction must not be alive across a GC point.
1871 //
1872 // See the SideEffect class comments.
DependsOnGC()1873 static SideEffects DependsOnGC() {
1874 return SideEffects(1ULL << kDependsOnGCBit);
1875 }
1876
1877 // Combines the side-effects of this and the other.
Union(SideEffects other)1878 SideEffects Union(SideEffects other) const {
1879 return SideEffects(flags_ | other.flags_);
1880 }
1881
Exclusion(SideEffects other)1882 SideEffects Exclusion(SideEffects other) const {
1883 return SideEffects(flags_ & ~other.flags_);
1884 }
1885
Add(SideEffects other)1886 void Add(SideEffects other) {
1887 flags_ |= other.flags_;
1888 }
1889
Includes(SideEffects other)1890 bool Includes(SideEffects other) const {
1891 return (other.flags_ & flags_) == other.flags_;
1892 }
1893
HasSideEffects()1894 bool HasSideEffects() const {
1895 return (flags_ & kAllChangeBits);
1896 }
1897
HasDependencies()1898 bool HasDependencies() const {
1899 return (flags_ & kAllDependOnBits);
1900 }
1901
1902 // Returns true if there are no side effects or dependencies.
DoesNothing()1903 bool DoesNothing() const {
1904 return flags_ == 0;
1905 }
1906
1907 // Returns true if something is written.
DoesAnyWrite()1908 bool DoesAnyWrite() const {
1909 return (flags_ & kAllWrites);
1910 }
1911
1912 // Returns true if something is read.
DoesAnyRead()1913 bool DoesAnyRead() const {
1914 return (flags_ & kAllReads);
1915 }
1916
1917 // Returns true if potentially everything is written and read
1918 // (every type and every kind of access).
DoesAllReadWrite()1919 bool DoesAllReadWrite() const {
1920 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1921 }
1922
DoesAll()1923 bool DoesAll() const {
1924 return flags_ == (kAllChangeBits | kAllDependOnBits);
1925 }
1926
1927 // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1928 bool MayDependOn(SideEffects other) const {
1929 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1930 return (other.flags_ & depends_on_flags);
1931 }
1932
1933 // Returns string representation of flags (for debugging only).
1934 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1935 std::string ToString() const {
1936 std::string flags = "|";
1937 for (int s = kLastBit; s >= 0; s--) {
1938 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1939 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1940 // This is a bit for the GC side effect.
1941 if (current_bit_is_set) {
1942 flags += "GC";
1943 }
1944 flags += "|";
1945 } else {
1946 // This is a bit for the array/field analysis.
1947 // The underscore character stands for the 'can trigger GC' bit.
1948 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1949 if (current_bit_is_set) {
1950 flags += kDebug[s];
1951 }
1952 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1953 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1954 flags += "|";
1955 }
1956 }
1957 }
1958 return flags;
1959 }
1960
Equals(const SideEffects & other)1961 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1962
1963 private:
1964 static constexpr int kFieldArrayAnalysisBits = 9;
1965
1966 static constexpr int kFieldWriteOffset = 0;
1967 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1968 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1969 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1970
1971 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1972
1973 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1974 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1975 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1976 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1977
1978 static constexpr int kLastBit = kDependsOnGCBit;
1979 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1980
1981 // Aliases.
1982
1983 static_assert(kChangeBits == kDependOnBits,
1984 "the 'change' bits should match the 'depend on' bits.");
1985
1986 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1987 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1988 static constexpr uint64_t kAllWrites =
1989 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1990 static constexpr uint64_t kAllReads =
1991 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1992
1993 // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)1994 static uint64_t TypeFlag(DataType::Type type, int offset) {
1995 int shift;
1996 switch (type) {
1997 case DataType::Type::kReference: shift = 0; break;
1998 case DataType::Type::kBool: shift = 1; break;
1999 case DataType::Type::kInt8: shift = 2; break;
2000 case DataType::Type::kUint16: shift = 3; break;
2001 case DataType::Type::kInt16: shift = 4; break;
2002 case DataType::Type::kInt32: shift = 5; break;
2003 case DataType::Type::kInt64: shift = 6; break;
2004 case DataType::Type::kFloat32: shift = 7; break;
2005 case DataType::Type::kFloat64: shift = 8; break;
2006 default:
2007 LOG(FATAL) << "Unexpected data type " << type;
2008 UNREACHABLE();
2009 }
2010 DCHECK_LE(kFieldWriteOffset, shift);
2011 DCHECK_LT(shift, kArrayWriteOffset);
2012 return UINT64_C(1) << (shift + offset);
2013 }
2014
2015 // Private constructor on direct flags value.
SideEffects(uint64_t flags)2016 explicit SideEffects(uint64_t flags) : flags_(flags) {}
2017
2018 uint64_t flags_;
2019 };
2020
2021 // A HEnvironment object contains the values of virtual registers at a given location.
2022 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
2023 public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)2024 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2025 size_t number_of_vregs,
2026 ArtMethod* method,
2027 uint32_t dex_pc,
2028 HInstruction* holder)
2029 : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
2030 locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
2031 parent_(nullptr),
2032 method_(method),
2033 dex_pc_(dex_pc),
2034 holder_(holder) {
2035 }
2036
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)2037 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2038 const HEnvironment& to_copy,
2039 HInstruction* holder)
2040 : HEnvironment(allocator,
2041 to_copy.Size(),
2042 to_copy.GetMethod(),
2043 to_copy.GetDexPc(),
2044 holder) {}
2045
AllocateLocations()2046 void AllocateLocations() {
2047 DCHECK(locations_.empty());
2048 locations_.resize(vregs_.size());
2049 }
2050
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)2051 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
2052 if (parent_ != nullptr) {
2053 parent_->SetAndCopyParentChain(allocator, parent);
2054 } else {
2055 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
2056 parent_->CopyFrom(parent);
2057 if (parent->GetParent() != nullptr) {
2058 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
2059 }
2060 }
2061 }
2062
2063 void CopyFrom(ArrayRef<HInstruction* const> locals);
2064 void CopyFrom(HEnvironment* environment);
2065
2066 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
2067 // input to the loop phi instead. This is for inserting instructions that
2068 // require an environment (like HDeoptimization) in the loop pre-header.
2069 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
2070
SetRawEnvAt(size_t index,HInstruction * instruction)2071 void SetRawEnvAt(size_t index, HInstruction* instruction) {
2072 vregs_[index] = HUserRecord<HEnvironment*>(instruction);
2073 }
2074
GetInstructionAt(size_t index)2075 HInstruction* GetInstructionAt(size_t index) const {
2076 return vregs_[index].GetInstruction();
2077 }
2078
2079 void RemoveAsUserOfInput(size_t index) const;
2080
2081 // Replaces the input at the position 'index' with the replacement; the replacement and old
2082 // input instructions' env_uses_ lists are adjusted. The function works similar to
2083 // HInstruction::ReplaceInput.
2084 void ReplaceInput(HInstruction* replacement, size_t index);
2085
Size()2086 size_t Size() const { return vregs_.size(); }
2087
GetParent()2088 HEnvironment* GetParent() const { return parent_; }
2089
SetLocationAt(size_t index,Location location)2090 void SetLocationAt(size_t index, Location location) {
2091 locations_[index] = location;
2092 }
2093
GetLocationAt(size_t index)2094 Location GetLocationAt(size_t index) const {
2095 return locations_[index];
2096 }
2097
GetDexPc()2098 uint32_t GetDexPc() const {
2099 return dex_pc_;
2100 }
2101
GetMethod()2102 ArtMethod* GetMethod() const {
2103 return method_;
2104 }
2105
GetHolder()2106 HInstruction* GetHolder() const {
2107 return holder_;
2108 }
2109
2110
IsFromInlinedInvoke()2111 bool IsFromInlinedInvoke() const {
2112 return GetParent() != nullptr;
2113 }
2114
2115 class EnvInputSelector {
2116 public:
EnvInputSelector(const HEnvironment * e)2117 explicit EnvInputSelector(const HEnvironment* e) : env_(e) {}
operator()2118 HInstruction* operator()(size_t s) const {
2119 return env_->GetInstructionAt(s);
2120 }
2121 private:
2122 const HEnvironment* env_;
2123 };
2124
2125 using HConstEnvInputRef = TransformIterator<CountIter, EnvInputSelector>;
GetEnvInputs()2126 IterationRange<HConstEnvInputRef> GetEnvInputs() const {
2127 IterationRange<CountIter> range(Range(Size()));
2128 return MakeIterationRange(MakeTransformIterator(range.begin(), EnvInputSelector(this)),
2129 MakeTransformIterator(range.end(), EnvInputSelector(this)));
2130 }
2131
2132 private:
2133 ArenaVector<HUserRecord<HEnvironment*>> vregs_;
2134 ArenaVector<Location> locations_;
2135 HEnvironment* parent_;
2136 ArtMethod* method_;
2137 const uint32_t dex_pc_;
2138
2139 // The instruction that holds this environment.
2140 HInstruction* const holder_;
2141
2142 friend class HInstruction;
2143
2144 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2145 };
2146
2147 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs);
2148
2149 // Iterates over the Environments
2150 class HEnvironmentIterator : public ValueObject,
2151 public std::iterator<std::forward_iterator_tag, HEnvironment*> {
2152 public:
HEnvironmentIterator(HEnvironment * cur)2153 explicit HEnvironmentIterator(HEnvironment* cur) : cur_(cur) {}
2154
2155 HEnvironment* operator*() const {
2156 return cur_;
2157 }
2158
2159 HEnvironmentIterator& operator++() {
2160 DCHECK(cur_ != nullptr);
2161 cur_ = cur_->GetParent();
2162 return *this;
2163 }
2164
2165 HEnvironmentIterator operator++(int) {
2166 HEnvironmentIterator prev(*this);
2167 ++(*this);
2168 return prev;
2169 }
2170
2171 bool operator==(const HEnvironmentIterator& other) const {
2172 return other.cur_ == cur_;
2173 }
2174
2175 bool operator!=(const HEnvironmentIterator& other) const {
2176 return !(*this == other);
2177 }
2178
2179 private:
2180 HEnvironment* cur_;
2181 };
2182
2183 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2184 public:
2185 #define DECLARE_KIND(type, super) k##type,
2186 enum InstructionKind { // private marker to avoid generate-operator-out.py from processing.
2187 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2188 kLastInstructionKind
2189 };
2190 #undef DECLARE_KIND
2191
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2192 HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2193 : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2194
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2195 HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2196 : previous_(nullptr),
2197 next_(nullptr),
2198 block_(nullptr),
2199 dex_pc_(dex_pc),
2200 id_(-1),
2201 ssa_index_(-1),
2202 packed_fields_(0u),
2203 environment_(nullptr),
2204 locations_(nullptr),
2205 live_interval_(nullptr),
2206 lifetime_position_(kNoLifetime),
2207 side_effects_(side_effects),
2208 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2209 SetPackedField<InstructionKindField>(kind);
2210 SetPackedField<TypeField>(type);
2211 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2212 }
2213
~HInstruction()2214 virtual ~HInstruction() {}
2215
2216 std::ostream& Dump(std::ostream& os, bool dump_args = false);
2217
2218 // Helper for dumping without argument information using operator<<
2219 struct NoArgsDump {
2220 const HInstruction* ins;
2221 };
DumpWithoutArgs()2222 NoArgsDump DumpWithoutArgs() const {
2223 return NoArgsDump{this};
2224 }
2225 // Helper for dumping with argument information using operator<<
2226 struct ArgsDump {
2227 const HInstruction* ins;
2228 };
DumpWithArgs()2229 ArgsDump DumpWithArgs() const {
2230 return ArgsDump{this};
2231 }
2232
GetNext()2233 HInstruction* GetNext() const { return next_; }
GetPrevious()2234 HInstruction* GetPrevious() const { return previous_; }
2235
2236 HInstruction* GetNextDisregardingMoves() const;
2237 HInstruction* GetPreviousDisregardingMoves() const;
2238
GetBlock()2239 HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2240 ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2241 void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2242 bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2243 bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2244 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2245 bool IsIrreducibleLoopHeaderPhi() const {
2246 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2247 }
2248
2249 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2250
GetInputRecords()2251 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2252 // One virtual method is enough, just const_cast<> and then re-add the const.
2253 return ArrayRef<const HUserRecord<HInstruction*>>(
2254 const_cast<HInstruction*>(this)->GetInputRecords());
2255 }
2256
GetInputs()2257 HInputsRef GetInputs() {
2258 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2259 }
2260
GetInputs()2261 HConstInputsRef GetInputs() const {
2262 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2263 }
2264
InputCount()2265 size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2266 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2267
HasInput(HInstruction * input)2268 bool HasInput(HInstruction* input) const {
2269 for (const HInstruction* i : GetInputs()) {
2270 if (i == input) {
2271 return true;
2272 }
2273 }
2274 return false;
2275 }
2276
SetRawInputAt(size_t index,HInstruction * input)2277 void SetRawInputAt(size_t index, HInstruction* input) {
2278 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2279 }
2280
2281 virtual void Accept(HGraphVisitor* visitor) = 0;
2282 virtual const char* DebugName() const = 0;
2283
GetType()2284 DataType::Type GetType() const {
2285 return TypeField::Decode(GetPackedFields());
2286 }
2287
NeedsEnvironment()2288 virtual bool NeedsEnvironment() const { return false; }
2289
GetDexPc()2290 uint32_t GetDexPc() const { return dex_pc_; }
2291
IsControlFlow()2292 virtual bool IsControlFlow() const { return false; }
2293
2294 // Can the instruction throw?
2295 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2296 // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2297 virtual bool CanThrow() const { return false; }
2298
2299 // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2300 virtual bool AlwaysThrows() const { return false; }
2301 // Will this instruction only cause async exceptions if it causes any at all?
OnlyThrowsAsyncExceptions()2302 virtual bool OnlyThrowsAsyncExceptions() const {
2303 return false;
2304 }
2305
CanThrowIntoCatchBlock()2306 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2307
HasSideEffects()2308 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2309 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2310
2311 // Does not apply for all instructions, but having this at top level greatly
2312 // simplifies the null check elimination.
2313 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2314 virtual bool CanBeNull() const {
2315 DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2316 return true;
2317 }
2318
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)2319 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2320 return false;
2321 }
2322
2323 // If this instruction will do an implicit null check, return the `HNullCheck` associated
2324 // with it. Otherwise return null.
GetImplicitNullCheck()2325 HNullCheck* GetImplicitNullCheck() const {
2326 // Go over previous non-move instructions that are emitted at use site.
2327 HInstruction* prev_not_move = GetPreviousDisregardingMoves();
2328 while (prev_not_move != nullptr && prev_not_move->IsEmittedAtUseSite()) {
2329 if (prev_not_move->IsNullCheck()) {
2330 return prev_not_move->AsNullCheck();
2331 }
2332 prev_not_move = prev_not_move->GetPreviousDisregardingMoves();
2333 }
2334 return nullptr;
2335 }
2336
IsActualObject()2337 virtual bool IsActualObject() const {
2338 return GetType() == DataType::Type::kReference;
2339 }
2340
2341 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2342
GetReferenceTypeInfo()2343 ReferenceTypeInfo GetReferenceTypeInfo() const {
2344 DCHECK_EQ(GetType(), DataType::Type::kReference);
2345 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2346 GetPackedFlag<kFlagReferenceTypeIsExact>());
2347 }
2348
AddUseAt(HInstruction * user,size_t index)2349 void AddUseAt(HInstruction* user, size_t index) {
2350 DCHECK(user != nullptr);
2351 // Note: fixup_end remains valid across push_front().
2352 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2353 ArenaAllocator* allocator = user->GetBlock()->GetGraph()->GetAllocator();
2354 HUseListNode<HInstruction*>* new_node =
2355 new (allocator) HUseListNode<HInstruction*>(user, index);
2356 uses_.push_front(*new_node);
2357 FixUpUserRecordsAfterUseInsertion(fixup_end);
2358 }
2359
AddEnvUseAt(HEnvironment * user,size_t index)2360 void AddEnvUseAt(HEnvironment* user, size_t index) {
2361 DCHECK(user != nullptr);
2362 // Note: env_fixup_end remains valid across push_front().
2363 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2364 HUseListNode<HEnvironment*>* new_node =
2365 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2366 env_uses_.push_front(*new_node);
2367 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2368 }
2369
RemoveAsUserOfInput(size_t input)2370 void RemoveAsUserOfInput(size_t input) {
2371 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2372 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2373 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2374 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2375 }
2376
RemoveAsUserOfAllInputs()2377 void RemoveAsUserOfAllInputs() {
2378 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2379 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2380 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2381 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2382 }
2383 }
2384
GetUses()2385 const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2386 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2387
HasUses()2388 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2389 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2390 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2391 bool HasOnlyOneNonEnvironmentUse() const {
2392 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2393 }
2394
IsRemovable()2395 bool IsRemovable() const {
2396 return
2397 !DoesAnyWrite() &&
2398 !CanThrow() &&
2399 !IsSuspendCheck() &&
2400 !IsControlFlow() &&
2401 !IsNativeDebugInfo() &&
2402 !IsParameterValue() &&
2403 // If we added an explicit barrier then we should keep it.
2404 !IsMemoryBarrier() &&
2405 !IsConstructorFence();
2406 }
2407
IsDeadAndRemovable()2408 bool IsDeadAndRemovable() const {
2409 return IsRemovable() && !HasUses();
2410 }
2411
2412 // Does this instruction strictly dominate `other_instruction`?
2413 // Returns false if this instruction and `other_instruction` are the same.
2414 // Aborts if this instruction and `other_instruction` are both phis.
2415 bool StrictlyDominates(HInstruction* other_instruction) const;
2416
GetId()2417 int GetId() const { return id_; }
SetId(int id)2418 void SetId(int id) { id_ = id; }
2419
GetSsaIndex()2420 int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2421 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2422 bool HasSsaIndex() const { return ssa_index_ != -1; }
2423
HasEnvironment()2424 bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2425 HEnvironment* GetEnvironment() const { return environment_; }
GetAllEnvironments()2426 IterationRange<HEnvironmentIterator> GetAllEnvironments() const {
2427 return MakeIterationRange(HEnvironmentIterator(GetEnvironment()),
2428 HEnvironmentIterator(nullptr));
2429 }
2430 // Set the `environment_` field. Raw because this method does not
2431 // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2432 void SetRawEnvironment(HEnvironment* environment) {
2433 DCHECK(environment_ == nullptr);
2434 DCHECK_EQ(environment->GetHolder(), this);
2435 environment_ = environment;
2436 }
2437
InsertRawEnvironment(HEnvironment * environment)2438 void InsertRawEnvironment(HEnvironment* environment) {
2439 DCHECK(environment_ != nullptr);
2440 DCHECK_EQ(environment->GetHolder(), this);
2441 DCHECK(environment->GetParent() == nullptr);
2442 environment->parent_ = environment_;
2443 environment_ = environment;
2444 }
2445
2446 void RemoveEnvironment();
2447
2448 // Set the environment of this instruction, copying it from `environment`. While
2449 // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2450 void CopyEnvironmentFrom(HEnvironment* environment) {
2451 DCHECK(environment_ == nullptr);
2452 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2453 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2454 environment_->CopyFrom(environment);
2455 if (environment->GetParent() != nullptr) {
2456 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2457 }
2458 }
2459
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2460 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2461 HBasicBlock* block) {
2462 DCHECK(environment_ == nullptr);
2463 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2464 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2465 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2466 if (environment->GetParent() != nullptr) {
2467 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2468 }
2469 }
2470
2471 // Returns the number of entries in the environment. Typically, that is the
2472 // number of dex registers in a method. It could be more in case of inlining.
2473 size_t EnvironmentSize() const;
2474
GetLocations()2475 LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2476 void SetLocations(LocationSummary* locations) { locations_ = locations; }
2477
2478 void ReplaceWith(HInstruction* instruction);
2479 void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2480 void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2481 void ReplaceInput(HInstruction* replacement, size_t index);
2482
2483 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2484 // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2485 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2486 ReplaceWith(other);
2487 other->ReplaceInput(this, use_index);
2488 }
2489
2490 // Move `this` instruction before `cursor`
2491 void MoveBefore(HInstruction* cursor, bool do_checks = true);
2492
2493 // Move `this` before its first user and out of any loops. If there is no
2494 // out-of-loop user that dominates all other users, move the instruction
2495 // to the end of the out-of-loop common dominator of the user's blocks.
2496 //
2497 // This can be used only on non-throwing instructions with no side effects that
2498 // have at least one use but no environment uses.
2499 void MoveBeforeFirstUserAndOutOfLoops();
2500
2501 #define INSTRUCTION_TYPE_CHECK(type, super) \
2502 bool Is##type() const;
2503
2504 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2505 #undef INSTRUCTION_TYPE_CHECK
2506
2507 #define INSTRUCTION_TYPE_CAST(type, super) \
2508 const H##type* As##type() const; \
2509 H##type* As##type();
2510
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2511 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2512 #undef INSTRUCTION_TYPE_CAST
2513
2514 // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2515 // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2516 // the instruction then the behaviour of this function is undefined.
2517 //
2518 // Note: It is semantically valid to create a clone of the instruction only until
2519 // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2520 // copied.
2521 //
2522 // Note: HEnvironment and some other fields are not copied and are set to default values, see
2523 // 'explicit HInstruction(const HInstruction& other)' for details.
2524 virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2525 LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2526 DebugName() << " " << GetId();
2527 UNREACHABLE();
2528 }
2529
IsFieldAccess()2530 virtual bool IsFieldAccess() const {
2531 return false;
2532 }
2533
GetFieldInfo()2534 virtual const FieldInfo& GetFieldInfo() const {
2535 CHECK(IsFieldAccess()) << "Only callable on field accessors not " << DebugName() << " "
2536 << *this;
2537 LOG(FATAL) << "Must be overridden by field accessors. Not implemented by " << *this;
2538 UNREACHABLE();
2539 }
2540
2541 // Return whether instruction can be cloned (copied).
IsClonable()2542 virtual bool IsClonable() const { return false; }
2543
2544 // Returns whether the instruction can be moved within the graph.
2545 // TODO: this method is used by LICM and GVN with possibly different
2546 // meanings? split and rename?
CanBeMoved()2547 virtual bool CanBeMoved() const { return false; }
2548
2549 // Returns whether any data encoded in the two instructions is equal.
2550 // This method does not look at the inputs. Both instructions must be
2551 // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2552 virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2553 return false;
2554 }
2555
2556 // Returns whether two instructions are equal, that is:
2557 // 1) They have the same type and contain the same data (InstructionDataEquals).
2558 // 2) Their inputs are identical.
2559 bool Equals(const HInstruction* other) const;
2560
GetKind()2561 InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2562
ComputeHashCode()2563 virtual size_t ComputeHashCode() const {
2564 size_t result = GetKind();
2565 for (const HInstruction* input : GetInputs()) {
2566 result = (result * 31) + input->GetId();
2567 }
2568 return result;
2569 }
2570
GetSideEffects()2571 SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2572 void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2573 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2574
GetLifetimePosition()2575 size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2576 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2577 LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2578 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2579 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2580
IsSuspendCheckEntry()2581 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2582
2583 // Returns whether the code generation of the instruction will require to have access
2584 // to the current method. Such instructions are:
2585 // (1): Instructions that require an environment, as calling the runtime requires
2586 // to walk the stack and have the current method stored at a specific stack address.
2587 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2588 // to access the dex cache.
NeedsCurrentMethod()2589 bool NeedsCurrentMethod() const {
2590 return NeedsEnvironment() || IsCurrentMethod();
2591 }
2592
2593 // Does this instruction have any use in an environment before
2594 // control flow hits 'other'?
2595 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2596
2597 // Remove all references to environment uses of this instruction.
2598 // The caller must ensure that this is safe to do.
2599 void RemoveEnvironmentUsers();
2600
IsEmittedAtUseSite()2601 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2602 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2603
2604 protected:
2605 // If set, the machine code for this instruction is assumed to be generated by
2606 // its users. Used by liveness analysis to compute use positions accordingly.
2607 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2608 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2609 static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2610 static constexpr size_t kFieldInstructionKindSize =
2611 MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2612 static constexpr size_t kFieldType =
2613 kFieldInstructionKind + kFieldInstructionKindSize;
2614 static constexpr size_t kFieldTypeSize =
2615 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2616 static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2617 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2618
2619 static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2620 "Too many generic packed fields");
2621
2622 using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2623
InputRecordAt(size_t i)2624 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2625 return GetInputRecords()[i];
2626 }
2627
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2628 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2629 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2630 input_records[index] = input;
2631 }
2632
GetPackedFields()2633 uint32_t GetPackedFields() const {
2634 return packed_fields_;
2635 }
2636
2637 template <size_t flag>
GetPackedFlag()2638 bool GetPackedFlag() const {
2639 return (packed_fields_ & (1u << flag)) != 0u;
2640 }
2641
2642 template <size_t flag>
2643 void SetPackedFlag(bool value = true) {
2644 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2645 }
2646
2647 template <typename BitFieldType>
GetPackedField()2648 typename BitFieldType::value_type GetPackedField() const {
2649 return BitFieldType::Decode(packed_fields_);
2650 }
2651
2652 template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2653 void SetPackedField(typename BitFieldType::value_type value) {
2654 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2655 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2656 }
2657
2658 // Copy construction for the instruction (used for Clone function).
2659 //
2660 // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2661 // prepare_for_register_allocator are not copied (set to default values).
2662 //
2663 // Copy constructors must be provided for every HInstruction type; default copy constructor is
2664 // fine for most of them. However for some of the instructions a custom copy constructor must be
2665 // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2666 // for copying them).
HInstruction(const HInstruction & other)2667 explicit HInstruction(const HInstruction& other)
2668 : previous_(nullptr),
2669 next_(nullptr),
2670 block_(nullptr),
2671 dex_pc_(other.dex_pc_),
2672 id_(-1),
2673 ssa_index_(-1),
2674 packed_fields_(other.packed_fields_),
2675 environment_(nullptr),
2676 locations_(nullptr),
2677 live_interval_(nullptr),
2678 lifetime_position_(kNoLifetime),
2679 side_effects_(other.side_effects_),
2680 reference_type_handle_(other.reference_type_handle_) {
2681 }
2682
2683 private:
2684 using InstructionKindField =
2685 BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2686
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2687 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2688 auto before_use_node = uses_.before_begin();
2689 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2690 HInstruction* user = use_node->GetUser();
2691 size_t input_index = use_node->GetIndex();
2692 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2693 before_use_node = use_node;
2694 }
2695 }
2696
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2697 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2698 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2699 if (next != uses_.end()) {
2700 HInstruction* next_user = next->GetUser();
2701 size_t next_index = next->GetIndex();
2702 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2703 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2704 }
2705 }
2706
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2707 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2708 auto before_env_use_node = env_uses_.before_begin();
2709 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2710 HEnvironment* user = env_use_node->GetUser();
2711 size_t input_index = env_use_node->GetIndex();
2712 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2713 before_env_use_node = env_use_node;
2714 }
2715 }
2716
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2717 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2718 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2719 if (next != env_uses_.end()) {
2720 HEnvironment* next_user = next->GetUser();
2721 size_t next_index = next->GetIndex();
2722 DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2723 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2724 }
2725 }
2726
2727 HInstruction* previous_;
2728 HInstruction* next_;
2729 HBasicBlock* block_;
2730 const uint32_t dex_pc_;
2731
2732 // An instruction gets an id when it is added to the graph.
2733 // It reflects creation order. A negative id means the instruction
2734 // has not been added to the graph.
2735 int id_;
2736
2737 // When doing liveness analysis, instructions that have uses get an SSA index.
2738 int ssa_index_;
2739
2740 // Packed fields.
2741 uint32_t packed_fields_;
2742
2743 // List of instructions that have this instruction as input.
2744 HUseList<HInstruction*> uses_;
2745
2746 // List of environments that contain this instruction.
2747 HUseList<HEnvironment*> env_uses_;
2748
2749 // The environment associated with this instruction. Not null if the instruction
2750 // might jump out of the method.
2751 HEnvironment* environment_;
2752
2753 // Set by the code generator.
2754 LocationSummary* locations_;
2755
2756 // Set by the liveness analysis.
2757 LiveInterval* live_interval_;
2758
2759 // Set by the liveness analysis, this is the position in a linear
2760 // order of blocks where this instruction's live interval start.
2761 size_t lifetime_position_;
2762
2763 SideEffects side_effects_;
2764
2765 // The reference handle part of the reference type info.
2766 // The IsExact() flag is stored in packed fields.
2767 // TODO: for primitive types this should be marked as invalid.
2768 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2769
2770 friend class GraphChecker;
2771 friend class HBasicBlock;
2772 friend class HEnvironment;
2773 friend class HGraph;
2774 friend class HInstructionList;
2775 };
2776
2777 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs);
2778 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs);
2779 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs);
2780 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst);
2781 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst);
2782
2783 // Forward declarations for friends
2784 template <typename InnerIter> struct HSTLInstructionIterator;
2785
2786 // Iterates over the instructions, while preserving the next instruction
2787 // in case the current instruction gets removed from the list by the user
2788 // of this iterator.
2789 class HInstructionIterator : public ValueObject {
2790 public:
HInstructionIterator(const HInstructionList & instructions)2791 explicit HInstructionIterator(const HInstructionList& instructions)
2792 : instruction_(instructions.first_instruction_) {
2793 next_ = Done() ? nullptr : instruction_->GetNext();
2794 }
2795
Done()2796 bool Done() const { return instruction_ == nullptr; }
Current()2797 HInstruction* Current() const { return instruction_; }
Advance()2798 void Advance() {
2799 instruction_ = next_;
2800 next_ = Done() ? nullptr : instruction_->GetNext();
2801 }
2802
2803 private:
HInstructionIterator()2804 HInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2805
2806 HInstruction* instruction_;
2807 HInstruction* next_;
2808
2809 friend struct HSTLInstructionIterator<HInstructionIterator>;
2810 };
2811
2812 // Iterates over the instructions without saving the next instruction,
2813 // therefore handling changes in the graph potentially made by the user
2814 // of this iterator.
2815 class HInstructionIteratorHandleChanges : public ValueObject {
2816 public:
2817 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2818 : instruction_(instructions.first_instruction_) {
2819 }
2820
2821 bool Done() const { return instruction_ == nullptr; }
2822 HInstruction* Current() const { return instruction_; }
2823 void Advance() {
2824 instruction_ = instruction_->GetNext();
2825 }
2826
2827 private:
2828 HInstructionIteratorHandleChanges() : instruction_(nullptr) {}
2829
2830 HInstruction* instruction_;
2831
2832 friend struct HSTLInstructionIterator<HInstructionIteratorHandleChanges>;
2833 };
2834
2835
2836 class HBackwardInstructionIterator : public ValueObject {
2837 public:
2838 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2839 : instruction_(instructions.last_instruction_) {
2840 next_ = Done() ? nullptr : instruction_->GetPrevious();
2841 }
2842
2843 bool Done() const { return instruction_ == nullptr; }
2844 HInstruction* Current() const { return instruction_; }
2845 void Advance() {
2846 instruction_ = next_;
2847 next_ = Done() ? nullptr : instruction_->GetPrevious();
2848 }
2849
2850 private:
2851 HBackwardInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2852
2853 HInstruction* instruction_;
2854 HInstruction* next_;
2855
2856 friend struct HSTLInstructionIterator<HBackwardInstructionIterator>;
2857 };
2858
2859 template <typename InnerIter>
2860 struct HSTLInstructionIterator : public ValueObject,
2861 public std::iterator<std::forward_iterator_tag, HInstruction*> {
2862 public:
2863 static_assert(std::is_same_v<InnerIter, HBackwardInstructionIterator> ||
2864 std::is_same_v<InnerIter, HInstructionIterator> ||
2865 std::is_same_v<InnerIter, HInstructionIteratorHandleChanges>,
2866 "Unknown wrapped iterator!");
2867
2868 explicit HSTLInstructionIterator(InnerIter inner) : inner_(inner) {}
2869 HInstruction* operator*() const {
2870 DCHECK(inner_.Current() != nullptr);
2871 return inner_.Current();
2872 }
2873
2874 HSTLInstructionIterator<InnerIter>& operator++() {
2875 DCHECK(*this != HSTLInstructionIterator<InnerIter>::EndIter());
2876 inner_.Advance();
2877 return *this;
2878 }
2879
2880 HSTLInstructionIterator<InnerIter> operator++(int) {
2881 HSTLInstructionIterator<InnerIter> prev(*this);
2882 ++(*this);
2883 return prev;
2884 }
2885
2886 bool operator==(const HSTLInstructionIterator<InnerIter>& other) const {
2887 return inner_.Current() == other.inner_.Current();
2888 }
2889
2890 bool operator!=(const HSTLInstructionIterator<InnerIter>& other) const {
2891 return !(*this == other);
2892 }
2893
2894 static HSTLInstructionIterator<InnerIter> EndIter() {
2895 return HSTLInstructionIterator<InnerIter>(InnerIter());
2896 }
2897
2898 private:
2899 InnerIter inner_;
2900 };
2901
2902 template <typename InnerIter>
2903 IterationRange<HSTLInstructionIterator<InnerIter>> MakeSTLInstructionIteratorRange(InnerIter iter) {
2904 return MakeIterationRange(HSTLInstructionIterator<InnerIter>(iter),
2905 HSTLInstructionIterator<InnerIter>::EndIter());
2906 }
2907
2908 class HVariableInputSizeInstruction : public HInstruction {
2909 public:
2910 using HInstruction::GetInputRecords; // Keep the const version visible.
2911 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2912 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2913 }
2914
2915 void AddInput(HInstruction* input);
2916 void InsertInputAt(size_t index, HInstruction* input);
2917 void RemoveInputAt(size_t index);
2918
2919 // Removes all the inputs.
2920 // Also removes this instructions from each input's use list
2921 // (for non-environment uses only).
2922 void RemoveAllInputs();
2923
2924 protected:
2925 HVariableInputSizeInstruction(InstructionKind inst_kind,
2926 SideEffects side_effects,
2927 uint32_t dex_pc,
2928 ArenaAllocator* allocator,
2929 size_t number_of_inputs,
2930 ArenaAllocKind kind)
2931 : HInstruction(inst_kind, side_effects, dex_pc),
2932 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2933 HVariableInputSizeInstruction(InstructionKind inst_kind,
2934 DataType::Type type,
2935 SideEffects side_effects,
2936 uint32_t dex_pc,
2937 ArenaAllocator* allocator,
2938 size_t number_of_inputs,
2939 ArenaAllocKind kind)
2940 : HInstruction(inst_kind, type, side_effects, dex_pc),
2941 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2942
2943 DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2944
2945 ArenaVector<HUserRecord<HInstruction*>> inputs_;
2946 };
2947
2948 template<size_t N>
2949 class HExpression : public HInstruction {
2950 public:
2951 HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2952 : HInstruction(kind, side_effects, dex_pc), inputs_() {}
2953 HExpression<N>(InstructionKind kind,
2954 DataType::Type type,
2955 SideEffects side_effects,
2956 uint32_t dex_pc)
2957 : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
2958 virtual ~HExpression() {}
2959
2960 using HInstruction::GetInputRecords; // Keep the const version visible.
2961 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2962 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2963 }
2964
2965 protected:
2966 DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2967
2968 private:
2969 std::array<HUserRecord<HInstruction*>, N> inputs_;
2970
2971 friend class SsaBuilder;
2972 };
2973
2974 // HExpression specialization for N=0.
2975 template<>
2976 class HExpression<0> : public HInstruction {
2977 public:
2978 using HInstruction::HInstruction;
2979
2980 virtual ~HExpression() {}
2981
2982 using HInstruction::GetInputRecords; // Keep the const version visible.
2983 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2984 return ArrayRef<HUserRecord<HInstruction*>>();
2985 }
2986
2987 protected:
2988 DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
2989
2990 private:
2991 friend class SsaBuilder;
2992 };
2993
2994 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2995 // instruction that branches to the exit block.
2996 class HReturnVoid final : public HExpression<0> {
2997 public:
2998 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2999 : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
3000 }
3001
3002 bool IsControlFlow() const override { return true; }
3003
3004 DECLARE_INSTRUCTION(ReturnVoid);
3005
3006 protected:
3007 DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
3008 };
3009
3010 // Represents dex's RETURN opcodes. A HReturn is a control flow
3011 // instruction that branches to the exit block.
3012 class HReturn final : public HExpression<1> {
3013 public:
3014 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
3015 : HExpression(kReturn, SideEffects::None(), dex_pc) {
3016 SetRawInputAt(0, value);
3017 }
3018
3019 bool IsControlFlow() const override { return true; }
3020
3021 DECLARE_INSTRUCTION(Return);
3022
3023 protected:
3024 DEFAULT_COPY_CONSTRUCTOR(Return);
3025 };
3026
3027 class HPhi final : public HVariableInputSizeInstruction {
3028 public:
3029 HPhi(ArenaAllocator* allocator,
3030 uint32_t reg_number,
3031 size_t number_of_inputs,
3032 DataType::Type type,
3033 uint32_t dex_pc = kNoDexPc)
3034 : HVariableInputSizeInstruction(
3035 kPhi,
3036 ToPhiType(type),
3037 SideEffects::None(),
3038 dex_pc,
3039 allocator,
3040 number_of_inputs,
3041 kArenaAllocPhiInputs),
3042 reg_number_(reg_number) {
3043 DCHECK_NE(GetType(), DataType::Type::kVoid);
3044 // Phis are constructed live and marked dead if conflicting or unused.
3045 // Individual steps of SsaBuilder should assume that if a phi has been
3046 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
3047 SetPackedFlag<kFlagIsLive>(true);
3048 SetPackedFlag<kFlagCanBeNull>(true);
3049 }
3050
3051 bool IsClonable() const override { return true; }
3052
3053 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
3054 static DataType::Type ToPhiType(DataType::Type type) {
3055 return DataType::Kind(type);
3056 }
3057
3058 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
3059
3060 void SetType(DataType::Type new_type) {
3061 // Make sure that only valid type changes occur. The following are allowed:
3062 // (1) int -> float/ref (primitive type propagation),
3063 // (2) long -> double (primitive type propagation).
3064 DCHECK(GetType() == new_type ||
3065 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
3066 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
3067 (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
3068 SetPackedField<TypeField>(new_type);
3069 }
3070
3071 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
3072 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
3073
3074 uint32_t GetRegNumber() const { return reg_number_; }
3075
3076 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
3077 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
3078 bool IsDead() const { return !IsLive(); }
3079 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
3080
3081 bool IsVRegEquivalentOf(const HInstruction* other) const {
3082 return other != nullptr
3083 && other->IsPhi()
3084 && other->AsPhi()->GetBlock() == GetBlock()
3085 && other->AsPhi()->GetRegNumber() == GetRegNumber();
3086 }
3087
3088 bool HasEquivalentPhi() const {
3089 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3090 return true;
3091 }
3092 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3093 return true;
3094 }
3095 return false;
3096 }
3097
3098 // Returns the next equivalent phi (starting from the current one) or null if there is none.
3099 // An equivalent phi is a phi having the same dex register and type.
3100 // It assumes that phis with the same dex register are adjacent.
3101 HPhi* GetNextEquivalentPhiWithSameType() {
3102 HInstruction* next = GetNext();
3103 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
3104 if (next->GetType() == GetType()) {
3105 return next->AsPhi();
3106 }
3107 next = next->GetNext();
3108 }
3109 return nullptr;
3110 }
3111
3112 DECLARE_INSTRUCTION(Phi);
3113
3114 protected:
3115 DEFAULT_COPY_CONSTRUCTOR(Phi);
3116
3117 private:
3118 static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
3119 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
3120 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
3121 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3122
3123 const uint32_t reg_number_;
3124 };
3125
3126 // The exit instruction is the only instruction of the exit block.
3127 // Instructions aborting the method (HThrow and HReturn) must branch to the
3128 // exit block.
3129 class HExit final : public HExpression<0> {
3130 public:
3131 explicit HExit(uint32_t dex_pc = kNoDexPc)
3132 : HExpression(kExit, SideEffects::None(), dex_pc) {
3133 }
3134
3135 bool IsControlFlow() const override { return true; }
3136
3137 DECLARE_INSTRUCTION(Exit);
3138
3139 protected:
3140 DEFAULT_COPY_CONSTRUCTOR(Exit);
3141 };
3142
3143 // Jumps from one block to another.
3144 class HGoto final : public HExpression<0> {
3145 public:
3146 explicit HGoto(uint32_t dex_pc = kNoDexPc)
3147 : HExpression(kGoto, SideEffects::None(), dex_pc) {
3148 }
3149
3150 bool IsClonable() const override { return true; }
3151 bool IsControlFlow() const override { return true; }
3152
3153 HBasicBlock* GetSuccessor() const {
3154 return GetBlock()->GetSingleSuccessor();
3155 }
3156
3157 DECLARE_INSTRUCTION(Goto);
3158
3159 protected:
3160 DEFAULT_COPY_CONSTRUCTOR(Goto);
3161 };
3162
3163 class HConstant : public HExpression<0> {
3164 public:
3165 explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
3166 : HExpression(kind, type, SideEffects::None(), dex_pc) {
3167 }
3168
3169 bool CanBeMoved() const override { return true; }
3170
3171 // Is this constant -1 in the arithmetic sense?
3172 virtual bool IsMinusOne() const { return false; }
3173 // Is this constant 0 in the arithmetic sense?
3174 virtual bool IsArithmeticZero() const { return false; }
3175 // Is this constant a 0-bit pattern?
3176 virtual bool IsZeroBitPattern() const { return false; }
3177 // Is this constant 1 in the arithmetic sense?
3178 virtual bool IsOne() const { return false; }
3179
3180 virtual uint64_t GetValueAsUint64() const = 0;
3181
3182 DECLARE_ABSTRACT_INSTRUCTION(Constant);
3183
3184 protected:
3185 DEFAULT_COPY_CONSTRUCTOR(Constant);
3186 };
3187
3188 class HNullConstant final : public HConstant {
3189 public:
3190 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3191 return true;
3192 }
3193
3194 uint64_t GetValueAsUint64() const override { return 0; }
3195
3196 size_t ComputeHashCode() const override { return 0; }
3197
3198 // The null constant representation is a 0-bit pattern.
3199 bool IsZeroBitPattern() const override { return true; }
3200
3201 DECLARE_INSTRUCTION(NullConstant);
3202
3203 protected:
3204 DEFAULT_COPY_CONSTRUCTOR(NullConstant);
3205
3206 private:
3207 explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
3208 : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
3209 }
3210
3211 friend class HGraph;
3212 };
3213
3214 // Constants of the type int. Those can be from Dex instructions, or
3215 // synthesized (for example with the if-eqz instruction).
3216 class HIntConstant final : public HConstant {
3217 public:
3218 int32_t GetValue() const { return value_; }
3219
3220 uint64_t GetValueAsUint64() const override {
3221 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
3222 }
3223
3224 bool InstructionDataEquals(const HInstruction* other) const override {
3225 DCHECK(other->IsIntConstant()) << other->DebugName();
3226 return other->AsIntConstant()->value_ == value_;
3227 }
3228
3229 size_t ComputeHashCode() const override { return GetValue(); }
3230
3231 bool IsMinusOne() const override { return GetValue() == -1; }
3232 bool IsArithmeticZero() const override { return GetValue() == 0; }
3233 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3234 bool IsOne() const override { return GetValue() == 1; }
3235
3236 // Integer constants are used to encode Boolean values as well,
3237 // where 1 means true and 0 means false.
3238 bool IsTrue() const { return GetValue() == 1; }
3239 bool IsFalse() const { return GetValue() == 0; }
3240
3241 DECLARE_INSTRUCTION(IntConstant);
3242
3243 protected:
3244 DEFAULT_COPY_CONSTRUCTOR(IntConstant);
3245
3246 private:
3247 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3248 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
3249 }
3250 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
3251 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
3252 value_(value ? 1 : 0) {
3253 }
3254
3255 const int32_t value_;
3256
3257 friend class HGraph;
3258 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
3259 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
3260 };
3261
3262 class HLongConstant final : public HConstant {
3263 public:
3264 int64_t GetValue() const { return value_; }
3265
3266 uint64_t GetValueAsUint64() const override { return value_; }
3267
3268 bool InstructionDataEquals(const HInstruction* other) const override {
3269 DCHECK(other->IsLongConstant()) << other->DebugName();
3270 return other->AsLongConstant()->value_ == value_;
3271 }
3272
3273 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3274
3275 bool IsMinusOne() const override { return GetValue() == -1; }
3276 bool IsArithmeticZero() const override { return GetValue() == 0; }
3277 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3278 bool IsOne() const override { return GetValue() == 1; }
3279
3280 DECLARE_INSTRUCTION(LongConstant);
3281
3282 protected:
3283 DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3284
3285 private:
3286 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3287 : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
3288 value_(value) {
3289 }
3290
3291 const int64_t value_;
3292
3293 friend class HGraph;
3294 };
3295
3296 class HFloatConstant final : public HConstant {
3297 public:
3298 float GetValue() const { return value_; }
3299
3300 uint64_t GetValueAsUint64() const override {
3301 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3302 }
3303
3304 bool InstructionDataEquals(const HInstruction* other) const override {
3305 DCHECK(other->IsFloatConstant()) << other->DebugName();
3306 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3307 }
3308
3309 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3310
3311 bool IsMinusOne() const override {
3312 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3313 }
3314 bool IsArithmeticZero() const override {
3315 return std::fpclassify(value_) == FP_ZERO;
3316 }
3317 bool IsArithmeticPositiveZero() const {
3318 return IsArithmeticZero() && !std::signbit(value_);
3319 }
3320 bool IsArithmeticNegativeZero() const {
3321 return IsArithmeticZero() && std::signbit(value_);
3322 }
3323 bool IsZeroBitPattern() const override {
3324 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3325 }
3326 bool IsOne() const override {
3327 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3328 }
3329 bool IsNaN() const {
3330 return std::isnan(value_);
3331 }
3332
3333 DECLARE_INSTRUCTION(FloatConstant);
3334
3335 protected:
3336 DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3337
3338 private:
3339 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
3340 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3341 value_(value) {
3342 }
3343 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3344 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3345 value_(bit_cast<float, int32_t>(value)) {
3346 }
3347
3348 const float value_;
3349
3350 // Only the SsaBuilder and HGraph can create floating-point constants.
3351 friend class SsaBuilder;
3352 friend class HGraph;
3353 };
3354
3355 class HDoubleConstant final : public HConstant {
3356 public:
3357 double GetValue() const { return value_; }
3358
3359 uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3360
3361 bool InstructionDataEquals(const HInstruction* other) const override {
3362 DCHECK(other->IsDoubleConstant()) << other->DebugName();
3363 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3364 }
3365
3366 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3367
3368 bool IsMinusOne() const override {
3369 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3370 }
3371 bool IsArithmeticZero() const override {
3372 return std::fpclassify(value_) == FP_ZERO;
3373 }
3374 bool IsArithmeticPositiveZero() const {
3375 return IsArithmeticZero() && !std::signbit(value_);
3376 }
3377 bool IsArithmeticNegativeZero() const {
3378 return IsArithmeticZero() && std::signbit(value_);
3379 }
3380 bool IsZeroBitPattern() const override {
3381 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3382 }
3383 bool IsOne() const override {
3384 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3385 }
3386 bool IsNaN() const {
3387 return std::isnan(value_);
3388 }
3389
3390 DECLARE_INSTRUCTION(DoubleConstant);
3391
3392 protected:
3393 DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3394
3395 private:
3396 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
3397 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3398 value_(value) {
3399 }
3400 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3401 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3402 value_(bit_cast<double, int64_t>(value)) {
3403 }
3404
3405 const double value_;
3406
3407 // Only the SsaBuilder and HGraph can create floating-point constants.
3408 friend class SsaBuilder;
3409 friend class HGraph;
3410 };
3411
3412 // Conditional branch. A block ending with an HIf instruction must have
3413 // two successors.
3414 class HIf final : public HExpression<1> {
3415 public:
3416 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
3417 : HExpression(kIf, SideEffects::None(), dex_pc) {
3418 SetRawInputAt(0, input);
3419 }
3420
3421 bool IsClonable() const override { return true; }
3422 bool IsControlFlow() const override { return true; }
3423
3424 HBasicBlock* IfTrueSuccessor() const {
3425 return GetBlock()->GetSuccessors()[0];
3426 }
3427
3428 HBasicBlock* IfFalseSuccessor() const {
3429 return GetBlock()->GetSuccessors()[1];
3430 }
3431
3432 DECLARE_INSTRUCTION(If);
3433
3434 protected:
3435 DEFAULT_COPY_CONSTRUCTOR(If);
3436 };
3437
3438
3439 // Abstract instruction which marks the beginning and/or end of a try block and
3440 // links it to the respective exception handlers. Behaves the same as a Goto in
3441 // non-exceptional control flow.
3442 // Normal-flow successor is stored at index zero, exception handlers under
3443 // higher indices in no particular order.
3444 class HTryBoundary final : public HExpression<0> {
3445 public:
3446 enum class BoundaryKind {
3447 kEntry,
3448 kExit,
3449 kLast = kExit
3450 };
3451
3452 // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3453 // across the catch block entering edges as GC might happen during throwing an exception.
3454 // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3455 // HInstruction which a catch block must start from.
3456 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3457 : HExpression(kTryBoundary,
3458 (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3459 : SideEffects::None(),
3460 dex_pc) {
3461 SetPackedField<BoundaryKindField>(kind);
3462 }
3463
3464 bool IsControlFlow() const override { return true; }
3465
3466 // Returns the block's non-exceptional successor (index zero).
3467 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3468
3469 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3470 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3471 }
3472
3473 // Returns whether `handler` is among its exception handlers (non-zero index
3474 // successors).
3475 bool HasExceptionHandler(const HBasicBlock& handler) const {
3476 DCHECK(handler.IsCatchBlock());
3477 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3478 }
3479
3480 // If not present already, adds `handler` to its block's list of exception
3481 // handlers.
3482 void AddExceptionHandler(HBasicBlock* handler) {
3483 if (!HasExceptionHandler(*handler)) {
3484 GetBlock()->AddSuccessor(handler);
3485 }
3486 }
3487
3488 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3489 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3490
3491 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3492
3493 DECLARE_INSTRUCTION(TryBoundary);
3494
3495 protected:
3496 DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3497
3498 private:
3499 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3500 static constexpr size_t kFieldBoundaryKindSize =
3501 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3502 static constexpr size_t kNumberOfTryBoundaryPackedBits =
3503 kFieldBoundaryKind + kFieldBoundaryKindSize;
3504 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3505 "Too many packed fields.");
3506 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3507 };
3508
3509 // Deoptimize to interpreter, upon checking a condition.
3510 class HDeoptimize final : public HVariableInputSizeInstruction {
3511 public:
3512 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3513 // across.
3514 HDeoptimize(ArenaAllocator* allocator,
3515 HInstruction* cond,
3516 DeoptimizationKind kind,
3517 uint32_t dex_pc)
3518 : HVariableInputSizeInstruction(
3519 kDeoptimize,
3520 SideEffects::All(),
3521 dex_pc,
3522 allocator,
3523 /* number_of_inputs= */ 1,
3524 kArenaAllocMisc) {
3525 SetPackedFlag<kFieldCanBeMoved>(false);
3526 SetPackedField<DeoptimizeKindField>(kind);
3527 SetRawInputAt(0, cond);
3528 }
3529
3530 bool IsClonable() const override { return true; }
3531
3532 // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3533 // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3534 // instead of `guard`.
3535 // We set CanTriggerGC to prevent any intermediate address to be live
3536 // at the point of the `HDeoptimize`.
3537 HDeoptimize(ArenaAllocator* allocator,
3538 HInstruction* cond,
3539 HInstruction* guard,
3540 DeoptimizationKind kind,
3541 uint32_t dex_pc)
3542 : HVariableInputSizeInstruction(
3543 kDeoptimize,
3544 guard->GetType(),
3545 SideEffects::CanTriggerGC(),
3546 dex_pc,
3547 allocator,
3548 /* number_of_inputs= */ 2,
3549 kArenaAllocMisc) {
3550 SetPackedFlag<kFieldCanBeMoved>(true);
3551 SetPackedField<DeoptimizeKindField>(kind);
3552 SetRawInputAt(0, cond);
3553 SetRawInputAt(1, guard);
3554 }
3555
3556 bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3557
3558 bool InstructionDataEquals(const HInstruction* other) const override {
3559 return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3560 }
3561
3562 bool NeedsEnvironment() const override { return true; }
3563
3564 bool CanThrow() const override { return true; }
3565
3566 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3567
3568 bool GuardsAnInput() const {
3569 return InputCount() == 2;
3570 }
3571
3572 HInstruction* GuardedInput() const {
3573 DCHECK(GuardsAnInput());
3574 return InputAt(1);
3575 }
3576
3577 void RemoveGuard() {
3578 RemoveInputAt(1);
3579 }
3580
3581 DECLARE_INSTRUCTION(Deoptimize);
3582
3583 protected:
3584 DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3585
3586 private:
3587 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3588 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3589 static constexpr size_t kFieldDeoptimizeKindSize =
3590 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3591 static constexpr size_t kNumberOfDeoptimizePackedBits =
3592 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3593 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3594 "Too many packed fields.");
3595 using DeoptimizeKindField =
3596 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3597 };
3598
3599 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3600 // The compiled code checks this flag value in a guard before devirtualized call and
3601 // if it's true, starts to do deoptimization.
3602 // It has a 4-byte slot on stack.
3603 // TODO: allocate a register for this flag.
3604 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3605 public:
3606 // CHA guards are only optimized in a separate pass and it has no side effects
3607 // with regard to other passes.
3608 HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3609 : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3610 DataType::Type::kInt32,
3611 SideEffects::None(),
3612 dex_pc,
3613 allocator,
3614 0,
3615 kArenaAllocCHA) {
3616 }
3617
3618 // We do all CHA guard elimination/motion in a single pass, after which there is no
3619 // further guard elimination/motion since a guard might have been used for justification
3620 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3621 // to avoid other optimizations trying to move it.
3622 bool CanBeMoved() const override { return false; }
3623
3624 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3625
3626 protected:
3627 DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3628 };
3629
3630 // Represents the ArtMethod that was passed as a first argument to
3631 // the method. It is used by instructions that depend on it, like
3632 // instructions that work with the dex cache.
3633 class HCurrentMethod final : public HExpression<0> {
3634 public:
3635 explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3636 : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3637 }
3638
3639 DECLARE_INSTRUCTION(CurrentMethod);
3640
3641 protected:
3642 DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3643 };
3644
3645 // Fetches an ArtMethod from the virtual table or the interface method table
3646 // of a class.
3647 class HClassTableGet final : public HExpression<1> {
3648 public:
3649 enum class TableKind {
3650 kVTable,
3651 kIMTable,
3652 kLast = kIMTable
3653 };
3654 HClassTableGet(HInstruction* cls,
3655 DataType::Type type,
3656 TableKind kind,
3657 size_t index,
3658 uint32_t dex_pc)
3659 : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3660 index_(index) {
3661 SetPackedField<TableKindField>(kind);
3662 SetRawInputAt(0, cls);
3663 }
3664
3665 bool IsClonable() const override { return true; }
3666 bool CanBeMoved() const override { return true; }
3667 bool InstructionDataEquals(const HInstruction* other) const override {
3668 return other->AsClassTableGet()->GetIndex() == index_ &&
3669 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3670 }
3671
3672 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3673 size_t GetIndex() const { return index_; }
3674
3675 DECLARE_INSTRUCTION(ClassTableGet);
3676
3677 protected:
3678 DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3679
3680 private:
3681 static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3682 static constexpr size_t kFieldTableKindSize =
3683 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3684 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3685 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3686 "Too many packed fields.");
3687 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3688
3689 // The index of the ArtMethod in the table.
3690 const size_t index_;
3691 };
3692
3693 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3694 // have one successor for each entry in the switch table, and the final successor
3695 // will be the block containing the next Dex opcode.
3696 class HPackedSwitch final : public HExpression<1> {
3697 public:
3698 HPackedSwitch(int32_t start_value,
3699 uint32_t num_entries,
3700 HInstruction* input,
3701 uint32_t dex_pc = kNoDexPc)
3702 : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3703 start_value_(start_value),
3704 num_entries_(num_entries) {
3705 SetRawInputAt(0, input);
3706 }
3707
3708 bool IsClonable() const override { return true; }
3709
3710 bool IsControlFlow() const override { return true; }
3711
3712 int32_t GetStartValue() const { return start_value_; }
3713
3714 uint32_t GetNumEntries() const { return num_entries_; }
3715
3716 HBasicBlock* GetDefaultBlock() const {
3717 // Last entry is the default block.
3718 return GetBlock()->GetSuccessors()[num_entries_];
3719 }
3720 DECLARE_INSTRUCTION(PackedSwitch);
3721
3722 protected:
3723 DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3724
3725 private:
3726 const int32_t start_value_;
3727 const uint32_t num_entries_;
3728 };
3729
3730 class HUnaryOperation : public HExpression<1> {
3731 public:
3732 HUnaryOperation(InstructionKind kind,
3733 DataType::Type result_type,
3734 HInstruction* input,
3735 uint32_t dex_pc = kNoDexPc)
3736 : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3737 SetRawInputAt(0, input);
3738 }
3739
3740 // All of the UnaryOperation instructions are clonable.
3741 bool IsClonable() const override { return true; }
3742
3743 HInstruction* GetInput() const { return InputAt(0); }
3744 DataType::Type GetResultType() const { return GetType(); }
3745
3746 bool CanBeMoved() const override { return true; }
3747 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3748 return true;
3749 }
3750
3751 // Try to statically evaluate `this` and return a HConstant
3752 // containing the result of this evaluation. If `this` cannot
3753 // be evaluated as a constant, return null.
3754 HConstant* TryStaticEvaluation() const;
3755
3756 // Apply this operation to `x`.
3757 virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3758 virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3759 virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3760 virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3761
3762 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3763
3764 protected:
3765 DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3766 };
3767
3768 class HBinaryOperation : public HExpression<2> {
3769 public:
3770 HBinaryOperation(InstructionKind kind,
3771 DataType::Type result_type,
3772 HInstruction* left,
3773 HInstruction* right,
3774 SideEffects side_effects = SideEffects::None(),
3775 uint32_t dex_pc = kNoDexPc)
3776 : HExpression(kind, result_type, side_effects, dex_pc) {
3777 SetRawInputAt(0, left);
3778 SetRawInputAt(1, right);
3779 }
3780
3781 // All of the BinaryOperation instructions are clonable.
3782 bool IsClonable() const override { return true; }
3783
3784 HInstruction* GetLeft() const { return InputAt(0); }
3785 HInstruction* GetRight() const { return InputAt(1); }
3786 DataType::Type GetResultType() const { return GetType(); }
3787
3788 virtual bool IsCommutative() const { return false; }
3789
3790 // Put constant on the right.
3791 // Returns whether order is changed.
3792 bool OrderInputsWithConstantOnTheRight() {
3793 HInstruction* left = InputAt(0);
3794 HInstruction* right = InputAt(1);
3795 if (left->IsConstant() && !right->IsConstant()) {
3796 ReplaceInput(right, 0);
3797 ReplaceInput(left, 1);
3798 return true;
3799 }
3800 return false;
3801 }
3802
3803 // Order inputs by instruction id, but favor constant on the right side.
3804 // This helps GVN for commutative ops.
3805 void OrderInputs() {
3806 DCHECK(IsCommutative());
3807 HInstruction* left = InputAt(0);
3808 HInstruction* right = InputAt(1);
3809 if (left == right || (!left->IsConstant() && right->IsConstant())) {
3810 return;
3811 }
3812 if (OrderInputsWithConstantOnTheRight()) {
3813 return;
3814 }
3815 // Order according to instruction id.
3816 if (left->GetId() > right->GetId()) {
3817 ReplaceInput(right, 0);
3818 ReplaceInput(left, 1);
3819 }
3820 }
3821
3822 bool CanBeMoved() const override { return true; }
3823 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3824 return true;
3825 }
3826
3827 // Try to statically evaluate `this` and return a HConstant
3828 // containing the result of this evaluation. If `this` cannot
3829 // be evaluated as a constant, return null.
3830 HConstant* TryStaticEvaluation() const;
3831
3832 // Apply this operation to `x` and `y`.
3833 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3834 HNullConstant* y ATTRIBUTE_UNUSED) const {
3835 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3836 UNREACHABLE();
3837 }
3838 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3839 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3840 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3841 HIntConstant* y ATTRIBUTE_UNUSED) const {
3842 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3843 UNREACHABLE();
3844 }
3845 virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3846 virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3847
3848 // Returns an input that can legally be used as the right input and is
3849 // constant, or null.
3850 HConstant* GetConstantRight() const;
3851
3852 // If `GetConstantRight()` returns one of the input, this returns the other
3853 // one. Otherwise it returns null.
3854 HInstruction* GetLeastConstantLeft() const;
3855
3856 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3857
3858 protected:
3859 DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3860 };
3861
3862 // The comparison bias applies for floating point operations and indicates how NaN
3863 // comparisons are treated:
3864 enum class ComparisonBias { // private marker to avoid generate-operator-out.py from processing.
3865 kNoBias, // bias is not applicable (i.e. for long operation)
3866 kGtBias, // return 1 for NaN comparisons
3867 kLtBias, // return -1 for NaN comparisons
3868 kLast = kLtBias
3869 };
3870
3871 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs);
3872
3873 class HCondition : public HBinaryOperation {
3874 public:
3875 HCondition(InstructionKind kind,
3876 HInstruction* first,
3877 HInstruction* second,
3878 uint32_t dex_pc = kNoDexPc)
3879 : HBinaryOperation(kind,
3880 DataType::Type::kBool,
3881 first,
3882 second,
3883 SideEffects::None(),
3884 dex_pc) {
3885 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3886 }
3887
3888 // For code generation purposes, returns whether this instruction is just before
3889 // `instruction`, and disregard moves in between.
3890 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3891
3892 DECLARE_ABSTRACT_INSTRUCTION(Condition);
3893
3894 virtual IfCondition GetCondition() const = 0;
3895
3896 virtual IfCondition GetOppositeCondition() const = 0;
3897
3898 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3899 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3900
3901 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3902 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3903
3904 bool InstructionDataEquals(const HInstruction* other) const override {
3905 return GetPackedFields() == other->AsCondition()->GetPackedFields();
3906 }
3907
3908 bool IsFPConditionTrueIfNaN() const {
3909 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3910 IfCondition if_cond = GetCondition();
3911 if (if_cond == kCondNE) {
3912 return true;
3913 } else if (if_cond == kCondEQ) {
3914 return false;
3915 }
3916 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3917 }
3918
3919 bool IsFPConditionFalseIfNaN() const {
3920 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3921 IfCondition if_cond = GetCondition();
3922 if (if_cond == kCondEQ) {
3923 return true;
3924 } else if (if_cond == kCondNE) {
3925 return false;
3926 }
3927 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3928 }
3929
3930 protected:
3931 // Needed if we merge a HCompare into a HCondition.
3932 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
3933 static constexpr size_t kFieldComparisonBiasSize =
3934 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3935 static constexpr size_t kNumberOfConditionPackedBits =
3936 kFieldComparisonBias + kFieldComparisonBiasSize;
3937 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3938 using ComparisonBiasField =
3939 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3940
3941 template <typename T>
3942 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3943
3944 template <typename T>
3945 int32_t CompareFP(T x, T y) const {
3946 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3947 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3948 // Handle the bias.
3949 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3950 }
3951
3952 // Return an integer constant containing the result of a condition evaluated at compile time.
3953 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3954 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3955 }
3956
3957 DEFAULT_COPY_CONSTRUCTOR(Condition);
3958 };
3959
3960 // Instruction to check if two inputs are equal to each other.
3961 class HEqual final : public HCondition {
3962 public:
3963 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3964 : HCondition(kEqual, first, second, dex_pc) {
3965 }
3966
3967 bool IsCommutative() const override { return true; }
3968
3969 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3970 HNullConstant* y ATTRIBUTE_UNUSED) const override {
3971 return MakeConstantCondition(true, GetDexPc());
3972 }
3973 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3974 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3975 }
3976 // In the following Evaluate methods, a HCompare instruction has
3977 // been merged into this HEqual instruction; evaluate it as
3978 // `Compare(x, y) == 0`.
3979 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3980 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3981 GetDexPc());
3982 }
3983 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3984 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3985 }
3986 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3987 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3988 }
3989
3990 DECLARE_INSTRUCTION(Equal);
3991
3992 IfCondition GetCondition() const override {
3993 return kCondEQ;
3994 }
3995
3996 IfCondition GetOppositeCondition() const override {
3997 return kCondNE;
3998 }
3999
4000 protected:
4001 DEFAULT_COPY_CONSTRUCTOR(Equal);
4002
4003 private:
4004 template <typename T> static bool Compute(T x, T y) { return x == y; }
4005 };
4006
4007 class HNotEqual final : public HCondition {
4008 public:
4009 HNotEqual(HInstruction* first, HInstruction* second,
4010 uint32_t dex_pc = kNoDexPc)
4011 : HCondition(kNotEqual, first, second, dex_pc) {
4012 }
4013
4014 bool IsCommutative() const override { return true; }
4015
4016 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
4017 HNullConstant* y ATTRIBUTE_UNUSED) const override {
4018 return MakeConstantCondition(false, GetDexPc());
4019 }
4020 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4021 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4022 }
4023 // In the following Evaluate methods, a HCompare instruction has
4024 // been merged into this HNotEqual instruction; evaluate it as
4025 // `Compare(x, y) != 0`.
4026 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4027 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4028 }
4029 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4030 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4031 }
4032 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4033 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4034 }
4035
4036 DECLARE_INSTRUCTION(NotEqual);
4037
4038 IfCondition GetCondition() const override {
4039 return kCondNE;
4040 }
4041
4042 IfCondition GetOppositeCondition() const override {
4043 return kCondEQ;
4044 }
4045
4046 protected:
4047 DEFAULT_COPY_CONSTRUCTOR(NotEqual);
4048
4049 private:
4050 template <typename T> static bool Compute(T x, T y) { return x != y; }
4051 };
4052
4053 class HLessThan final : public HCondition {
4054 public:
4055 HLessThan(HInstruction* first, HInstruction* second,
4056 uint32_t dex_pc = kNoDexPc)
4057 : HCondition(kLessThan, first, second, dex_pc) {
4058 }
4059
4060 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4061 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4062 }
4063 // In the following Evaluate methods, a HCompare instruction has
4064 // been merged into this HLessThan instruction; evaluate it as
4065 // `Compare(x, y) < 0`.
4066 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4067 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4068 }
4069 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4070 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4071 }
4072 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4073 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4074 }
4075
4076 DECLARE_INSTRUCTION(LessThan);
4077
4078 IfCondition GetCondition() const override {
4079 return kCondLT;
4080 }
4081
4082 IfCondition GetOppositeCondition() const override {
4083 return kCondGE;
4084 }
4085
4086 protected:
4087 DEFAULT_COPY_CONSTRUCTOR(LessThan);
4088
4089 private:
4090 template <typename T> static bool Compute(T x, T y) { return x < y; }
4091 };
4092
4093 class HLessThanOrEqual final : public HCondition {
4094 public:
4095 HLessThanOrEqual(HInstruction* first, HInstruction* second,
4096 uint32_t dex_pc = kNoDexPc)
4097 : HCondition(kLessThanOrEqual, first, second, dex_pc) {
4098 }
4099
4100 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4101 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4102 }
4103 // In the following Evaluate methods, a HCompare instruction has
4104 // been merged into this HLessThanOrEqual instruction; evaluate it as
4105 // `Compare(x, y) <= 0`.
4106 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4107 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4108 }
4109 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4110 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4111 }
4112 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4113 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4114 }
4115
4116 DECLARE_INSTRUCTION(LessThanOrEqual);
4117
4118 IfCondition GetCondition() const override {
4119 return kCondLE;
4120 }
4121
4122 IfCondition GetOppositeCondition() const override {
4123 return kCondGT;
4124 }
4125
4126 protected:
4127 DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
4128
4129 private:
4130 template <typename T> static bool Compute(T x, T y) { return x <= y; }
4131 };
4132
4133 class HGreaterThan final : public HCondition {
4134 public:
4135 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4136 : HCondition(kGreaterThan, first, second, dex_pc) {
4137 }
4138
4139 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4140 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4141 }
4142 // In the following Evaluate methods, a HCompare instruction has
4143 // been merged into this HGreaterThan instruction; evaluate it as
4144 // `Compare(x, y) > 0`.
4145 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4146 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4147 }
4148 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4149 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4150 }
4151 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4152 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4153 }
4154
4155 DECLARE_INSTRUCTION(GreaterThan);
4156
4157 IfCondition GetCondition() const override {
4158 return kCondGT;
4159 }
4160
4161 IfCondition GetOppositeCondition() const override {
4162 return kCondLE;
4163 }
4164
4165 protected:
4166 DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
4167
4168 private:
4169 template <typename T> static bool Compute(T x, T y) { return x > y; }
4170 };
4171
4172 class HGreaterThanOrEqual final : public HCondition {
4173 public:
4174 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4175 : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
4176 }
4177
4178 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4179 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4180 }
4181 // In the following Evaluate methods, a HCompare instruction has
4182 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
4183 // `Compare(x, y) >= 0`.
4184 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4185 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4186 }
4187 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4188 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4189 }
4190 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4191 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4192 }
4193
4194 DECLARE_INSTRUCTION(GreaterThanOrEqual);
4195
4196 IfCondition GetCondition() const override {
4197 return kCondGE;
4198 }
4199
4200 IfCondition GetOppositeCondition() const override {
4201 return kCondLT;
4202 }
4203
4204 protected:
4205 DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
4206
4207 private:
4208 template <typename T> static bool Compute(T x, T y) { return x >= y; }
4209 };
4210
4211 class HBelow final : public HCondition {
4212 public:
4213 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4214 : HCondition(kBelow, first, second, dex_pc) {
4215 }
4216
4217 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4218 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4219 }
4220 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4221 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4222 }
4223 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4224 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4225 LOG(FATAL) << DebugName() << " is not defined for float values";
4226 UNREACHABLE();
4227 }
4228 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4229 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4230 LOG(FATAL) << DebugName() << " is not defined for double values";
4231 UNREACHABLE();
4232 }
4233
4234 DECLARE_INSTRUCTION(Below);
4235
4236 IfCondition GetCondition() const override {
4237 return kCondB;
4238 }
4239
4240 IfCondition GetOppositeCondition() const override {
4241 return kCondAE;
4242 }
4243
4244 protected:
4245 DEFAULT_COPY_CONSTRUCTOR(Below);
4246
4247 private:
4248 template <typename T> static bool Compute(T x, T y) {
4249 return MakeUnsigned(x) < MakeUnsigned(y);
4250 }
4251 };
4252
4253 class HBelowOrEqual final : public HCondition {
4254 public:
4255 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4256 : HCondition(kBelowOrEqual, first, second, dex_pc) {
4257 }
4258
4259 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4260 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4261 }
4262 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4263 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4264 }
4265 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4266 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4267 LOG(FATAL) << DebugName() << " is not defined for float values";
4268 UNREACHABLE();
4269 }
4270 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4271 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4272 LOG(FATAL) << DebugName() << " is not defined for double values";
4273 UNREACHABLE();
4274 }
4275
4276 DECLARE_INSTRUCTION(BelowOrEqual);
4277
4278 IfCondition GetCondition() const override {
4279 return kCondBE;
4280 }
4281
4282 IfCondition GetOppositeCondition() const override {
4283 return kCondA;
4284 }
4285
4286 protected:
4287 DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4288
4289 private:
4290 template <typename T> static bool Compute(T x, T y) {
4291 return MakeUnsigned(x) <= MakeUnsigned(y);
4292 }
4293 };
4294
4295 class HAbove final : public HCondition {
4296 public:
4297 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4298 : HCondition(kAbove, first, second, dex_pc) {
4299 }
4300
4301 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4302 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4303 }
4304 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4305 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4306 }
4307 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4308 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4309 LOG(FATAL) << DebugName() << " is not defined for float values";
4310 UNREACHABLE();
4311 }
4312 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4313 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4314 LOG(FATAL) << DebugName() << " is not defined for double values";
4315 UNREACHABLE();
4316 }
4317
4318 DECLARE_INSTRUCTION(Above);
4319
4320 IfCondition GetCondition() const override {
4321 return kCondA;
4322 }
4323
4324 IfCondition GetOppositeCondition() const override {
4325 return kCondBE;
4326 }
4327
4328 protected:
4329 DEFAULT_COPY_CONSTRUCTOR(Above);
4330
4331 private:
4332 template <typename T> static bool Compute(T x, T y) {
4333 return MakeUnsigned(x) > MakeUnsigned(y);
4334 }
4335 };
4336
4337 class HAboveOrEqual final : public HCondition {
4338 public:
4339 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4340 : HCondition(kAboveOrEqual, first, second, dex_pc) {
4341 }
4342
4343 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4344 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4345 }
4346 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4347 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4348 }
4349 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4350 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4351 LOG(FATAL) << DebugName() << " is not defined for float values";
4352 UNREACHABLE();
4353 }
4354 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4355 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4356 LOG(FATAL) << DebugName() << " is not defined for double values";
4357 UNREACHABLE();
4358 }
4359
4360 DECLARE_INSTRUCTION(AboveOrEqual);
4361
4362 IfCondition GetCondition() const override {
4363 return kCondAE;
4364 }
4365
4366 IfCondition GetOppositeCondition() const override {
4367 return kCondB;
4368 }
4369
4370 protected:
4371 DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4372
4373 private:
4374 template <typename T> static bool Compute(T x, T y) {
4375 return MakeUnsigned(x) >= MakeUnsigned(y);
4376 }
4377 };
4378
4379 // Instruction to check how two inputs compare to each other.
4380 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4381 class HCompare final : public HBinaryOperation {
4382 public:
4383 // Note that `comparison_type` is the type of comparison performed
4384 // between the comparison's inputs, not the type of the instantiated
4385 // HCompare instruction (which is always DataType::Type::kInt).
4386 HCompare(DataType::Type comparison_type,
4387 HInstruction* first,
4388 HInstruction* second,
4389 ComparisonBias bias,
4390 uint32_t dex_pc)
4391 : HBinaryOperation(kCompare,
4392 DataType::Type::kInt32,
4393 first,
4394 second,
4395 SideEffectsForArchRuntimeCalls(comparison_type),
4396 dex_pc) {
4397 SetPackedField<ComparisonBiasField>(bias);
4398 }
4399
4400 template <typename T>
4401 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4402
4403 template <typename T>
4404 int32_t ComputeFP(T x, T y) const {
4405 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4406 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4407 // Handle the bias.
4408 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4409 }
4410
4411 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4412 // Note that there is no "cmp-int" Dex instruction so we shouldn't
4413 // reach this code path when processing a freshly built HIR
4414 // graph. However HCompare integer instructions can be synthesized
4415 // by the instruction simplifier to implement IntegerCompare and
4416 // IntegerSignum intrinsics, so we have to handle this case.
4417 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4418 }
4419 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4420 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4421 }
4422 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4423 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4424 }
4425 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4426 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4427 }
4428
4429 bool InstructionDataEquals(const HInstruction* other) const override {
4430 return GetPackedFields() == other->AsCompare()->GetPackedFields();
4431 }
4432
4433 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4434
4435 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4436 // Only meaningful for floating-point comparisons.
4437 bool IsGtBias() const {
4438 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4439 return GetBias() == ComparisonBias::kGtBias;
4440 }
4441
4442 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4443 // Comparisons do not require a runtime call in any back end.
4444 return SideEffects::None();
4445 }
4446
4447 DECLARE_INSTRUCTION(Compare);
4448
4449 protected:
4450 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4451 static constexpr size_t kFieldComparisonBiasSize =
4452 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4453 static constexpr size_t kNumberOfComparePackedBits =
4454 kFieldComparisonBias + kFieldComparisonBiasSize;
4455 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4456 using ComparisonBiasField =
4457 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4458
4459 // Return an integer constant containing the result of a comparison evaluated at compile time.
4460 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4461 DCHECK(value == -1 || value == 0 || value == 1) << value;
4462 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4463 }
4464
4465 DEFAULT_COPY_CONSTRUCTOR(Compare);
4466 };
4467
4468 class HNewInstance final : public HExpression<1> {
4469 public:
4470 HNewInstance(HInstruction* cls,
4471 uint32_t dex_pc,
4472 dex::TypeIndex type_index,
4473 const DexFile& dex_file,
4474 bool finalizable,
4475 QuickEntrypointEnum entrypoint)
4476 : HExpression(kNewInstance,
4477 DataType::Type::kReference,
4478 SideEffects::CanTriggerGC(),
4479 dex_pc),
4480 type_index_(type_index),
4481 dex_file_(dex_file),
4482 entrypoint_(entrypoint) {
4483 SetPackedFlag<kFlagFinalizable>(finalizable);
4484 SetPackedFlag<kFlagPartialMaterialization>(false);
4485 SetRawInputAt(0, cls);
4486 }
4487
4488 bool IsClonable() const override { return true; }
4489
4490 void SetPartialMaterialization() {
4491 SetPackedFlag<kFlagPartialMaterialization>(true);
4492 }
4493
4494 dex::TypeIndex GetTypeIndex() const { return type_index_; }
4495 const DexFile& GetDexFile() const { return dex_file_; }
4496
4497 // Calls runtime so needs an environment.
4498 bool NeedsEnvironment() const override { return true; }
4499
4500 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
4501 bool CanThrow() const override { return true; }
4502 bool OnlyThrowsAsyncExceptions() const override {
4503 return !IsFinalizable() && !NeedsChecks();
4504 }
4505
4506 bool NeedsChecks() const {
4507 return entrypoint_ == kQuickAllocObjectWithChecks;
4508 }
4509
4510 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4511
4512 bool CanBeNull() const override { return false; }
4513
4514 bool IsPartialMaterialization() const {
4515 return GetPackedFlag<kFlagPartialMaterialization>();
4516 }
4517
4518 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4519
4520 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4521 entrypoint_ = entrypoint;
4522 }
4523
4524 HLoadClass* GetLoadClass() const {
4525 HInstruction* input = InputAt(0);
4526 if (input->IsClinitCheck()) {
4527 input = input->InputAt(0);
4528 }
4529 DCHECK(input->IsLoadClass());
4530 return input->AsLoadClass();
4531 }
4532
4533 bool IsStringAlloc() const;
4534
4535 DECLARE_INSTRUCTION(NewInstance);
4536
4537 protected:
4538 DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4539
4540 private:
4541 static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4542 static constexpr size_t kFlagPartialMaterialization = kFlagFinalizable + 1;
4543 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagPartialMaterialization + 1;
4544 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4545 "Too many packed fields.");
4546
4547 const dex::TypeIndex type_index_;
4548 const DexFile& dex_file_;
4549 QuickEntrypointEnum entrypoint_;
4550 };
4551
4552 enum IntrinsicNeedsEnvironment {
4553 kNoEnvironment, // Intrinsic does not require an environment.
4554 kNeedsEnvironment // Intrinsic requires an environment.
4555 };
4556
4557 enum IntrinsicSideEffects {
4558 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
4559 kReadSideEffects, // Intrinsic may read heap memory.
4560 kWriteSideEffects, // Intrinsic may write heap memory.
4561 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
4562 };
4563
4564 enum IntrinsicExceptions {
4565 kNoThrow, // Intrinsic does not throw any exceptions.
4566 kCanThrow // Intrinsic may throw exceptions.
4567 };
4568
4569 // Determines how to load an ArtMethod*.
4570 enum class MethodLoadKind {
4571 // Use a String init ArtMethod* loaded from Thread entrypoints.
4572 kStringInit,
4573
4574 // Use the method's own ArtMethod* loaded by the register allocator.
4575 kRecursive,
4576
4577 // Use PC-relative boot image ArtMethod* address that will be known at link time.
4578 // Used for boot image methods referenced by boot image code.
4579 kBootImageLinkTimePcRelative,
4580
4581 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
4582 // Used for app->boot calls with relocatable image.
4583 kBootImageRelRo,
4584
4585 // Load from an entry in the .bss section using a PC-relative load.
4586 // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4587 kBssEntry,
4588
4589 // Use ArtMethod* at a known address, embed the direct address in the code.
4590 // Used for for JIT-compiled calls.
4591 kJitDirectAddress,
4592
4593 // Make a runtime call to resolve and call the method. This is the last-resort-kind
4594 // used when other kinds are unimplemented on a particular architecture.
4595 kRuntimeCall,
4596 };
4597
4598 // Determines the location of the code pointer of an invoke.
4599 enum class CodePtrLocation {
4600 // Recursive call, use local PC-relative call instruction.
4601 kCallSelf,
4602
4603 // Use native pointer from the Artmethod*.
4604 // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
4605 // a special resolution stub if the class is not initialized or no native code is registered.
4606 kCallCriticalNative,
4607
4608 // Use code pointer from the ArtMethod*.
4609 // Used when we don't know the target code. This is also the last-resort-kind used when
4610 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4611 kCallArtMethod,
4612 };
4613
4614 static inline bool IsPcRelativeMethodLoadKind(MethodLoadKind load_kind) {
4615 return load_kind == MethodLoadKind::kBootImageLinkTimePcRelative ||
4616 load_kind == MethodLoadKind::kBootImageRelRo ||
4617 load_kind == MethodLoadKind::kBssEntry;
4618 }
4619
4620 class HInvoke : public HVariableInputSizeInstruction {
4621 public:
4622 bool NeedsEnvironment() const override;
4623
4624 void SetArgumentAt(size_t index, HInstruction* argument) {
4625 SetRawInputAt(index, argument);
4626 }
4627
4628 // Return the number of arguments. This number can be lower than
4629 // the number of inputs returned by InputCount(), as some invoke
4630 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4631 // inputs at the end of their list of inputs.
4632 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4633
4634 InvokeType GetInvokeType() const {
4635 return GetPackedField<InvokeTypeField>();
4636 }
4637
4638 Intrinsics GetIntrinsic() const {
4639 return intrinsic_;
4640 }
4641
4642 void SetIntrinsic(Intrinsics intrinsic,
4643 IntrinsicNeedsEnvironment needs_env,
4644 IntrinsicSideEffects side_effects,
4645 IntrinsicExceptions exceptions);
4646
4647 bool IsFromInlinedInvoke() const {
4648 return GetEnvironment()->IsFromInlinedInvoke();
4649 }
4650
4651 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4652
4653 bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4654
4655 void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4656
4657 bool AlwaysThrows() const override { return GetPackedFlag<kFlagAlwaysThrows>(); }
4658
4659 bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4660
4661 bool InstructionDataEquals(const HInstruction* other) const override {
4662 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4663 }
4664
4665 uint32_t* GetIntrinsicOptimizations() {
4666 return &intrinsic_optimizations_;
4667 }
4668
4669 const uint32_t* GetIntrinsicOptimizations() const {
4670 return &intrinsic_optimizations_;
4671 }
4672
4673 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4674
4675 ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4676 void SetResolvedMethod(ArtMethod* method);
4677
4678 MethodReference GetMethodReference() const { return method_reference_; }
4679
4680 const MethodReference GetResolvedMethodReference() const {
4681 return resolved_method_reference_;
4682 }
4683
4684 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4685
4686 protected:
4687 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4688 static constexpr size_t kFieldInvokeTypeSize =
4689 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4690 static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4691 static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4692 static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4693 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4694 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4695
4696 HInvoke(InstructionKind kind,
4697 ArenaAllocator* allocator,
4698 uint32_t number_of_arguments,
4699 uint32_t number_of_other_inputs,
4700 DataType::Type return_type,
4701 uint32_t dex_pc,
4702 MethodReference method_reference,
4703 ArtMethod* resolved_method,
4704 MethodReference resolved_method_reference,
4705 InvokeType invoke_type)
4706 : HVariableInputSizeInstruction(
4707 kind,
4708 return_type,
4709 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
4710 dex_pc,
4711 allocator,
4712 number_of_arguments + number_of_other_inputs,
4713 kArenaAllocInvokeInputs),
4714 number_of_arguments_(number_of_arguments),
4715 method_reference_(method_reference),
4716 resolved_method_reference_(resolved_method_reference),
4717 intrinsic_(Intrinsics::kNone),
4718 intrinsic_optimizations_(0) {
4719 SetPackedField<InvokeTypeField>(invoke_type);
4720 SetPackedFlag<kFlagCanThrow>(true);
4721 SetResolvedMethod(resolved_method);
4722 }
4723
4724 DEFAULT_COPY_CONSTRUCTOR(Invoke);
4725
4726 uint32_t number_of_arguments_;
4727 ArtMethod* resolved_method_;
4728 const MethodReference method_reference_;
4729 // Cached values of the resolved method, to avoid needing the mutator lock.
4730 const MethodReference resolved_method_reference_;
4731 Intrinsics intrinsic_;
4732
4733 // A magic word holding optimizations for intrinsics. See intrinsics.h.
4734 uint32_t intrinsic_optimizations_;
4735 };
4736
4737 class HInvokeUnresolved final : public HInvoke {
4738 public:
4739 HInvokeUnresolved(ArenaAllocator* allocator,
4740 uint32_t number_of_arguments,
4741 DataType::Type return_type,
4742 uint32_t dex_pc,
4743 MethodReference method_reference,
4744 InvokeType invoke_type)
4745 : HInvoke(kInvokeUnresolved,
4746 allocator,
4747 number_of_arguments,
4748 /* number_of_other_inputs= */ 0u,
4749 return_type,
4750 dex_pc,
4751 method_reference,
4752 nullptr,
4753 MethodReference(nullptr, 0u),
4754 invoke_type) {
4755 }
4756
4757 bool IsClonable() const override { return true; }
4758
4759 DECLARE_INSTRUCTION(InvokeUnresolved);
4760
4761 protected:
4762 DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4763 };
4764
4765 class HInvokePolymorphic final : public HInvoke {
4766 public:
4767 HInvokePolymorphic(ArenaAllocator* allocator,
4768 uint32_t number_of_arguments,
4769 DataType::Type return_type,
4770 uint32_t dex_pc,
4771 MethodReference method_reference,
4772 // resolved_method is the ArtMethod object corresponding to the polymorphic
4773 // method (e.g. VarHandle.get), resolved using the class linker. It is needed
4774 // to pass intrinsic information to the HInvokePolymorphic node.
4775 ArtMethod* resolved_method,
4776 MethodReference resolved_method_reference,
4777 dex::ProtoIndex proto_idx)
4778 : HInvoke(kInvokePolymorphic,
4779 allocator,
4780 number_of_arguments,
4781 /* number_of_other_inputs= */ 0u,
4782 return_type,
4783 dex_pc,
4784 method_reference,
4785 resolved_method,
4786 resolved_method_reference,
4787 kPolymorphic),
4788 proto_idx_(proto_idx) {
4789 }
4790
4791 bool IsClonable() const override { return true; }
4792
4793 dex::ProtoIndex GetProtoIndex() { return proto_idx_; }
4794
4795 DECLARE_INSTRUCTION(InvokePolymorphic);
4796
4797 protected:
4798 dex::ProtoIndex proto_idx_;
4799 DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4800 };
4801
4802 class HInvokeCustom final : public HInvoke {
4803 public:
4804 HInvokeCustom(ArenaAllocator* allocator,
4805 uint32_t number_of_arguments,
4806 uint32_t call_site_index,
4807 DataType::Type return_type,
4808 uint32_t dex_pc,
4809 MethodReference method_reference)
4810 : HInvoke(kInvokeCustom,
4811 allocator,
4812 number_of_arguments,
4813 /* number_of_other_inputs= */ 0u,
4814 return_type,
4815 dex_pc,
4816 method_reference,
4817 /* resolved_method= */ nullptr,
4818 MethodReference(nullptr, 0u),
4819 kStatic),
4820 call_site_index_(call_site_index) {
4821 }
4822
4823 uint32_t GetCallSiteIndex() const { return call_site_index_; }
4824
4825 bool IsClonable() const override { return true; }
4826
4827 DECLARE_INSTRUCTION(InvokeCustom);
4828
4829 protected:
4830 DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4831
4832 private:
4833 uint32_t call_site_index_;
4834 };
4835
4836 class HInvokeStaticOrDirect final : public HInvoke {
4837 public:
4838 // Requirements of this method call regarding the class
4839 // initialization (clinit) check of its declaring class.
4840 enum class ClinitCheckRequirement { // private marker to avoid generate-operator-out.py from processing.
4841 kNone, // Class already initialized.
4842 kExplicit, // Static call having explicit clinit check as last input.
4843 kImplicit, // Static call implicitly requiring a clinit check.
4844 kLast = kImplicit
4845 };
4846
4847 struct DispatchInfo {
4848 MethodLoadKind method_load_kind;
4849 CodePtrLocation code_ptr_location;
4850 // The method load data holds
4851 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
4852 // Note that there are multiple string init methods, each having its own offset.
4853 // - the method address for kDirectAddress
4854 uint64_t method_load_data;
4855 };
4856
4857 HInvokeStaticOrDirect(ArenaAllocator* allocator,
4858 uint32_t number_of_arguments,
4859 DataType::Type return_type,
4860 uint32_t dex_pc,
4861 MethodReference method_reference,
4862 ArtMethod* resolved_method,
4863 DispatchInfo dispatch_info,
4864 InvokeType invoke_type,
4865 MethodReference resolved_method_reference,
4866 ClinitCheckRequirement clinit_check_requirement)
4867 : HInvoke(kInvokeStaticOrDirect,
4868 allocator,
4869 number_of_arguments,
4870 // There is potentially one extra argument for the HCurrentMethod input,
4871 // and one other if the clinit check is explicit. These can be removed later.
4872 (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) +
4873 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4874 return_type,
4875 dex_pc,
4876 method_reference,
4877 resolved_method,
4878 resolved_method_reference,
4879 invoke_type),
4880 dispatch_info_(dispatch_info) {
4881 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4882 }
4883
4884 bool IsClonable() const override { return true; }
4885
4886 void SetDispatchInfo(DispatchInfo dispatch_info) {
4887 bool had_current_method_input = HasCurrentMethodInput();
4888 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info);
4889
4890 // Using the current method is the default and once we find a better
4891 // method load kind, we should not go back to using the current method.
4892 DCHECK(had_current_method_input || !needs_current_method_input);
4893
4894 if (had_current_method_input && !needs_current_method_input) {
4895 DCHECK_EQ(InputAt(GetCurrentMethodIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4896 RemoveInputAt(GetCurrentMethodIndex());
4897 }
4898 dispatch_info_ = dispatch_info;
4899 }
4900
4901 DispatchInfo GetDispatchInfo() const {
4902 return dispatch_info_;
4903 }
4904
4905 using HInstruction::GetInputRecords; // Keep the const version visible.
4906 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
4907 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4908 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4909 DCHECK(!input_records.empty());
4910 DCHECK_GT(input_records.size(), GetNumberOfArguments());
4911 HInstruction* last_input = input_records.back().GetInstruction();
4912 // Note: `last_input` may be null during arguments setup.
4913 if (last_input != nullptr) {
4914 // `last_input` is the last input of a static invoke marked as having
4915 // an explicit clinit check. It must either be:
4916 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4917 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4918 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4919 }
4920 }
4921 return input_records;
4922 }
4923
4924 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
4925 // We do not access the method via object reference, so we cannot do an implicit null check.
4926 // TODO: for intrinsics we can generate implicit null checks.
4927 return false;
4928 }
4929
4930 bool CanBeNull() const override {
4931 return GetType() == DataType::Type::kReference && !IsStringInit();
4932 }
4933
4934 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4935 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4936 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4937 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4938 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
4939 bool HasPcRelativeMethodLoadKind() const {
4940 return IsPcRelativeMethodLoadKind(GetMethodLoadKind());
4941 }
4942
4943 QuickEntrypointEnum GetStringInitEntryPoint() const {
4944 DCHECK(IsStringInit());
4945 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4946 }
4947
4948 uint64_t GetMethodAddress() const {
4949 DCHECK(HasMethodAddress());
4950 return dispatch_info_.method_load_data;
4951 }
4952
4953 const DexFile& GetDexFileForPcRelativeDexCache() const;
4954
4955 ClinitCheckRequirement GetClinitCheckRequirement() const {
4956 return GetPackedField<ClinitCheckRequirementField>();
4957 }
4958
4959 // Is this instruction a call to a static method?
4960 bool IsStatic() const {
4961 return GetInvokeType() == kStatic;
4962 }
4963
4964 // Does this method load kind need the current method as an input?
4965 static bool NeedsCurrentMethodInput(DispatchInfo dispatch_info) {
4966 return dispatch_info.method_load_kind == MethodLoadKind::kRecursive ||
4967 dispatch_info.method_load_kind == MethodLoadKind::kRuntimeCall ||
4968 dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative;
4969 }
4970
4971 // Get the index of the current method input.
4972 size_t GetCurrentMethodIndex() const {
4973 DCHECK(HasCurrentMethodInput());
4974 return GetCurrentMethodIndexUnchecked();
4975 }
4976 size_t GetCurrentMethodIndexUnchecked() const {
4977 return GetNumberOfArguments();
4978 }
4979
4980 // Check if the method has a current method input.
4981 bool HasCurrentMethodInput() const {
4982 if (NeedsCurrentMethodInput(GetDispatchInfo())) {
4983 DCHECK(InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
4984 InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
4985 return true;
4986 } else {
4987 DCHECK(InputCount() == GetCurrentMethodIndexUnchecked() ||
4988 InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
4989 !InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
4990 return false;
4991 }
4992 }
4993
4994 // Get the index of the special input.
4995 size_t GetSpecialInputIndex() const {
4996 DCHECK(HasSpecialInput());
4997 return GetSpecialInputIndexUnchecked();
4998 }
4999 size_t GetSpecialInputIndexUnchecked() const {
5000 return GetNumberOfArguments() + (HasCurrentMethodInput() ? 1u : 0u);
5001 }
5002
5003 // Check if the method has a special input.
5004 bool HasSpecialInput() const {
5005 size_t other_inputs =
5006 GetSpecialInputIndexUnchecked() + (IsStaticWithExplicitClinitCheck() ? 1u : 0u);
5007 size_t input_count = InputCount();
5008 DCHECK_LE(input_count - other_inputs, 1u) << other_inputs << " " << input_count;
5009 return other_inputs != input_count;
5010 }
5011
5012 void AddSpecialInput(HInstruction* input) {
5013 // We allow only one special input.
5014 DCHECK(!HasSpecialInput());
5015 InsertInputAt(GetSpecialInputIndexUnchecked(), input);
5016 }
5017
5018 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
5019 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
5020 // instruction; only relevant for static calls with explicit clinit check.
5021 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
5022 DCHECK(IsStaticWithExplicitClinitCheck());
5023 size_t last_input_index = inputs_.size() - 1u;
5024 HInstruction* last_input = inputs_.back().GetInstruction();
5025 DCHECK(last_input != nullptr);
5026 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
5027 RemoveAsUserOfInput(last_input_index);
5028 inputs_.pop_back();
5029 SetPackedField<ClinitCheckRequirementField>(new_requirement);
5030 DCHECK(!IsStaticWithExplicitClinitCheck());
5031 }
5032
5033 // Is this a call to a static method whose declaring class has an
5034 // explicit initialization check in the graph?
5035 bool IsStaticWithExplicitClinitCheck() const {
5036 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
5037 }
5038
5039 // Is this a call to a static method whose declaring class has an
5040 // implicit intialization check requirement?
5041 bool IsStaticWithImplicitClinitCheck() const {
5042 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
5043 }
5044
5045 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
5046
5047 protected:
5048 DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
5049
5050 private:
5051 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
5052 static constexpr size_t kFieldClinitCheckRequirementSize =
5053 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
5054 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
5055 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
5056 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
5057 "Too many packed fields.");
5058 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
5059 kFieldClinitCheckRequirement,
5060 kFieldClinitCheckRequirementSize>;
5061
5062 DispatchInfo dispatch_info_;
5063 };
5064 std::ostream& operator<<(std::ostream& os, MethodLoadKind rhs);
5065 std::ostream& operator<<(std::ostream& os, CodePtrLocation rhs);
5066 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
5067
5068 class HInvokeVirtual final : public HInvoke {
5069 public:
5070 HInvokeVirtual(ArenaAllocator* allocator,
5071 uint32_t number_of_arguments,
5072 DataType::Type return_type,
5073 uint32_t dex_pc,
5074 MethodReference method_reference,
5075 ArtMethod* resolved_method,
5076 MethodReference resolved_method_reference,
5077 uint32_t vtable_index)
5078 : HInvoke(kInvokeVirtual,
5079 allocator,
5080 number_of_arguments,
5081 0u,
5082 return_type,
5083 dex_pc,
5084 method_reference,
5085 resolved_method,
5086 resolved_method_reference,
5087 kVirtual),
5088 vtable_index_(vtable_index) {
5089 }
5090
5091 bool IsClonable() const override { return true; }
5092
5093 bool CanBeNull() const override {
5094 switch (GetIntrinsic()) {
5095 case Intrinsics::kThreadCurrentThread:
5096 case Intrinsics::kStringBufferAppend:
5097 case Intrinsics::kStringBufferToString:
5098 case Intrinsics::kStringBuilderAppendObject:
5099 case Intrinsics::kStringBuilderAppendString:
5100 case Intrinsics::kStringBuilderAppendCharSequence:
5101 case Intrinsics::kStringBuilderAppendCharArray:
5102 case Intrinsics::kStringBuilderAppendBoolean:
5103 case Intrinsics::kStringBuilderAppendChar:
5104 case Intrinsics::kStringBuilderAppendInt:
5105 case Intrinsics::kStringBuilderAppendLong:
5106 case Intrinsics::kStringBuilderAppendFloat:
5107 case Intrinsics::kStringBuilderAppendDouble:
5108 case Intrinsics::kStringBuilderToString:
5109 return false;
5110 default:
5111 return HInvoke::CanBeNull();
5112 }
5113 }
5114
5115 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override;
5116
5117 uint32_t GetVTableIndex() const { return vtable_index_; }
5118
5119 DECLARE_INSTRUCTION(InvokeVirtual);
5120
5121 protected:
5122 DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
5123
5124 private:
5125 // Cached value of the resolved method, to avoid needing the mutator lock.
5126 const uint32_t vtable_index_;
5127 };
5128
5129 class HInvokeInterface final : public HInvoke {
5130 public:
5131 HInvokeInterface(ArenaAllocator* allocator,
5132 uint32_t number_of_arguments,
5133 DataType::Type return_type,
5134 uint32_t dex_pc,
5135 MethodReference method_reference,
5136 ArtMethod* resolved_method,
5137 MethodReference resolved_method_reference,
5138 uint32_t imt_index,
5139 MethodLoadKind load_kind)
5140 : HInvoke(kInvokeInterface,
5141 allocator,
5142 number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0),
5143 0u,
5144 return_type,
5145 dex_pc,
5146 method_reference,
5147 resolved_method,
5148 resolved_method_reference,
5149 kInterface),
5150 imt_index_(imt_index),
5151 hidden_argument_load_kind_(load_kind) {
5152 }
5153
5154 static bool NeedsCurrentMethod(MethodLoadKind load_kind) {
5155 return load_kind == MethodLoadKind::kRecursive;
5156 }
5157
5158 bool IsClonable() const override { return true; }
5159
5160 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5161 // TODO: Add implicit null checks in intrinsics.
5162 return (obj == InputAt(0)) && !IsIntrinsic();
5163 }
5164
5165 size_t GetSpecialInputIndex() const {
5166 return GetNumberOfArguments();
5167 }
5168
5169 void AddSpecialInput(HInstruction* input) {
5170 InsertInputAt(GetSpecialInputIndex(), input);
5171 }
5172
5173 uint32_t GetImtIndex() const { return imt_index_; }
5174 MethodLoadKind GetHiddenArgumentLoadKind() const { return hidden_argument_load_kind_; }
5175
5176 DECLARE_INSTRUCTION(InvokeInterface);
5177
5178 protected:
5179 DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
5180
5181 private:
5182 // Cached value of the resolved method, to avoid needing the mutator lock.
5183 const uint32_t imt_index_;
5184
5185 // How the hidden argument (the interface method) is being loaded.
5186 const MethodLoadKind hidden_argument_load_kind_;
5187 };
5188
5189 class HNeg final : public HUnaryOperation {
5190 public:
5191 HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5192 : HUnaryOperation(kNeg, result_type, input, dex_pc) {
5193 DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
5194 }
5195
5196 template <typename T> static T Compute(T x) { return -x; }
5197
5198 HConstant* Evaluate(HIntConstant* x) const override {
5199 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5200 }
5201 HConstant* Evaluate(HLongConstant* x) const override {
5202 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5203 }
5204 HConstant* Evaluate(HFloatConstant* x) const override {
5205 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
5206 }
5207 HConstant* Evaluate(HDoubleConstant* x) const override {
5208 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
5209 }
5210
5211 DECLARE_INSTRUCTION(Neg);
5212
5213 protected:
5214 DEFAULT_COPY_CONSTRUCTOR(Neg);
5215 };
5216
5217 class HNewArray final : public HExpression<2> {
5218 public:
5219 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
5220 : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
5221 SetRawInputAt(0, cls);
5222 SetRawInputAt(1, length);
5223 SetPackedField<ComponentSizeShiftField>(component_size_shift);
5224 }
5225
5226 bool IsClonable() const override { return true; }
5227
5228 // Calls runtime so needs an environment.
5229 bool NeedsEnvironment() const override { return true; }
5230
5231 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
5232 bool CanThrow() const override { return true; }
5233
5234 bool CanBeNull() const override { return false; }
5235
5236 HLoadClass* GetLoadClass() const {
5237 DCHECK(InputAt(0)->IsLoadClass());
5238 return InputAt(0)->AsLoadClass();
5239 }
5240
5241 HInstruction* GetLength() const {
5242 return InputAt(1);
5243 }
5244
5245 size_t GetComponentSizeShift() {
5246 return GetPackedField<ComponentSizeShiftField>();
5247 }
5248
5249 DECLARE_INSTRUCTION(NewArray);
5250
5251 protected:
5252 DEFAULT_COPY_CONSTRUCTOR(NewArray);
5253
5254 private:
5255 static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
5256 static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
5257 static constexpr size_t kNumberOfNewArrayPackedBits =
5258 kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
5259 static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5260 using ComponentSizeShiftField =
5261 BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShift>;
5262 };
5263
5264 class HAdd final : public HBinaryOperation {
5265 public:
5266 HAdd(DataType::Type result_type,
5267 HInstruction* left,
5268 HInstruction* right,
5269 uint32_t dex_pc = kNoDexPc)
5270 : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
5271 }
5272
5273 bool IsCommutative() const override { return true; }
5274
5275 template <typename T> static T Compute(T x, T y) { return x + y; }
5276
5277 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5278 return GetBlock()->GetGraph()->GetIntConstant(
5279 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5280 }
5281 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5282 return GetBlock()->GetGraph()->GetLongConstant(
5283 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5284 }
5285 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5286 return GetBlock()->GetGraph()->GetFloatConstant(
5287 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5288 }
5289 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5290 return GetBlock()->GetGraph()->GetDoubleConstant(
5291 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5292 }
5293
5294 DECLARE_INSTRUCTION(Add);
5295
5296 protected:
5297 DEFAULT_COPY_CONSTRUCTOR(Add);
5298 };
5299
5300 class HSub final : public HBinaryOperation {
5301 public:
5302 HSub(DataType::Type result_type,
5303 HInstruction* left,
5304 HInstruction* right,
5305 uint32_t dex_pc = kNoDexPc)
5306 : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
5307 }
5308
5309 template <typename T> static T Compute(T x, T y) { return x - y; }
5310
5311 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5312 return GetBlock()->GetGraph()->GetIntConstant(
5313 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5314 }
5315 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5316 return GetBlock()->GetGraph()->GetLongConstant(
5317 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5318 }
5319 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5320 return GetBlock()->GetGraph()->GetFloatConstant(
5321 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5322 }
5323 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5324 return GetBlock()->GetGraph()->GetDoubleConstant(
5325 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5326 }
5327
5328 DECLARE_INSTRUCTION(Sub);
5329
5330 protected:
5331 DEFAULT_COPY_CONSTRUCTOR(Sub);
5332 };
5333
5334 class HMul final : public HBinaryOperation {
5335 public:
5336 HMul(DataType::Type result_type,
5337 HInstruction* left,
5338 HInstruction* right,
5339 uint32_t dex_pc = kNoDexPc)
5340 : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5341 }
5342
5343 bool IsCommutative() const override { return true; }
5344
5345 template <typename T> static T Compute(T x, T y) { return x * y; }
5346
5347 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5348 return GetBlock()->GetGraph()->GetIntConstant(
5349 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5350 }
5351 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5352 return GetBlock()->GetGraph()->GetLongConstant(
5353 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5354 }
5355 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5356 return GetBlock()->GetGraph()->GetFloatConstant(
5357 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5358 }
5359 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5360 return GetBlock()->GetGraph()->GetDoubleConstant(
5361 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5362 }
5363
5364 DECLARE_INSTRUCTION(Mul);
5365
5366 protected:
5367 DEFAULT_COPY_CONSTRUCTOR(Mul);
5368 };
5369
5370 class HDiv final : public HBinaryOperation {
5371 public:
5372 HDiv(DataType::Type result_type,
5373 HInstruction* left,
5374 HInstruction* right,
5375 uint32_t dex_pc)
5376 : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5377 }
5378
5379 template <typename T>
5380 T ComputeIntegral(T x, T y) const {
5381 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5382 // Our graph structure ensures we never have 0 for `y` during
5383 // constant folding.
5384 DCHECK_NE(y, 0);
5385 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5386 return (y == -1) ? -x : x / y;
5387 }
5388
5389 template <typename T>
5390 T ComputeFP(T x, T y) const {
5391 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5392 return x / y;
5393 }
5394
5395 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5396 return GetBlock()->GetGraph()->GetIntConstant(
5397 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5398 }
5399 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5400 return GetBlock()->GetGraph()->GetLongConstant(
5401 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5402 }
5403 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5404 return GetBlock()->GetGraph()->GetFloatConstant(
5405 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5406 }
5407 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5408 return GetBlock()->GetGraph()->GetDoubleConstant(
5409 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5410 }
5411
5412 DECLARE_INSTRUCTION(Div);
5413
5414 protected:
5415 DEFAULT_COPY_CONSTRUCTOR(Div);
5416 };
5417
5418 class HRem final : public HBinaryOperation {
5419 public:
5420 HRem(DataType::Type result_type,
5421 HInstruction* left,
5422 HInstruction* right,
5423 uint32_t dex_pc)
5424 : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5425 }
5426
5427 template <typename T>
5428 T ComputeIntegral(T x, T y) const {
5429 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5430 // Our graph structure ensures we never have 0 for `y` during
5431 // constant folding.
5432 DCHECK_NE(y, 0);
5433 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5434 return (y == -1) ? 0 : x % y;
5435 }
5436
5437 template <typename T>
5438 T ComputeFP(T x, T y) const {
5439 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5440 return std::fmod(x, y);
5441 }
5442
5443 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5444 return GetBlock()->GetGraph()->GetIntConstant(
5445 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5446 }
5447 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5448 return GetBlock()->GetGraph()->GetLongConstant(
5449 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5450 }
5451 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5452 return GetBlock()->GetGraph()->GetFloatConstant(
5453 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5454 }
5455 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5456 return GetBlock()->GetGraph()->GetDoubleConstant(
5457 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5458 }
5459
5460 DECLARE_INSTRUCTION(Rem);
5461
5462 protected:
5463 DEFAULT_COPY_CONSTRUCTOR(Rem);
5464 };
5465
5466 class HMin final : public HBinaryOperation {
5467 public:
5468 HMin(DataType::Type result_type,
5469 HInstruction* left,
5470 HInstruction* right,
5471 uint32_t dex_pc)
5472 : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5473
5474 bool IsCommutative() const override { return true; }
5475
5476 // Evaluation for integral values.
5477 template <typename T> static T ComputeIntegral(T x, T y) {
5478 return (x <= y) ? x : y;
5479 }
5480
5481 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5482 return GetBlock()->GetGraph()->GetIntConstant(
5483 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5484 }
5485 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5486 return GetBlock()->GetGraph()->GetLongConstant(
5487 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5488 }
5489 // TODO: Evaluation for floating-point values.
5490 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5491 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5492 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5493 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5494
5495 DECLARE_INSTRUCTION(Min);
5496
5497 protected:
5498 DEFAULT_COPY_CONSTRUCTOR(Min);
5499 };
5500
5501 class HMax final : public HBinaryOperation {
5502 public:
5503 HMax(DataType::Type result_type,
5504 HInstruction* left,
5505 HInstruction* right,
5506 uint32_t dex_pc)
5507 : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5508
5509 bool IsCommutative() const override { return true; }
5510
5511 // Evaluation for integral values.
5512 template <typename T> static T ComputeIntegral(T x, T y) {
5513 return (x >= y) ? x : y;
5514 }
5515
5516 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5517 return GetBlock()->GetGraph()->GetIntConstant(
5518 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5519 }
5520 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5521 return GetBlock()->GetGraph()->GetLongConstant(
5522 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5523 }
5524 // TODO: Evaluation for floating-point values.
5525 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5526 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5527 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5528 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5529
5530 DECLARE_INSTRUCTION(Max);
5531
5532 protected:
5533 DEFAULT_COPY_CONSTRUCTOR(Max);
5534 };
5535
5536 class HAbs final : public HUnaryOperation {
5537 public:
5538 HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5539 : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5540
5541 // Evaluation for integral values.
5542 template <typename T> static T ComputeIntegral(T x) {
5543 return x < 0 ? -x : x;
5544 }
5545
5546 // Evaluation for floating-point values.
5547 // Note, as a "quality of implementation", rather than pure "spec compliance",
5548 // we require that Math.abs() clears the sign bit (but changes nothing else)
5549 // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5550 // http://b/30758343
5551 template <typename T, typename S> static T ComputeFP(T x) {
5552 S bits = bit_cast<S, T>(x);
5553 return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5554 }
5555
5556 HConstant* Evaluate(HIntConstant* x) const override {
5557 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5558 }
5559 HConstant* Evaluate(HLongConstant* x) const override {
5560 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5561 }
5562 HConstant* Evaluate(HFloatConstant* x) const override {
5563 return GetBlock()->GetGraph()->GetFloatConstant(
5564 ComputeFP<float, int32_t>(x->GetValue()), GetDexPc());
5565 }
5566 HConstant* Evaluate(HDoubleConstant* x) const override {
5567 return GetBlock()->GetGraph()->GetDoubleConstant(
5568 ComputeFP<double, int64_t>(x->GetValue()), GetDexPc());
5569 }
5570
5571 DECLARE_INSTRUCTION(Abs);
5572
5573 protected:
5574 DEFAULT_COPY_CONSTRUCTOR(Abs);
5575 };
5576
5577 class HDivZeroCheck final : public HExpression<1> {
5578 public:
5579 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5580 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5581 // instruction following the current one; thus 'SideEffects::None()' is used.
5582 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5583 : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5584 SetRawInputAt(0, value);
5585 }
5586
5587 bool IsClonable() const override { return true; }
5588 bool CanBeMoved() const override { return true; }
5589
5590 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5591 return true;
5592 }
5593
5594 bool NeedsEnvironment() const override { return true; }
5595 bool CanThrow() const override { return true; }
5596
5597 DECLARE_INSTRUCTION(DivZeroCheck);
5598
5599 protected:
5600 DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5601 };
5602
5603 class HShl final : public HBinaryOperation {
5604 public:
5605 HShl(DataType::Type result_type,
5606 HInstruction* value,
5607 HInstruction* distance,
5608 uint32_t dex_pc = kNoDexPc)
5609 : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5610 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5611 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5612 }
5613
5614 template <typename T>
5615 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5616 return value << (distance & max_shift_distance);
5617 }
5618
5619 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5620 return GetBlock()->GetGraph()->GetIntConstant(
5621 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5622 }
5623 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5624 return GetBlock()->GetGraph()->GetLongConstant(
5625 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5626 }
5627 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5628 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5629 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5630 UNREACHABLE();
5631 }
5632 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5633 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5634 LOG(FATAL) << DebugName() << " is not defined for float values";
5635 UNREACHABLE();
5636 }
5637 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5638 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5639 LOG(FATAL) << DebugName() << " is not defined for double values";
5640 UNREACHABLE();
5641 }
5642
5643 DECLARE_INSTRUCTION(Shl);
5644
5645 protected:
5646 DEFAULT_COPY_CONSTRUCTOR(Shl);
5647 };
5648
5649 class HShr final : public HBinaryOperation {
5650 public:
5651 HShr(DataType::Type result_type,
5652 HInstruction* value,
5653 HInstruction* distance,
5654 uint32_t dex_pc = kNoDexPc)
5655 : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5656 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5657 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5658 }
5659
5660 template <typename T>
5661 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5662 return value >> (distance & max_shift_distance);
5663 }
5664
5665 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5666 return GetBlock()->GetGraph()->GetIntConstant(
5667 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5668 }
5669 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5670 return GetBlock()->GetGraph()->GetLongConstant(
5671 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5672 }
5673 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5674 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5675 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5676 UNREACHABLE();
5677 }
5678 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5679 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5680 LOG(FATAL) << DebugName() << " is not defined for float values";
5681 UNREACHABLE();
5682 }
5683 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5684 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5685 LOG(FATAL) << DebugName() << " is not defined for double values";
5686 UNREACHABLE();
5687 }
5688
5689 DECLARE_INSTRUCTION(Shr);
5690
5691 protected:
5692 DEFAULT_COPY_CONSTRUCTOR(Shr);
5693 };
5694
5695 class HUShr final : public HBinaryOperation {
5696 public:
5697 HUShr(DataType::Type result_type,
5698 HInstruction* value,
5699 HInstruction* distance,
5700 uint32_t dex_pc = kNoDexPc)
5701 : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5702 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5703 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5704 }
5705
5706 template <typename T>
5707 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5708 typedef typename std::make_unsigned<T>::type V;
5709 V ux = static_cast<V>(value);
5710 return static_cast<T>(ux >> (distance & max_shift_distance));
5711 }
5712
5713 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5714 return GetBlock()->GetGraph()->GetIntConstant(
5715 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5716 }
5717 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5718 return GetBlock()->GetGraph()->GetLongConstant(
5719 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5720 }
5721 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5722 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5723 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5724 UNREACHABLE();
5725 }
5726 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5727 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5728 LOG(FATAL) << DebugName() << " is not defined for float values";
5729 UNREACHABLE();
5730 }
5731 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5732 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5733 LOG(FATAL) << DebugName() << " is not defined for double values";
5734 UNREACHABLE();
5735 }
5736
5737 DECLARE_INSTRUCTION(UShr);
5738
5739 protected:
5740 DEFAULT_COPY_CONSTRUCTOR(UShr);
5741 };
5742
5743 class HAnd final : public HBinaryOperation {
5744 public:
5745 HAnd(DataType::Type result_type,
5746 HInstruction* left,
5747 HInstruction* right,
5748 uint32_t dex_pc = kNoDexPc)
5749 : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5750 }
5751
5752 bool IsCommutative() const override { return true; }
5753
5754 template <typename T> static T Compute(T x, T y) { return x & y; }
5755
5756 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5757 return GetBlock()->GetGraph()->GetIntConstant(
5758 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5759 }
5760 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5761 return GetBlock()->GetGraph()->GetLongConstant(
5762 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5763 }
5764 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5765 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5766 LOG(FATAL) << DebugName() << " is not defined for float values";
5767 UNREACHABLE();
5768 }
5769 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5770 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5771 LOG(FATAL) << DebugName() << " is not defined for double values";
5772 UNREACHABLE();
5773 }
5774
5775 DECLARE_INSTRUCTION(And);
5776
5777 protected:
5778 DEFAULT_COPY_CONSTRUCTOR(And);
5779 };
5780
5781 class HOr final : public HBinaryOperation {
5782 public:
5783 HOr(DataType::Type result_type,
5784 HInstruction* left,
5785 HInstruction* right,
5786 uint32_t dex_pc = kNoDexPc)
5787 : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5788 }
5789
5790 bool IsCommutative() const override { return true; }
5791
5792 template <typename T> static T Compute(T x, T y) { return x | y; }
5793
5794 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5795 return GetBlock()->GetGraph()->GetIntConstant(
5796 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5797 }
5798 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5799 return GetBlock()->GetGraph()->GetLongConstant(
5800 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5801 }
5802 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5803 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5804 LOG(FATAL) << DebugName() << " is not defined for float values";
5805 UNREACHABLE();
5806 }
5807 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5808 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5809 LOG(FATAL) << DebugName() << " is not defined for double values";
5810 UNREACHABLE();
5811 }
5812
5813 DECLARE_INSTRUCTION(Or);
5814
5815 protected:
5816 DEFAULT_COPY_CONSTRUCTOR(Or);
5817 };
5818
5819 class HXor final : public HBinaryOperation {
5820 public:
5821 HXor(DataType::Type result_type,
5822 HInstruction* left,
5823 HInstruction* right,
5824 uint32_t dex_pc = kNoDexPc)
5825 : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5826 }
5827
5828 bool IsCommutative() const override { return true; }
5829
5830 template <typename T> static T Compute(T x, T y) { return x ^ y; }
5831
5832 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5833 return GetBlock()->GetGraph()->GetIntConstant(
5834 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5835 }
5836 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5837 return GetBlock()->GetGraph()->GetLongConstant(
5838 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5839 }
5840 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5841 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5842 LOG(FATAL) << DebugName() << " is not defined for float values";
5843 UNREACHABLE();
5844 }
5845 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5846 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5847 LOG(FATAL) << DebugName() << " is not defined for double values";
5848 UNREACHABLE();
5849 }
5850
5851 DECLARE_INSTRUCTION(Xor);
5852
5853 protected:
5854 DEFAULT_COPY_CONSTRUCTOR(Xor);
5855 };
5856
5857 class HRor final : public HBinaryOperation {
5858 public:
5859 HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5860 : HBinaryOperation(kRor, result_type, value, distance) {
5861 }
5862
5863 template <typename T>
5864 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5865 typedef typename std::make_unsigned<T>::type V;
5866 V ux = static_cast<V>(value);
5867 if ((distance & max_shift_value) == 0) {
5868 return static_cast<T>(ux);
5869 } else {
5870 const V reg_bits = sizeof(T) * 8;
5871 return static_cast<T>(ux >> (distance & max_shift_value)) |
5872 (value << (reg_bits - (distance & max_shift_value)));
5873 }
5874 }
5875
5876 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5877 return GetBlock()->GetGraph()->GetIntConstant(
5878 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5879 }
5880 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5881 return GetBlock()->GetGraph()->GetLongConstant(
5882 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5883 }
5884 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5885 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5886 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5887 UNREACHABLE();
5888 }
5889 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5890 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5891 LOG(FATAL) << DebugName() << " is not defined for float values";
5892 UNREACHABLE();
5893 }
5894 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5895 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5896 LOG(FATAL) << DebugName() << " is not defined for double values";
5897 UNREACHABLE();
5898 }
5899
5900 DECLARE_INSTRUCTION(Ror);
5901
5902 protected:
5903 DEFAULT_COPY_CONSTRUCTOR(Ror);
5904 };
5905
5906 // The value of a parameter in this method. Its location depends on
5907 // the calling convention.
5908 class HParameterValue final : public HExpression<0> {
5909 public:
5910 HParameterValue(const DexFile& dex_file,
5911 dex::TypeIndex type_index,
5912 uint8_t index,
5913 DataType::Type parameter_type,
5914 bool is_this = false)
5915 : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5916 dex_file_(dex_file),
5917 type_index_(type_index),
5918 index_(index) {
5919 SetPackedFlag<kFlagIsThis>(is_this);
5920 SetPackedFlag<kFlagCanBeNull>(!is_this);
5921 }
5922
5923 const DexFile& GetDexFile() const { return dex_file_; }
5924 dex::TypeIndex GetTypeIndex() const { return type_index_; }
5925 uint8_t GetIndex() const { return index_; }
5926 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5927
5928 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
5929 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5930
5931 DECLARE_INSTRUCTION(ParameterValue);
5932
5933 protected:
5934 DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5935
5936 private:
5937 // Whether or not the parameter value corresponds to 'this' argument.
5938 static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
5939 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5940 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5941 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5942 "Too many packed fields.");
5943
5944 const DexFile& dex_file_;
5945 const dex::TypeIndex type_index_;
5946 // The index of this parameter in the parameters list. Must be less
5947 // than HGraph::number_of_in_vregs_.
5948 const uint8_t index_;
5949 };
5950
5951 class HNot final : public HUnaryOperation {
5952 public:
5953 HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5954 : HUnaryOperation(kNot, result_type, input, dex_pc) {
5955 }
5956
5957 bool CanBeMoved() const override { return true; }
5958 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5959 return true;
5960 }
5961
5962 template <typename T> static T Compute(T x) { return ~x; }
5963
5964 HConstant* Evaluate(HIntConstant* x) const override {
5965 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5966 }
5967 HConstant* Evaluate(HLongConstant* x) const override {
5968 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5969 }
5970 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
5971 LOG(FATAL) << DebugName() << " is not defined for float values";
5972 UNREACHABLE();
5973 }
5974 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
5975 LOG(FATAL) << DebugName() << " is not defined for double values";
5976 UNREACHABLE();
5977 }
5978
5979 DECLARE_INSTRUCTION(Not);
5980
5981 protected:
5982 DEFAULT_COPY_CONSTRUCTOR(Not);
5983 };
5984
5985 class HBooleanNot final : public HUnaryOperation {
5986 public:
5987 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5988 : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
5989 }
5990
5991 bool CanBeMoved() const override { return true; }
5992 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5993 return true;
5994 }
5995
5996 template <typename T> static bool Compute(T x) {
5997 DCHECK(IsUint<1>(x)) << x;
5998 return !x;
5999 }
6000
6001 HConstant* Evaluate(HIntConstant* x) const override {
6002 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
6003 }
6004 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override {
6005 LOG(FATAL) << DebugName() << " is not defined for long values";
6006 UNREACHABLE();
6007 }
6008 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
6009 LOG(FATAL) << DebugName() << " is not defined for float values";
6010 UNREACHABLE();
6011 }
6012 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
6013 LOG(FATAL) << DebugName() << " is not defined for double values";
6014 UNREACHABLE();
6015 }
6016
6017 DECLARE_INSTRUCTION(BooleanNot);
6018
6019 protected:
6020 DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
6021 };
6022
6023 class HTypeConversion final : public HExpression<1> {
6024 public:
6025 // Instantiate a type conversion of `input` to `result_type`.
6026 HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
6027 : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
6028 SetRawInputAt(0, input);
6029 // Invariant: We should never generate a conversion to a Boolean value.
6030 DCHECK_NE(DataType::Type::kBool, result_type);
6031 }
6032
6033 HInstruction* GetInput() const { return InputAt(0); }
6034 DataType::Type GetInputType() const { return GetInput()->GetType(); }
6035 DataType::Type GetResultType() const { return GetType(); }
6036
6037 bool IsClonable() const override { return true; }
6038 bool CanBeMoved() const override { return true; }
6039 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6040 return true;
6041 }
6042 // Return whether the conversion is implicit. This includes conversion to the same type.
6043 bool IsImplicitConversion() const {
6044 return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
6045 }
6046
6047 // Try to statically evaluate the conversion and return a HConstant
6048 // containing the result. If the input cannot be converted, return nullptr.
6049 HConstant* TryStaticEvaluation() const;
6050
6051 DECLARE_INSTRUCTION(TypeConversion);
6052
6053 protected:
6054 DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
6055 };
6056
6057 static constexpr uint32_t kNoRegNumber = -1;
6058
6059 class HNullCheck final : public HExpression<1> {
6060 public:
6061 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
6062 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6063 // instruction following the current one; thus 'SideEffects::None()' is used.
6064 HNullCheck(HInstruction* value, uint32_t dex_pc)
6065 : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
6066 SetRawInputAt(0, value);
6067 }
6068
6069 bool IsClonable() const override { return true; }
6070 bool CanBeMoved() const override { return true; }
6071 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6072 return true;
6073 }
6074
6075 bool NeedsEnvironment() const override { return true; }
6076
6077 bool CanThrow() const override { return true; }
6078
6079 bool CanBeNull() const override { return false; }
6080
6081 DECLARE_INSTRUCTION(NullCheck);
6082
6083 protected:
6084 DEFAULT_COPY_CONSTRUCTOR(NullCheck);
6085 };
6086
6087 // Embeds an ArtField and all the information required by the compiler. We cache
6088 // that information to avoid requiring the mutator lock every time we need it.
6089 class FieldInfo : public ValueObject {
6090 public:
6091 FieldInfo(ArtField* field,
6092 MemberOffset field_offset,
6093 DataType::Type field_type,
6094 bool is_volatile,
6095 uint32_t index,
6096 uint16_t declaring_class_def_index,
6097 const DexFile& dex_file)
6098 : field_(field),
6099 field_offset_(field_offset),
6100 field_type_(field_type),
6101 is_volatile_(is_volatile),
6102 index_(index),
6103 declaring_class_def_index_(declaring_class_def_index),
6104 dex_file_(dex_file) {}
6105
6106 ArtField* GetField() const { return field_; }
6107 MemberOffset GetFieldOffset() const { return field_offset_; }
6108 DataType::Type GetFieldType() const { return field_type_; }
6109 uint32_t GetFieldIndex() const { return index_; }
6110 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
6111 const DexFile& GetDexFile() const { return dex_file_; }
6112 bool IsVolatile() const { return is_volatile_; }
6113
6114 bool Equals(const FieldInfo& other) const {
6115 return field_ == other.field_ &&
6116 field_offset_ == other.field_offset_ &&
6117 field_type_ == other.field_type_ &&
6118 is_volatile_ == other.is_volatile_ &&
6119 index_ == other.index_ &&
6120 declaring_class_def_index_ == other.declaring_class_def_index_ &&
6121 &dex_file_ == &other.dex_file_;
6122 }
6123
6124 std::ostream& Dump(std::ostream& os) const {
6125 os << field_ << ", off: " << field_offset_ << ", type: " << field_type_
6126 << ", volatile: " << std::boolalpha << is_volatile_ << ", index_: " << std::dec << index_
6127 << ", declaring_class: " << declaring_class_def_index_ << ", dex: " << dex_file_;
6128 return os;
6129 }
6130
6131 private:
6132 ArtField* const field_;
6133 const MemberOffset field_offset_;
6134 const DataType::Type field_type_;
6135 const bool is_volatile_;
6136 const uint32_t index_;
6137 const uint16_t declaring_class_def_index_;
6138 const DexFile& dex_file_;
6139 };
6140
6141 inline bool operator==(const FieldInfo& a, const FieldInfo& b) {
6142 return a.Equals(b);
6143 }
6144
6145 inline std::ostream& operator<<(std::ostream& os, const FieldInfo& a) {
6146 return a.Dump(os);
6147 }
6148
6149 class HInstanceFieldGet final : public HExpression<1> {
6150 public:
6151 HInstanceFieldGet(HInstruction* value,
6152 ArtField* field,
6153 DataType::Type field_type,
6154 MemberOffset field_offset,
6155 bool is_volatile,
6156 uint32_t field_idx,
6157 uint16_t declaring_class_def_index,
6158 const DexFile& dex_file,
6159 uint32_t dex_pc)
6160 : HExpression(kInstanceFieldGet,
6161 field_type,
6162 SideEffects::FieldReadOfType(field_type, is_volatile),
6163 dex_pc),
6164 field_info_(field,
6165 field_offset,
6166 field_type,
6167 is_volatile,
6168 field_idx,
6169 declaring_class_def_index,
6170 dex_file) {
6171 SetRawInputAt(0, value);
6172 }
6173
6174 bool IsClonable() const override { return true; }
6175 bool CanBeMoved() const override { return !IsVolatile(); }
6176
6177 bool InstructionDataEquals(const HInstruction* other) const override {
6178 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
6179 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6180 }
6181
6182 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6183 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6184 }
6185
6186 size_t ComputeHashCode() const override {
6187 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6188 }
6189
6190 bool IsFieldAccess() const override { return true; }
6191 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6192 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6193 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6194 bool IsVolatile() const { return field_info_.IsVolatile(); }
6195
6196 void SetType(DataType::Type new_type) {
6197 DCHECK(DataType::IsIntegralType(GetType()));
6198 DCHECK(DataType::IsIntegralType(new_type));
6199 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6200 SetPackedField<TypeField>(new_type);
6201 }
6202
6203 DECLARE_INSTRUCTION(InstanceFieldGet);
6204
6205 protected:
6206 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
6207
6208 private:
6209 const FieldInfo field_info_;
6210 };
6211
6212 class HPredicatedInstanceFieldGet final : public HExpression<2> {
6213 public:
6214 HPredicatedInstanceFieldGet(HInstanceFieldGet* orig,
6215 HInstruction* target,
6216 HInstruction* default_val)
6217 : HExpression(kPredicatedInstanceFieldGet,
6218 orig->GetFieldType(),
6219 orig->GetSideEffects(),
6220 orig->GetDexPc()),
6221 field_info_(orig->GetFieldInfo()) {
6222 // NB Default-val is at 0 so we can avoid doing a move.
6223 SetRawInputAt(1, target);
6224 SetRawInputAt(0, default_val);
6225 }
6226
6227 HPredicatedInstanceFieldGet(HInstruction* value,
6228 ArtField* field,
6229 HInstruction* default_value,
6230 DataType::Type field_type,
6231 MemberOffset field_offset,
6232 bool is_volatile,
6233 uint32_t field_idx,
6234 uint16_t declaring_class_def_index,
6235 const DexFile& dex_file,
6236 uint32_t dex_pc)
6237 : HExpression(kPredicatedInstanceFieldGet,
6238 field_type,
6239 SideEffects::FieldReadOfType(field_type, is_volatile),
6240 dex_pc),
6241 field_info_(field,
6242 field_offset,
6243 field_type,
6244 is_volatile,
6245 field_idx,
6246 declaring_class_def_index,
6247 dex_file) {
6248 SetRawInputAt(1, value);
6249 SetRawInputAt(0, default_value);
6250 }
6251
6252 bool IsClonable() const override {
6253 return true;
6254 }
6255 bool CanBeMoved() const override {
6256 return !IsVolatile();
6257 }
6258
6259 HInstruction* GetDefaultValue() const {
6260 return InputAt(0);
6261 }
6262 HInstruction* GetTarget() const {
6263 return InputAt(1);
6264 }
6265
6266 bool InstructionDataEquals(const HInstruction* other) const override {
6267 const HPredicatedInstanceFieldGet* other_get = other->AsPredicatedInstanceFieldGet();
6268 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue() &&
6269 GetDefaultValue() == other_get->GetDefaultValue();
6270 }
6271
6272 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6273 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6274 }
6275
6276 size_t ComputeHashCode() const override {
6277 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6278 }
6279
6280 bool IsFieldAccess() const override { return true; }
6281 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6282 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6283 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6284 bool IsVolatile() const { return field_info_.IsVolatile(); }
6285
6286 void SetType(DataType::Type new_type) {
6287 DCHECK(DataType::IsIntegralType(GetType()));
6288 DCHECK(DataType::IsIntegralType(new_type));
6289 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6290 SetPackedField<TypeField>(new_type);
6291 }
6292
6293 DECLARE_INSTRUCTION(PredicatedInstanceFieldGet);
6294
6295 protected:
6296 DEFAULT_COPY_CONSTRUCTOR(PredicatedInstanceFieldGet);
6297
6298 private:
6299 const FieldInfo field_info_;
6300 };
6301
6302 class HInstanceFieldSet final : public HExpression<2> {
6303 public:
6304 HInstanceFieldSet(HInstruction* object,
6305 HInstruction* value,
6306 ArtField* field,
6307 DataType::Type field_type,
6308 MemberOffset field_offset,
6309 bool is_volatile,
6310 uint32_t field_idx,
6311 uint16_t declaring_class_def_index,
6312 const DexFile& dex_file,
6313 uint32_t dex_pc)
6314 : HExpression(kInstanceFieldSet,
6315 SideEffects::FieldWriteOfType(field_type, is_volatile),
6316 dex_pc),
6317 field_info_(field,
6318 field_offset,
6319 field_type,
6320 is_volatile,
6321 field_idx,
6322 declaring_class_def_index,
6323 dex_file) {
6324 SetPackedFlag<kFlagValueCanBeNull>(true);
6325 SetPackedFlag<kFlagIsPredicatedSet>(false);
6326 SetRawInputAt(0, object);
6327 SetRawInputAt(1, value);
6328 }
6329
6330 bool IsClonable() const override { return true; }
6331
6332 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6333 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6334 }
6335
6336 bool IsFieldAccess() const override { return true; }
6337 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6338 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6339 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6340 bool IsVolatile() const { return field_info_.IsVolatile(); }
6341 HInstruction* GetValue() const { return InputAt(1); }
6342 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6343 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6344 bool GetIsPredicatedSet() const { return GetPackedFlag<kFlagIsPredicatedSet>(); }
6345 void SetIsPredicatedSet(bool value = true) { SetPackedFlag<kFlagIsPredicatedSet>(value); }
6346
6347 DECLARE_INSTRUCTION(InstanceFieldSet);
6348
6349 protected:
6350 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
6351
6352 private:
6353 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6354 static constexpr size_t kFlagIsPredicatedSet = kFlagValueCanBeNull + 1;
6355 static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagIsPredicatedSet + 1;
6356 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
6357 "Too many packed fields.");
6358
6359 const FieldInfo field_info_;
6360 };
6361
6362 class HArrayGet final : public HExpression<2> {
6363 public:
6364 HArrayGet(HInstruction* array,
6365 HInstruction* index,
6366 DataType::Type type,
6367 uint32_t dex_pc)
6368 : HArrayGet(array,
6369 index,
6370 type,
6371 SideEffects::ArrayReadOfType(type),
6372 dex_pc,
6373 /* is_string_char_at= */ false) {
6374 }
6375
6376 HArrayGet(HInstruction* array,
6377 HInstruction* index,
6378 DataType::Type type,
6379 SideEffects side_effects,
6380 uint32_t dex_pc,
6381 bool is_string_char_at)
6382 : HExpression(kArrayGet, type, side_effects, dex_pc) {
6383 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6384 SetRawInputAt(0, array);
6385 SetRawInputAt(1, index);
6386 }
6387
6388 bool IsClonable() const override { return true; }
6389 bool CanBeMoved() const override { return true; }
6390 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6391 return true;
6392 }
6393 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6394 // TODO: We can be smarter here.
6395 // Currently, unless the array is the result of NewArray, the array access is always
6396 // preceded by some form of null NullCheck necessary for the bounds check, usually
6397 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
6398 // dynamic BCE. There are cases when these could be removed to produce better code.
6399 // If we ever add optimizations to do so we should allow an implicit check here
6400 // (as long as the address falls in the first page).
6401 //
6402 // As an example of such fancy optimization, we could eliminate BoundsCheck for
6403 // a = cond ? new int[1] : null;
6404 // a[0]; // The Phi does not need bounds check for either input.
6405 return false;
6406 }
6407
6408 bool IsEquivalentOf(HArrayGet* other) const {
6409 bool result = (GetDexPc() == other->GetDexPc());
6410 if (kIsDebugBuild && result) {
6411 DCHECK_EQ(GetBlock(), other->GetBlock());
6412 DCHECK_EQ(GetArray(), other->GetArray());
6413 DCHECK_EQ(GetIndex(), other->GetIndex());
6414 if (DataType::IsIntOrLongType(GetType())) {
6415 DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
6416 } else {
6417 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
6418 DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
6419 }
6420 }
6421 return result;
6422 }
6423
6424 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6425
6426 HInstruction* GetArray() const { return InputAt(0); }
6427 HInstruction* GetIndex() const { return InputAt(1); }
6428
6429 void SetType(DataType::Type new_type) {
6430 DCHECK(DataType::IsIntegralType(GetType()));
6431 DCHECK(DataType::IsIntegralType(new_type));
6432 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6433 SetPackedField<TypeField>(new_type);
6434 }
6435
6436 DECLARE_INSTRUCTION(ArrayGet);
6437
6438 protected:
6439 DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
6440
6441 private:
6442 // We treat a String as an array, creating the HArrayGet from String.charAt()
6443 // intrinsic in the instruction simplifier. We can always determine whether
6444 // a particular HArrayGet is actually a String.charAt() by looking at the type
6445 // of the input but that requires holding the mutator lock, so we prefer to use
6446 // a flag, so that code generators don't need to do the locking.
6447 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6448 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
6449 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6450 "Too many packed fields.");
6451 };
6452
6453 class HArraySet final : public HExpression<3> {
6454 public:
6455 HArraySet(HInstruction* array,
6456 HInstruction* index,
6457 HInstruction* value,
6458 DataType::Type expected_component_type,
6459 uint32_t dex_pc)
6460 : HArraySet(array,
6461 index,
6462 value,
6463 expected_component_type,
6464 // Make a best guess for side effects now, may be refined during SSA building.
6465 ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6466 dex_pc) {
6467 }
6468
6469 HArraySet(HInstruction* array,
6470 HInstruction* index,
6471 HInstruction* value,
6472 DataType::Type expected_component_type,
6473 SideEffects side_effects,
6474 uint32_t dex_pc)
6475 : HExpression(kArraySet, side_effects, dex_pc) {
6476 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6477 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6478 SetPackedFlag<kFlagValueCanBeNull>(true);
6479 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6480 SetRawInputAt(0, array);
6481 SetRawInputAt(1, index);
6482 SetRawInputAt(2, value);
6483 }
6484
6485 bool IsClonable() const override { return true; }
6486
6487 bool NeedsEnvironment() const override {
6488 // We call a runtime method to throw ArrayStoreException.
6489 return NeedsTypeCheck();
6490 }
6491
6492 // Can throw ArrayStoreException.
6493 bool CanThrow() const override { return NeedsTypeCheck(); }
6494
6495 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6496 // TODO: Same as for ArrayGet.
6497 return false;
6498 }
6499
6500 void ClearNeedsTypeCheck() {
6501 SetPackedFlag<kFlagNeedsTypeCheck>(false);
6502 }
6503
6504 void ClearValueCanBeNull() {
6505 SetPackedFlag<kFlagValueCanBeNull>(false);
6506 }
6507
6508 void SetStaticTypeOfArrayIsObjectArray() {
6509 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6510 }
6511
6512 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6513 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
6514 bool StaticTypeOfArrayIsObjectArray() const {
6515 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6516 }
6517
6518 HInstruction* GetArray() const { return InputAt(0); }
6519 HInstruction* GetIndex() const { return InputAt(1); }
6520 HInstruction* GetValue() const { return InputAt(2); }
6521
6522 DataType::Type GetComponentType() const {
6523 return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6524 }
6525
6526 static DataType::Type GetComponentType(DataType::Type value_type,
6527 DataType::Type expected_component_type) {
6528 // The Dex format does not type floating point index operations. Since the
6529 // `expected_component_type` comes from SSA building and can therefore not
6530 // be correct, we also check what is the value type. If it is a floating
6531 // point type, we must use that type.
6532 return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6533 ? value_type
6534 : expected_component_type;
6535 }
6536
6537 DataType::Type GetRawExpectedComponentType() const {
6538 return GetPackedField<ExpectedComponentTypeField>();
6539 }
6540
6541 static SideEffects ComputeSideEffects(DataType::Type type) {
6542 return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6543 }
6544
6545 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6546 return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6547 : SideEffects::None();
6548 }
6549
6550 DECLARE_INSTRUCTION(ArraySet);
6551
6552 protected:
6553 DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6554
6555 private:
6556 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6557 static constexpr size_t kFieldExpectedComponentTypeSize =
6558 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6559 static constexpr size_t kFlagNeedsTypeCheck =
6560 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6561 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6562 // Cached information for the reference_type_info_ so that codegen
6563 // does not need to inspect the static type.
6564 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6565 static constexpr size_t kNumberOfArraySetPackedBits =
6566 kFlagStaticTypeOfArrayIsObjectArray + 1;
6567 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6568 using ExpectedComponentTypeField =
6569 BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6570 };
6571
6572 class HArrayLength final : public HExpression<1> {
6573 public:
6574 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
6575 : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6576 SetPackedFlag<kFlagIsStringLength>(is_string_length);
6577 // Note that arrays do not change length, so the instruction does not
6578 // depend on any write.
6579 SetRawInputAt(0, array);
6580 }
6581
6582 bool IsClonable() const override { return true; }
6583 bool CanBeMoved() const override { return true; }
6584 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6585 return true;
6586 }
6587 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6588 return obj == InputAt(0);
6589 }
6590
6591 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6592
6593 DECLARE_INSTRUCTION(ArrayLength);
6594
6595 protected:
6596 DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6597
6598 private:
6599 // We treat a String as an array, creating the HArrayLength from String.length()
6600 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6601 // determine whether a particular HArrayLength is actually a String.length() by
6602 // looking at the type of the input but that requires holding the mutator lock, so
6603 // we prefer to use a flag, so that code generators don't need to do the locking.
6604 static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6605 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6606 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6607 "Too many packed fields.");
6608 };
6609
6610 class HBoundsCheck final : public HExpression<2> {
6611 public:
6612 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6613 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6614 // instruction following the current one; thus 'SideEffects::None()' is used.
6615 HBoundsCheck(HInstruction* index,
6616 HInstruction* length,
6617 uint32_t dex_pc,
6618 bool is_string_char_at = false)
6619 : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6620 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6621 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6622 SetRawInputAt(0, index);
6623 SetRawInputAt(1, length);
6624 }
6625
6626 bool IsClonable() const override { return true; }
6627 bool CanBeMoved() const override { return true; }
6628 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6629 return true;
6630 }
6631
6632 bool NeedsEnvironment() const override { return true; }
6633
6634 bool CanThrow() const override { return true; }
6635
6636 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6637
6638 HInstruction* GetIndex() const { return InputAt(0); }
6639
6640 DECLARE_INSTRUCTION(BoundsCheck);
6641
6642 protected:
6643 DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6644
6645 private:
6646 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6647 static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6648 static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6649 "Too many packed fields.");
6650 };
6651
6652 class HSuspendCheck final : public HExpression<0> {
6653 public:
6654 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
6655 : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6656 slow_path_(nullptr) {
6657 }
6658
6659 bool IsClonable() const override { return true; }
6660
6661 bool NeedsEnvironment() const override {
6662 return true;
6663 }
6664
6665 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
6666 SlowPathCode* GetSlowPath() const { return slow_path_; }
6667
6668 DECLARE_INSTRUCTION(SuspendCheck);
6669
6670 protected:
6671 DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6672
6673 private:
6674 // Only used for code generation, in order to share the same slow path between back edges
6675 // of a same loop.
6676 SlowPathCode* slow_path_;
6677 };
6678
6679 // Pseudo-instruction which provides the native debugger with mapping information.
6680 // It ensures that we can generate line number and local variables at this point.
6681 class HNativeDebugInfo : public HExpression<0> {
6682 public:
6683 explicit HNativeDebugInfo(uint32_t dex_pc)
6684 : HExpression<0>(kNativeDebugInfo, SideEffects::None(), dex_pc) {
6685 }
6686
6687 bool NeedsEnvironment() const override {
6688 return true;
6689 }
6690
6691 DECLARE_INSTRUCTION(NativeDebugInfo);
6692
6693 protected:
6694 DEFAULT_COPY_CONSTRUCTOR(NativeDebugInfo);
6695 };
6696
6697 /**
6698 * Instruction to load a Class object.
6699 */
6700 class HLoadClass final : public HInstruction {
6701 public:
6702 // Determines how to load the Class.
6703 enum class LoadKind {
6704 // We cannot load this class. See HSharpening::SharpenLoadClass.
6705 kInvalid = -1,
6706
6707 // Use the Class* from the method's own ArtMethod*.
6708 kReferrersClass,
6709
6710 // Use PC-relative boot image Class* address that will be known at link time.
6711 // Used for boot image classes referenced by boot image code.
6712 kBootImageLinkTimePcRelative,
6713
6714 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6715 // Used for boot image classes referenced by apps in AOT-compiled code.
6716 kBootImageRelRo,
6717
6718 // Load from an entry in the .bss section using a PC-relative load.
6719 // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6720 kBssEntry,
6721
6722 // Load from an entry for public class in the .bss section using a PC-relative load.
6723 // Used for classes that were unresolved during AOT-compilation outside the literal
6724 // package of the compiling class. Such classes are accessible only if they are public
6725 // and the .bss entry shall therefore be filled only if the resolved class is public.
6726 kBssEntryPublic,
6727
6728 // Load from an entry for package class in the .bss section using a PC-relative load.
6729 // Used for classes that were unresolved during AOT-compilation but within the literal
6730 // package of the compiling class. Such classes are accessible if they are public or
6731 // in the same package which, given the literal package match, requires only matching
6732 // defining class loader and the .bss entry shall therefore be filled only if at least
6733 // one of those conditions holds. Note that all code in an oat file belongs to classes
6734 // with the same defining class loader.
6735 kBssEntryPackage,
6736
6737 // Use a known boot image Class* address, embedded in the code by the codegen.
6738 // Used for boot image classes referenced by apps in JIT-compiled code.
6739 kJitBootImageAddress,
6740
6741 // Load from the root table associated with the JIT compiled method.
6742 kJitTableAddress,
6743
6744 // Load using a simple runtime call. This is the fall-back load kind when
6745 // the codegen is unable to use another appropriate kind.
6746 kRuntimeCall,
6747
6748 kLast = kRuntimeCall
6749 };
6750
6751 HLoadClass(HCurrentMethod* current_method,
6752 dex::TypeIndex type_index,
6753 const DexFile& dex_file,
6754 Handle<mirror::Class> klass,
6755 bool is_referrers_class,
6756 uint32_t dex_pc,
6757 bool needs_access_check)
6758 : HInstruction(kLoadClass,
6759 DataType::Type::kReference,
6760 SideEffectsForArchRuntimeCalls(),
6761 dex_pc),
6762 special_input_(HUserRecord<HInstruction*>(current_method)),
6763 type_index_(type_index),
6764 dex_file_(dex_file),
6765 klass_(klass) {
6766 // Referrers class should not need access check. We never inline unverified
6767 // methods so we can't possibly end up in this situation.
6768 DCHECK(!is_referrers_class || !needs_access_check);
6769
6770 SetPackedField<LoadKindField>(
6771 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6772 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6773 SetPackedFlag<kFlagIsInBootImage>(false);
6774 SetPackedFlag<kFlagGenerateClInitCheck>(false);
6775 SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6776 }
6777
6778 bool IsClonable() const override { return true; }
6779
6780 void SetLoadKind(LoadKind load_kind);
6781
6782 LoadKind GetLoadKind() const {
6783 return GetPackedField<LoadKindField>();
6784 }
6785
6786 bool HasPcRelativeLoadKind() const {
6787 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6788 GetLoadKind() == LoadKind::kBootImageRelRo ||
6789 GetLoadKind() == LoadKind::kBssEntry ||
6790 GetLoadKind() == LoadKind::kBssEntryPublic ||
6791 GetLoadKind() == LoadKind::kBssEntryPackage;
6792 }
6793
6794 bool CanBeMoved() const override { return true; }
6795
6796 bool InstructionDataEquals(const HInstruction* other) const override;
6797
6798 size_t ComputeHashCode() const override { return type_index_.index_; }
6799
6800 bool CanBeNull() const override { return false; }
6801
6802 bool NeedsEnvironment() const override {
6803 return CanCallRuntime();
6804 }
6805
6806 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6807 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6808 }
6809
6810 bool CanCallRuntime() const {
6811 return NeedsAccessCheck() ||
6812 MustGenerateClinitCheck() ||
6813 GetLoadKind() == LoadKind::kRuntimeCall ||
6814 GetLoadKind() == LoadKind::kBssEntry;
6815 }
6816
6817 bool CanThrow() const override {
6818 return NeedsAccessCheck() ||
6819 MustGenerateClinitCheck() ||
6820 // If the class is in the boot image, the lookup in the runtime call cannot throw.
6821 ((GetLoadKind() == LoadKind::kRuntimeCall ||
6822 GetLoadKind() == LoadKind::kBssEntry) &&
6823 !IsInBootImage());
6824 }
6825
6826 ReferenceTypeInfo GetLoadedClassRTI() {
6827 if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
6828 // Note: The is_exact flag from the return value should not be used.
6829 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
6830 } else {
6831 return ReferenceTypeInfo::CreateInvalid();
6832 }
6833 }
6834
6835 // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
6836 void SetValidLoadedClassRTI() {
6837 DCHECK(klass_ != nullptr);
6838 SetPackedFlag<kFlagValidLoadedClassRTI>(true);
6839 }
6840
6841 dex::TypeIndex GetTypeIndex() const { return type_index_; }
6842 const DexFile& GetDexFile() const { return dex_file_; }
6843
6844 static SideEffects SideEffectsForArchRuntimeCalls() {
6845 return SideEffects::CanTriggerGC();
6846 }
6847
6848 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
6849 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
6850 bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
6851 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6852
6853 bool MustResolveTypeOnSlowPath() const {
6854 // Check that this instruction has a slow path.
6855 LoadKind load_kind = GetLoadKind();
6856 DCHECK(load_kind != LoadKind::kRuntimeCall); // kRuntimeCall calls on main path.
6857 bool must_resolve_type_on_slow_path =
6858 load_kind == LoadKind::kBssEntry ||
6859 load_kind == LoadKind::kBssEntryPublic ||
6860 load_kind == LoadKind::kBssEntryPackage;
6861 DCHECK(must_resolve_type_on_slow_path || MustGenerateClinitCheck());
6862 return must_resolve_type_on_slow_path;
6863 }
6864
6865 void MarkInBootImage() {
6866 SetPackedFlag<kFlagIsInBootImage>(true);
6867 }
6868
6869 void AddSpecialInput(HInstruction* special_input);
6870
6871 using HInstruction::GetInputRecords; // Keep the const version visible.
6872 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6873 return ArrayRef<HUserRecord<HInstruction*>>(
6874 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6875 }
6876
6877 Handle<mirror::Class> GetClass() const {
6878 return klass_;
6879 }
6880
6881 DECLARE_INSTRUCTION(LoadClass);
6882
6883 protected:
6884 DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6885
6886 private:
6887 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
6888 static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1;
6889 // Whether this instruction must generate the initialization check.
6890 // Used for code generation.
6891 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
6892 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1;
6893 static constexpr size_t kFieldLoadKindSize =
6894 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6895 static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
6896 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
6897 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6898 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6899
6900 static bool HasTypeReference(LoadKind load_kind) {
6901 return load_kind == LoadKind::kReferrersClass ||
6902 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6903 load_kind == LoadKind::kBssEntry ||
6904 load_kind == LoadKind::kBssEntryPublic ||
6905 load_kind == LoadKind::kBssEntryPackage ||
6906 load_kind == LoadKind::kRuntimeCall;
6907 }
6908
6909 void SetLoadKindInternal(LoadKind load_kind);
6910
6911 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6912 // For other load kinds it's empty or possibly some architecture-specific instruction
6913 // for PC-relative loads, i.e. kBssEntry* or kBootImageLinkTimePcRelative.
6914 HUserRecord<HInstruction*> special_input_;
6915
6916 // A type index and dex file where the class can be accessed. The dex file can be:
6917 // - The compiling method's dex file if the class is defined there too.
6918 // - The compiling method's dex file if the class is referenced there.
6919 // - The dex file where the class is defined. When the load kind can only be
6920 // kBssEntry* or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6921 const dex::TypeIndex type_index_;
6922 const DexFile& dex_file_;
6923
6924 Handle<mirror::Class> klass_;
6925 };
6926 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6927
6928 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6929 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6930 // The load kind should be determined before inserting the instruction to the graph.
6931 DCHECK(GetBlock() == nullptr);
6932 DCHECK(GetEnvironment() == nullptr);
6933 SetPackedField<LoadKindField>(load_kind);
6934 if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6935 special_input_ = HUserRecord<HInstruction*>(nullptr);
6936 }
6937 if (!NeedsEnvironment()) {
6938 SetSideEffects(SideEffects::None());
6939 }
6940 }
6941
6942 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6943 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6944 // The special input is used for PC-relative loads on some architectures,
6945 // including literal pool loads, which are PC-relative too.
6946 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6947 GetLoadKind() == LoadKind::kBootImageRelRo ||
6948 GetLoadKind() == LoadKind::kBssEntry ||
6949 GetLoadKind() == LoadKind::kBssEntryPublic ||
6950 GetLoadKind() == LoadKind::kBssEntryPackage ||
6951 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6952 DCHECK(special_input_.GetInstruction() == nullptr);
6953 special_input_ = HUserRecord<HInstruction*>(special_input);
6954 special_input->AddUseAt(this, 0);
6955 }
6956
6957 class HLoadString final : public HInstruction {
6958 public:
6959 // Determines how to load the String.
6960 enum class LoadKind {
6961 // Use PC-relative boot image String* address that will be known at link time.
6962 // Used for boot image strings referenced by boot image code.
6963 kBootImageLinkTimePcRelative,
6964
6965 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6966 // Used for boot image strings referenced by apps in AOT-compiled code.
6967 kBootImageRelRo,
6968
6969 // Load from an entry in the .bss section using a PC-relative load.
6970 // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
6971 kBssEntry,
6972
6973 // Use a known boot image String* address, embedded in the code by the codegen.
6974 // Used for boot image strings referenced by apps in JIT-compiled code.
6975 kJitBootImageAddress,
6976
6977 // Load from the root table associated with the JIT compiled method.
6978 kJitTableAddress,
6979
6980 // Load using a simple runtime call. This is the fall-back load kind when
6981 // the codegen is unable to use another appropriate kind.
6982 kRuntimeCall,
6983
6984 kLast = kRuntimeCall,
6985 };
6986
6987 HLoadString(HCurrentMethod* current_method,
6988 dex::StringIndex string_index,
6989 const DexFile& dex_file,
6990 uint32_t dex_pc)
6991 : HInstruction(kLoadString,
6992 DataType::Type::kReference,
6993 SideEffectsForArchRuntimeCalls(),
6994 dex_pc),
6995 special_input_(HUserRecord<HInstruction*>(current_method)),
6996 string_index_(string_index),
6997 dex_file_(dex_file) {
6998 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6999 }
7000
7001 bool IsClonable() const override { return true; }
7002
7003 void SetLoadKind(LoadKind load_kind);
7004
7005 LoadKind GetLoadKind() const {
7006 return GetPackedField<LoadKindField>();
7007 }
7008
7009 bool HasPcRelativeLoadKind() const {
7010 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7011 GetLoadKind() == LoadKind::kBootImageRelRo ||
7012 GetLoadKind() == LoadKind::kBssEntry;
7013 }
7014
7015 const DexFile& GetDexFile() const {
7016 return dex_file_;
7017 }
7018
7019 dex::StringIndex GetStringIndex() const {
7020 return string_index_;
7021 }
7022
7023 Handle<mirror::String> GetString() const {
7024 return string_;
7025 }
7026
7027 void SetString(Handle<mirror::String> str) {
7028 string_ = str;
7029 }
7030
7031 bool CanBeMoved() const override { return true; }
7032
7033 bool InstructionDataEquals(const HInstruction* other) const override;
7034
7035 size_t ComputeHashCode() const override { return string_index_.index_; }
7036
7037 // Will call the runtime if we need to load the string through
7038 // the dex cache and the string is not guaranteed to be there yet.
7039 bool NeedsEnvironment() const override {
7040 LoadKind load_kind = GetLoadKind();
7041 if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
7042 load_kind == LoadKind::kBootImageRelRo ||
7043 load_kind == LoadKind::kJitBootImageAddress ||
7044 load_kind == LoadKind::kJitTableAddress) {
7045 return false;
7046 }
7047 return true;
7048 }
7049
7050 bool CanBeNull() const override { return false; }
7051 bool CanThrow() const override { return NeedsEnvironment(); }
7052
7053 static SideEffects SideEffectsForArchRuntimeCalls() {
7054 return SideEffects::CanTriggerGC();
7055 }
7056
7057 void AddSpecialInput(HInstruction* special_input);
7058
7059 using HInstruction::GetInputRecords; // Keep the const version visible.
7060 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7061 return ArrayRef<HUserRecord<HInstruction*>>(
7062 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7063 }
7064
7065 DECLARE_INSTRUCTION(LoadString);
7066
7067 protected:
7068 DEFAULT_COPY_CONSTRUCTOR(LoadString);
7069
7070 private:
7071 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
7072 static constexpr size_t kFieldLoadKindSize =
7073 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7074 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
7075 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7076 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7077
7078 void SetLoadKindInternal(LoadKind load_kind);
7079
7080 // The special input is the HCurrentMethod for kRuntimeCall.
7081 // For other load kinds it's empty or possibly some architecture-specific instruction
7082 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
7083 HUserRecord<HInstruction*> special_input_;
7084
7085 dex::StringIndex string_index_;
7086 const DexFile& dex_file_;
7087
7088 Handle<mirror::String> string_;
7089 };
7090 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
7091
7092 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7093 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
7094 // The load kind should be determined before inserting the instruction to the graph.
7095 DCHECK(GetBlock() == nullptr);
7096 DCHECK(GetEnvironment() == nullptr);
7097 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
7098 SetPackedField<LoadKindField>(load_kind);
7099 if (load_kind != LoadKind::kRuntimeCall) {
7100 special_input_ = HUserRecord<HInstruction*>(nullptr);
7101 }
7102 if (!NeedsEnvironment()) {
7103 SetSideEffects(SideEffects::None());
7104 }
7105 }
7106
7107 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7108 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
7109 // The special input is used for PC-relative loads on some architectures,
7110 // including literal pool loads, which are PC-relative too.
7111 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7112 GetLoadKind() == LoadKind::kBootImageRelRo ||
7113 GetLoadKind() == LoadKind::kBssEntry ||
7114 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
7115 // HLoadString::GetInputRecords() returns an empty array at this point,
7116 // so use the GetInputRecords() from the base class to set the input record.
7117 DCHECK(special_input_.GetInstruction() == nullptr);
7118 special_input_ = HUserRecord<HInstruction*>(special_input);
7119 special_input->AddUseAt(this, 0);
7120 }
7121
7122 class HLoadMethodHandle final : public HInstruction {
7123 public:
7124 HLoadMethodHandle(HCurrentMethod* current_method,
7125 uint16_t method_handle_idx,
7126 const DexFile& dex_file,
7127 uint32_t dex_pc)
7128 : HInstruction(kLoadMethodHandle,
7129 DataType::Type::kReference,
7130 SideEffectsForArchRuntimeCalls(),
7131 dex_pc),
7132 special_input_(HUserRecord<HInstruction*>(current_method)),
7133 method_handle_idx_(method_handle_idx),
7134 dex_file_(dex_file) {
7135 }
7136
7137 using HInstruction::GetInputRecords; // Keep the const version visible.
7138 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7139 return ArrayRef<HUserRecord<HInstruction*>>(
7140 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7141 }
7142
7143 bool IsClonable() const override { return true; }
7144
7145 uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
7146
7147 const DexFile& GetDexFile() const { return dex_file_; }
7148
7149 static SideEffects SideEffectsForArchRuntimeCalls() {
7150 return SideEffects::CanTriggerGC();
7151 }
7152
7153 DECLARE_INSTRUCTION(LoadMethodHandle);
7154
7155 protected:
7156 DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
7157
7158 private:
7159 // The special input is the HCurrentMethod for kRuntimeCall.
7160 HUserRecord<HInstruction*> special_input_;
7161
7162 const uint16_t method_handle_idx_;
7163 const DexFile& dex_file_;
7164 };
7165
7166 class HLoadMethodType final : public HInstruction {
7167 public:
7168 HLoadMethodType(HCurrentMethod* current_method,
7169 dex::ProtoIndex proto_index,
7170 const DexFile& dex_file,
7171 uint32_t dex_pc)
7172 : HInstruction(kLoadMethodType,
7173 DataType::Type::kReference,
7174 SideEffectsForArchRuntimeCalls(),
7175 dex_pc),
7176 special_input_(HUserRecord<HInstruction*>(current_method)),
7177 proto_index_(proto_index),
7178 dex_file_(dex_file) {
7179 }
7180
7181 using HInstruction::GetInputRecords; // Keep the const version visible.
7182 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7183 return ArrayRef<HUserRecord<HInstruction*>>(
7184 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7185 }
7186
7187 bool IsClonable() const override { return true; }
7188
7189 dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
7190
7191 const DexFile& GetDexFile() const { return dex_file_; }
7192
7193 static SideEffects SideEffectsForArchRuntimeCalls() {
7194 return SideEffects::CanTriggerGC();
7195 }
7196
7197 DECLARE_INSTRUCTION(LoadMethodType);
7198
7199 protected:
7200 DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
7201
7202 private:
7203 // The special input is the HCurrentMethod for kRuntimeCall.
7204 HUserRecord<HInstruction*> special_input_;
7205
7206 const dex::ProtoIndex proto_index_;
7207 const DexFile& dex_file_;
7208 };
7209
7210 /**
7211 * Performs an initialization check on its Class object input.
7212 */
7213 class HClinitCheck final : public HExpression<1> {
7214 public:
7215 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
7216 : HExpression(
7217 kClinitCheck,
7218 DataType::Type::kReference,
7219 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
7220 dex_pc) {
7221 SetRawInputAt(0, constant);
7222 }
7223 // TODO: Make ClinitCheck clonable.
7224 bool CanBeMoved() const override { return true; }
7225 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
7226 return true;
7227 }
7228
7229 bool NeedsEnvironment() const override {
7230 // May call runtime to initialize the class.
7231 return true;
7232 }
7233
7234 bool CanThrow() const override { return true; }
7235
7236 HLoadClass* GetLoadClass() const {
7237 DCHECK(InputAt(0)->IsLoadClass());
7238 return InputAt(0)->AsLoadClass();
7239 }
7240
7241 DECLARE_INSTRUCTION(ClinitCheck);
7242
7243
7244 protected:
7245 DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
7246 };
7247
7248 class HStaticFieldGet final : public HExpression<1> {
7249 public:
7250 HStaticFieldGet(HInstruction* cls,
7251 ArtField* field,
7252 DataType::Type field_type,
7253 MemberOffset field_offset,
7254 bool is_volatile,
7255 uint32_t field_idx,
7256 uint16_t declaring_class_def_index,
7257 const DexFile& dex_file,
7258 uint32_t dex_pc)
7259 : HExpression(kStaticFieldGet,
7260 field_type,
7261 SideEffects::FieldReadOfType(field_type, is_volatile),
7262 dex_pc),
7263 field_info_(field,
7264 field_offset,
7265 field_type,
7266 is_volatile,
7267 field_idx,
7268 declaring_class_def_index,
7269 dex_file) {
7270 SetRawInputAt(0, cls);
7271 }
7272
7273
7274 bool IsClonable() const override { return true; }
7275 bool CanBeMoved() const override { return !IsVolatile(); }
7276
7277 bool InstructionDataEquals(const HInstruction* other) const override {
7278 const HStaticFieldGet* other_get = other->AsStaticFieldGet();
7279 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
7280 }
7281
7282 size_t ComputeHashCode() const override {
7283 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
7284 }
7285
7286 bool IsFieldAccess() const override { return true; }
7287 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7288 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7289 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7290 bool IsVolatile() const { return field_info_.IsVolatile(); }
7291
7292 void SetType(DataType::Type new_type) {
7293 DCHECK(DataType::IsIntegralType(GetType()));
7294 DCHECK(DataType::IsIntegralType(new_type));
7295 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
7296 SetPackedField<TypeField>(new_type);
7297 }
7298
7299 DECLARE_INSTRUCTION(StaticFieldGet);
7300
7301 protected:
7302 DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
7303
7304 private:
7305 const FieldInfo field_info_;
7306 };
7307
7308 class HStaticFieldSet final : public HExpression<2> {
7309 public:
7310 HStaticFieldSet(HInstruction* cls,
7311 HInstruction* value,
7312 ArtField* field,
7313 DataType::Type field_type,
7314 MemberOffset field_offset,
7315 bool is_volatile,
7316 uint32_t field_idx,
7317 uint16_t declaring_class_def_index,
7318 const DexFile& dex_file,
7319 uint32_t dex_pc)
7320 : HExpression(kStaticFieldSet,
7321 SideEffects::FieldWriteOfType(field_type, is_volatile),
7322 dex_pc),
7323 field_info_(field,
7324 field_offset,
7325 field_type,
7326 is_volatile,
7327 field_idx,
7328 declaring_class_def_index,
7329 dex_file) {
7330 SetPackedFlag<kFlagValueCanBeNull>(true);
7331 SetRawInputAt(0, cls);
7332 SetRawInputAt(1, value);
7333 }
7334
7335 bool IsClonable() const override { return true; }
7336 bool IsFieldAccess() const override { return true; }
7337 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7338 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7339 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7340 bool IsVolatile() const { return field_info_.IsVolatile(); }
7341
7342 HInstruction* GetValue() const { return InputAt(1); }
7343 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
7344 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
7345
7346 DECLARE_INSTRUCTION(StaticFieldSet);
7347
7348 protected:
7349 DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
7350
7351 private:
7352 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
7353 static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
7354 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
7355 "Too many packed fields.");
7356
7357 const FieldInfo field_info_;
7358 };
7359
7360 class HStringBuilderAppend final : public HVariableInputSizeInstruction {
7361 public:
7362 HStringBuilderAppend(HIntConstant* format,
7363 uint32_t number_of_arguments,
7364 ArenaAllocator* allocator,
7365 uint32_t dex_pc)
7366 : HVariableInputSizeInstruction(
7367 kStringBuilderAppend,
7368 DataType::Type::kReference,
7369 // The runtime call may read memory from inputs. It never writes outside
7370 // of the newly allocated result object (or newly allocated helper objects).
7371 SideEffects::AllReads().Union(SideEffects::CanTriggerGC()),
7372 dex_pc,
7373 allocator,
7374 number_of_arguments + /* format */ 1u,
7375 kArenaAllocInvokeInputs) {
7376 DCHECK_GE(number_of_arguments, 1u); // There must be something to append.
7377 SetRawInputAt(FormatIndex(), format);
7378 }
7379
7380 void SetArgumentAt(size_t index, HInstruction* argument) {
7381 DCHECK_LE(index, GetNumberOfArguments());
7382 SetRawInputAt(index, argument);
7383 }
7384
7385 // Return the number of arguments, excluding the format.
7386 size_t GetNumberOfArguments() const {
7387 DCHECK_GE(InputCount(), 1u);
7388 return InputCount() - 1u;
7389 }
7390
7391 size_t FormatIndex() const {
7392 return GetNumberOfArguments();
7393 }
7394
7395 HIntConstant* GetFormat() {
7396 return InputAt(FormatIndex())->AsIntConstant();
7397 }
7398
7399 bool NeedsEnvironment() const override { return true; }
7400
7401 bool CanThrow() const override { return true; }
7402
7403 bool CanBeNull() const override { return false; }
7404
7405 DECLARE_INSTRUCTION(StringBuilderAppend);
7406
7407 protected:
7408 DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend);
7409 };
7410
7411 class HUnresolvedInstanceFieldGet final : public HExpression<1> {
7412 public:
7413 HUnresolvedInstanceFieldGet(HInstruction* obj,
7414 DataType::Type field_type,
7415 uint32_t field_index,
7416 uint32_t dex_pc)
7417 : HExpression(kUnresolvedInstanceFieldGet,
7418 field_type,
7419 SideEffects::AllExceptGCDependency(),
7420 dex_pc),
7421 field_index_(field_index) {
7422 SetRawInputAt(0, obj);
7423 }
7424
7425 bool IsClonable() const override { return true; }
7426 bool NeedsEnvironment() const override { return true; }
7427 bool CanThrow() const override { return true; }
7428
7429 DataType::Type GetFieldType() const { return GetType(); }
7430 uint32_t GetFieldIndex() const { return field_index_; }
7431
7432 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
7433
7434 protected:
7435 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
7436
7437 private:
7438 const uint32_t field_index_;
7439 };
7440
7441 class HUnresolvedInstanceFieldSet final : public HExpression<2> {
7442 public:
7443 HUnresolvedInstanceFieldSet(HInstruction* obj,
7444 HInstruction* value,
7445 DataType::Type field_type,
7446 uint32_t field_index,
7447 uint32_t dex_pc)
7448 : HExpression(kUnresolvedInstanceFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7449 field_index_(field_index) {
7450 SetPackedField<FieldTypeField>(field_type);
7451 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7452 SetRawInputAt(0, obj);
7453 SetRawInputAt(1, value);
7454 }
7455
7456 bool IsClonable() const override { return true; }
7457 bool NeedsEnvironment() const override { return true; }
7458 bool CanThrow() const override { return true; }
7459
7460 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7461 uint32_t GetFieldIndex() const { return field_index_; }
7462
7463 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
7464
7465 protected:
7466 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
7467
7468 private:
7469 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7470 static constexpr size_t kFieldFieldTypeSize =
7471 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7472 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7473 kFieldFieldType + kFieldFieldTypeSize;
7474 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7475 "Too many packed fields.");
7476 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7477
7478 const uint32_t field_index_;
7479 };
7480
7481 class HUnresolvedStaticFieldGet final : public HExpression<0> {
7482 public:
7483 HUnresolvedStaticFieldGet(DataType::Type field_type,
7484 uint32_t field_index,
7485 uint32_t dex_pc)
7486 : HExpression(kUnresolvedStaticFieldGet,
7487 field_type,
7488 SideEffects::AllExceptGCDependency(),
7489 dex_pc),
7490 field_index_(field_index) {
7491 }
7492
7493 bool IsClonable() const override { return true; }
7494 bool NeedsEnvironment() const override { return true; }
7495 bool CanThrow() const override { return true; }
7496
7497 DataType::Type GetFieldType() const { return GetType(); }
7498 uint32_t GetFieldIndex() const { return field_index_; }
7499
7500 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
7501
7502 protected:
7503 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
7504
7505 private:
7506 const uint32_t field_index_;
7507 };
7508
7509 class HUnresolvedStaticFieldSet final : public HExpression<1> {
7510 public:
7511 HUnresolvedStaticFieldSet(HInstruction* value,
7512 DataType::Type field_type,
7513 uint32_t field_index,
7514 uint32_t dex_pc)
7515 : HExpression(kUnresolvedStaticFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7516 field_index_(field_index) {
7517 SetPackedField<FieldTypeField>(field_type);
7518 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7519 SetRawInputAt(0, value);
7520 }
7521
7522 bool IsClonable() const override { return true; }
7523 bool NeedsEnvironment() const override { return true; }
7524 bool CanThrow() const override { return true; }
7525
7526 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7527 uint32_t GetFieldIndex() const { return field_index_; }
7528
7529 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
7530
7531 protected:
7532 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
7533
7534 private:
7535 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7536 static constexpr size_t kFieldFieldTypeSize =
7537 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7538 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7539 kFieldFieldType + kFieldFieldTypeSize;
7540 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7541 "Too many packed fields.");
7542 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7543
7544 const uint32_t field_index_;
7545 };
7546
7547 // Implement the move-exception DEX instruction.
7548 class HLoadException final : public HExpression<0> {
7549 public:
7550 explicit HLoadException(uint32_t dex_pc = kNoDexPc)
7551 : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
7552 }
7553
7554 bool CanBeNull() const override { return false; }
7555
7556 DECLARE_INSTRUCTION(LoadException);
7557
7558 protected:
7559 DEFAULT_COPY_CONSTRUCTOR(LoadException);
7560 };
7561
7562 // Implicit part of move-exception which clears thread-local exception storage.
7563 // Must not be removed because the runtime expects the TLS to get cleared.
7564 class HClearException final : public HExpression<0> {
7565 public:
7566 explicit HClearException(uint32_t dex_pc = kNoDexPc)
7567 : HExpression(kClearException, SideEffects::AllWrites(), dex_pc) {
7568 }
7569
7570 DECLARE_INSTRUCTION(ClearException);
7571
7572 protected:
7573 DEFAULT_COPY_CONSTRUCTOR(ClearException);
7574 };
7575
7576 class HThrow final : public HExpression<1> {
7577 public:
7578 HThrow(HInstruction* exception, uint32_t dex_pc)
7579 : HExpression(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
7580 SetRawInputAt(0, exception);
7581 }
7582
7583 bool IsControlFlow() const override { return true; }
7584
7585 bool NeedsEnvironment() const override { return true; }
7586
7587 bool CanThrow() const override { return true; }
7588
7589 bool AlwaysThrows() const override { return true; }
7590
7591 DECLARE_INSTRUCTION(Throw);
7592
7593 protected:
7594 DEFAULT_COPY_CONSTRUCTOR(Throw);
7595 };
7596
7597 /**
7598 * Implementation strategies for the code generator of a HInstanceOf
7599 * or `HCheckCast`.
7600 */
7601 enum class TypeCheckKind { // private marker to avoid generate-operator-out.py from processing.
7602 kUnresolvedCheck, // Check against an unresolved type.
7603 kExactCheck, // Can do a single class compare.
7604 kClassHierarchyCheck, // Can just walk the super class chain.
7605 kAbstractClassCheck, // Can just walk the super class chain, starting one up.
7606 kInterfaceCheck, // No optimization yet when checking against an interface.
7607 kArrayObjectCheck, // Can just check if the array is not primitive.
7608 kArrayCheck, // No optimization yet when checking against a generic array.
7609 kBitstringCheck, // Compare the type check bitstring.
7610 kLast = kArrayCheck
7611 };
7612
7613 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
7614
7615 // Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
7616 // `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
7617 class HTypeCheckInstruction : public HVariableInputSizeInstruction {
7618 public:
7619 HTypeCheckInstruction(InstructionKind kind,
7620 DataType::Type type,
7621 HInstruction* object,
7622 HInstruction* target_class_or_null,
7623 TypeCheckKind check_kind,
7624 Handle<mirror::Class> klass,
7625 uint32_t dex_pc,
7626 ArenaAllocator* allocator,
7627 HIntConstant* bitstring_path_to_root,
7628 HIntConstant* bitstring_mask,
7629 SideEffects side_effects)
7630 : HVariableInputSizeInstruction(
7631 kind,
7632 type,
7633 side_effects,
7634 dex_pc,
7635 allocator,
7636 /* number_of_inputs= */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
7637 kArenaAllocTypeCheckInputs),
7638 klass_(klass) {
7639 SetPackedField<TypeCheckKindField>(check_kind);
7640 SetPackedFlag<kFlagMustDoNullCheck>(true);
7641 SetPackedFlag<kFlagValidTargetClassRTI>(false);
7642 SetRawInputAt(0, object);
7643 SetRawInputAt(1, target_class_or_null);
7644 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
7645 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
7646 if (check_kind == TypeCheckKind::kBitstringCheck) {
7647 DCHECK(target_class_or_null->IsNullConstant());
7648 SetRawInputAt(2, bitstring_path_to_root);
7649 SetRawInputAt(3, bitstring_mask);
7650 } else {
7651 DCHECK(target_class_or_null->IsLoadClass());
7652 }
7653 }
7654
7655 HLoadClass* GetTargetClass() const {
7656 DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7657 HInstruction* load_class = InputAt(1);
7658 DCHECK(load_class->IsLoadClass());
7659 return load_class->AsLoadClass();
7660 }
7661
7662 uint32_t GetBitstringPathToRoot() const {
7663 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7664 HInstruction* path_to_root = InputAt(2);
7665 DCHECK(path_to_root->IsIntConstant());
7666 return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
7667 }
7668
7669 uint32_t GetBitstringMask() const {
7670 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7671 HInstruction* mask = InputAt(3);
7672 DCHECK(mask->IsIntConstant());
7673 return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
7674 }
7675
7676 bool IsClonable() const override { return true; }
7677 bool CanBeMoved() const override { return true; }
7678
7679 bool InstructionDataEquals(const HInstruction* other) const override {
7680 DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
7681 return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
7682 }
7683
7684 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
7685 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
7686 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
7687 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
7688
7689 ReferenceTypeInfo GetTargetClassRTI() {
7690 if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
7691 // Note: The is_exact flag from the return value should not be used.
7692 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7693 } else {
7694 return ReferenceTypeInfo::CreateInvalid();
7695 }
7696 }
7697
7698 // Target class RTI is marked as valid by RTP if the klass_ is admissible.
7699 void SetValidTargetClassRTI() {
7700 DCHECK(klass_ != nullptr);
7701 SetPackedFlag<kFlagValidTargetClassRTI>(true);
7702 }
7703
7704 Handle<mirror::Class> GetClass() const {
7705 return klass_;
7706 }
7707
7708 protected:
7709 DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
7710
7711 private:
7712 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
7713 static constexpr size_t kFieldTypeCheckKindSize =
7714 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
7715 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
7716 static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
7717 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
7718 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7719 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
7720
7721 Handle<mirror::Class> klass_;
7722 };
7723
7724 class HInstanceOf final : public HTypeCheckInstruction {
7725 public:
7726 HInstanceOf(HInstruction* object,
7727 HInstruction* target_class_or_null,
7728 TypeCheckKind check_kind,
7729 Handle<mirror::Class> klass,
7730 uint32_t dex_pc,
7731 ArenaAllocator* allocator,
7732 HIntConstant* bitstring_path_to_root,
7733 HIntConstant* bitstring_mask)
7734 : HTypeCheckInstruction(kInstanceOf,
7735 DataType::Type::kBool,
7736 object,
7737 target_class_or_null,
7738 check_kind,
7739 klass,
7740 dex_pc,
7741 allocator,
7742 bitstring_path_to_root,
7743 bitstring_mask,
7744 SideEffectsForArchRuntimeCalls(check_kind)) {}
7745
7746 bool IsClonable() const override { return true; }
7747
7748 bool NeedsEnvironment() const override {
7749 return CanCallRuntime(GetTypeCheckKind());
7750 }
7751
7752 static bool CanCallRuntime(TypeCheckKind check_kind) {
7753 // TODO: Re-evaluate now that mips codegen has been removed.
7754 return check_kind != TypeCheckKind::kExactCheck;
7755 }
7756
7757 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
7758 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
7759 }
7760
7761 DECLARE_INSTRUCTION(InstanceOf);
7762
7763 protected:
7764 DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
7765 };
7766
7767 class HBoundType final : public HExpression<1> {
7768 public:
7769 explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
7770 : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
7771 upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
7772 SetPackedFlag<kFlagUpperCanBeNull>(true);
7773 SetPackedFlag<kFlagCanBeNull>(true);
7774 DCHECK_EQ(input->GetType(), DataType::Type::kReference);
7775 SetRawInputAt(0, input);
7776 }
7777
7778 bool InstructionDataEquals(const HInstruction* other) const override;
7779 bool IsClonable() const override { return true; }
7780
7781 // {Get,Set}Upper* should only be used in reference type propagation.
7782 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
7783 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
7784 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
7785
7786 void SetCanBeNull(bool can_be_null) {
7787 DCHECK(GetUpperCanBeNull() || !can_be_null);
7788 SetPackedFlag<kFlagCanBeNull>(can_be_null);
7789 }
7790
7791 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
7792
7793 DECLARE_INSTRUCTION(BoundType);
7794
7795 protected:
7796 DEFAULT_COPY_CONSTRUCTOR(BoundType);
7797
7798 private:
7799 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
7800 // is false then CanBeNull() cannot be true).
7801 static constexpr size_t kFlagUpperCanBeNull = kNumberOfGenericPackedBits;
7802 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
7803 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
7804 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7805
7806 // Encodes the most upper class that this instruction can have. In other words
7807 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
7808 // It is used to bound the type in cases like:
7809 // if (x instanceof ClassX) {
7810 // // uper_bound_ will be ClassX
7811 // }
7812 ReferenceTypeInfo upper_bound_;
7813 };
7814
7815 class HCheckCast final : public HTypeCheckInstruction {
7816 public:
7817 HCheckCast(HInstruction* object,
7818 HInstruction* target_class_or_null,
7819 TypeCheckKind check_kind,
7820 Handle<mirror::Class> klass,
7821 uint32_t dex_pc,
7822 ArenaAllocator* allocator,
7823 HIntConstant* bitstring_path_to_root,
7824 HIntConstant* bitstring_mask)
7825 : HTypeCheckInstruction(kCheckCast,
7826 DataType::Type::kVoid,
7827 object,
7828 target_class_or_null,
7829 check_kind,
7830 klass,
7831 dex_pc,
7832 allocator,
7833 bitstring_path_to_root,
7834 bitstring_mask,
7835 SideEffects::CanTriggerGC()) {}
7836
7837 bool IsClonable() const override { return true; }
7838 bool NeedsEnvironment() const override {
7839 // Instruction may throw a CheckCastError.
7840 return true;
7841 }
7842
7843 bool CanThrow() const override { return true; }
7844
7845 DECLARE_INSTRUCTION(CheckCast);
7846
7847 protected:
7848 DEFAULT_COPY_CONSTRUCTOR(CheckCast);
7849 };
7850
7851 /**
7852 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
7853 * @details We define the combined barrier types that are actually required
7854 * by the Java Memory Model, rather than using exactly the terminology from
7855 * the JSR-133 cookbook. These should, in many cases, be replaced by acquire/release
7856 * primitives. Note that the JSR-133 cookbook generally does not deal with
7857 * store atomicity issues, and the recipes there are not always entirely sufficient.
7858 * The current recipe is as follows:
7859 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
7860 * -# Use AnyAny barrier after volatile store. (StoreLoad is as expensive.)
7861 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
7862 * -# Use StoreStore barrier after all stores but before return from any constructor whose
7863 * class has final fields.
7864 * -# Use NTStoreStore to order non-temporal stores with respect to all later
7865 * store-to-memory instructions. Only generated together with non-temporal stores.
7866 */
7867 enum MemBarrierKind {
7868 kAnyStore,
7869 kLoadAny,
7870 kStoreStore,
7871 kAnyAny,
7872 kNTStoreStore,
7873 kLastBarrierKind = kNTStoreStore
7874 };
7875 std::ostream& operator<<(std::ostream& os, MemBarrierKind kind);
7876
7877 class HMemoryBarrier final : public HExpression<0> {
7878 public:
7879 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
7880 : HExpression(kMemoryBarrier,
7881 SideEffects::AllWritesAndReads(), // Assume write/read on all fields/arrays.
7882 dex_pc) {
7883 SetPackedField<BarrierKindField>(barrier_kind);
7884 }
7885
7886 bool IsClonable() const override { return true; }
7887
7888 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
7889
7890 DECLARE_INSTRUCTION(MemoryBarrier);
7891
7892 protected:
7893 DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
7894
7895 private:
7896 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
7897 static constexpr size_t kFieldBarrierKindSize =
7898 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
7899 static constexpr size_t kNumberOfMemoryBarrierPackedBits =
7900 kFieldBarrierKind + kFieldBarrierKindSize;
7901 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
7902 "Too many packed fields.");
7903 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
7904 };
7905
7906 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
7907 // the specified object(s), with respect to any subsequent store that might "publish"
7908 // (i.e. make visible) the specified object to another thread.
7909 //
7910 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
7911 // for all final fields (that were set) at the end of the invoked constructor.
7912 //
7913 // The constructor fence models the freeze actions for the final fields of an object
7914 // being constructed (semantically at the end of the constructor). Constructor fences
7915 // have a per-object affinity; two separate objects being constructed get two separate
7916 // constructor fences.
7917 //
7918 // (Note: that if calling a super-constructor or forwarding to another constructor,
7919 // the freezes would happen at the end of *that* constructor being invoked).
7920 //
7921 // The memory model guarantees that when the object being constructed is "published" after
7922 // constructor completion (i.e. escapes the current thread via a store), then any final field
7923 // writes must be observable on other threads (once they observe that publication).
7924 //
7925 // Further, anything written before the freeze, and read by dereferencing through the final field,
7926 // must also be visible (so final object field could itself have an object with non-final fields;
7927 // yet the freeze must also extend to them).
7928 //
7929 // Constructor example:
7930 //
7931 // class HasFinal {
7932 // final int field; Optimizing IR for <init>()V:
7933 // HasFinal() {
7934 // field = 123; HInstanceFieldSet(this, HasFinal.field, 123)
7935 // // freeze(this.field); HConstructorFence(this)
7936 // } HReturn
7937 // }
7938 //
7939 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
7940 // already-initialized classes; in that case the allocation must act as a "default-initializer"
7941 // of the object which effectively writes the class pointer "final field".
7942 //
7943 // For example, we can model default-initialiation as roughly the equivalent of the following:
7944 //
7945 // class Object {
7946 // private final Class header;
7947 // }
7948 //
7949 // Java code: Optimizing IR:
7950 //
7951 // T new_instance<T>() {
7952 // Object obj = allocate_memory(T.class.size); obj = HInvoke(art_quick_alloc_object, T)
7953 // obj.header = T.class; // header write is done by above call.
7954 // // freeze(obj.header) HConstructorFence(obj)
7955 // return (T)obj;
7956 // }
7957 //
7958 // See also:
7959 // * DexCompilationUnit::RequiresConstructorBarrier
7960 // * QuasiAtomic::ThreadFenceForConstructor
7961 //
7962 class HConstructorFence final : public HVariableInputSizeInstruction {
7963 // A fence has variable inputs because the inputs can be removed
7964 // after prepare_for_register_allocation phase.
7965 // (TODO: In the future a fence could freeze multiple objects
7966 // after merging two fences together.)
7967 public:
7968 // `fence_object` is the reference that needs to be protected for correct publication.
7969 //
7970 // It makes sense in the following situations:
7971 // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
7972 // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
7973 //
7974 // After construction the `fence_object` becomes the 0th input.
7975 // This is not an input in a real sense, but just a convenient place to stash the information
7976 // about the associated object.
7977 HConstructorFence(HInstruction* fence_object,
7978 uint32_t dex_pc,
7979 ArenaAllocator* allocator)
7980 // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
7981 // constraints described in the class header. We claim that these SideEffects constraints
7982 // enforce a superset of the real constraints.
7983 //
7984 // The ordering described above is conservatively modeled with SideEffects as follows:
7985 //
7986 // * To prevent reordering of the publication stores:
7987 // ----> "Reads of objects" is the initial SideEffect.
7988 // * For every primitive final field store in the constructor:
7989 // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
7990 // * If there are any stores to reference final fields in the constructor:
7991 // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
7992 // that are reachable from `fence_object` also need to be prevented for reordering
7993 // (and we do not want to do alias analysis to figure out what those stores are).
7994 //
7995 // In the implementation, this initially starts out as an "all reads" side effect; this is an
7996 // even more conservative approach than the one described above, and prevents all of the
7997 // above reordering without analyzing any of the instructions in the constructor.
7998 //
7999 // If in a later phase we discover that there are no writes to reference final fields,
8000 // we can refine the side effect to a smaller set of type reads (see above constraints).
8001 : HVariableInputSizeInstruction(kConstructorFence,
8002 SideEffects::AllReads(),
8003 dex_pc,
8004 allocator,
8005 /* number_of_inputs= */ 1,
8006 kArenaAllocConstructorFenceInputs) {
8007 DCHECK(fence_object != nullptr);
8008 SetRawInputAt(0, fence_object);
8009 }
8010
8011 // The object associated with this constructor fence.
8012 //
8013 // (Note: This will be null after the prepare_for_register_allocation phase,
8014 // as all constructor fence inputs are removed there).
8015 HInstruction* GetFenceObject() const {
8016 return InputAt(0);
8017 }
8018
8019 // Find all the HConstructorFence uses (`fence_use`) for `this` and:
8020 // - Delete `fence_use` from `this`'s use list.
8021 // - Delete `this` from `fence_use`'s inputs list.
8022 // - If the `fence_use` is dead, remove it from the graph.
8023 //
8024 // A fence is considered dead once it no longer has any uses
8025 // and all of the inputs are dead.
8026 //
8027 // This must *not* be called during/after prepare_for_register_allocation,
8028 // because that removes all the inputs to the fences but the fence is actually
8029 // still considered live.
8030 //
8031 // Returns how many HConstructorFence instructions were removed from graph.
8032 static size_t RemoveConstructorFences(HInstruction* instruction);
8033
8034 // Combine all inputs of `this` and `other` instruction and remove
8035 // `other` from the graph.
8036 //
8037 // Inputs are unique after the merge.
8038 //
8039 // Requirement: `this` must not be the same as `other.
8040 void Merge(HConstructorFence* other);
8041
8042 // Check if this constructor fence is protecting
8043 // an HNewInstance or HNewArray that is also the immediate
8044 // predecessor of `this`.
8045 //
8046 // If `ignore_inputs` is true, then the immediate predecessor doesn't need
8047 // to be one of the inputs of `this`.
8048 //
8049 // Returns the associated HNewArray or HNewInstance,
8050 // or null otherwise.
8051 HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
8052
8053 DECLARE_INSTRUCTION(ConstructorFence);
8054
8055 protected:
8056 DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
8057 };
8058
8059 class HMonitorOperation final : public HExpression<1> {
8060 public:
8061 enum class OperationKind {
8062 kEnter,
8063 kExit,
8064 kLast = kExit
8065 };
8066
8067 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
8068 : HExpression(kMonitorOperation,
8069 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
8070 dex_pc) {
8071 SetPackedField<OperationKindField>(kind);
8072 SetRawInputAt(0, object);
8073 }
8074
8075 // Instruction may go into runtime, so we need an environment.
8076 bool NeedsEnvironment() const override { return true; }
8077
8078 bool CanThrow() const override {
8079 // Verifier guarantees that monitor-exit cannot throw.
8080 // This is important because it allows the HGraphBuilder to remove
8081 // a dead throw-catch loop generated for `synchronized` blocks/methods.
8082 return IsEnter();
8083 }
8084
8085 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
8086 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
8087
8088 DECLARE_INSTRUCTION(MonitorOperation);
8089
8090 protected:
8091 DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
8092
8093 private:
8094 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
8095 static constexpr size_t kFieldOperationKindSize =
8096 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
8097 static constexpr size_t kNumberOfMonitorOperationPackedBits =
8098 kFieldOperationKind + kFieldOperationKindSize;
8099 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
8100 "Too many packed fields.");
8101 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
8102 };
8103
8104 class HSelect final : public HExpression<3> {
8105 public:
8106 HSelect(HInstruction* condition,
8107 HInstruction* true_value,
8108 HInstruction* false_value,
8109 uint32_t dex_pc)
8110 : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
8111 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
8112
8113 // First input must be `true_value` or `false_value` to allow codegens to
8114 // use the SameAsFirstInput allocation policy. We make it `false_value`, so
8115 // that architectures which implement HSelect as a conditional move also
8116 // will not need to invert the condition.
8117 SetRawInputAt(0, false_value);
8118 SetRawInputAt(1, true_value);
8119 SetRawInputAt(2, condition);
8120 }
8121
8122 bool IsClonable() const override { return true; }
8123 HInstruction* GetFalseValue() const { return InputAt(0); }
8124 HInstruction* GetTrueValue() const { return InputAt(1); }
8125 HInstruction* GetCondition() const { return InputAt(2); }
8126
8127 bool CanBeMoved() const override { return true; }
8128 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
8129 return true;
8130 }
8131
8132 bool CanBeNull() const override {
8133 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
8134 }
8135
8136 DECLARE_INSTRUCTION(Select);
8137
8138 protected:
8139 DEFAULT_COPY_CONSTRUCTOR(Select);
8140 };
8141
8142 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
8143 public:
8144 MoveOperands(Location source,
8145 Location destination,
8146 DataType::Type type,
8147 HInstruction* instruction)
8148 : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
8149
8150 Location GetSource() const { return source_; }
8151 Location GetDestination() const { return destination_; }
8152
8153 void SetSource(Location value) { source_ = value; }
8154 void SetDestination(Location value) { destination_ = value; }
8155
8156 // The parallel move resolver marks moves as "in-progress" by clearing the
8157 // destination (but not the source).
8158 Location MarkPending() {
8159 DCHECK(!IsPending());
8160 Location dest = destination_;
8161 destination_ = Location::NoLocation();
8162 return dest;
8163 }
8164
8165 void ClearPending(Location dest) {
8166 DCHECK(IsPending());
8167 destination_ = dest;
8168 }
8169
8170 bool IsPending() const {
8171 DCHECK(source_.IsValid() || destination_.IsInvalid());
8172 return destination_.IsInvalid() && source_.IsValid();
8173 }
8174
8175 // True if this blocks a move from the given location.
8176 bool Blocks(Location loc) const {
8177 return !IsEliminated() && source_.OverlapsWith(loc);
8178 }
8179
8180 // A move is redundant if it's been eliminated, if its source and
8181 // destination are the same, or if its destination is unneeded.
8182 bool IsRedundant() const {
8183 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
8184 }
8185
8186 // We clear both operands to indicate move that's been eliminated.
8187 void Eliminate() {
8188 source_ = destination_ = Location::NoLocation();
8189 }
8190
8191 bool IsEliminated() const {
8192 DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
8193 return source_.IsInvalid();
8194 }
8195
8196 DataType::Type GetType() const { return type_; }
8197
8198 bool Is64BitMove() const {
8199 return DataType::Is64BitType(type_);
8200 }
8201
8202 HInstruction* GetInstruction() const { return instruction_; }
8203
8204 private:
8205 Location source_;
8206 Location destination_;
8207 // The type this move is for.
8208 DataType::Type type_;
8209 // The instruction this move is assocatied with. Null when this move is
8210 // for moving an input in the expected locations of user (including a phi user).
8211 // This is only used in debug mode, to ensure we do not connect interval siblings
8212 // in the same parallel move.
8213 HInstruction* instruction_;
8214 };
8215
8216 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
8217
8218 static constexpr size_t kDefaultNumberOfMoves = 4;
8219
8220 class HParallelMove final : public HExpression<0> {
8221 public:
8222 explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
8223 : HExpression(kParallelMove, SideEffects::None(), dex_pc),
8224 moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
8225 moves_.reserve(kDefaultNumberOfMoves);
8226 }
8227
8228 void AddMove(Location source,
8229 Location destination,
8230 DataType::Type type,
8231 HInstruction* instruction) {
8232 DCHECK(source.IsValid());
8233 DCHECK(destination.IsValid());
8234 if (kIsDebugBuild) {
8235 if (instruction != nullptr) {
8236 for (const MoveOperands& move : moves_) {
8237 if (move.GetInstruction() == instruction) {
8238 // Special case the situation where the move is for the spill slot
8239 // of the instruction.
8240 if ((GetPrevious() == instruction)
8241 || ((GetPrevious() == nullptr)
8242 && instruction->IsPhi()
8243 && instruction->GetBlock() == GetBlock())) {
8244 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
8245 << "Doing parallel moves for the same instruction.";
8246 } else {
8247 DCHECK(false) << "Doing parallel moves for the same instruction.";
8248 }
8249 }
8250 }
8251 }
8252 for (const MoveOperands& move : moves_) {
8253 DCHECK(!destination.OverlapsWith(move.GetDestination()))
8254 << "Overlapped destination for two moves in a parallel move: "
8255 << move.GetSource() << " ==> " << move.GetDestination() << " and "
8256 << source << " ==> " << destination << " for " << SafePrint(instruction);
8257 }
8258 }
8259 moves_.emplace_back(source, destination, type, instruction);
8260 }
8261
8262 MoveOperands* MoveOperandsAt(size_t index) {
8263 return &moves_[index];
8264 }
8265
8266 size_t NumMoves() const { return moves_.size(); }
8267
8268 DECLARE_INSTRUCTION(ParallelMove);
8269
8270 protected:
8271 DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
8272
8273 private:
8274 ArenaVector<MoveOperands> moves_;
8275 };
8276
8277 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
8278 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
8279 // never used across anything that can trigger GC.
8280 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
8281 // So we represent it by the type `DataType::Type::kInt`.
8282 class HIntermediateAddress final : public HExpression<2> {
8283 public:
8284 HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
8285 : HExpression(kIntermediateAddress,
8286 DataType::Type::kInt32,
8287 SideEffects::DependsOnGC(),
8288 dex_pc) {
8289 DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
8290 DataType::Size(DataType::Type::kReference))
8291 << "kPrimInt and kPrimNot have different sizes.";
8292 SetRawInputAt(0, base_address);
8293 SetRawInputAt(1, offset);
8294 }
8295
8296 bool IsClonable() const override { return true; }
8297 bool CanBeMoved() const override { return true; }
8298 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
8299 return true;
8300 }
8301 bool IsActualObject() const override { return false; }
8302
8303 HInstruction* GetBaseAddress() const { return InputAt(0); }
8304 HInstruction* GetOffset() const { return InputAt(1); }
8305
8306 DECLARE_INSTRUCTION(IntermediateAddress);
8307
8308 protected:
8309 DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
8310 };
8311
8312
8313 } // namespace art
8314
8315 #include "nodes_vector.h"
8316
8317 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
8318 #include "nodes_shared.h"
8319 #endif
8320 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
8321 #include "nodes_x86.h"
8322 #endif
8323
8324 namespace art {
8325
8326 class OptimizingCompilerStats;
8327
8328 class HGraphVisitor : public ValueObject {
8329 public:
8330 explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8331 : stats_(stats),
8332 graph_(graph) {}
8333 virtual ~HGraphVisitor() {}
8334
8335 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
8336 virtual void VisitBasicBlock(HBasicBlock* block);
8337
8338 // Visit the graph following basic block insertion order.
8339 void VisitInsertionOrder();
8340
8341 // Visit the graph following dominator tree reverse post-order.
8342 void VisitReversePostOrder();
8343
8344 HGraph* GetGraph() const { return graph_; }
8345
8346 // Visit functions for instruction classes.
8347 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8348 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
8349
8350 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8351
8352 #undef DECLARE_VISIT_INSTRUCTION
8353
8354 protected:
8355 OptimizingCompilerStats* stats_;
8356
8357 private:
8358 HGraph* const graph_;
8359
8360 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
8361 };
8362
8363 class HGraphDelegateVisitor : public HGraphVisitor {
8364 public:
8365 explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8366 : HGraphVisitor(graph, stats) {}
8367 virtual ~HGraphDelegateVisitor() {}
8368
8369 // Visit functions that delegate to to super class.
8370 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8371 void Visit##name(H##name* instr) override { Visit##super(instr); }
8372
8373 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8374
8375 #undef DECLARE_VISIT_INSTRUCTION
8376
8377 private:
8378 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
8379 };
8380
8381 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
8382 // and remove the old instruction.
8383 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
8384
8385 // Create a clone for each clonable instructions/phis and replace the original with the clone.
8386 //
8387 // Used for testing individual instruction cloner.
8388 class CloneAndReplaceInstructionVisitor : public HGraphDelegateVisitor {
8389 public:
8390 explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
8391 : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
8392
8393 void VisitInstruction(HInstruction* instruction) override {
8394 if (instruction->IsClonable()) {
8395 ReplaceInstrOrPhiByClone(instruction);
8396 instr_replaced_by_clones_count_++;
8397 }
8398 }
8399
8400 size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
8401
8402 private:
8403 size_t instr_replaced_by_clones_count_;
8404
8405 DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
8406 };
8407
8408 // Iterator over the blocks that art part of the loop. Includes blocks part
8409 // of an inner loop. The order in which the blocks are iterated is on their
8410 // block id.
8411 class HBlocksInLoopIterator : public ValueObject {
8412 public:
8413 explicit HBlocksInLoopIterator(const HLoopInformation& info)
8414 : blocks_in_loop_(info.GetBlocks()),
8415 blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
8416 index_(0) {
8417 if (!blocks_in_loop_.IsBitSet(index_)) {
8418 Advance();
8419 }
8420 }
8421
8422 bool Done() const { return index_ == blocks_.size(); }
8423 HBasicBlock* Current() const { return blocks_[index_]; }
8424 void Advance() {
8425 ++index_;
8426 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8427 if (blocks_in_loop_.IsBitSet(index_)) {
8428 break;
8429 }
8430 }
8431 }
8432
8433 private:
8434 const BitVector& blocks_in_loop_;
8435 const ArenaVector<HBasicBlock*>& blocks_;
8436 size_t index_;
8437
8438 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
8439 };
8440
8441 // Iterator over the blocks that art part of the loop. Includes blocks part
8442 // of an inner loop. The order in which the blocks are iterated is reverse
8443 // post order.
8444 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
8445 public:
8446 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
8447 : blocks_in_loop_(info.GetBlocks()),
8448 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8449 index_(0) {
8450 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8451 Advance();
8452 }
8453 }
8454
8455 bool Done() const { return index_ == blocks_.size(); }
8456 HBasicBlock* Current() const { return blocks_[index_]; }
8457 void Advance() {
8458 ++index_;
8459 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8460 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8461 break;
8462 }
8463 }
8464 }
8465
8466 private:
8467 const BitVector& blocks_in_loop_;
8468 const ArenaVector<HBasicBlock*>& blocks_;
8469 size_t index_;
8470
8471 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
8472 };
8473
8474 // Returns int64_t value of a properly typed constant.
8475 inline int64_t Int64FromConstant(HConstant* constant) {
8476 if (constant->IsIntConstant()) {
8477 return constant->AsIntConstant()->GetValue();
8478 } else if (constant->IsLongConstant()) {
8479 return constant->AsLongConstant()->GetValue();
8480 } else {
8481 DCHECK(constant->IsNullConstant()) << constant->DebugName();
8482 return 0;
8483 }
8484 }
8485
8486 // Returns true iff instruction is an integral constant (and sets value on success).
8487 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
8488 if (instruction->IsIntConstant()) {
8489 *value = instruction->AsIntConstant()->GetValue();
8490 return true;
8491 } else if (instruction->IsLongConstant()) {
8492 *value = instruction->AsLongConstant()->GetValue();
8493 return true;
8494 } else if (instruction->IsNullConstant()) {
8495 *value = 0;
8496 return true;
8497 }
8498 return false;
8499 }
8500
8501 // Returns true iff instruction is the given integral constant.
8502 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
8503 int64_t val = 0;
8504 return IsInt64AndGet(instruction, &val) && val == value;
8505 }
8506
8507 // Returns true iff instruction is a zero bit pattern.
8508 inline bool IsZeroBitPattern(HInstruction* instruction) {
8509 return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
8510 }
8511
8512 // Implement HInstruction::Is##type() for concrete instructions.
8513 #define INSTRUCTION_TYPE_CHECK(type, super) \
8514 inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
8515 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8516 #undef INSTRUCTION_TYPE_CHECK
8517
8518 // Implement HInstruction::Is##type() for abstract instructions.
8519 #define INSTRUCTION_TYPE_CHECK_RESULT(type, super) \
8520 std::is_base_of<BaseType, H##type>::value,
8521 #define INSTRUCTION_TYPE_CHECK(type, super) \
8522 inline bool HInstruction::Is##type() const { \
8523 DCHECK_LT(GetKind(), kLastInstructionKind); \
8524 using BaseType = H##type; \
8525 static constexpr bool results[] = { \
8526 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT) \
8527 }; \
8528 return results[static_cast<size_t>(GetKind())]; \
8529 }
8530
8531 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8532 #undef INSTRUCTION_TYPE_CHECK
8533 #undef INSTRUCTION_TYPE_CHECK_RESULT
8534
8535 #define INSTRUCTION_TYPE_CAST(type, super) \
8536 inline const H##type* HInstruction::As##type() const { \
8537 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
8538 } \
8539 inline H##type* HInstruction::As##type() { \
8540 return Is##type() ? static_cast<H##type*>(this) : nullptr; \
8541 }
8542
8543 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
8544 #undef INSTRUCTION_TYPE_CAST
8545
8546
8547 // Create space in `blocks` for adding `number_of_new_blocks` entries
8548 // starting at location `at`. Blocks after `at` are moved accordingly.
8549 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
8550 size_t number_of_new_blocks,
8551 size_t after) {
8552 DCHECK_LT(after, blocks->size());
8553 size_t old_size = blocks->size();
8554 size_t new_size = old_size + number_of_new_blocks;
8555 blocks->resize(new_size);
8556 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
8557 }
8558
8559 /*
8560 * Hunt "under the hood" of array lengths (leading to array references),
8561 * null checks (also leading to array references), and new arrays
8562 * (leading to the actual length). This makes it more likely related
8563 * instructions become actually comparable.
8564 */
8565 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
8566 while (instruction->IsArrayLength() ||
8567 instruction->IsNullCheck() ||
8568 instruction->IsNewArray()) {
8569 instruction = instruction->IsNewArray()
8570 ? instruction->AsNewArray()->GetLength()
8571 : instruction->InputAt(0);
8572 }
8573 return instruction;
8574 }
8575
8576 inline bool IsAddOrSub(const HInstruction* instruction) {
8577 return instruction->IsAdd() || instruction->IsSub();
8578 }
8579
8580 void RemoveEnvironmentUses(HInstruction* instruction);
8581 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
8582 void ResetEnvironmentInputRecords(HInstruction* instruction);
8583
8584 // Detects an instruction that is >= 0. As long as the value is carried by
8585 // a single instruction, arithmetic wrap-around cannot occur.
8586 bool IsGEZero(HInstruction* instruction);
8587
8588 } // namespace art
8589
8590 #endif // ART_COMPILER_OPTIMIZING_NODES_H_
8591