1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23
24 #include "art_method.h"
25 #include "base/arena_allocator.h"
26 #include "base/arena_bit_vector.h"
27 #include "base/arena_containers.h"
28 #include "base/arena_object.h"
29 #include "base/array_ref.h"
30 #include "base/intrusive_forward_list.h"
31 #include "base/iteration_range.h"
32 #include "base/macros.h"
33 #include "base/mutex.h"
34 #include "base/quasi_atomic.h"
35 #include "base/stl_util.h"
36 #include "base/transform_array_ref.h"
37 #include "block_namer.h"
38 #include "class_root.h"
39 #include "compilation_kind.h"
40 #include "data_type.h"
41 #include "deoptimization_kind.h"
42 #include "dex/dex_file.h"
43 #include "dex/dex_file_types.h"
44 #include "dex/invoke_type.h"
45 #include "dex/method_reference.h"
46 #include "entrypoints/quick/quick_entrypoints_enum.h"
47 #include "handle.h"
48 #include "handle_scope.h"
49 #include "intrinsics_enum.h"
50 #include "locations.h"
51 #include "mirror/class.h"
52 #include "mirror/method_type.h"
53 #include "offsets.h"
54
55 namespace art HIDDEN {
56
57 class ArenaStack;
58 class CodeGenerator;
59 class GraphChecker;
60 class HBasicBlock;
61 class HConstructorFence;
62 class HCurrentMethod;
63 class HDoubleConstant;
64 class HEnvironment;
65 class HFloatConstant;
66 class HGraphBuilder;
67 class HGraphVisitor;
68 class HInstruction;
69 class HIntConstant;
70 class HInvoke;
71 class HLongConstant;
72 class HNullConstant;
73 class HParameterValue;
74 class HPhi;
75 class HSuspendCheck;
76 class HTryBoundary;
77 class FieldInfo;
78 class LiveInterval;
79 class LocationSummary;
80 class ProfilingInfo;
81 class SlowPathCode;
82 class SsaBuilder;
83
84 namespace mirror {
85 class DexCache;
86 } // namespace mirror
87
88 static const int kDefaultNumberOfBlocks = 8;
89 static const int kDefaultNumberOfSuccessors = 2;
90 static const int kDefaultNumberOfPredecessors = 2;
91 static const int kDefaultNumberOfExceptionalPredecessors = 0;
92 static const int kDefaultNumberOfDominatedBlocks = 1;
93 static const int kDefaultNumberOfBackEdges = 1;
94
95 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
96 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
97 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
98 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
99
100 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
101 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
102
103 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
104
105 static constexpr uint32_t kNoDexPc = -1;
106
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)107 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
108 // For the purposes of the compiler, the dex files must actually be the same object
109 // if we want to safely treat them as the same. This is especially important for JIT
110 // as custom class loaders can open the same underlying file (or memory) multiple
111 // times and provide different class resolution but no two class loaders should ever
112 // use the same DexFile object - doing so is an unsupported hack that can lead to
113 // all sorts of weird failures.
114 return &lhs == &rhs;
115 }
116
117 enum IfCondition {
118 // All types.
119 kCondEQ, // ==
120 kCondNE, // !=
121 // Signed integers and floating-point numbers.
122 kCondLT, // <
123 kCondLE, // <=
124 kCondGT, // >
125 kCondGE, // >=
126 // Unsigned integers.
127 kCondB, // <
128 kCondBE, // <=
129 kCondA, // >
130 kCondAE, // >=
131 // First and last aliases.
132 kCondFirst = kCondEQ,
133 kCondLast = kCondAE,
134 };
135
136 enum GraphAnalysisResult {
137 kAnalysisSkipped,
138 kAnalysisInvalidBytecode,
139 kAnalysisFailThrowCatchLoop,
140 kAnalysisFailAmbiguousArrayOp,
141 kAnalysisFailIrreducibleLoopAndStringInit,
142 kAnalysisFailPhiEquivalentInOsr,
143 kAnalysisSuccess,
144 };
145
146 template <typename T>
MakeUnsigned(T x)147 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
148 return static_cast<typename std::make_unsigned<T>::type>(x);
149 }
150
151 class HInstructionList : public ValueObject {
152 public:
HInstructionList()153 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
154
155 void AddInstruction(HInstruction* instruction);
156 void RemoveInstruction(HInstruction* instruction);
157
158 // Insert `instruction` before/after an existing instruction `cursor`.
159 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
160 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
161
162 // Return true if this list contains `instruction`.
163 bool Contains(HInstruction* instruction) const;
164
165 // Return true if `instruction1` is found before `instruction2` in
166 // this instruction list and false otherwise. Abort if none
167 // of these instructions is found.
168 bool FoundBefore(const HInstruction* instruction1,
169 const HInstruction* instruction2) const;
170
IsEmpty()171 bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()172 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
173
174 // Update the block of all instructions to be `block`.
175 void SetBlockOfInstructions(HBasicBlock* block) const;
176
177 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
178 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
179 void Add(const HInstructionList& instruction_list);
180
181 // Return the number of instructions in the list. This is an expensive operation.
182 size_t CountSize() const;
183
184 private:
185 HInstruction* first_instruction_;
186 HInstruction* last_instruction_;
187
188 friend class HBasicBlock;
189 friend class HGraph;
190 friend class HInstruction;
191 friend class HInstructionIterator;
192 friend class HInstructionIteratorHandleChanges;
193 friend class HBackwardInstructionIterator;
194
195 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
196 };
197
198 class ReferenceTypeInfo : ValueObject {
199 public:
200 using TypeHandle = Handle<mirror::Class>;
201
202 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
203
Create(TypeHandle type_handle)204 static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
205 return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
206 }
207
CreateUnchecked(TypeHandle type_handle,bool is_exact)208 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
209 return ReferenceTypeInfo(type_handle, is_exact);
210 }
211
CreateInvalid()212 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
213
IsValidHandle(TypeHandle handle)214 static bool IsValidHandle(TypeHandle handle) {
215 return handle.GetReference() != nullptr;
216 }
217
IsValid()218 bool IsValid() const {
219 return IsValidHandle(type_handle_);
220 }
221
IsExact()222 bool IsExact() const { return is_exact_; }
223
IsObjectClass()224 bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
225 DCHECK(IsValid());
226 return GetTypeHandle()->IsObjectClass();
227 }
228
IsStringClass()229 bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230 DCHECK(IsValid());
231 return GetTypeHandle()->IsStringClass();
232 }
233
IsObjectArray()234 bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
235 DCHECK(IsValid());
236 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
237 }
238
IsInterface()239 bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
240 DCHECK(IsValid());
241 return GetTypeHandle()->IsInterface();
242 }
243
IsArrayClass()244 bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
245 DCHECK(IsValid());
246 return GetTypeHandle()->IsArrayClass();
247 }
248
IsPrimitiveArrayClass()249 bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
250 DCHECK(IsValid());
251 return GetTypeHandle()->IsPrimitiveArray();
252 }
253
IsNonPrimitiveArrayClass()254 bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
255 DCHECK(IsValid());
256 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
257 }
258
CanArrayHold(ReferenceTypeInfo rti)259 bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
260 DCHECK(IsValid());
261 if (!IsExact()) return false;
262 if (!IsArrayClass()) return false;
263 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
264 }
265
CanArrayHoldValuesOf(ReferenceTypeInfo rti)266 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
267 DCHECK(IsValid());
268 if (!IsExact()) return false;
269 if (!IsArrayClass()) return false;
270 if (!rti.IsArrayClass()) return false;
271 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
272 rti.GetTypeHandle()->GetComponentType());
273 }
274
GetTypeHandle()275 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
276
IsSupertypeOf(ReferenceTypeInfo rti)277 bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
278 DCHECK(IsValid());
279 DCHECK(rti.IsValid());
280 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
281 }
282
283 // Returns true if the type information provide the same amount of details.
284 // Note that it does not mean that the instructions have the same actual type
285 // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)286 bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
287 if (!IsValid() && !rti.IsValid()) {
288 // Invalid types are equal.
289 return true;
290 }
291 if (!IsValid() || !rti.IsValid()) {
292 // One is valid, the other not.
293 return false;
294 }
295 return IsExact() == rti.IsExact()
296 && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
297 }
298
299 private:
ReferenceTypeInfo()300 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)301 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
302 : type_handle_(type_handle), is_exact_(is_exact) { }
303
304 // The class of the object.
305 TypeHandle type_handle_;
306 // Whether or not the type is exact or a superclass of the actual type.
307 // Whether or not we have any information about this type.
308 bool is_exact_;
309 };
310
311 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
312
313 class HandleCache {
314 public:
HandleCache(VariableSizedHandleScope * handles)315 explicit HandleCache(VariableSizedHandleScope* handles) : handles_(handles) { }
316
GetHandles()317 VariableSizedHandleScope* GetHandles() { return handles_; }
318
319 template <typename T>
NewHandle(T * object)320 MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_) {
321 return handles_->NewHandle(object);
322 }
323
324 template <typename T>
NewHandle(ObjPtr<T> object)325 MutableHandle<T> NewHandle(ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_) {
326 return handles_->NewHandle(object);
327 }
328
GetObjectClassHandle()329 ReferenceTypeInfo::TypeHandle GetObjectClassHandle() {
330 return GetRootHandle(ClassRoot::kJavaLangObject, &object_class_handle_);
331 }
332
GetClassClassHandle()333 ReferenceTypeInfo::TypeHandle GetClassClassHandle() {
334 return GetRootHandle(ClassRoot::kJavaLangClass, &class_class_handle_);
335 }
336
GetMethodHandleClassHandle()337 ReferenceTypeInfo::TypeHandle GetMethodHandleClassHandle() {
338 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodHandleImpl, &method_handle_class_handle_);
339 }
340
GetMethodTypeClassHandle()341 ReferenceTypeInfo::TypeHandle GetMethodTypeClassHandle() {
342 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodType, &method_type_class_handle_);
343 }
344
GetStringClassHandle()345 ReferenceTypeInfo::TypeHandle GetStringClassHandle() {
346 return GetRootHandle(ClassRoot::kJavaLangString, &string_class_handle_);
347 }
348
GetThrowableClassHandle()349 ReferenceTypeInfo::TypeHandle GetThrowableClassHandle() {
350 return GetRootHandle(ClassRoot::kJavaLangThrowable, &throwable_class_handle_);
351 }
352
353
354 private:
GetRootHandle(ClassRoot class_root,ReferenceTypeInfo::TypeHandle * cache)355 inline ReferenceTypeInfo::TypeHandle GetRootHandle(ClassRoot class_root,
356 ReferenceTypeInfo::TypeHandle* cache) {
357 if (UNLIKELY(!ReferenceTypeInfo::IsValidHandle(*cache))) {
358 *cache = CreateRootHandle(handles_, class_root);
359 }
360 return *cache;
361 }
362
363 static ReferenceTypeInfo::TypeHandle CreateRootHandle(VariableSizedHandleScope* handles,
364 ClassRoot class_root);
365
366 VariableSizedHandleScope* handles_;
367
368 ReferenceTypeInfo::TypeHandle object_class_handle_;
369 ReferenceTypeInfo::TypeHandle class_class_handle_;
370 ReferenceTypeInfo::TypeHandle method_handle_class_handle_;
371 ReferenceTypeInfo::TypeHandle method_type_class_handle_;
372 ReferenceTypeInfo::TypeHandle string_class_handle_;
373 ReferenceTypeInfo::TypeHandle throwable_class_handle_;
374 };
375
376 // Control-flow graph of a method. Contains a list of basic blocks.
377 class HGraph : public ArenaObject<kArenaAllocGraph> {
378 public:
379 HGraph(ArenaAllocator* allocator,
380 ArenaStack* arena_stack,
381 VariableSizedHandleScope* handles,
382 const DexFile& dex_file,
383 uint32_t method_idx,
384 InstructionSet instruction_set,
385 InvokeType invoke_type = kInvalidInvokeType,
386 bool dead_reference_safe = false,
387 bool debuggable = false,
388 CompilationKind compilation_kind = CompilationKind::kOptimized,
389 int start_instruction_id = 0)
allocator_(allocator)390 : allocator_(allocator),
391 arena_stack_(arena_stack),
392 handle_cache_(handles),
393 blocks_(allocator->Adapter(kArenaAllocBlockList)),
394 reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
395 linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
396 reachability_graph_(allocator, 0, 0, true, kArenaAllocReachabilityGraph),
397 entry_block_(nullptr),
398 exit_block_(nullptr),
399 maximum_number_of_out_vregs_(0),
400 number_of_vregs_(0),
401 number_of_in_vregs_(0),
402 temporaries_vreg_slots_(0),
403 has_bounds_checks_(false),
404 has_try_catch_(false),
405 has_monitor_operations_(false),
406 has_simd_(false),
407 has_loops_(false),
408 has_irreducible_loops_(false),
409 has_direct_critical_native_call_(false),
410 has_always_throwing_invokes_(false),
411 dead_reference_safe_(dead_reference_safe),
412 debuggable_(debuggable),
413 current_instruction_id_(start_instruction_id),
414 dex_file_(dex_file),
415 method_idx_(method_idx),
416 invoke_type_(invoke_type),
417 in_ssa_form_(false),
418 number_of_cha_guards_(0),
419 instruction_set_(instruction_set),
420 cached_null_constant_(nullptr),
421 cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
422 cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
423 cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
424 cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
425 cached_current_method_(nullptr),
426 art_method_(nullptr),
427 compilation_kind_(compilation_kind),
428 cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
429 blocks_.reserve(kDefaultNumberOfBlocks);
430 }
431
432 std::ostream& Dump(std::ostream& os,
433 CodeGenerator* codegen,
434 std::optional<std::reference_wrapper<const BlockNamer>> namer = std::nullopt);
435
GetAllocator()436 ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()437 ArenaStack* GetArenaStack() const { return arena_stack_; }
438
GetHandleCache()439 HandleCache* GetHandleCache() { return &handle_cache_; }
440
GetBlocks()441 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
442
443 // An iterator to only blocks that are still actually in the graph (when
444 // blocks are removed they are replaced with 'nullptr' in GetBlocks to
445 // simplify block-id assignment and avoid memmoves in the block-list).
GetActiveBlocks()446 IterationRange<FilterNull<ArenaVector<HBasicBlock*>::const_iterator>> GetActiveBlocks() const {
447 return FilterOutNull(MakeIterationRange(GetBlocks()));
448 }
449
IsInSsaForm()450 bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()451 void SetInSsaForm() { in_ssa_form_ = true; }
452
GetEntryBlock()453 HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()454 HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()455 bool HasExitBlock() const { return exit_block_ != nullptr; }
456
SetEntryBlock(HBasicBlock * block)457 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)458 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
459
460 void AddBlock(HBasicBlock* block);
461
462 void ComputeDominanceInformation();
463 void ClearDominanceInformation();
464 void ComputeReachabilityInformation();
465 void ClearReachabilityInformation();
466 void ClearLoopInformation();
467 void FindBackEdges(ArenaBitVector* visited);
468 GraphAnalysisResult BuildDominatorTree();
469 void SimplifyCFG();
470 void SimplifyCatchBlocks();
471
472 // Analyze all natural loops in this graph. Returns a code specifying that it
473 // was successful or the reason for failure. The method will fail if a loop
474 // is a throw-catch loop, i.e. the header is a catch block.
475 GraphAnalysisResult AnalyzeLoops() const;
476
477 // Iterate over blocks to compute try block membership. Needs reverse post
478 // order and loop information.
479 void ComputeTryBlockInformation();
480
481 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
482 // Returns the instruction to replace the invoke expression or null if the
483 // invoke is for a void method. Note that the caller is responsible for replacing
484 // and removing the invoke instruction.
485 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
486
487 // Update the loop and try membership of `block`, which was spawned from `reference`.
488 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
489 // should be the new back edge.
490 // `has_more_specific_try_catch_info` will be set to true when inlining a try catch.
491 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
492 HBasicBlock* reference,
493 bool replace_if_back_edge,
494 bool has_more_specific_try_catch_info = false);
495
496 // Need to add a couple of blocks to test if the loop body is entered and
497 // put deoptimization instructions, etc.
498 void TransformLoopHeaderForBCE(HBasicBlock* header);
499
500 // Adds a new loop directly after the loop with the given header and exit.
501 // Returns the new preheader.
502 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
503 HBasicBlock* body,
504 HBasicBlock* exit);
505
506 // Removes `block` from the graph. Assumes `block` has been disconnected from
507 // other blocks and has no instructions or phis.
508 void DeleteDeadEmptyBlock(HBasicBlock* block);
509
510 // Splits the edge between `block` and `successor` while preserving the
511 // indices in the predecessor/successor lists. If there are multiple edges
512 // between the blocks, the lowest indices are used.
513 // Returns the new block which is empty and has the same dex pc as `successor`.
514 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
515
516 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
517
518 // Splits the edge between `block` and `successor` and then updates the graph's RPO to keep
519 // consistency without recomputing the whole graph.
520 HBasicBlock* SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor);
521
522 void OrderLoopHeaderPredecessors(HBasicBlock* header);
523
524 // Transform a loop into a format with a single preheader.
525 //
526 // Each phi in the header should be split: original one in the header should only hold
527 // inputs reachable from the back edges and a single input from the preheader. The newly created
528 // phi in the preheader should collate the inputs from the original multiple incoming blocks.
529 //
530 // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
531 // that no longer have this property.
532 void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
533
534 void SimplifyLoop(HBasicBlock* header);
535
GetNextInstructionId()536 int32_t GetNextInstructionId() {
537 CHECK_NE(current_instruction_id_, INT32_MAX);
538 return current_instruction_id_++;
539 }
540
GetCurrentInstructionId()541 int32_t GetCurrentInstructionId() const {
542 return current_instruction_id_;
543 }
544
SetCurrentInstructionId(int32_t id)545 void SetCurrentInstructionId(int32_t id) {
546 CHECK_GE(id, current_instruction_id_);
547 current_instruction_id_ = id;
548 }
549
GetMaximumNumberOfOutVRegs()550 uint16_t GetMaximumNumberOfOutVRegs() const {
551 return maximum_number_of_out_vregs_;
552 }
553
SetMaximumNumberOfOutVRegs(uint16_t new_value)554 void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
555 maximum_number_of_out_vregs_ = new_value;
556 }
557
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)558 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
559 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
560 }
561
UpdateTemporariesVRegSlots(size_t slots)562 void UpdateTemporariesVRegSlots(size_t slots) {
563 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
564 }
565
GetTemporariesVRegSlots()566 size_t GetTemporariesVRegSlots() const {
567 DCHECK(!in_ssa_form_);
568 return temporaries_vreg_slots_;
569 }
570
SetNumberOfVRegs(uint16_t number_of_vregs)571 void SetNumberOfVRegs(uint16_t number_of_vregs) {
572 number_of_vregs_ = number_of_vregs;
573 }
574
GetNumberOfVRegs()575 uint16_t GetNumberOfVRegs() const {
576 return number_of_vregs_;
577 }
578
SetNumberOfInVRegs(uint16_t value)579 void SetNumberOfInVRegs(uint16_t value) {
580 number_of_in_vregs_ = value;
581 }
582
GetNumberOfInVRegs()583 uint16_t GetNumberOfInVRegs() const {
584 return number_of_in_vregs_;
585 }
586
GetNumberOfLocalVRegs()587 uint16_t GetNumberOfLocalVRegs() const {
588 DCHECK(!in_ssa_form_);
589 return number_of_vregs_ - number_of_in_vregs_;
590 }
591
GetReversePostOrder()592 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
593 return reverse_post_order_;
594 }
595
GetReversePostOrderSkipEntryBlock()596 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() const {
597 DCHECK(GetReversePostOrder()[0] == entry_block_);
598 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
599 }
600
GetPostOrder()601 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
602 return ReverseRange(GetReversePostOrder());
603 }
604
GetLinearOrder()605 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
606 return linear_order_;
607 }
608
GetLinearPostOrder()609 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
610 return ReverseRange(GetLinearOrder());
611 }
612
HasBoundsChecks()613 bool HasBoundsChecks() const {
614 return has_bounds_checks_;
615 }
616
SetHasBoundsChecks(bool value)617 void SetHasBoundsChecks(bool value) {
618 has_bounds_checks_ = value;
619 }
620
621 // Returns true if dest is reachable from source, using either blocks or block-ids.
622 bool PathBetween(const HBasicBlock* source, const HBasicBlock* dest) const;
623 bool PathBetween(uint32_t source_id, uint32_t dest_id) const;
624
625 // Is the code known to be robust against eliminating dead references
626 // and the effects of early finalization?
IsDeadReferenceSafe()627 bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
628
MarkDeadReferenceUnsafe()629 void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
630
IsDebuggable()631 bool IsDebuggable() const { return debuggable_; }
632
633 // Returns a constant of the given type and value. If it does not exist
634 // already, it is created and inserted into the graph. This method is only for
635 // integral types.
636 HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
637
638 // TODO: This is problematic for the consistency of reference type propagation
639 // because it can be created anytime after the pass and thus it will be left
640 // with an invalid type.
641 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
642
643 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
644 return CreateConstant(value, &cached_int_constants_, dex_pc);
645 }
646 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
647 return CreateConstant(value, &cached_long_constants_, dex_pc);
648 }
649 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
650 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
651 }
652 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
653 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
654 }
655
656 HCurrentMethod* GetCurrentMethod();
657
GetDexFile()658 const DexFile& GetDexFile() const {
659 return dex_file_;
660 }
661
GetMethodIdx()662 uint32_t GetMethodIdx() const {
663 return method_idx_;
664 }
665
666 // Get the method name (without the signature), e.g. "<init>"
667 const char* GetMethodName() const;
668
669 // Get the pretty method name (class + name + optionally signature).
670 std::string PrettyMethod(bool with_signature = true) const;
671
GetInvokeType()672 InvokeType GetInvokeType() const {
673 return invoke_type_;
674 }
675
GetInstructionSet()676 InstructionSet GetInstructionSet() const {
677 return instruction_set_;
678 }
679
IsCompilingOsr()680 bool IsCompilingOsr() const { return compilation_kind_ == CompilationKind::kOsr; }
681
IsCompilingBaseline()682 bool IsCompilingBaseline() const { return compilation_kind_ == CompilationKind::kBaseline; }
683
GetCompilationKind()684 CompilationKind GetCompilationKind() const { return compilation_kind_; }
685
GetCHASingleImplementationList()686 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
687 return cha_single_implementation_list_;
688 }
689
690 // In case of OSR we intend to use SuspendChecks as an entry point to the
691 // function; for debuggable graphs we might deoptimize to interpreter from
692 // SuspendChecks. In these cases we should always generate code for them.
SuspendChecksAreAllowedToNoOp()693 bool SuspendChecksAreAllowedToNoOp() const {
694 return !IsDebuggable() && !IsCompilingOsr();
695 }
696
AddCHASingleImplementationDependency(ArtMethod * method)697 void AddCHASingleImplementationDependency(ArtMethod* method) {
698 cha_single_implementation_list_.insert(method);
699 }
700
HasShouldDeoptimizeFlag()701 bool HasShouldDeoptimizeFlag() const {
702 return number_of_cha_guards_ != 0 || debuggable_;
703 }
704
HasTryCatch()705 bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)706 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
707
HasMonitorOperations()708 bool HasMonitorOperations() const { return has_monitor_operations_; }
SetHasMonitorOperations(bool value)709 void SetHasMonitorOperations(bool value) { has_monitor_operations_ = value; }
710
HasSIMD()711 bool HasSIMD() const { return has_simd_; }
SetHasSIMD(bool value)712 void SetHasSIMD(bool value) { has_simd_ = value; }
713
HasLoops()714 bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)715 void SetHasLoops(bool value) { has_loops_ = value; }
716
HasIrreducibleLoops()717 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)718 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
719
HasDirectCriticalNativeCall()720 bool HasDirectCriticalNativeCall() const { return has_direct_critical_native_call_; }
SetHasDirectCriticalNativeCall(bool value)721 void SetHasDirectCriticalNativeCall(bool value) { has_direct_critical_native_call_ = value; }
722
HasAlwaysThrowingInvokes()723 bool HasAlwaysThrowingInvokes() const { return has_always_throwing_invokes_; }
SetHasAlwaysThrowingInvokes(bool value)724 void SetHasAlwaysThrowingInvokes(bool value) { has_always_throwing_invokes_ = value; }
725
GetArtMethod()726 ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)727 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
728
SetProfilingInfo(ProfilingInfo * info)729 void SetProfilingInfo(ProfilingInfo* info) { profiling_info_ = info; }
GetProfilingInfo()730 ProfilingInfo* GetProfilingInfo() const { return profiling_info_; }
731
732 // Returns an instruction with the opposite Boolean value from 'cond'.
733 // The instruction has been inserted into the graph, either as a constant, or
734 // before cursor.
735 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
736
GetInexactObjectRti()737 ReferenceTypeInfo GetInexactObjectRti() {
738 return ReferenceTypeInfo::Create(handle_cache_.GetObjectClassHandle(), /* is_exact= */ false);
739 }
740
GetNumberOfCHAGuards()741 uint32_t GetNumberOfCHAGuards() const { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)742 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()743 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
744
745 private:
746 void RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector& visited) const;
747 void RemoveDeadBlocks(const ArenaBitVector& visited);
748
749 template <class InstructionType, typename ValueType>
750 InstructionType* CreateConstant(ValueType value,
751 ArenaSafeMap<ValueType, InstructionType*>* cache,
752 uint32_t dex_pc = kNoDexPc) {
753 // Try to find an existing constant of the given value.
754 InstructionType* constant = nullptr;
755 auto cached_constant = cache->find(value);
756 if (cached_constant != cache->end()) {
757 constant = cached_constant->second;
758 }
759
760 // If not found or previously deleted, create and cache a new instruction.
761 // Don't bother reviving a previously deleted instruction, for simplicity.
762 if (constant == nullptr || constant->GetBlock() == nullptr) {
763 constant = new (allocator_) InstructionType(value, dex_pc);
764 cache->Overwrite(value, constant);
765 InsertConstant(constant);
766 }
767 return constant;
768 }
769
770 void InsertConstant(HConstant* instruction);
771
772 // Cache a float constant into the graph. This method should only be
773 // called by the SsaBuilder when creating "equivalent" instructions.
774 void CacheFloatConstant(HFloatConstant* constant);
775
776 // See CacheFloatConstant comment.
777 void CacheDoubleConstant(HDoubleConstant* constant);
778
779 ArenaAllocator* const allocator_;
780 ArenaStack* const arena_stack_;
781
782 HandleCache handle_cache_;
783
784 // List of blocks in insertion order.
785 ArenaVector<HBasicBlock*> blocks_;
786
787 // List of blocks to perform a reverse post order tree traversal.
788 ArenaVector<HBasicBlock*> reverse_post_order_;
789
790 // List of blocks to perform a linear order tree traversal. Unlike the reverse
791 // post order, this order is not incrementally kept up-to-date.
792 ArenaVector<HBasicBlock*> linear_order_;
793
794 // Reachability graph for checking connectedness between nodes. Acts as a partitioned vector where
795 // each RoundUp(blocks_.size(), BitVector::kWordBits) is the reachability of each node.
796 ArenaBitVectorArray reachability_graph_;
797
798 HBasicBlock* entry_block_;
799 HBasicBlock* exit_block_;
800
801 // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
802 uint16_t maximum_number_of_out_vregs_;
803
804 // The number of virtual registers in this method. Contains the parameters.
805 uint16_t number_of_vregs_;
806
807 // The number of virtual registers used by parameters of this method.
808 uint16_t number_of_in_vregs_;
809
810 // Number of vreg size slots that the temporaries use (used in baseline compiler).
811 size_t temporaries_vreg_slots_;
812
813 // Flag whether there are bounds checks in the graph. We can skip
814 // BCE if it's false.
815 bool has_bounds_checks_;
816
817 // Flag whether there are try/catch blocks in the graph. We will skip
818 // try/catch-related passes if it's false.
819 bool has_try_catch_;
820
821 // Flag whether there are any HMonitorOperation in the graph. If yes this will mandate
822 // DexRegisterMap to be present to allow deadlock analysis for non-debuggable code.
823 bool has_monitor_operations_;
824
825 // Flag whether SIMD instructions appear in the graph. If true, the
826 // code generators may have to be more careful spilling the wider
827 // contents of SIMD registers.
828 bool has_simd_;
829
830 // Flag whether there are any loops in the graph. We can skip loop
831 // optimization if it's false.
832 bool has_loops_;
833
834 // Flag whether there are any irreducible loops in the graph.
835 bool has_irreducible_loops_;
836
837 // Flag whether there are any direct calls to native code registered
838 // for @CriticalNative methods.
839 bool has_direct_critical_native_call_;
840
841 // Flag whether the graph contains invokes that always throw.
842 bool has_always_throwing_invokes_;
843
844 // Is the code known to be robust against eliminating dead references
845 // and the effects of early finalization? If false, dead reference variables
846 // are kept if they might be visible to the garbage collector.
847 // Currently this means that the class was declared to be dead-reference-safe,
848 // the method accesses no reachability-sensitive fields or data, and the same
849 // is true for any methods that were inlined into the current one.
850 bool dead_reference_safe_;
851
852 // Indicates whether the graph should be compiled in a way that
853 // ensures full debuggability. If false, we can apply more
854 // aggressive optimizations that may limit the level of debugging.
855 const bool debuggable_;
856
857 // The current id to assign to a newly added instruction. See HInstruction.id_.
858 int32_t current_instruction_id_;
859
860 // The dex file from which the method is from.
861 const DexFile& dex_file_;
862
863 // The method index in the dex file.
864 const uint32_t method_idx_;
865
866 // If inlined, this encodes how the callee is being invoked.
867 const InvokeType invoke_type_;
868
869 // Whether the graph has been transformed to SSA form. Only used
870 // in debug mode to ensure we are not using properties only valid
871 // for non-SSA form (like the number of temporaries).
872 bool in_ssa_form_;
873
874 // Number of CHA guards in the graph. Used to short-circuit the
875 // CHA guard optimization pass when there is no CHA guard left.
876 uint32_t number_of_cha_guards_;
877
878 const InstructionSet instruction_set_;
879
880 // Cached constants.
881 HNullConstant* cached_null_constant_;
882 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
883 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
884 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
885 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
886
887 HCurrentMethod* cached_current_method_;
888
889 // The ArtMethod this graph is for. Note that for AOT, it may be null,
890 // for example for methods whose declaring class could not be resolved
891 // (such as when the superclass could not be found).
892 ArtMethod* art_method_;
893
894 // The `ProfilingInfo` associated with the method being compiled.
895 ProfilingInfo* profiling_info_;
896
897 // How we are compiling the graph: either optimized, osr, or baseline.
898 // For osr, we will make all loops seen as irreducible and emit special
899 // stack maps to mark compiled code entries which the interpreter can
900 // directly jump to.
901 const CompilationKind compilation_kind_;
902
903 // List of methods that are assumed to have single implementation.
904 ArenaSet<ArtMethod*> cha_single_implementation_list_;
905
906 friend class SsaBuilder; // For caching constants.
907 friend class SsaLivenessAnalysis; // For the linear order.
908 friend class HInliner; // For the reverse post order.
909 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
910 DISALLOW_COPY_AND_ASSIGN(HGraph);
911 };
912
913 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
914 public:
HLoopInformation(HBasicBlock * header,HGraph * graph)915 HLoopInformation(HBasicBlock* header, HGraph* graph)
916 : header_(header),
917 suspend_check_(nullptr),
918 irreducible_(false),
919 contains_irreducible_loop_(false),
920 back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
921 // Make bit vector growable, as the number of blocks may change.
922 blocks_(graph->GetAllocator(),
923 graph->GetBlocks().size(),
924 true,
925 kArenaAllocLoopInfoBackEdges) {
926 back_edges_.reserve(kDefaultNumberOfBackEdges);
927 }
928
IsIrreducible()929 bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()930 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
931
932 void Dump(std::ostream& os);
933
GetHeader()934 HBasicBlock* GetHeader() const {
935 return header_;
936 }
937
SetHeader(HBasicBlock * block)938 void SetHeader(HBasicBlock* block) {
939 header_ = block;
940 }
941
GetSuspendCheck()942 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)943 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()944 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
945
AddBackEdge(HBasicBlock * back_edge)946 void AddBackEdge(HBasicBlock* back_edge) {
947 back_edges_.push_back(back_edge);
948 }
949
RemoveBackEdge(HBasicBlock * back_edge)950 void RemoveBackEdge(HBasicBlock* back_edge) {
951 RemoveElement(back_edges_, back_edge);
952 }
953
IsBackEdge(const HBasicBlock & block)954 bool IsBackEdge(const HBasicBlock& block) const {
955 return ContainsElement(back_edges_, &block);
956 }
957
NumberOfBackEdges()958 size_t NumberOfBackEdges() const {
959 return back_edges_.size();
960 }
961
962 HBasicBlock* GetPreHeader() const;
963
GetBackEdges()964 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
965 return back_edges_;
966 }
967
968 // Returns the lifetime position of the back edge that has the
969 // greatest lifetime position.
970 size_t GetLifetimeEnd() const;
971
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)972 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
973 ReplaceElement(back_edges_, existing, new_back_edge);
974 }
975
976 // Finds blocks that are part of this loop.
977 void Populate();
978
979 // Updates blocks population of the loop and all of its outer' ones recursively after the
980 // population of the inner loop is updated.
981 void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
982
983 // Returns whether this loop information contains `block`.
984 // Note that this loop information *must* be populated before entering this function.
985 bool Contains(const HBasicBlock& block) const;
986
987 // Returns whether this loop information is an inner loop of `other`.
988 // Note that `other` *must* be populated before entering this function.
989 bool IsIn(const HLoopInformation& other) const;
990
991 // Returns true if instruction is not defined within this loop.
992 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
993
GetBlocks()994 const ArenaBitVector& GetBlocks() const { return blocks_; }
995
996 void Add(HBasicBlock* block);
997 void Remove(HBasicBlock* block);
998
ClearAllBlocks()999 void ClearAllBlocks() {
1000 blocks_.ClearAllBits();
1001 }
1002
1003 bool HasBackEdgeNotDominatedByHeader() const;
1004
IsPopulated()1005 bool IsPopulated() const {
1006 return blocks_.GetHighestBitSet() != -1;
1007 }
1008
1009 bool DominatesAllBackEdges(HBasicBlock* block);
1010
1011 bool HasExitEdge() const;
1012
1013 // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()1014 void ResetBasicBlockData() {
1015 back_edges_.clear();
1016 ClearAllBlocks();
1017 }
1018
1019 private:
1020 // Internal recursive implementation of `Populate`.
1021 void PopulateRecursive(HBasicBlock* block);
1022 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
1023
1024 HBasicBlock* header_;
1025 HSuspendCheck* suspend_check_;
1026 bool irreducible_;
1027 bool contains_irreducible_loop_;
1028 ArenaVector<HBasicBlock*> back_edges_;
1029 ArenaBitVector blocks_;
1030
1031 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
1032 };
1033
1034 // Stores try/catch information for basic blocks.
1035 // Note that HGraph is constructed so that catch blocks cannot simultaneously
1036 // be try blocks.
1037 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
1038 public:
1039 // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)1040 explicit TryCatchInformation(const HTryBoundary& try_entry)
1041 : try_entry_(&try_entry),
1042 catch_dex_file_(nullptr),
1043 catch_type_index_(dex::TypeIndex::Invalid()) {
1044 DCHECK(try_entry_ != nullptr);
1045 }
1046
1047 // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)1048 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
1049 : try_entry_(nullptr),
1050 catch_dex_file_(&dex_file),
1051 catch_type_index_(catch_type_index) {}
1052
IsTryBlock()1053 bool IsTryBlock() const { return try_entry_ != nullptr; }
1054
GetTryEntry()1055 const HTryBoundary& GetTryEntry() const {
1056 DCHECK(IsTryBlock());
1057 return *try_entry_;
1058 }
1059
IsCatchBlock()1060 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
1061
IsValidTypeIndex()1062 bool IsValidTypeIndex() const {
1063 DCHECK(IsCatchBlock());
1064 return catch_type_index_.IsValid();
1065 }
1066
GetCatchTypeIndex()1067 dex::TypeIndex GetCatchTypeIndex() const {
1068 DCHECK(IsCatchBlock());
1069 return catch_type_index_;
1070 }
1071
GetCatchDexFile()1072 const DexFile& GetCatchDexFile() const {
1073 DCHECK(IsCatchBlock());
1074 return *catch_dex_file_;
1075 }
1076
SetInvalidTypeIndex()1077 void SetInvalidTypeIndex() {
1078 catch_type_index_ = dex::TypeIndex::Invalid();
1079 }
1080
1081 private:
1082 // One of possibly several TryBoundary instructions entering the block's try.
1083 // Only set for try blocks.
1084 const HTryBoundary* try_entry_;
1085
1086 // Exception type information. Only set for catch blocks.
1087 const DexFile* catch_dex_file_;
1088 dex::TypeIndex catch_type_index_;
1089 };
1090
1091 static constexpr size_t kNoLifetime = -1;
1092 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
1093
1094 // A block in a method. Contains the list of instructions represented
1095 // as a double linked list. Each block knows its predecessors and
1096 // successors.
1097
1098 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
1099 public:
1100 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)1101 : graph_(graph),
1102 predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
1103 successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
1104 loop_information_(nullptr),
1105 dominator_(nullptr),
1106 dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
1107 block_id_(kInvalidBlockId),
1108 dex_pc_(dex_pc),
1109 lifetime_start_(kNoLifetime),
1110 lifetime_end_(kNoLifetime),
1111 try_catch_information_(nullptr) {
1112 predecessors_.reserve(kDefaultNumberOfPredecessors);
1113 successors_.reserve(kDefaultNumberOfSuccessors);
1114 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
1115 }
1116
GetPredecessors()1117 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
1118 return predecessors_;
1119 }
1120
GetNumberOfPredecessors()1121 size_t GetNumberOfPredecessors() const {
1122 return GetPredecessors().size();
1123 }
1124
GetSuccessors()1125 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
1126 return successors_;
1127 }
1128
1129 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
1130 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
1131
1132 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
1133 return ContainsElement(successors_, block, start_from);
1134 }
1135
GetDominatedBlocks()1136 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
1137 return dominated_blocks_;
1138 }
1139
IsEntryBlock()1140 bool IsEntryBlock() const {
1141 return graph_->GetEntryBlock() == this;
1142 }
1143
IsExitBlock()1144 bool IsExitBlock() const {
1145 return graph_->GetExitBlock() == this;
1146 }
1147
1148 bool IsSingleGoto() const;
1149 bool IsSingleReturn() const;
1150 bool IsSingleReturnOrReturnVoidAllowingPhis() const;
1151 bool IsSingleTryBoundary() const;
1152
1153 // Returns true if this block emits nothing but a jump.
IsSingleJump()1154 bool IsSingleJump() const {
1155 HLoopInformation* loop_info = GetLoopInformation();
1156 return (IsSingleGoto() || IsSingleTryBoundary())
1157 // Back edges generate a suspend check.
1158 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1159 }
1160
AddBackEdge(HBasicBlock * back_edge)1161 void AddBackEdge(HBasicBlock* back_edge) {
1162 if (loop_information_ == nullptr) {
1163 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1164 }
1165 DCHECK_EQ(loop_information_->GetHeader(), this);
1166 loop_information_->AddBackEdge(back_edge);
1167 }
1168
1169 // Registers a back edge; if the block was not a loop header before the call associates a newly
1170 // created loop info with it.
1171 //
1172 // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1173 // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1174 void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1175 if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1176 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1177 }
1178 loop_information_->AddBackEdge(back_edge);
1179 }
1180
GetGraph()1181 HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1182 void SetGraph(HGraph* graph) { graph_ = graph; }
1183
GetBlockId()1184 uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1185 void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1186 uint32_t GetDexPc() const { return dex_pc_; }
1187
GetDominator()1188 HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1189 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1190 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1191
RemoveDominatedBlock(HBasicBlock * block)1192 void RemoveDominatedBlock(HBasicBlock* block) {
1193 RemoveElement(dominated_blocks_, block);
1194 }
1195
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1196 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1197 ReplaceElement(dominated_blocks_, existing, new_block);
1198 }
1199
1200 void ClearDominanceInformation();
1201
NumberOfBackEdges()1202 int NumberOfBackEdges() const {
1203 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1204 }
1205
GetFirstInstruction()1206 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1207 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1208 const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1209 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1210 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1211 const HInstructionList& GetPhis() const { return phis_; }
1212
1213 HInstruction* GetFirstInstructionDisregardMoves() const;
1214
AddSuccessor(HBasicBlock * block)1215 void AddSuccessor(HBasicBlock* block) {
1216 successors_.push_back(block);
1217 block->predecessors_.push_back(this);
1218 }
1219
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1220 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1221 size_t successor_index = GetSuccessorIndexOf(existing);
1222 existing->RemovePredecessor(this);
1223 new_block->predecessors_.push_back(this);
1224 successors_[successor_index] = new_block;
1225 }
1226
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1227 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1228 size_t predecessor_index = GetPredecessorIndexOf(existing);
1229 existing->RemoveSuccessor(this);
1230 new_block->successors_.push_back(this);
1231 predecessors_[predecessor_index] = new_block;
1232 }
1233
1234 // Insert `this` between `predecessor` and `successor. This method
1235 // preserves the indices, and will update the first edge found between
1236 // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1237 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1238 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1239 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1240 successor->predecessors_[predecessor_index] = this;
1241 predecessor->successors_[successor_index] = this;
1242 successors_.push_back(successor);
1243 predecessors_.push_back(predecessor);
1244 }
1245
RemovePredecessor(HBasicBlock * block)1246 void RemovePredecessor(HBasicBlock* block) {
1247 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1248 }
1249
RemoveSuccessor(HBasicBlock * block)1250 void RemoveSuccessor(HBasicBlock* block) {
1251 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1252 }
1253
ClearAllPredecessors()1254 void ClearAllPredecessors() {
1255 predecessors_.clear();
1256 }
1257
AddPredecessor(HBasicBlock * block)1258 void AddPredecessor(HBasicBlock* block) {
1259 predecessors_.push_back(block);
1260 block->successors_.push_back(this);
1261 }
1262
SwapPredecessors()1263 void SwapPredecessors() {
1264 DCHECK_EQ(predecessors_.size(), 2u);
1265 std::swap(predecessors_[0], predecessors_[1]);
1266 }
1267
SwapSuccessors()1268 void SwapSuccessors() {
1269 DCHECK_EQ(successors_.size(), 2u);
1270 std::swap(successors_[0], successors_[1]);
1271 }
1272
GetPredecessorIndexOf(HBasicBlock * predecessor)1273 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1274 return IndexOfElement(predecessors_, predecessor);
1275 }
1276
GetSuccessorIndexOf(HBasicBlock * successor)1277 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1278 return IndexOfElement(successors_, successor);
1279 }
1280
GetSinglePredecessor()1281 HBasicBlock* GetSinglePredecessor() const {
1282 DCHECK_EQ(GetPredecessors().size(), 1u);
1283 return GetPredecessors()[0];
1284 }
1285
GetSingleSuccessor()1286 HBasicBlock* GetSingleSuccessor() const {
1287 DCHECK_EQ(GetSuccessors().size(), 1u);
1288 return GetSuccessors()[0];
1289 }
1290
1291 // Returns whether the first occurrence of `predecessor` in the list of
1292 // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1293 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1294 DCHECK_EQ(GetPredecessors()[idx], predecessor);
1295 return GetPredecessorIndexOf(predecessor) == idx;
1296 }
1297
1298 // Create a new block between this block and its predecessors. The new block
1299 // is added to the graph, all predecessor edges are relinked to it and an edge
1300 // is created to `this`. Returns the new empty block. Reverse post order or
1301 // loop and try/catch information are not updated.
1302 HBasicBlock* CreateImmediateDominator();
1303
1304 // Split the block into two blocks just before `cursor`. Returns the newly
1305 // created, latter block. Note that this method will add the block to the
1306 // graph, create a Goto at the end of the former block and will create an edge
1307 // between the blocks. It will not, however, update the reverse post order or
1308 // loop and try/catch information.
1309 HBasicBlock* SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form = true);
1310
1311 // Split the block into two blocks just before `cursor`. Returns the newly
1312 // created block. Note that this method just updates raw block information,
1313 // like predecessors, successors, dominators, and instruction list. It does not
1314 // update the graph, reverse post order, loop information, nor make sure the
1315 // blocks are consistent (for example ending with a control flow instruction).
1316 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1317
1318 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1319 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1320
1321 // Merge `other` at the end of `this`. Successors and dominated blocks of
1322 // `other` are changed to be successors and dominated blocks of `this`. Note
1323 // that this method does not update the graph, reverse post order, loop
1324 // information, nor make sure the blocks are consistent (for example ending
1325 // with a control flow instruction).
1326 void MergeWithInlined(HBasicBlock* other);
1327
1328 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1329 // of `this` are moved to `other`.
1330 // Note that this method does not update the graph, reverse post order, loop
1331 // information, nor make sure the blocks are consistent (for example ending
1332 // with a control flow instruction).
1333 void ReplaceWith(HBasicBlock* other);
1334
1335 // Merges the instructions of `other` at the end of `this`.
1336 void MergeInstructionsWith(HBasicBlock* other);
1337
1338 // Merge `other` at the end of `this`. This method updates loops, reverse post
1339 // order, links to predecessors, successors, dominators and deletes the block
1340 // from the graph. The two blocks must be successive, i.e. `this` the only
1341 // predecessor of `other` and vice versa.
1342 void MergeWith(HBasicBlock* other);
1343
1344 // Disconnects `this` from all its predecessors, successors and dominator,
1345 // removes it from all loops it is included in and eventually from the graph.
1346 // The block must not dominate any other block. Predecessors and successors
1347 // are safely updated.
1348 void DisconnectAndDelete();
1349
1350 // Disconnects `this` from all its successors and updates their phis, if the successors have them.
1351 // If `visited` is provided, it will use the information to know if a successor is reachable and
1352 // skip updating those phis.
1353 void DisconnectFromSuccessors(const ArenaBitVector* visited = nullptr);
1354
1355 // Removes the catch phi uses of the instructions in `this`, and then remove the instruction
1356 // itself. If `building_dominator_tree` is true, it will not remove the instruction as user, since
1357 // we do it in a previous step. This is a special case for building up the dominator tree: we want
1358 // to eliminate uses before inputs but we don't have domination information, so we remove all
1359 // connections from input/uses first before removing any instruction.
1360 // This method assumes the instructions have been removed from all users with the exception of
1361 // catch phis because of missing exceptional edges in the graph.
1362 void RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree);
1363
1364 void AddInstruction(HInstruction* instruction);
1365 // Insert `instruction` before/after an existing instruction `cursor`.
1366 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1367 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1368 // Replace phi `initial` with `replacement` within this block.
1369 void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1370 // Replace instruction `initial` with `replacement` within this block.
1371 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1372 HInstruction* replacement);
1373 void AddPhi(HPhi* phi);
1374 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1375 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1376 // instruction list. With 'ensure_safety' set to true, it verifies that the
1377 // instruction is not in use and removes it from the use lists of its inputs.
1378 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1379 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1380 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1381
IsLoopHeader()1382 bool IsLoopHeader() const {
1383 return IsInLoop() && (loop_information_->GetHeader() == this);
1384 }
1385
IsLoopPreHeaderFirstPredecessor()1386 bool IsLoopPreHeaderFirstPredecessor() const {
1387 DCHECK(IsLoopHeader());
1388 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1389 }
1390
IsFirstPredecessorBackEdge()1391 bool IsFirstPredecessorBackEdge() const {
1392 DCHECK(IsLoopHeader());
1393 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1394 }
1395
GetLoopInformation()1396 HLoopInformation* GetLoopInformation() const {
1397 return loop_information_;
1398 }
1399
1400 // Set the loop_information_ on this block. Overrides the current
1401 // loop_information if it is an outer loop of the passed loop information.
1402 // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1403 void SetInLoop(HLoopInformation* info) {
1404 if (IsLoopHeader()) {
1405 // Nothing to do. This just means `info` is an outer loop.
1406 } else if (!IsInLoop()) {
1407 loop_information_ = info;
1408 } else if (loop_information_->Contains(*info->GetHeader())) {
1409 // Block is currently part of an outer loop. Make it part of this inner loop.
1410 // Note that a non loop header having a loop information means this loop information
1411 // has already been populated
1412 loop_information_ = info;
1413 } else {
1414 // Block is part of an inner loop. Do not update the loop information.
1415 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1416 // at this point, because this method is being called while populating `info`.
1417 }
1418 }
1419
1420 // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1421 void SetLoopInformation(HLoopInformation* info) {
1422 loop_information_ = info;
1423 }
1424
IsInLoop()1425 bool IsInLoop() const { return loop_information_ != nullptr; }
1426
GetTryCatchInformation()1427 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1428
SetTryCatchInformation(TryCatchInformation * try_catch_information)1429 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1430 try_catch_information_ = try_catch_information;
1431 }
1432
IsTryBlock()1433 bool IsTryBlock() const {
1434 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1435 }
1436
IsCatchBlock()1437 bool IsCatchBlock() const {
1438 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1439 }
1440
1441 // Returns the try entry that this block's successors should have. They will
1442 // be in the same try, unless the block ends in a try boundary. In that case,
1443 // the appropriate try entry will be returned.
1444 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1445
1446 bool HasThrowingInstructions() const;
1447
1448 // Returns whether this block dominates the blocked passed as parameter.
1449 bool Dominates(const HBasicBlock* block) const;
1450
GetLifetimeStart()1451 size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1452 size_t GetLifetimeEnd() const { return lifetime_end_; }
1453
SetLifetimeStart(size_t start)1454 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1455 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1456
1457 bool EndsWithControlFlowInstruction() const;
1458 bool EndsWithReturn() const;
1459 bool EndsWithIf() const;
1460 bool EndsWithTryBoundary() const;
1461 bool HasSinglePhi() const;
1462
1463 private:
1464 HGraph* graph_;
1465 ArenaVector<HBasicBlock*> predecessors_;
1466 ArenaVector<HBasicBlock*> successors_;
1467 HInstructionList instructions_;
1468 HInstructionList phis_;
1469 HLoopInformation* loop_information_;
1470 HBasicBlock* dominator_;
1471 ArenaVector<HBasicBlock*> dominated_blocks_;
1472 uint32_t block_id_;
1473 // The dex program counter of the first instruction of this block.
1474 const uint32_t dex_pc_;
1475 size_t lifetime_start_;
1476 size_t lifetime_end_;
1477 TryCatchInformation* try_catch_information_;
1478
1479 friend class HGraph;
1480 friend class HInstruction;
1481 // Allow manual control of the ordering of predecessors/successors
1482 friend class OptimizingUnitTestHelper;
1483
1484 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1485 };
1486
1487 // Iterates over the LoopInformation of all loops which contain 'block'
1488 // from the innermost to the outermost.
1489 class HLoopInformationOutwardIterator : public ValueObject {
1490 public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1491 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1492 : current_(block.GetLoopInformation()) {}
1493
Done()1494 bool Done() const { return current_ == nullptr; }
1495
Advance()1496 void Advance() {
1497 DCHECK(!Done());
1498 current_ = current_->GetPreHeader()->GetLoopInformation();
1499 }
1500
Current()1501 HLoopInformation* Current() const {
1502 DCHECK(!Done());
1503 return current_;
1504 }
1505
1506 private:
1507 HLoopInformation* current_;
1508
1509 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1510 };
1511
1512 #define FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1513 M(Above, Condition) \
1514 M(AboveOrEqual, Condition) \
1515 M(Abs, UnaryOperation) \
1516 M(Add, BinaryOperation) \
1517 M(And, BinaryOperation) \
1518 M(ArrayGet, Instruction) \
1519 M(ArrayLength, Instruction) \
1520 M(ArraySet, Instruction) \
1521 M(Below, Condition) \
1522 M(BelowOrEqual, Condition) \
1523 M(BooleanNot, UnaryOperation) \
1524 M(BoundsCheck, Instruction) \
1525 M(BoundType, Instruction) \
1526 M(CheckCast, Instruction) \
1527 M(ClassTableGet, Instruction) \
1528 M(ClearException, Instruction) \
1529 M(ClinitCheck, Instruction) \
1530 M(Compare, BinaryOperation) \
1531 M(ConstructorFence, Instruction) \
1532 M(CurrentMethod, Instruction) \
1533 M(ShouldDeoptimizeFlag, Instruction) \
1534 M(Deoptimize, Instruction) \
1535 M(Div, BinaryOperation) \
1536 M(DivZeroCheck, Instruction) \
1537 M(DoubleConstant, Constant) \
1538 M(Equal, Condition) \
1539 M(Exit, Instruction) \
1540 M(FloatConstant, Constant) \
1541 M(Goto, Instruction) \
1542 M(GreaterThan, Condition) \
1543 M(GreaterThanOrEqual, Condition) \
1544 M(If, Instruction) \
1545 M(InstanceFieldGet, Instruction) \
1546 M(InstanceFieldSet, Instruction) \
1547 M(PredicatedInstanceFieldGet, Instruction) \
1548 M(InstanceOf, Instruction) \
1549 M(IntConstant, Constant) \
1550 M(IntermediateAddress, Instruction) \
1551 M(InvokeUnresolved, Invoke) \
1552 M(InvokeInterface, Invoke) \
1553 M(InvokeStaticOrDirect, Invoke) \
1554 M(InvokeVirtual, Invoke) \
1555 M(InvokePolymorphic, Invoke) \
1556 M(InvokeCustom, Invoke) \
1557 M(LessThan, Condition) \
1558 M(LessThanOrEqual, Condition) \
1559 M(LoadClass, Instruction) \
1560 M(LoadException, Instruction) \
1561 M(LoadMethodHandle, Instruction) \
1562 M(LoadMethodType, Instruction) \
1563 M(LoadString, Instruction) \
1564 M(LongConstant, Constant) \
1565 M(Max, Instruction) \
1566 M(MemoryBarrier, Instruction) \
1567 M(MethodEntryHook, Instruction) \
1568 M(MethodExitHook, Instruction) \
1569 M(Min, BinaryOperation) \
1570 M(MonitorOperation, Instruction) \
1571 M(Mul, BinaryOperation) \
1572 M(Neg, UnaryOperation) \
1573 M(NewArray, Instruction) \
1574 M(NewInstance, Instruction) \
1575 M(Nop, Instruction) \
1576 M(Not, UnaryOperation) \
1577 M(NotEqual, Condition) \
1578 M(NullConstant, Instruction) \
1579 M(NullCheck, Instruction) \
1580 M(Or, BinaryOperation) \
1581 M(PackedSwitch, Instruction) \
1582 M(ParallelMove, Instruction) \
1583 M(ParameterValue, Instruction) \
1584 M(Phi, Instruction) \
1585 M(Rem, BinaryOperation) \
1586 M(Return, Instruction) \
1587 M(ReturnVoid, Instruction) \
1588 M(Ror, BinaryOperation) \
1589 M(Shl, BinaryOperation) \
1590 M(Shr, BinaryOperation) \
1591 M(StaticFieldGet, Instruction) \
1592 M(StaticFieldSet, Instruction) \
1593 M(StringBuilderAppend, Instruction) \
1594 M(UnresolvedInstanceFieldGet, Instruction) \
1595 M(UnresolvedInstanceFieldSet, Instruction) \
1596 M(UnresolvedStaticFieldGet, Instruction) \
1597 M(UnresolvedStaticFieldSet, Instruction) \
1598 M(Select, Instruction) \
1599 M(Sub, BinaryOperation) \
1600 M(SuspendCheck, Instruction) \
1601 M(Throw, Instruction) \
1602 M(TryBoundary, Instruction) \
1603 M(TypeConversion, Instruction) \
1604 M(UShr, BinaryOperation) \
1605 M(Xor, BinaryOperation)
1606
1607 #define FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M) \
1608 M(VecReplicateScalar, VecUnaryOperation) \
1609 M(VecExtractScalar, VecUnaryOperation) \
1610 M(VecReduce, VecUnaryOperation) \
1611 M(VecCnv, VecUnaryOperation) \
1612 M(VecNeg, VecUnaryOperation) \
1613 M(VecAbs, VecUnaryOperation) \
1614 M(VecNot, VecUnaryOperation) \
1615 M(VecAdd, VecBinaryOperation) \
1616 M(VecHalvingAdd, VecBinaryOperation) \
1617 M(VecSub, VecBinaryOperation) \
1618 M(VecMul, VecBinaryOperation) \
1619 M(VecDiv, VecBinaryOperation) \
1620 M(VecMin, VecBinaryOperation) \
1621 M(VecMax, VecBinaryOperation) \
1622 M(VecAnd, VecBinaryOperation) \
1623 M(VecAndNot, VecBinaryOperation) \
1624 M(VecOr, VecBinaryOperation) \
1625 M(VecXor, VecBinaryOperation) \
1626 M(VecSaturationAdd, VecBinaryOperation) \
1627 M(VecSaturationSub, VecBinaryOperation) \
1628 M(VecShl, VecBinaryOperation) \
1629 M(VecShr, VecBinaryOperation) \
1630 M(VecUShr, VecBinaryOperation) \
1631 M(VecSetScalars, VecOperation) \
1632 M(VecMultiplyAccumulate, VecOperation) \
1633 M(VecSADAccumulate, VecOperation) \
1634 M(VecDotProd, VecOperation) \
1635 M(VecLoad, VecMemoryOperation) \
1636 M(VecStore, VecMemoryOperation) \
1637 M(VecPredSetAll, VecPredSetOperation) \
1638 M(VecPredWhile, VecPredSetOperation) \
1639 M(VecPredCondition, VecOperation) \
1640
1641 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1642 FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1643 FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)
1644
1645 /*
1646 * Instructions, shared across several (not all) architectures.
1647 */
1648 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1649 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1650 #else
1651 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1652 M(BitwiseNegatedRight, Instruction) \
1653 M(DataProcWithShifterOp, Instruction) \
1654 M(MultiplyAccumulate, Instruction) \
1655 M(IntermediateAddressIndex, Instruction)
1656 #endif
1657
1658 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1659
1660 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1661
1662 #ifndef ART_ENABLE_CODEGEN_x86
1663 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1664 #else
1665 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1666 M(X86ComputeBaseMethodAddress, Instruction) \
1667 M(X86LoadFromConstantTable, Instruction) \
1668 M(X86FPNeg, Instruction) \
1669 M(X86PackedSwitch, Instruction)
1670 #endif
1671
1672 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1673 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M) \
1674 M(X86AndNot, Instruction) \
1675 M(X86MaskOrResetLeastSetBit, Instruction)
1676 #else
1677 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1678 #endif
1679
1680 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1681
1682 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1683 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1684 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1685 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1686 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1687 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1688 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) \
1689 FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1690
1691 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1692 M(Condition, BinaryOperation) \
1693 M(Constant, Instruction) \
1694 M(UnaryOperation, Instruction) \
1695 M(BinaryOperation, Instruction) \
1696 M(Invoke, Instruction) \
1697 M(VecOperation, Instruction) \
1698 M(VecUnaryOperation, VecOperation) \
1699 M(VecBinaryOperation, VecOperation) \
1700 M(VecMemoryOperation, VecOperation) \
1701 M(VecPredSetOperation, VecOperation)
1702
1703 #define FOR_EACH_INSTRUCTION(M) \
1704 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1705 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1706
1707 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1708 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1709 #undef FORWARD_DECLARATION
1710
1711 #define DECLARE_INSTRUCTION(type) \
1712 private: \
1713 H##type& operator=(const H##type&) = delete; \
1714 public: \
1715 const char* DebugName() const override { return #type; } \
1716 HInstruction* Clone(ArenaAllocator* arena) const override { \
1717 DCHECK(IsClonable()); \
1718 return new (arena) H##type(*this->As##type()); \
1719 } \
1720 void Accept(HGraphVisitor* visitor) override
1721
1722 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1723 private: \
1724 H##type& operator=(const H##type&) = delete; \
1725 public:
1726
1727 #define DEFAULT_COPY_CONSTRUCTOR(type) H##type(const H##type& other) = default;
1728
1729 template <typename T>
1730 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1731 public IntrusiveForwardListNode<HUseListNode<T>> {
1732 public:
1733 // Get the instruction which has this use as one of the inputs.
1734 T GetUser() const { return user_; }
1735 // Get the position of the input record that this use corresponds to.
1736 size_t GetIndex() const { return index_; }
1737 // Set the position of the input record that this use corresponds to.
1738 void SetIndex(size_t index) { index_ = index; }
1739
1740 private:
1741 HUseListNode(T user, size_t index)
1742 : user_(user), index_(index) {}
1743
1744 T const user_;
1745 size_t index_;
1746
1747 friend class HInstruction;
1748
1749 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1750 };
1751
1752 template <typename T>
1753 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1754
1755 // This class is used by HEnvironment and HInstruction classes to record the
1756 // instructions they use and pointers to the corresponding HUseListNodes kept
1757 // by the used instructions.
1758 template <typename T>
1759 class HUserRecord : public ValueObject {
1760 public:
HUserRecord()1761 HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1762 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1763
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1764 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1765 : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1766 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1767 : instruction_(instruction), before_use_node_(before_use_node) {
1768 DCHECK(instruction_ != nullptr);
1769 }
1770
GetInstruction()1771 HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1772 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1773 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1774
1775 private:
1776 // Instruction used by the user.
1777 HInstruction* instruction_;
1778
1779 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1780 typename HUseList<T>::iterator before_use_node_;
1781 };
1782
1783 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1784 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1785 // HInstruction* values even though the underlying container has HUserRecord<>s.
1786 struct HInputExtractor {
operatorHInputExtractor1787 HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1788 return record.GetInstruction();
1789 }
operatorHInputExtractor1790 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1791 return record.GetInstruction();
1792 }
1793 };
1794
1795 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1796 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1797
1798 /**
1799 * Side-effects representation.
1800 *
1801 * For write/read dependences on fields/arrays, the dependence analysis uses
1802 * type disambiguation (e.g. a float field write cannot modify the value of an
1803 * integer field read) and the access type (e.g. a reference array write cannot
1804 * modify the value of a reference field read [although it may modify the
1805 * reference fetch prior to reading the field, which is represented by its own
1806 * write/read dependence]). The analysis makes conservative points-to
1807 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1808 * the same, and any reference read depends on any reference read without
1809 * further regard of its type).
1810 *
1811 * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1812 * alive across the point where garbage collection might happen.
1813 *
1814 * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1815 *
1816 * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1817 * those instructions from the compiler perspective (between this instruction and the next one
1818 * in the IR).
1819 *
1820 * Note: Instructions which can cause GC only on a fatal slow path do not need
1821 * kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1822 * one. However the execution may return to compiled code if there is a catch block in the
1823 * current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1824 * set.
1825 *
1826 * The internal representation uses 38-bit and is described in the table below.
1827 * The first line indicates the side effect, and for field/array accesses the
1828 * second line indicates the type of the access (in the order of the
1829 * DataType::Type enum).
1830 * The two numbered lines below indicate the bit position in the bitfield (read
1831 * vertically).
1832 *
1833 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1834 * +-------------+---------+---------+--------------+---------+---------+
1835 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1836 * | 3 |333333322|222222221| 1 |111111110|000000000|
1837 * | 7 |654321098|765432109| 8 |765432109|876543210|
1838 *
1839 * Note that, to ease the implementation, 'changes' bits are least significant
1840 * bits, while 'dependency' bits are most significant bits.
1841 */
1842 class SideEffects : public ValueObject {
1843 public:
SideEffects()1844 SideEffects() : flags_(0) {}
1845
None()1846 static SideEffects None() {
1847 return SideEffects(0);
1848 }
1849
All()1850 static SideEffects All() {
1851 return SideEffects(kAllChangeBits | kAllDependOnBits);
1852 }
1853
AllChanges()1854 static SideEffects AllChanges() {
1855 return SideEffects(kAllChangeBits);
1856 }
1857
AllDependencies()1858 static SideEffects AllDependencies() {
1859 return SideEffects(kAllDependOnBits);
1860 }
1861
AllExceptGCDependency()1862 static SideEffects AllExceptGCDependency() {
1863 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1864 }
1865
AllWritesAndReads()1866 static SideEffects AllWritesAndReads() {
1867 return SideEffects(kAllWrites | kAllReads);
1868 }
1869
AllWrites()1870 static SideEffects AllWrites() {
1871 return SideEffects(kAllWrites);
1872 }
1873
AllReads()1874 static SideEffects AllReads() {
1875 return SideEffects(kAllReads);
1876 }
1877
FieldWriteOfType(DataType::Type type,bool is_volatile)1878 static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1879 return is_volatile
1880 ? AllWritesAndReads()
1881 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1882 }
1883
ArrayWriteOfType(DataType::Type type)1884 static SideEffects ArrayWriteOfType(DataType::Type type) {
1885 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1886 }
1887
FieldReadOfType(DataType::Type type,bool is_volatile)1888 static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1889 return is_volatile
1890 ? AllWritesAndReads()
1891 : SideEffects(TypeFlag(type, kFieldReadOffset));
1892 }
1893
ArrayReadOfType(DataType::Type type)1894 static SideEffects ArrayReadOfType(DataType::Type type) {
1895 return SideEffects(TypeFlag(type, kArrayReadOffset));
1896 }
1897
1898 // Returns whether GC might happen across this instruction from the compiler perspective so
1899 // the next instruction in the IR would see that.
1900 //
1901 // See the SideEffect class comments.
CanTriggerGC()1902 static SideEffects CanTriggerGC() {
1903 return SideEffects(1ULL << kCanTriggerGCBit);
1904 }
1905
1906 // Returns whether the instruction must not be alive across a GC point.
1907 //
1908 // See the SideEffect class comments.
DependsOnGC()1909 static SideEffects DependsOnGC() {
1910 return SideEffects(1ULL << kDependsOnGCBit);
1911 }
1912
1913 // Combines the side-effects of this and the other.
Union(SideEffects other)1914 SideEffects Union(SideEffects other) const {
1915 return SideEffects(flags_ | other.flags_);
1916 }
1917
Exclusion(SideEffects other)1918 SideEffects Exclusion(SideEffects other) const {
1919 return SideEffects(flags_ & ~other.flags_);
1920 }
1921
Add(SideEffects other)1922 void Add(SideEffects other) {
1923 flags_ |= other.flags_;
1924 }
1925
Includes(SideEffects other)1926 bool Includes(SideEffects other) const {
1927 return (other.flags_ & flags_) == other.flags_;
1928 }
1929
HasSideEffects()1930 bool HasSideEffects() const {
1931 return (flags_ & kAllChangeBits);
1932 }
1933
HasDependencies()1934 bool HasDependencies() const {
1935 return (flags_ & kAllDependOnBits);
1936 }
1937
1938 // Returns true if there are no side effects or dependencies.
DoesNothing()1939 bool DoesNothing() const {
1940 return flags_ == 0;
1941 }
1942
1943 // Returns true if something is written.
DoesAnyWrite()1944 bool DoesAnyWrite() const {
1945 return (flags_ & kAllWrites);
1946 }
1947
1948 // Returns true if something is read.
DoesAnyRead()1949 bool DoesAnyRead() const {
1950 return (flags_ & kAllReads);
1951 }
1952
1953 // Returns true if potentially everything is written and read
1954 // (every type and every kind of access).
DoesAllReadWrite()1955 bool DoesAllReadWrite() const {
1956 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1957 }
1958
DoesAll()1959 bool DoesAll() const {
1960 return flags_ == (kAllChangeBits | kAllDependOnBits);
1961 }
1962
1963 // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1964 bool MayDependOn(SideEffects other) const {
1965 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1966 return (other.flags_ & depends_on_flags);
1967 }
1968
1969 // Returns string representation of flags (for debugging only).
1970 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1971 std::string ToString() const {
1972 std::string flags = "|";
1973 for (int s = kLastBit; s >= 0; s--) {
1974 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1975 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1976 // This is a bit for the GC side effect.
1977 if (current_bit_is_set) {
1978 flags += "GC";
1979 }
1980 flags += "|";
1981 } else {
1982 // This is a bit for the array/field analysis.
1983 // The underscore character stands for the 'can trigger GC' bit.
1984 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1985 if (current_bit_is_set) {
1986 flags += kDebug[s];
1987 }
1988 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1989 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1990 flags += "|";
1991 }
1992 }
1993 }
1994 return flags;
1995 }
1996
Equals(const SideEffects & other)1997 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1998
1999 private:
2000 static constexpr int kFieldArrayAnalysisBits = 9;
2001
2002 static constexpr int kFieldWriteOffset = 0;
2003 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
2004 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
2005 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
2006
2007 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
2008
2009 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
2010 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
2011 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
2012 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
2013
2014 static constexpr int kLastBit = kDependsOnGCBit;
2015 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
2016
2017 // Aliases.
2018
2019 static_assert(kChangeBits == kDependOnBits,
2020 "the 'change' bits should match the 'depend on' bits.");
2021
2022 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
2023 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
2024 static constexpr uint64_t kAllWrites =
2025 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
2026 static constexpr uint64_t kAllReads =
2027 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
2028
2029 // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)2030 static uint64_t TypeFlag(DataType::Type type, int offset) {
2031 int shift;
2032 switch (type) {
2033 case DataType::Type::kReference: shift = 0; break;
2034 case DataType::Type::kBool: shift = 1; break;
2035 case DataType::Type::kInt8: shift = 2; break;
2036 case DataType::Type::kUint16: shift = 3; break;
2037 case DataType::Type::kInt16: shift = 4; break;
2038 case DataType::Type::kInt32: shift = 5; break;
2039 case DataType::Type::kInt64: shift = 6; break;
2040 case DataType::Type::kFloat32: shift = 7; break;
2041 case DataType::Type::kFloat64: shift = 8; break;
2042 default:
2043 LOG(FATAL) << "Unexpected data type " << type;
2044 UNREACHABLE();
2045 }
2046 DCHECK_LE(kFieldWriteOffset, shift);
2047 DCHECK_LT(shift, kArrayWriteOffset);
2048 return UINT64_C(1) << (shift + offset);
2049 }
2050
2051 // Private constructor on direct flags value.
SideEffects(uint64_t flags)2052 explicit SideEffects(uint64_t flags) : flags_(flags) {}
2053
2054 uint64_t flags_;
2055 };
2056
2057 // A HEnvironment object contains the values of virtual registers at a given location.
2058 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
2059 public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)2060 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2061 size_t number_of_vregs,
2062 ArtMethod* method,
2063 uint32_t dex_pc,
2064 HInstruction* holder)
2065 : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
2066 locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
2067 parent_(nullptr),
2068 method_(method),
2069 dex_pc_(dex_pc),
2070 holder_(holder) {
2071 }
2072
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)2073 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2074 const HEnvironment& to_copy,
2075 HInstruction* holder)
2076 : HEnvironment(allocator,
2077 to_copy.Size(),
2078 to_copy.GetMethod(),
2079 to_copy.GetDexPc(),
2080 holder) {}
2081
AllocateLocations()2082 void AllocateLocations() {
2083 DCHECK(locations_.empty());
2084 locations_.resize(vregs_.size());
2085 }
2086
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)2087 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
2088 if (parent_ != nullptr) {
2089 parent_->SetAndCopyParentChain(allocator, parent);
2090 } else {
2091 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
2092 parent_->CopyFrom(parent);
2093 if (parent->GetParent() != nullptr) {
2094 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
2095 }
2096 }
2097 }
2098
2099 void CopyFrom(ArrayRef<HInstruction* const> locals);
2100 void CopyFrom(HEnvironment* environment);
2101
2102 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
2103 // input to the loop phi instead. This is for inserting instructions that
2104 // require an environment (like HDeoptimization) in the loop pre-header.
2105 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
2106
SetRawEnvAt(size_t index,HInstruction * instruction)2107 void SetRawEnvAt(size_t index, HInstruction* instruction) {
2108 vregs_[index] = HUserRecord<HEnvironment*>(instruction);
2109 }
2110
GetInstructionAt(size_t index)2111 HInstruction* GetInstructionAt(size_t index) const {
2112 return vregs_[index].GetInstruction();
2113 }
2114
2115 void RemoveAsUserOfInput(size_t index) const;
2116
2117 // Replaces the input at the position 'index' with the replacement; the replacement and old
2118 // input instructions' env_uses_ lists are adjusted. The function works similar to
2119 // HInstruction::ReplaceInput.
2120 void ReplaceInput(HInstruction* replacement, size_t index);
2121
Size()2122 size_t Size() const { return vregs_.size(); }
2123
GetParent()2124 HEnvironment* GetParent() const { return parent_; }
2125
SetLocationAt(size_t index,Location location)2126 void SetLocationAt(size_t index, Location location) {
2127 locations_[index] = location;
2128 }
2129
GetLocationAt(size_t index)2130 Location GetLocationAt(size_t index) const {
2131 return locations_[index];
2132 }
2133
GetDexPc()2134 uint32_t GetDexPc() const {
2135 return dex_pc_;
2136 }
2137
GetMethod()2138 ArtMethod* GetMethod() const {
2139 return method_;
2140 }
2141
GetHolder()2142 HInstruction* GetHolder() const {
2143 return holder_;
2144 }
2145
2146
IsFromInlinedInvoke()2147 bool IsFromInlinedInvoke() const {
2148 return GetParent() != nullptr;
2149 }
2150
2151 class EnvInputSelector {
2152 public:
EnvInputSelector(const HEnvironment * e)2153 explicit EnvInputSelector(const HEnvironment* e) : env_(e) {}
operator()2154 HInstruction* operator()(size_t s) const {
2155 return env_->GetInstructionAt(s);
2156 }
2157 private:
2158 const HEnvironment* env_;
2159 };
2160
2161 using HConstEnvInputRef = TransformIterator<CountIter, EnvInputSelector>;
GetEnvInputs()2162 IterationRange<HConstEnvInputRef> GetEnvInputs() const {
2163 IterationRange<CountIter> range(Range(Size()));
2164 return MakeIterationRange(MakeTransformIterator(range.begin(), EnvInputSelector(this)),
2165 MakeTransformIterator(range.end(), EnvInputSelector(this)));
2166 }
2167
2168 private:
2169 ArenaVector<HUserRecord<HEnvironment*>> vregs_;
2170 ArenaVector<Location> locations_;
2171 HEnvironment* parent_;
2172 ArtMethod* method_;
2173 const uint32_t dex_pc_;
2174
2175 // The instruction that holds this environment.
2176 HInstruction* const holder_;
2177
2178 friend class HInstruction;
2179
2180 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2181 };
2182
2183 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs);
2184
2185 // Iterates over the Environments
2186 class HEnvironmentIterator : public ValueObject,
2187 public std::iterator<std::forward_iterator_tag, HEnvironment*> {
2188 public:
HEnvironmentIterator(HEnvironment * cur)2189 explicit HEnvironmentIterator(HEnvironment* cur) : cur_(cur) {}
2190
2191 HEnvironment* operator*() const {
2192 return cur_;
2193 }
2194
2195 HEnvironmentIterator& operator++() {
2196 DCHECK(cur_ != nullptr);
2197 cur_ = cur_->GetParent();
2198 return *this;
2199 }
2200
2201 HEnvironmentIterator operator++(int) {
2202 HEnvironmentIterator prev(*this);
2203 ++(*this);
2204 return prev;
2205 }
2206
2207 bool operator==(const HEnvironmentIterator& other) const {
2208 return other.cur_ == cur_;
2209 }
2210
2211 bool operator!=(const HEnvironmentIterator& other) const {
2212 return !(*this == other);
2213 }
2214
2215 private:
2216 HEnvironment* cur_;
2217 };
2218
2219 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2220 public:
2221 #define DECLARE_KIND(type, super) k##type,
2222 enum InstructionKind { // private marker to avoid generate-operator-out.py from processing.
2223 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2224 kLastInstructionKind
2225 };
2226 #undef DECLARE_KIND
2227
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2228 HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2229 : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2230
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2231 HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2232 : previous_(nullptr),
2233 next_(nullptr),
2234 block_(nullptr),
2235 dex_pc_(dex_pc),
2236 id_(-1),
2237 ssa_index_(-1),
2238 packed_fields_(0u),
2239 environment_(nullptr),
2240 locations_(nullptr),
2241 live_interval_(nullptr),
2242 lifetime_position_(kNoLifetime),
2243 side_effects_(side_effects),
2244 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2245 SetPackedField<InstructionKindField>(kind);
2246 SetPackedField<TypeField>(type);
2247 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2248 }
2249
~HInstruction()2250 virtual ~HInstruction() {}
2251
2252 std::ostream& Dump(std::ostream& os, bool dump_args = false);
2253
2254 // Helper for dumping without argument information using operator<<
2255 struct NoArgsDump {
2256 const HInstruction* ins;
2257 };
DumpWithoutArgs()2258 NoArgsDump DumpWithoutArgs() const {
2259 return NoArgsDump{this};
2260 }
2261 // Helper for dumping with argument information using operator<<
2262 struct ArgsDump {
2263 const HInstruction* ins;
2264 };
DumpWithArgs()2265 ArgsDump DumpWithArgs() const {
2266 return ArgsDump{this};
2267 }
2268
GetNext()2269 HInstruction* GetNext() const { return next_; }
GetPrevious()2270 HInstruction* GetPrevious() const { return previous_; }
2271
2272 HInstruction* GetNextDisregardingMoves() const;
2273 HInstruction* GetPreviousDisregardingMoves() const;
2274
GetBlock()2275 HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2276 ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2277 void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2278 bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2279 bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2280 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2281 bool IsIrreducibleLoopHeaderPhi() const {
2282 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2283 }
2284
2285 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2286
GetInputRecords()2287 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2288 // One virtual method is enough, just const_cast<> and then re-add the const.
2289 return ArrayRef<const HUserRecord<HInstruction*>>(
2290 const_cast<HInstruction*>(this)->GetInputRecords());
2291 }
2292
GetInputs()2293 HInputsRef GetInputs() {
2294 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2295 }
2296
GetInputs()2297 HConstInputsRef GetInputs() const {
2298 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2299 }
2300
InputCount()2301 size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2302 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2303
HasInput(HInstruction * input)2304 bool HasInput(HInstruction* input) const {
2305 for (const HInstruction* i : GetInputs()) {
2306 if (i == input) {
2307 return true;
2308 }
2309 }
2310 return false;
2311 }
2312
SetRawInputAt(size_t index,HInstruction * input)2313 void SetRawInputAt(size_t index, HInstruction* input) {
2314 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2315 }
2316
2317 virtual void Accept(HGraphVisitor* visitor) = 0;
2318 virtual const char* DebugName() const = 0;
2319
GetType()2320 DataType::Type GetType() const {
2321 return TypeField::Decode(GetPackedFields());
2322 }
2323
NeedsEnvironment()2324 virtual bool NeedsEnvironment() const { return false; }
NeedsBss()2325 virtual bool NeedsBss() const {
2326 return false;
2327 }
2328
GetDexPc()2329 uint32_t GetDexPc() const { return dex_pc_; }
2330
IsControlFlow()2331 virtual bool IsControlFlow() const { return false; }
2332
2333 // Can the instruction throw?
2334 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2335 // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2336 virtual bool CanThrow() const { return false; }
2337
2338 // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2339 virtual bool AlwaysThrows() const { return false; }
2340 // Will this instruction only cause async exceptions if it causes any at all?
OnlyThrowsAsyncExceptions()2341 virtual bool OnlyThrowsAsyncExceptions() const {
2342 return false;
2343 }
2344
CanThrowIntoCatchBlock()2345 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2346
HasSideEffects()2347 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2348 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2349
2350 // Does not apply for all instructions, but having this at top level greatly
2351 // simplifies the null check elimination.
2352 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2353 virtual bool CanBeNull() const {
2354 DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2355 return true;
2356 }
2357
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)2358 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2359 return false;
2360 }
2361
2362 // If this instruction will do an implicit null check, return the `HNullCheck` associated
2363 // with it. Otherwise return null.
GetImplicitNullCheck()2364 HNullCheck* GetImplicitNullCheck() const {
2365 // Go over previous non-move instructions that are emitted at use site.
2366 HInstruction* prev_not_move = GetPreviousDisregardingMoves();
2367 while (prev_not_move != nullptr && prev_not_move->IsEmittedAtUseSite()) {
2368 if (prev_not_move->IsNullCheck()) {
2369 return prev_not_move->AsNullCheck();
2370 }
2371 prev_not_move = prev_not_move->GetPreviousDisregardingMoves();
2372 }
2373 return nullptr;
2374 }
2375
IsActualObject()2376 virtual bool IsActualObject() const {
2377 return GetType() == DataType::Type::kReference;
2378 }
2379
2380 // Sets the ReferenceTypeInfo. The RTI must be valid.
2381 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2382 // Same as above, but we only set it if it's valid. Otherwise, we don't change the current RTI.
2383 void SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti);
2384
GetReferenceTypeInfo()2385 ReferenceTypeInfo GetReferenceTypeInfo() const {
2386 DCHECK_EQ(GetType(), DataType::Type::kReference);
2387 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2388 GetPackedFlag<kFlagReferenceTypeIsExact>());
2389 }
2390
AddUseAt(HInstruction * user,size_t index)2391 void AddUseAt(HInstruction* user, size_t index) {
2392 DCHECK(user != nullptr);
2393 // Note: fixup_end remains valid across push_front().
2394 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2395 ArenaAllocator* allocator = user->GetBlock()->GetGraph()->GetAllocator();
2396 HUseListNode<HInstruction*>* new_node =
2397 new (allocator) HUseListNode<HInstruction*>(user, index);
2398 uses_.push_front(*new_node);
2399 FixUpUserRecordsAfterUseInsertion(fixup_end);
2400 }
2401
AddEnvUseAt(HEnvironment * user,size_t index)2402 void AddEnvUseAt(HEnvironment* user, size_t index) {
2403 DCHECK(user != nullptr);
2404 // Note: env_fixup_end remains valid across push_front().
2405 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2406 HUseListNode<HEnvironment*>* new_node =
2407 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2408 env_uses_.push_front(*new_node);
2409 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2410 }
2411
RemoveAsUserOfInput(size_t input)2412 void RemoveAsUserOfInput(size_t input) {
2413 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2414 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2415 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2416 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2417 }
2418
RemoveAsUserOfAllInputs()2419 void RemoveAsUserOfAllInputs() {
2420 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2421 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2422 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2423 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2424 }
2425 }
2426
GetUses()2427 const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2428 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2429
HasUses()2430 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2431 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2432 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2433 bool HasOnlyOneNonEnvironmentUse() const {
2434 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2435 }
2436
IsRemovable()2437 bool IsRemovable() const {
2438 return
2439 !DoesAnyWrite() &&
2440 !CanThrow() &&
2441 !IsSuspendCheck() &&
2442 !IsControlFlow() &&
2443 !IsNop() &&
2444 !IsParameterValue() &&
2445 // If we added an explicit barrier then we should keep it.
2446 !IsMemoryBarrier() &&
2447 !IsConstructorFence();
2448 }
2449
IsDeadAndRemovable()2450 bool IsDeadAndRemovable() const {
2451 return IsRemovable() && !HasUses();
2452 }
2453
2454 // Does this instruction dominate `other_instruction`?
2455 // Aborts if this instruction and `other_instruction` are different phis.
2456 bool Dominates(HInstruction* other_instruction) const;
2457
2458 // Same but with `strictly dominates` i.e. returns false if this instruction and
2459 // `other_instruction` are the same.
2460 bool StrictlyDominates(HInstruction* other_instruction) const;
2461
GetId()2462 int GetId() const { return id_; }
SetId(int id)2463 void SetId(int id) { id_ = id; }
2464
GetSsaIndex()2465 int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2466 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2467 bool HasSsaIndex() const { return ssa_index_ != -1; }
2468
HasEnvironment()2469 bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2470 HEnvironment* GetEnvironment() const { return environment_; }
GetAllEnvironments()2471 IterationRange<HEnvironmentIterator> GetAllEnvironments() const {
2472 return MakeIterationRange(HEnvironmentIterator(GetEnvironment()),
2473 HEnvironmentIterator(nullptr));
2474 }
2475 // Set the `environment_` field. Raw because this method does not
2476 // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2477 void SetRawEnvironment(HEnvironment* environment) {
2478 DCHECK(environment_ == nullptr);
2479 DCHECK_EQ(environment->GetHolder(), this);
2480 environment_ = environment;
2481 }
2482
InsertRawEnvironment(HEnvironment * environment)2483 void InsertRawEnvironment(HEnvironment* environment) {
2484 DCHECK(environment_ != nullptr);
2485 DCHECK_EQ(environment->GetHolder(), this);
2486 DCHECK(environment->GetParent() == nullptr);
2487 environment->parent_ = environment_;
2488 environment_ = environment;
2489 }
2490
2491 void RemoveEnvironment();
2492
2493 // Set the environment of this instruction, copying it from `environment`. While
2494 // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2495 void CopyEnvironmentFrom(HEnvironment* environment) {
2496 DCHECK(environment_ == nullptr);
2497 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2498 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2499 environment_->CopyFrom(environment);
2500 if (environment->GetParent() != nullptr) {
2501 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2502 }
2503 }
2504
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2505 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2506 HBasicBlock* block) {
2507 DCHECK(environment_ == nullptr);
2508 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2509 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2510 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2511 if (environment->GetParent() != nullptr) {
2512 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2513 }
2514 }
2515
2516 // Returns the number of entries in the environment. Typically, that is the
2517 // number of dex registers in a method. It could be more in case of inlining.
2518 size_t EnvironmentSize() const;
2519
GetLocations()2520 LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2521 void SetLocations(LocationSummary* locations) { locations_ = locations; }
2522
2523 void ReplaceWith(HInstruction* instruction);
2524 void ReplaceUsesDominatedBy(HInstruction* dominator,
2525 HInstruction* replacement,
2526 bool strictly_dominated = true);
2527 void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2528 void ReplaceInput(HInstruction* replacement, size_t index);
2529
2530 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2531 // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2532 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2533 ReplaceWith(other);
2534 other->ReplaceInput(this, use_index);
2535 }
2536
2537 // Move `this` instruction before `cursor`
2538 void MoveBefore(HInstruction* cursor, bool do_checks = true);
2539
2540 // Move `this` before its first user and out of any loops. If there is no
2541 // out-of-loop user that dominates all other users, move the instruction
2542 // to the end of the out-of-loop common dominator of the user's blocks.
2543 //
2544 // This can be used only on non-throwing instructions with no side effects that
2545 // have at least one use but no environment uses.
2546 void MoveBeforeFirstUserAndOutOfLoops();
2547
2548 #define INSTRUCTION_TYPE_CHECK(type, super) \
2549 bool Is##type() const;
2550
2551 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2552 #undef INSTRUCTION_TYPE_CHECK
2553
2554 #define INSTRUCTION_TYPE_CAST(type, super) \
2555 const H##type* As##type() const; \
2556 H##type* As##type();
2557
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2558 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2559 #undef INSTRUCTION_TYPE_CAST
2560
2561 // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2562 // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2563 // the instruction then the behaviour of this function is undefined.
2564 //
2565 // Note: It is semantically valid to create a clone of the instruction only until
2566 // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2567 // copied.
2568 //
2569 // Note: HEnvironment and some other fields are not copied and are set to default values, see
2570 // 'explicit HInstruction(const HInstruction& other)' for details.
2571 virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2572 LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2573 DebugName() << " " << GetId();
2574 UNREACHABLE();
2575 }
2576
IsFieldAccess()2577 virtual bool IsFieldAccess() const {
2578 return false;
2579 }
2580
GetFieldInfo()2581 virtual const FieldInfo& GetFieldInfo() const {
2582 CHECK(IsFieldAccess()) << "Only callable on field accessors not " << DebugName() << " "
2583 << *this;
2584 LOG(FATAL) << "Must be overridden by field accessors. Not implemented by " << *this;
2585 UNREACHABLE();
2586 }
2587
2588 // Return whether instruction can be cloned (copied).
IsClonable()2589 virtual bool IsClonable() const { return false; }
2590
2591 // Returns whether the instruction can be moved within the graph.
2592 // TODO: this method is used by LICM and GVN with possibly different
2593 // meanings? split and rename?
CanBeMoved()2594 virtual bool CanBeMoved() const { return false; }
2595
2596 // Returns whether any data encoded in the two instructions is equal.
2597 // This method does not look at the inputs. Both instructions must be
2598 // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2599 virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2600 return false;
2601 }
2602
2603 // Returns whether two instructions are equal, that is:
2604 // 1) They have the same type and contain the same data (InstructionDataEquals).
2605 // 2) Their inputs are identical.
2606 bool Equals(const HInstruction* other) const;
2607
GetKind()2608 InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2609
ComputeHashCode()2610 virtual size_t ComputeHashCode() const {
2611 size_t result = GetKind();
2612 for (const HInstruction* input : GetInputs()) {
2613 result = (result * 31) + input->GetId();
2614 }
2615 return result;
2616 }
2617
GetSideEffects()2618 SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2619 void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2620 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2621
GetLifetimePosition()2622 size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2623 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2624 LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2625 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2626 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2627
IsSuspendCheckEntry()2628 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2629
2630 // Returns whether the code generation of the instruction will require to have access
2631 // to the current method. Such instructions are:
2632 // (1): Instructions that require an environment, as calling the runtime requires
2633 // to walk the stack and have the current method stored at a specific stack address.
2634 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2635 // to access the dex cache.
NeedsCurrentMethod()2636 bool NeedsCurrentMethod() const {
2637 return NeedsEnvironment() || IsCurrentMethod();
2638 }
2639
2640 // Does this instruction have any use in an environment before
2641 // control flow hits 'other'?
2642 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2643
2644 // Remove all references to environment uses of this instruction.
2645 // The caller must ensure that this is safe to do.
2646 void RemoveEnvironmentUsers();
2647
IsEmittedAtUseSite()2648 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2649 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2650
2651 protected:
2652 // If set, the machine code for this instruction is assumed to be generated by
2653 // its users. Used by liveness analysis to compute use positions accordingly.
2654 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2655 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2656 static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2657 static constexpr size_t kFieldInstructionKindSize =
2658 MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2659 static constexpr size_t kFieldType =
2660 kFieldInstructionKind + kFieldInstructionKindSize;
2661 static constexpr size_t kFieldTypeSize =
2662 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2663 static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2664 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2665
2666 static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2667 "Too many generic packed fields");
2668
2669 using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2670
InputRecordAt(size_t i)2671 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2672 return GetInputRecords()[i];
2673 }
2674
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2675 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2676 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2677 input_records[index] = input;
2678 }
2679
GetPackedFields()2680 uint32_t GetPackedFields() const {
2681 return packed_fields_;
2682 }
2683
2684 template <size_t flag>
GetPackedFlag()2685 bool GetPackedFlag() const {
2686 return (packed_fields_ & (1u << flag)) != 0u;
2687 }
2688
2689 template <size_t flag>
2690 void SetPackedFlag(bool value = true) {
2691 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2692 }
2693
2694 template <typename BitFieldType>
GetPackedField()2695 typename BitFieldType::value_type GetPackedField() const {
2696 return BitFieldType::Decode(packed_fields_);
2697 }
2698
2699 template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2700 void SetPackedField(typename BitFieldType::value_type value) {
2701 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2702 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2703 }
2704
2705 // Copy construction for the instruction (used for Clone function).
2706 //
2707 // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2708 // prepare_for_register_allocator are not copied (set to default values).
2709 //
2710 // Copy constructors must be provided for every HInstruction type; default copy constructor is
2711 // fine for most of them. However for some of the instructions a custom copy constructor must be
2712 // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2713 // for copying them).
HInstruction(const HInstruction & other)2714 explicit HInstruction(const HInstruction& other)
2715 : previous_(nullptr),
2716 next_(nullptr),
2717 block_(nullptr),
2718 dex_pc_(other.dex_pc_),
2719 id_(-1),
2720 ssa_index_(-1),
2721 packed_fields_(other.packed_fields_),
2722 environment_(nullptr),
2723 locations_(nullptr),
2724 live_interval_(nullptr),
2725 lifetime_position_(kNoLifetime),
2726 side_effects_(other.side_effects_),
2727 reference_type_handle_(other.reference_type_handle_) {
2728 }
2729
2730 private:
2731 using InstructionKindField =
2732 BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2733
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2734 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2735 auto before_use_node = uses_.before_begin();
2736 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2737 HInstruction* user = use_node->GetUser();
2738 size_t input_index = use_node->GetIndex();
2739 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2740 before_use_node = use_node;
2741 }
2742 }
2743
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2744 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2745 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2746 if (next != uses_.end()) {
2747 HInstruction* next_user = next->GetUser();
2748 size_t next_index = next->GetIndex();
2749 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2750 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2751 }
2752 }
2753
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2754 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2755 auto before_env_use_node = env_uses_.before_begin();
2756 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2757 HEnvironment* user = env_use_node->GetUser();
2758 size_t input_index = env_use_node->GetIndex();
2759 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2760 before_env_use_node = env_use_node;
2761 }
2762 }
2763
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2764 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2765 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2766 if (next != env_uses_.end()) {
2767 HEnvironment* next_user = next->GetUser();
2768 size_t next_index = next->GetIndex();
2769 DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2770 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2771 }
2772 }
2773
2774 HInstruction* previous_;
2775 HInstruction* next_;
2776 HBasicBlock* block_;
2777 const uint32_t dex_pc_;
2778
2779 // An instruction gets an id when it is added to the graph.
2780 // It reflects creation order. A negative id means the instruction
2781 // has not been added to the graph.
2782 int id_;
2783
2784 // When doing liveness analysis, instructions that have uses get an SSA index.
2785 int ssa_index_;
2786
2787 // Packed fields.
2788 uint32_t packed_fields_;
2789
2790 // List of instructions that have this instruction as input.
2791 HUseList<HInstruction*> uses_;
2792
2793 // List of environments that contain this instruction.
2794 HUseList<HEnvironment*> env_uses_;
2795
2796 // The environment associated with this instruction. Not null if the instruction
2797 // might jump out of the method.
2798 HEnvironment* environment_;
2799
2800 // Set by the code generator.
2801 LocationSummary* locations_;
2802
2803 // Set by the liveness analysis.
2804 LiveInterval* live_interval_;
2805
2806 // Set by the liveness analysis, this is the position in a linear
2807 // order of blocks where this instruction's live interval start.
2808 size_t lifetime_position_;
2809
2810 SideEffects side_effects_;
2811
2812 // The reference handle part of the reference type info.
2813 // The IsExact() flag is stored in packed fields.
2814 // TODO: for primitive types this should be marked as invalid.
2815 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2816
2817 friend class GraphChecker;
2818 friend class HBasicBlock;
2819 friend class HEnvironment;
2820 friend class HGraph;
2821 friend class HInstructionList;
2822 };
2823
2824 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs);
2825 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs);
2826 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs);
2827 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst);
2828 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst);
2829
2830 // Forward declarations for friends
2831 template <typename InnerIter> struct HSTLInstructionIterator;
2832
2833 // Iterates over the instructions, while preserving the next instruction
2834 // in case the current instruction gets removed from the list by the user
2835 // of this iterator.
2836 class HInstructionIterator : public ValueObject {
2837 public:
HInstructionIterator(const HInstructionList & instructions)2838 explicit HInstructionIterator(const HInstructionList& instructions)
2839 : instruction_(instructions.first_instruction_) {
2840 next_ = Done() ? nullptr : instruction_->GetNext();
2841 }
2842
Done()2843 bool Done() const { return instruction_ == nullptr; }
Current()2844 HInstruction* Current() const { return instruction_; }
Advance()2845 void Advance() {
2846 instruction_ = next_;
2847 next_ = Done() ? nullptr : instruction_->GetNext();
2848 }
2849
2850 private:
HInstructionIterator()2851 HInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2852
2853 HInstruction* instruction_;
2854 HInstruction* next_;
2855
2856 friend struct HSTLInstructionIterator<HInstructionIterator>;
2857 };
2858
2859 // Iterates over the instructions without saving the next instruction,
2860 // therefore handling changes in the graph potentially made by the user
2861 // of this iterator.
2862 class HInstructionIteratorHandleChanges : public ValueObject {
2863 public:
2864 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2865 : instruction_(instructions.first_instruction_) {
2866 }
2867
2868 bool Done() const { return instruction_ == nullptr; }
2869 HInstruction* Current() const { return instruction_; }
2870 void Advance() {
2871 instruction_ = instruction_->GetNext();
2872 }
2873
2874 private:
2875 HInstructionIteratorHandleChanges() : instruction_(nullptr) {}
2876
2877 HInstruction* instruction_;
2878
2879 friend struct HSTLInstructionIterator<HInstructionIteratorHandleChanges>;
2880 };
2881
2882
2883 class HBackwardInstructionIterator : public ValueObject {
2884 public:
2885 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2886 : instruction_(instructions.last_instruction_) {
2887 next_ = Done() ? nullptr : instruction_->GetPrevious();
2888 }
2889
2890 bool Done() const { return instruction_ == nullptr; }
2891 HInstruction* Current() const { return instruction_; }
2892 void Advance() {
2893 instruction_ = next_;
2894 next_ = Done() ? nullptr : instruction_->GetPrevious();
2895 }
2896
2897 private:
2898 HBackwardInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2899
2900 HInstruction* instruction_;
2901 HInstruction* next_;
2902
2903 friend struct HSTLInstructionIterator<HBackwardInstructionIterator>;
2904 };
2905
2906 template <typename InnerIter>
2907 struct HSTLInstructionIterator : public ValueObject,
2908 public std::iterator<std::forward_iterator_tag, HInstruction*> {
2909 public:
2910 static_assert(std::is_same_v<InnerIter, HBackwardInstructionIterator> ||
2911 std::is_same_v<InnerIter, HInstructionIterator> ||
2912 std::is_same_v<InnerIter, HInstructionIteratorHandleChanges>,
2913 "Unknown wrapped iterator!");
2914
2915 explicit HSTLInstructionIterator(InnerIter inner) : inner_(inner) {}
2916 HInstruction* operator*() const {
2917 DCHECK(inner_.Current() != nullptr);
2918 return inner_.Current();
2919 }
2920
2921 HSTLInstructionIterator<InnerIter>& operator++() {
2922 DCHECK(*this != HSTLInstructionIterator<InnerIter>::EndIter());
2923 inner_.Advance();
2924 return *this;
2925 }
2926
2927 HSTLInstructionIterator<InnerIter> operator++(int) {
2928 HSTLInstructionIterator<InnerIter> prev(*this);
2929 ++(*this);
2930 return prev;
2931 }
2932
2933 bool operator==(const HSTLInstructionIterator<InnerIter>& other) const {
2934 return inner_.Current() == other.inner_.Current();
2935 }
2936
2937 bool operator!=(const HSTLInstructionIterator<InnerIter>& other) const {
2938 return !(*this == other);
2939 }
2940
2941 static HSTLInstructionIterator<InnerIter> EndIter() {
2942 return HSTLInstructionIterator<InnerIter>(InnerIter());
2943 }
2944
2945 private:
2946 InnerIter inner_;
2947 };
2948
2949 template <typename InnerIter>
2950 IterationRange<HSTLInstructionIterator<InnerIter>> MakeSTLInstructionIteratorRange(InnerIter iter) {
2951 return MakeIterationRange(HSTLInstructionIterator<InnerIter>(iter),
2952 HSTLInstructionIterator<InnerIter>::EndIter());
2953 }
2954
2955 class HVariableInputSizeInstruction : public HInstruction {
2956 public:
2957 using HInstruction::GetInputRecords; // Keep the const version visible.
2958 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2959 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2960 }
2961
2962 void AddInput(HInstruction* input);
2963 void InsertInputAt(size_t index, HInstruction* input);
2964 void RemoveInputAt(size_t index);
2965
2966 // Removes all the inputs.
2967 // Also removes this instructions from each input's use list
2968 // (for non-environment uses only).
2969 void RemoveAllInputs();
2970
2971 protected:
2972 HVariableInputSizeInstruction(InstructionKind inst_kind,
2973 SideEffects side_effects,
2974 uint32_t dex_pc,
2975 ArenaAllocator* allocator,
2976 size_t number_of_inputs,
2977 ArenaAllocKind kind)
2978 : HInstruction(inst_kind, side_effects, dex_pc),
2979 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2980 HVariableInputSizeInstruction(InstructionKind inst_kind,
2981 DataType::Type type,
2982 SideEffects side_effects,
2983 uint32_t dex_pc,
2984 ArenaAllocator* allocator,
2985 size_t number_of_inputs,
2986 ArenaAllocKind kind)
2987 : HInstruction(inst_kind, type, side_effects, dex_pc),
2988 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2989
2990 DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2991
2992 ArenaVector<HUserRecord<HInstruction*>> inputs_;
2993 };
2994
2995 template<size_t N>
2996 class HExpression : public HInstruction {
2997 public:
2998 HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2999 : HInstruction(kind, side_effects, dex_pc), inputs_() {}
3000 HExpression<N>(InstructionKind kind,
3001 DataType::Type type,
3002 SideEffects side_effects,
3003 uint32_t dex_pc)
3004 : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
3005 virtual ~HExpression() {}
3006
3007 using HInstruction::GetInputRecords; // Keep the const version visible.
3008 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
3009 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
3010 }
3011
3012 protected:
3013 DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
3014
3015 private:
3016 std::array<HUserRecord<HInstruction*>, N> inputs_;
3017
3018 friend class SsaBuilder;
3019 };
3020
3021 // HExpression specialization for N=0.
3022 template<>
3023 class HExpression<0> : public HInstruction {
3024 public:
3025 using HInstruction::HInstruction;
3026
3027 virtual ~HExpression() {}
3028
3029 using HInstruction::GetInputRecords; // Keep the const version visible.
3030 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
3031 return ArrayRef<HUserRecord<HInstruction*>>();
3032 }
3033
3034 protected:
3035 DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
3036
3037 private:
3038 friend class SsaBuilder;
3039 };
3040
3041 class HMethodEntryHook : public HExpression<0> {
3042 public:
3043 explicit HMethodEntryHook(uint32_t dex_pc)
3044 : HExpression(kMethodEntryHook, SideEffects::All(), dex_pc) {}
3045
3046 bool NeedsEnvironment() const override {
3047 return true;
3048 }
3049
3050 bool CanThrow() const override { return true; }
3051
3052 DECLARE_INSTRUCTION(MethodEntryHook);
3053
3054 protected:
3055 DEFAULT_COPY_CONSTRUCTOR(MethodEntryHook);
3056 };
3057
3058 class HMethodExitHook : public HExpression<1> {
3059 public:
3060 HMethodExitHook(HInstruction* value, uint32_t dex_pc)
3061 : HExpression(kMethodExitHook, SideEffects::All(), dex_pc) {
3062 SetRawInputAt(0, value);
3063 }
3064
3065 bool NeedsEnvironment() const override {
3066 return true;
3067 }
3068
3069 bool CanThrow() const override { return true; }
3070
3071 DECLARE_INSTRUCTION(MethodExitHook);
3072
3073 protected:
3074 DEFAULT_COPY_CONSTRUCTOR(MethodExitHook);
3075 };
3076
3077 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
3078 // instruction that branches to the exit block.
3079 class HReturnVoid final : public HExpression<0> {
3080 public:
3081 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
3082 : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
3083 }
3084
3085 bool IsControlFlow() const override { return true; }
3086
3087 DECLARE_INSTRUCTION(ReturnVoid);
3088
3089 protected:
3090 DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
3091 };
3092
3093 // Represents dex's RETURN opcodes. A HReturn is a control flow
3094 // instruction that branches to the exit block.
3095 class HReturn final : public HExpression<1> {
3096 public:
3097 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
3098 : HExpression(kReturn, SideEffects::None(), dex_pc) {
3099 SetRawInputAt(0, value);
3100 }
3101
3102 bool IsControlFlow() const override { return true; }
3103
3104 DECLARE_INSTRUCTION(Return);
3105
3106 protected:
3107 DEFAULT_COPY_CONSTRUCTOR(Return);
3108 };
3109
3110 class HPhi final : public HVariableInputSizeInstruction {
3111 public:
3112 HPhi(ArenaAllocator* allocator,
3113 uint32_t reg_number,
3114 size_t number_of_inputs,
3115 DataType::Type type,
3116 uint32_t dex_pc = kNoDexPc)
3117 : HVariableInputSizeInstruction(
3118 kPhi,
3119 ToPhiType(type),
3120 SideEffects::None(),
3121 dex_pc,
3122 allocator,
3123 number_of_inputs,
3124 kArenaAllocPhiInputs),
3125 reg_number_(reg_number) {
3126 DCHECK_NE(GetType(), DataType::Type::kVoid);
3127 // Phis are constructed live and marked dead if conflicting or unused.
3128 // Individual steps of SsaBuilder should assume that if a phi has been
3129 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
3130 SetPackedFlag<kFlagIsLive>(true);
3131 SetPackedFlag<kFlagCanBeNull>(true);
3132 }
3133
3134 bool IsClonable() const override { return true; }
3135
3136 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
3137 static DataType::Type ToPhiType(DataType::Type type) {
3138 return DataType::Kind(type);
3139 }
3140
3141 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
3142
3143 void SetType(DataType::Type new_type) {
3144 // Make sure that only valid type changes occur. The following are allowed:
3145 // (1) int -> float/ref (primitive type propagation),
3146 // (2) long -> double (primitive type propagation).
3147 DCHECK(GetType() == new_type ||
3148 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
3149 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
3150 (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
3151 SetPackedField<TypeField>(new_type);
3152 }
3153
3154 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
3155 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
3156
3157 uint32_t GetRegNumber() const { return reg_number_; }
3158
3159 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
3160 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
3161 bool IsDead() const { return !IsLive(); }
3162 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
3163
3164 bool IsVRegEquivalentOf(const HInstruction* other) const {
3165 return other != nullptr
3166 && other->IsPhi()
3167 && other->AsPhi()->GetBlock() == GetBlock()
3168 && other->AsPhi()->GetRegNumber() == GetRegNumber();
3169 }
3170
3171 bool HasEquivalentPhi() const {
3172 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3173 return true;
3174 }
3175 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3176 return true;
3177 }
3178 return false;
3179 }
3180
3181 // Returns the next equivalent phi (starting from the current one) or null if there is none.
3182 // An equivalent phi is a phi having the same dex register and type.
3183 // It assumes that phis with the same dex register are adjacent.
3184 HPhi* GetNextEquivalentPhiWithSameType() {
3185 HInstruction* next = GetNext();
3186 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
3187 if (next->GetType() == GetType()) {
3188 return next->AsPhi();
3189 }
3190 next = next->GetNext();
3191 }
3192 return nullptr;
3193 }
3194
3195 DECLARE_INSTRUCTION(Phi);
3196
3197 protected:
3198 DEFAULT_COPY_CONSTRUCTOR(Phi);
3199
3200 private:
3201 static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
3202 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
3203 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
3204 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3205
3206 const uint32_t reg_number_;
3207 };
3208
3209 // The exit instruction is the only instruction of the exit block.
3210 // Instructions aborting the method (HThrow and HReturn) must branch to the
3211 // exit block.
3212 class HExit final : public HExpression<0> {
3213 public:
3214 explicit HExit(uint32_t dex_pc = kNoDexPc)
3215 : HExpression(kExit, SideEffects::None(), dex_pc) {
3216 }
3217
3218 bool IsControlFlow() const override { return true; }
3219
3220 DECLARE_INSTRUCTION(Exit);
3221
3222 protected:
3223 DEFAULT_COPY_CONSTRUCTOR(Exit);
3224 };
3225
3226 // Jumps from one block to another.
3227 class HGoto final : public HExpression<0> {
3228 public:
3229 explicit HGoto(uint32_t dex_pc = kNoDexPc)
3230 : HExpression(kGoto, SideEffects::None(), dex_pc) {
3231 }
3232
3233 bool IsClonable() const override { return true; }
3234 bool IsControlFlow() const override { return true; }
3235
3236 HBasicBlock* GetSuccessor() const {
3237 return GetBlock()->GetSingleSuccessor();
3238 }
3239
3240 DECLARE_INSTRUCTION(Goto);
3241
3242 protected:
3243 DEFAULT_COPY_CONSTRUCTOR(Goto);
3244 };
3245
3246 class HConstant : public HExpression<0> {
3247 public:
3248 explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
3249 : HExpression(kind, type, SideEffects::None(), dex_pc) {
3250 }
3251
3252 bool CanBeMoved() const override { return true; }
3253
3254 // Is this constant -1 in the arithmetic sense?
3255 virtual bool IsMinusOne() const { return false; }
3256 // Is this constant 0 in the arithmetic sense?
3257 virtual bool IsArithmeticZero() const { return false; }
3258 // Is this constant a 0-bit pattern?
3259 virtual bool IsZeroBitPattern() const { return false; }
3260 // Is this constant 1 in the arithmetic sense?
3261 virtual bool IsOne() const { return false; }
3262
3263 virtual uint64_t GetValueAsUint64() const = 0;
3264
3265 DECLARE_ABSTRACT_INSTRUCTION(Constant);
3266
3267 protected:
3268 DEFAULT_COPY_CONSTRUCTOR(Constant);
3269 };
3270
3271 class HNullConstant final : public HConstant {
3272 public:
3273 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3274 return true;
3275 }
3276
3277 uint64_t GetValueAsUint64() const override { return 0; }
3278
3279 size_t ComputeHashCode() const override { return 0; }
3280
3281 // The null constant representation is a 0-bit pattern.
3282 bool IsZeroBitPattern() const override { return true; }
3283
3284 DECLARE_INSTRUCTION(NullConstant);
3285
3286 protected:
3287 DEFAULT_COPY_CONSTRUCTOR(NullConstant);
3288
3289 private:
3290 explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
3291 : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
3292 }
3293
3294 friend class HGraph;
3295 };
3296
3297 // Constants of the type int. Those can be from Dex instructions, or
3298 // synthesized (for example with the if-eqz instruction).
3299 class HIntConstant final : public HConstant {
3300 public:
3301 int32_t GetValue() const { return value_; }
3302
3303 uint64_t GetValueAsUint64() const override {
3304 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
3305 }
3306
3307 bool InstructionDataEquals(const HInstruction* other) const override {
3308 DCHECK(other->IsIntConstant()) << other->DebugName();
3309 return other->AsIntConstant()->value_ == value_;
3310 }
3311
3312 size_t ComputeHashCode() const override { return GetValue(); }
3313
3314 bool IsMinusOne() const override { return GetValue() == -1; }
3315 bool IsArithmeticZero() const override { return GetValue() == 0; }
3316 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3317 bool IsOne() const override { return GetValue() == 1; }
3318
3319 // Integer constants are used to encode Boolean values as well,
3320 // where 1 means true and 0 means false.
3321 bool IsTrue() const { return GetValue() == 1; }
3322 bool IsFalse() const { return GetValue() == 0; }
3323
3324 DECLARE_INSTRUCTION(IntConstant);
3325
3326 protected:
3327 DEFAULT_COPY_CONSTRUCTOR(IntConstant);
3328
3329 private:
3330 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3331 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
3332 }
3333 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
3334 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
3335 value_(value ? 1 : 0) {
3336 }
3337
3338 const int32_t value_;
3339
3340 friend class HGraph;
3341 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
3342 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
3343 };
3344
3345 class HLongConstant final : public HConstant {
3346 public:
3347 int64_t GetValue() const { return value_; }
3348
3349 uint64_t GetValueAsUint64() const override { return value_; }
3350
3351 bool InstructionDataEquals(const HInstruction* other) const override {
3352 DCHECK(other->IsLongConstant()) << other->DebugName();
3353 return other->AsLongConstant()->value_ == value_;
3354 }
3355
3356 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3357
3358 bool IsMinusOne() const override { return GetValue() == -1; }
3359 bool IsArithmeticZero() const override { return GetValue() == 0; }
3360 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3361 bool IsOne() const override { return GetValue() == 1; }
3362
3363 DECLARE_INSTRUCTION(LongConstant);
3364
3365 protected:
3366 DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3367
3368 private:
3369 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3370 : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
3371 value_(value) {
3372 }
3373
3374 const int64_t value_;
3375
3376 friend class HGraph;
3377 };
3378
3379 class HFloatConstant final : public HConstant {
3380 public:
3381 float GetValue() const { return value_; }
3382
3383 uint64_t GetValueAsUint64() const override {
3384 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3385 }
3386
3387 bool InstructionDataEquals(const HInstruction* other) const override {
3388 DCHECK(other->IsFloatConstant()) << other->DebugName();
3389 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3390 }
3391
3392 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3393
3394 bool IsMinusOne() const override {
3395 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3396 }
3397 bool IsArithmeticZero() const override {
3398 return std::fpclassify(value_) == FP_ZERO;
3399 }
3400 bool IsArithmeticPositiveZero() const {
3401 return IsArithmeticZero() && !std::signbit(value_);
3402 }
3403 bool IsArithmeticNegativeZero() const {
3404 return IsArithmeticZero() && std::signbit(value_);
3405 }
3406 bool IsZeroBitPattern() const override {
3407 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3408 }
3409 bool IsOne() const override {
3410 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3411 }
3412 bool IsNaN() const {
3413 return std::isnan(value_);
3414 }
3415
3416 DECLARE_INSTRUCTION(FloatConstant);
3417
3418 protected:
3419 DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3420
3421 private:
3422 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
3423 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3424 value_(value) {
3425 }
3426 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3427 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3428 value_(bit_cast<float, int32_t>(value)) {
3429 }
3430
3431 const float value_;
3432
3433 // Only the SsaBuilder and HGraph can create floating-point constants.
3434 friend class SsaBuilder;
3435 friend class HGraph;
3436 };
3437
3438 class HDoubleConstant final : public HConstant {
3439 public:
3440 double GetValue() const { return value_; }
3441
3442 uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3443
3444 bool InstructionDataEquals(const HInstruction* other) const override {
3445 DCHECK(other->IsDoubleConstant()) << other->DebugName();
3446 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3447 }
3448
3449 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3450
3451 bool IsMinusOne() const override {
3452 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3453 }
3454 bool IsArithmeticZero() const override {
3455 return std::fpclassify(value_) == FP_ZERO;
3456 }
3457 bool IsArithmeticPositiveZero() const {
3458 return IsArithmeticZero() && !std::signbit(value_);
3459 }
3460 bool IsArithmeticNegativeZero() const {
3461 return IsArithmeticZero() && std::signbit(value_);
3462 }
3463 bool IsZeroBitPattern() const override {
3464 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3465 }
3466 bool IsOne() const override {
3467 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3468 }
3469 bool IsNaN() const {
3470 return std::isnan(value_);
3471 }
3472
3473 DECLARE_INSTRUCTION(DoubleConstant);
3474
3475 protected:
3476 DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3477
3478 private:
3479 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
3480 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3481 value_(value) {
3482 }
3483 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3484 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3485 value_(bit_cast<double, int64_t>(value)) {
3486 }
3487
3488 const double value_;
3489
3490 // Only the SsaBuilder and HGraph can create floating-point constants.
3491 friend class SsaBuilder;
3492 friend class HGraph;
3493 };
3494
3495 // Conditional branch. A block ending with an HIf instruction must have
3496 // two successors.
3497 class HIf final : public HExpression<1> {
3498 public:
3499 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
3500 : HExpression(kIf, SideEffects::None(), dex_pc) {
3501 SetRawInputAt(0, input);
3502 }
3503
3504 bool IsClonable() const override { return true; }
3505 bool IsControlFlow() const override { return true; }
3506
3507 HBasicBlock* IfTrueSuccessor() const {
3508 return GetBlock()->GetSuccessors()[0];
3509 }
3510
3511 HBasicBlock* IfFalseSuccessor() const {
3512 return GetBlock()->GetSuccessors()[1];
3513 }
3514
3515 DECLARE_INSTRUCTION(If);
3516
3517 protected:
3518 DEFAULT_COPY_CONSTRUCTOR(If);
3519 };
3520
3521
3522 // Abstract instruction which marks the beginning and/or end of a try block and
3523 // links it to the respective exception handlers. Behaves the same as a Goto in
3524 // non-exceptional control flow.
3525 // Normal-flow successor is stored at index zero, exception handlers under
3526 // higher indices in no particular order.
3527 class HTryBoundary final : public HExpression<0> {
3528 public:
3529 enum class BoundaryKind {
3530 kEntry,
3531 kExit,
3532 kLast = kExit
3533 };
3534
3535 // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3536 // across the catch block entering edges as GC might happen during throwing an exception.
3537 // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3538 // HInstruction which a catch block must start from.
3539 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3540 : HExpression(kTryBoundary,
3541 (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3542 : SideEffects::None(),
3543 dex_pc) {
3544 SetPackedField<BoundaryKindField>(kind);
3545 }
3546
3547 bool IsControlFlow() const override { return true; }
3548
3549 // Returns the block's non-exceptional successor (index zero).
3550 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3551
3552 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3553 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3554 }
3555
3556 // Returns whether `handler` is among its exception handlers (non-zero index
3557 // successors).
3558 bool HasExceptionHandler(const HBasicBlock& handler) const {
3559 DCHECK(handler.IsCatchBlock());
3560 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3561 }
3562
3563 // If not present already, adds `handler` to its block's list of exception
3564 // handlers.
3565 void AddExceptionHandler(HBasicBlock* handler) {
3566 if (!HasExceptionHandler(*handler)) {
3567 GetBlock()->AddSuccessor(handler);
3568 }
3569 }
3570
3571 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3572 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3573
3574 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3575
3576 DECLARE_INSTRUCTION(TryBoundary);
3577
3578 protected:
3579 DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3580
3581 private:
3582 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3583 static constexpr size_t kFieldBoundaryKindSize =
3584 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3585 static constexpr size_t kNumberOfTryBoundaryPackedBits =
3586 kFieldBoundaryKind + kFieldBoundaryKindSize;
3587 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3588 "Too many packed fields.");
3589 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3590 };
3591
3592 // Deoptimize to interpreter, upon checking a condition.
3593 class HDeoptimize final : public HVariableInputSizeInstruction {
3594 public:
3595 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3596 // across.
3597 HDeoptimize(ArenaAllocator* allocator,
3598 HInstruction* cond,
3599 DeoptimizationKind kind,
3600 uint32_t dex_pc)
3601 : HVariableInputSizeInstruction(
3602 kDeoptimize,
3603 SideEffects::All(),
3604 dex_pc,
3605 allocator,
3606 /* number_of_inputs= */ 1,
3607 kArenaAllocMisc) {
3608 SetPackedFlag<kFieldCanBeMoved>(false);
3609 SetPackedField<DeoptimizeKindField>(kind);
3610 SetRawInputAt(0, cond);
3611 }
3612
3613 bool IsClonable() const override { return true; }
3614
3615 // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3616 // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3617 // instead of `guard`.
3618 // We set CanTriggerGC to prevent any intermediate address to be live
3619 // at the point of the `HDeoptimize`.
3620 HDeoptimize(ArenaAllocator* allocator,
3621 HInstruction* cond,
3622 HInstruction* guard,
3623 DeoptimizationKind kind,
3624 uint32_t dex_pc)
3625 : HVariableInputSizeInstruction(
3626 kDeoptimize,
3627 guard->GetType(),
3628 SideEffects::CanTriggerGC(),
3629 dex_pc,
3630 allocator,
3631 /* number_of_inputs= */ 2,
3632 kArenaAllocMisc) {
3633 SetPackedFlag<kFieldCanBeMoved>(true);
3634 SetPackedField<DeoptimizeKindField>(kind);
3635 SetRawInputAt(0, cond);
3636 SetRawInputAt(1, guard);
3637 }
3638
3639 bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3640
3641 bool InstructionDataEquals(const HInstruction* other) const override {
3642 return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3643 }
3644
3645 bool NeedsEnvironment() const override { return true; }
3646
3647 bool CanThrow() const override { return true; }
3648
3649 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3650
3651 bool GuardsAnInput() const {
3652 return InputCount() == 2;
3653 }
3654
3655 HInstruction* GuardedInput() const {
3656 DCHECK(GuardsAnInput());
3657 return InputAt(1);
3658 }
3659
3660 void RemoveGuard() {
3661 RemoveInputAt(1);
3662 }
3663
3664 DECLARE_INSTRUCTION(Deoptimize);
3665
3666 protected:
3667 DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3668
3669 private:
3670 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3671 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3672 static constexpr size_t kFieldDeoptimizeKindSize =
3673 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3674 static constexpr size_t kNumberOfDeoptimizePackedBits =
3675 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3676 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3677 "Too many packed fields.");
3678 using DeoptimizeKindField =
3679 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3680 };
3681
3682 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3683 // The compiled code checks this flag value in a guard before devirtualized call and
3684 // if it's true, starts to do deoptimization.
3685 // It has a 4-byte slot on stack.
3686 // TODO: allocate a register for this flag.
3687 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3688 public:
3689 // CHA guards are only optimized in a separate pass and it has no side effects
3690 // with regard to other passes.
3691 HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3692 : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3693 DataType::Type::kInt32,
3694 SideEffects::None(),
3695 dex_pc,
3696 allocator,
3697 0,
3698 kArenaAllocCHA) {
3699 }
3700
3701 // We do all CHA guard elimination/motion in a single pass, after which there is no
3702 // further guard elimination/motion since a guard might have been used for justification
3703 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3704 // to avoid other optimizations trying to move it.
3705 bool CanBeMoved() const override { return false; }
3706
3707 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3708
3709 protected:
3710 DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3711 };
3712
3713 // Represents the ArtMethod that was passed as a first argument to
3714 // the method. It is used by instructions that depend on it, like
3715 // instructions that work with the dex cache.
3716 class HCurrentMethod final : public HExpression<0> {
3717 public:
3718 explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3719 : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3720 }
3721
3722 DECLARE_INSTRUCTION(CurrentMethod);
3723
3724 protected:
3725 DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3726 };
3727
3728 // Fetches an ArtMethod from the virtual table or the interface method table
3729 // of a class.
3730 class HClassTableGet final : public HExpression<1> {
3731 public:
3732 enum class TableKind {
3733 kVTable,
3734 kIMTable,
3735 kLast = kIMTable
3736 };
3737 HClassTableGet(HInstruction* cls,
3738 DataType::Type type,
3739 TableKind kind,
3740 size_t index,
3741 uint32_t dex_pc)
3742 : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3743 index_(index) {
3744 SetPackedField<TableKindField>(kind);
3745 SetRawInputAt(0, cls);
3746 }
3747
3748 bool IsClonable() const override { return true; }
3749 bool CanBeMoved() const override { return true; }
3750 bool InstructionDataEquals(const HInstruction* other) const override {
3751 return other->AsClassTableGet()->GetIndex() == index_ &&
3752 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3753 }
3754
3755 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3756 size_t GetIndex() const { return index_; }
3757
3758 DECLARE_INSTRUCTION(ClassTableGet);
3759
3760 protected:
3761 DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3762
3763 private:
3764 static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3765 static constexpr size_t kFieldTableKindSize =
3766 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3767 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3768 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3769 "Too many packed fields.");
3770 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKindSize>;
3771
3772 // The index of the ArtMethod in the table.
3773 const size_t index_;
3774 };
3775
3776 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3777 // have one successor for each entry in the switch table, and the final successor
3778 // will be the block containing the next Dex opcode.
3779 class HPackedSwitch final : public HExpression<1> {
3780 public:
3781 HPackedSwitch(int32_t start_value,
3782 uint32_t num_entries,
3783 HInstruction* input,
3784 uint32_t dex_pc = kNoDexPc)
3785 : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3786 start_value_(start_value),
3787 num_entries_(num_entries) {
3788 SetRawInputAt(0, input);
3789 }
3790
3791 bool IsClonable() const override { return true; }
3792
3793 bool IsControlFlow() const override { return true; }
3794
3795 int32_t GetStartValue() const { return start_value_; }
3796
3797 uint32_t GetNumEntries() const { return num_entries_; }
3798
3799 HBasicBlock* GetDefaultBlock() const {
3800 // Last entry is the default block.
3801 return GetBlock()->GetSuccessors()[num_entries_];
3802 }
3803 DECLARE_INSTRUCTION(PackedSwitch);
3804
3805 protected:
3806 DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3807
3808 private:
3809 const int32_t start_value_;
3810 const uint32_t num_entries_;
3811 };
3812
3813 class HUnaryOperation : public HExpression<1> {
3814 public:
3815 HUnaryOperation(InstructionKind kind,
3816 DataType::Type result_type,
3817 HInstruction* input,
3818 uint32_t dex_pc = kNoDexPc)
3819 : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3820 SetRawInputAt(0, input);
3821 }
3822
3823 // All of the UnaryOperation instructions are clonable.
3824 bool IsClonable() const override { return true; }
3825
3826 HInstruction* GetInput() const { return InputAt(0); }
3827 DataType::Type GetResultType() const { return GetType(); }
3828
3829 bool CanBeMoved() const override { return true; }
3830 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3831 return true;
3832 }
3833
3834 // Try to statically evaluate `this` and return a HConstant
3835 // containing the result of this evaluation. If `this` cannot
3836 // be evaluated as a constant, return null.
3837 HConstant* TryStaticEvaluation() const;
3838
3839 // Apply this operation to `x`.
3840 virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3841 virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3842 virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3843 virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3844
3845 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3846
3847 protected:
3848 DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3849 };
3850
3851 class HBinaryOperation : public HExpression<2> {
3852 public:
3853 HBinaryOperation(InstructionKind kind,
3854 DataType::Type result_type,
3855 HInstruction* left,
3856 HInstruction* right,
3857 SideEffects side_effects = SideEffects::None(),
3858 uint32_t dex_pc = kNoDexPc)
3859 : HExpression(kind, result_type, side_effects, dex_pc) {
3860 SetRawInputAt(0, left);
3861 SetRawInputAt(1, right);
3862 }
3863
3864 // All of the BinaryOperation instructions are clonable.
3865 bool IsClonable() const override { return true; }
3866
3867 HInstruction* GetLeft() const { return InputAt(0); }
3868 HInstruction* GetRight() const { return InputAt(1); }
3869 DataType::Type GetResultType() const { return GetType(); }
3870
3871 virtual bool IsCommutative() const { return false; }
3872
3873 // Put constant on the right.
3874 // Returns whether order is changed.
3875 bool OrderInputsWithConstantOnTheRight() {
3876 HInstruction* left = InputAt(0);
3877 HInstruction* right = InputAt(1);
3878 if (left->IsConstant() && !right->IsConstant()) {
3879 ReplaceInput(right, 0);
3880 ReplaceInput(left, 1);
3881 return true;
3882 }
3883 return false;
3884 }
3885
3886 // Order inputs by instruction id, but favor constant on the right side.
3887 // This helps GVN for commutative ops.
3888 void OrderInputs() {
3889 DCHECK(IsCommutative());
3890 HInstruction* left = InputAt(0);
3891 HInstruction* right = InputAt(1);
3892 if (left == right || (!left->IsConstant() && right->IsConstant())) {
3893 return;
3894 }
3895 if (OrderInputsWithConstantOnTheRight()) {
3896 return;
3897 }
3898 // Order according to instruction id.
3899 if (left->GetId() > right->GetId()) {
3900 ReplaceInput(right, 0);
3901 ReplaceInput(left, 1);
3902 }
3903 }
3904
3905 bool CanBeMoved() const override { return true; }
3906 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3907 return true;
3908 }
3909
3910 // Try to statically evaluate `this` and return a HConstant
3911 // containing the result of this evaluation. If `this` cannot
3912 // be evaluated as a constant, return null.
3913 HConstant* TryStaticEvaluation() const;
3914
3915 // Apply this operation to `x` and `y`.
3916 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3917 HNullConstant* y ATTRIBUTE_UNUSED) const {
3918 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3919 UNREACHABLE();
3920 }
3921 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3922 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3923 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3924 HIntConstant* y ATTRIBUTE_UNUSED) const {
3925 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3926 UNREACHABLE();
3927 }
3928 virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3929 virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3930
3931 // Returns an input that can legally be used as the right input and is
3932 // constant, or null.
3933 HConstant* GetConstantRight() const;
3934
3935 // If `GetConstantRight()` returns one of the input, this returns the other
3936 // one. Otherwise it returns null.
3937 HInstruction* GetLeastConstantLeft() const;
3938
3939 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3940
3941 protected:
3942 DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3943 };
3944
3945 // The comparison bias applies for floating point operations and indicates how NaN
3946 // comparisons are treated:
3947 enum class ComparisonBias { // private marker to avoid generate-operator-out.py from processing.
3948 kNoBias, // bias is not applicable (i.e. for long operation)
3949 kGtBias, // return 1 for NaN comparisons
3950 kLtBias, // return -1 for NaN comparisons
3951 kLast = kLtBias
3952 };
3953
3954 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs);
3955
3956 class HCondition : public HBinaryOperation {
3957 public:
3958 HCondition(InstructionKind kind,
3959 HInstruction* first,
3960 HInstruction* second,
3961 uint32_t dex_pc = kNoDexPc)
3962 : HBinaryOperation(kind,
3963 DataType::Type::kBool,
3964 first,
3965 second,
3966 SideEffects::None(),
3967 dex_pc) {
3968 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3969 }
3970
3971 // For code generation purposes, returns whether this instruction is just before
3972 // `instruction`, and disregard moves in between.
3973 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3974
3975 DECLARE_ABSTRACT_INSTRUCTION(Condition);
3976
3977 virtual IfCondition GetCondition() const = 0;
3978
3979 virtual IfCondition GetOppositeCondition() const = 0;
3980
3981 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3982 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3983
3984 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3985 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3986
3987 bool InstructionDataEquals(const HInstruction* other) const override {
3988 return GetPackedFields() == other->AsCondition()->GetPackedFields();
3989 }
3990
3991 bool IsFPConditionTrueIfNaN() const {
3992 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3993 IfCondition if_cond = GetCondition();
3994 if (if_cond == kCondNE) {
3995 return true;
3996 } else if (if_cond == kCondEQ) {
3997 return false;
3998 }
3999 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
4000 }
4001
4002 bool IsFPConditionFalseIfNaN() const {
4003 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4004 IfCondition if_cond = GetCondition();
4005 if (if_cond == kCondEQ) {
4006 return true;
4007 } else if (if_cond == kCondNE) {
4008 return false;
4009 }
4010 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
4011 }
4012
4013 protected:
4014 // Needed if we merge a HCompare into a HCondition.
4015 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4016 static constexpr size_t kFieldComparisonBiasSize =
4017 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4018 static constexpr size_t kNumberOfConditionPackedBits =
4019 kFieldComparisonBias + kFieldComparisonBiasSize;
4020 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4021 using ComparisonBiasField =
4022 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4023
4024 template <typename T>
4025 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4026
4027 template <typename T>
4028 int32_t CompareFP(T x, T y) const {
4029 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4030 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4031 // Handle the bias.
4032 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
4033 }
4034
4035 // Return an integer constant containing the result of a condition evaluated at compile time.
4036 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
4037 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4038 }
4039
4040 DEFAULT_COPY_CONSTRUCTOR(Condition);
4041 };
4042
4043 // Instruction to check if two inputs are equal to each other.
4044 class HEqual final : public HCondition {
4045 public:
4046 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4047 : HCondition(kEqual, first, second, dex_pc) {
4048 }
4049
4050 bool IsCommutative() const override { return true; }
4051
4052 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
4053 HNullConstant* y ATTRIBUTE_UNUSED) const override {
4054 return MakeConstantCondition(true, GetDexPc());
4055 }
4056 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4057 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4058 }
4059 // In the following Evaluate methods, a HCompare instruction has
4060 // been merged into this HEqual instruction; evaluate it as
4061 // `Compare(x, y) == 0`.
4062 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4063 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
4064 GetDexPc());
4065 }
4066 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4067 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4068 }
4069 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4070 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4071 }
4072
4073 DECLARE_INSTRUCTION(Equal);
4074
4075 IfCondition GetCondition() const override {
4076 return kCondEQ;
4077 }
4078
4079 IfCondition GetOppositeCondition() const override {
4080 return kCondNE;
4081 }
4082
4083 protected:
4084 DEFAULT_COPY_CONSTRUCTOR(Equal);
4085
4086 private:
4087 template <typename T> static bool Compute(T x, T y) { return x == y; }
4088 };
4089
4090 class HNotEqual final : public HCondition {
4091 public:
4092 HNotEqual(HInstruction* first, HInstruction* second,
4093 uint32_t dex_pc = kNoDexPc)
4094 : HCondition(kNotEqual, first, second, dex_pc) {
4095 }
4096
4097 bool IsCommutative() const override { return true; }
4098
4099 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
4100 HNullConstant* y ATTRIBUTE_UNUSED) const override {
4101 return MakeConstantCondition(false, GetDexPc());
4102 }
4103 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4104 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4105 }
4106 // In the following Evaluate methods, a HCompare instruction has
4107 // been merged into this HNotEqual instruction; evaluate it as
4108 // `Compare(x, y) != 0`.
4109 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4110 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4111 }
4112 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4113 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4114 }
4115 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4116 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4117 }
4118
4119 DECLARE_INSTRUCTION(NotEqual);
4120
4121 IfCondition GetCondition() const override {
4122 return kCondNE;
4123 }
4124
4125 IfCondition GetOppositeCondition() const override {
4126 return kCondEQ;
4127 }
4128
4129 protected:
4130 DEFAULT_COPY_CONSTRUCTOR(NotEqual);
4131
4132 private:
4133 template <typename T> static bool Compute(T x, T y) { return x != y; }
4134 };
4135
4136 class HLessThan final : public HCondition {
4137 public:
4138 HLessThan(HInstruction* first, HInstruction* second,
4139 uint32_t dex_pc = kNoDexPc)
4140 : HCondition(kLessThan, first, second, dex_pc) {
4141 }
4142
4143 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4144 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4145 }
4146 // In the following Evaluate methods, a HCompare instruction has
4147 // been merged into this HLessThan instruction; evaluate it as
4148 // `Compare(x, y) < 0`.
4149 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4150 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4151 }
4152 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4153 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4154 }
4155 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4156 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4157 }
4158
4159 DECLARE_INSTRUCTION(LessThan);
4160
4161 IfCondition GetCondition() const override {
4162 return kCondLT;
4163 }
4164
4165 IfCondition GetOppositeCondition() const override {
4166 return kCondGE;
4167 }
4168
4169 protected:
4170 DEFAULT_COPY_CONSTRUCTOR(LessThan);
4171
4172 private:
4173 template <typename T> static bool Compute(T x, T y) { return x < y; }
4174 };
4175
4176 class HLessThanOrEqual final : public HCondition {
4177 public:
4178 HLessThanOrEqual(HInstruction* first, HInstruction* second,
4179 uint32_t dex_pc = kNoDexPc)
4180 : HCondition(kLessThanOrEqual, first, second, dex_pc) {
4181 }
4182
4183 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4184 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4185 }
4186 // In the following Evaluate methods, a HCompare instruction has
4187 // been merged into this HLessThanOrEqual instruction; evaluate it as
4188 // `Compare(x, y) <= 0`.
4189 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4190 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4191 }
4192 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4193 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4194 }
4195 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4196 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4197 }
4198
4199 DECLARE_INSTRUCTION(LessThanOrEqual);
4200
4201 IfCondition GetCondition() const override {
4202 return kCondLE;
4203 }
4204
4205 IfCondition GetOppositeCondition() const override {
4206 return kCondGT;
4207 }
4208
4209 protected:
4210 DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
4211
4212 private:
4213 template <typename T> static bool Compute(T x, T y) { return x <= y; }
4214 };
4215
4216 class HGreaterThan final : public HCondition {
4217 public:
4218 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4219 : HCondition(kGreaterThan, first, second, dex_pc) {
4220 }
4221
4222 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4223 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4224 }
4225 // In the following Evaluate methods, a HCompare instruction has
4226 // been merged into this HGreaterThan instruction; evaluate it as
4227 // `Compare(x, y) > 0`.
4228 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4229 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4230 }
4231 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4232 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4233 }
4234 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4235 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4236 }
4237
4238 DECLARE_INSTRUCTION(GreaterThan);
4239
4240 IfCondition GetCondition() const override {
4241 return kCondGT;
4242 }
4243
4244 IfCondition GetOppositeCondition() const override {
4245 return kCondLE;
4246 }
4247
4248 protected:
4249 DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
4250
4251 private:
4252 template <typename T> static bool Compute(T x, T y) { return x > y; }
4253 };
4254
4255 class HGreaterThanOrEqual final : public HCondition {
4256 public:
4257 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4258 : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
4259 }
4260
4261 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4262 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4263 }
4264 // In the following Evaluate methods, a HCompare instruction has
4265 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
4266 // `Compare(x, y) >= 0`.
4267 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4268 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4269 }
4270 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4271 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4272 }
4273 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4274 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4275 }
4276
4277 DECLARE_INSTRUCTION(GreaterThanOrEqual);
4278
4279 IfCondition GetCondition() const override {
4280 return kCondGE;
4281 }
4282
4283 IfCondition GetOppositeCondition() const override {
4284 return kCondLT;
4285 }
4286
4287 protected:
4288 DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
4289
4290 private:
4291 template <typename T> static bool Compute(T x, T y) { return x >= y; }
4292 };
4293
4294 class HBelow final : public HCondition {
4295 public:
4296 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4297 : HCondition(kBelow, first, second, dex_pc) {
4298 }
4299
4300 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4301 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4302 }
4303 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4304 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4305 }
4306 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4307 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4308 LOG(FATAL) << DebugName() << " is not defined for float values";
4309 UNREACHABLE();
4310 }
4311 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4312 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4313 LOG(FATAL) << DebugName() << " is not defined for double values";
4314 UNREACHABLE();
4315 }
4316
4317 DECLARE_INSTRUCTION(Below);
4318
4319 IfCondition GetCondition() const override {
4320 return kCondB;
4321 }
4322
4323 IfCondition GetOppositeCondition() const override {
4324 return kCondAE;
4325 }
4326
4327 protected:
4328 DEFAULT_COPY_CONSTRUCTOR(Below);
4329
4330 private:
4331 template <typename T> static bool Compute(T x, T y) {
4332 return MakeUnsigned(x) < MakeUnsigned(y);
4333 }
4334 };
4335
4336 class HBelowOrEqual final : public HCondition {
4337 public:
4338 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4339 : HCondition(kBelowOrEqual, first, second, dex_pc) {
4340 }
4341
4342 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4343 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4344 }
4345 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4346 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4347 }
4348 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4349 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4350 LOG(FATAL) << DebugName() << " is not defined for float values";
4351 UNREACHABLE();
4352 }
4353 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4354 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4355 LOG(FATAL) << DebugName() << " is not defined for double values";
4356 UNREACHABLE();
4357 }
4358
4359 DECLARE_INSTRUCTION(BelowOrEqual);
4360
4361 IfCondition GetCondition() const override {
4362 return kCondBE;
4363 }
4364
4365 IfCondition GetOppositeCondition() const override {
4366 return kCondA;
4367 }
4368
4369 protected:
4370 DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4371
4372 private:
4373 template <typename T> static bool Compute(T x, T y) {
4374 return MakeUnsigned(x) <= MakeUnsigned(y);
4375 }
4376 };
4377
4378 class HAbove final : public HCondition {
4379 public:
4380 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4381 : HCondition(kAbove, first, second, dex_pc) {
4382 }
4383
4384 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4385 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4386 }
4387 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4388 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4389 }
4390 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4391 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4392 LOG(FATAL) << DebugName() << " is not defined for float values";
4393 UNREACHABLE();
4394 }
4395 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4396 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4397 LOG(FATAL) << DebugName() << " is not defined for double values";
4398 UNREACHABLE();
4399 }
4400
4401 DECLARE_INSTRUCTION(Above);
4402
4403 IfCondition GetCondition() const override {
4404 return kCondA;
4405 }
4406
4407 IfCondition GetOppositeCondition() const override {
4408 return kCondBE;
4409 }
4410
4411 protected:
4412 DEFAULT_COPY_CONSTRUCTOR(Above);
4413
4414 private:
4415 template <typename T> static bool Compute(T x, T y) {
4416 return MakeUnsigned(x) > MakeUnsigned(y);
4417 }
4418 };
4419
4420 class HAboveOrEqual final : public HCondition {
4421 public:
4422 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4423 : HCondition(kAboveOrEqual, first, second, dex_pc) {
4424 }
4425
4426 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4427 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4428 }
4429 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4430 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4431 }
4432 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4433 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4434 LOG(FATAL) << DebugName() << " is not defined for float values";
4435 UNREACHABLE();
4436 }
4437 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4438 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4439 LOG(FATAL) << DebugName() << " is not defined for double values";
4440 UNREACHABLE();
4441 }
4442
4443 DECLARE_INSTRUCTION(AboveOrEqual);
4444
4445 IfCondition GetCondition() const override {
4446 return kCondAE;
4447 }
4448
4449 IfCondition GetOppositeCondition() const override {
4450 return kCondB;
4451 }
4452
4453 protected:
4454 DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4455
4456 private:
4457 template <typename T> static bool Compute(T x, T y) {
4458 return MakeUnsigned(x) >= MakeUnsigned(y);
4459 }
4460 };
4461
4462 // Instruction to check how two inputs compare to each other.
4463 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4464 class HCompare final : public HBinaryOperation {
4465 public:
4466 // Note that `comparison_type` is the type of comparison performed
4467 // between the comparison's inputs, not the type of the instantiated
4468 // HCompare instruction (which is always DataType::Type::kInt).
4469 HCompare(DataType::Type comparison_type,
4470 HInstruction* first,
4471 HInstruction* second,
4472 ComparisonBias bias,
4473 uint32_t dex_pc)
4474 : HBinaryOperation(kCompare,
4475 DataType::Type::kInt32,
4476 first,
4477 second,
4478 SideEffectsForArchRuntimeCalls(comparison_type),
4479 dex_pc) {
4480 SetPackedField<ComparisonBiasField>(bias);
4481 }
4482
4483 template <typename T>
4484 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4485
4486 template <typename T>
4487 int32_t ComputeFP(T x, T y) const {
4488 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4489 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4490 // Handle the bias.
4491 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4492 }
4493
4494 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4495 // Note that there is no "cmp-int" Dex instruction so we shouldn't
4496 // reach this code path when processing a freshly built HIR
4497 // graph. However HCompare integer instructions can be synthesized
4498 // by the instruction simplifier to implement IntegerCompare and
4499 // IntegerSignum intrinsics, so we have to handle this case.
4500 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4501 }
4502 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4503 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4504 }
4505 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4506 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4507 }
4508 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4509 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4510 }
4511
4512 bool InstructionDataEquals(const HInstruction* other) const override {
4513 return GetPackedFields() == other->AsCompare()->GetPackedFields();
4514 }
4515
4516 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4517
4518 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4519 // Only meaningful for floating-point comparisons.
4520 bool IsGtBias() const {
4521 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4522 return GetBias() == ComparisonBias::kGtBias;
4523 }
4524
4525 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4526 // Comparisons do not require a runtime call in any back end.
4527 return SideEffects::None();
4528 }
4529
4530 DECLARE_INSTRUCTION(Compare);
4531
4532 protected:
4533 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4534 static constexpr size_t kFieldComparisonBiasSize =
4535 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4536 static constexpr size_t kNumberOfComparePackedBits =
4537 kFieldComparisonBias + kFieldComparisonBiasSize;
4538 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4539 using ComparisonBiasField =
4540 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4541
4542 // Return an integer constant containing the result of a comparison evaluated at compile time.
4543 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4544 DCHECK(value == -1 || value == 0 || value == 1) << value;
4545 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4546 }
4547
4548 DEFAULT_COPY_CONSTRUCTOR(Compare);
4549 };
4550
4551 class HNewInstance final : public HExpression<1> {
4552 public:
4553 HNewInstance(HInstruction* cls,
4554 uint32_t dex_pc,
4555 dex::TypeIndex type_index,
4556 const DexFile& dex_file,
4557 bool finalizable,
4558 QuickEntrypointEnum entrypoint)
4559 : HExpression(kNewInstance,
4560 DataType::Type::kReference,
4561 SideEffects::CanTriggerGC(),
4562 dex_pc),
4563 type_index_(type_index),
4564 dex_file_(dex_file),
4565 entrypoint_(entrypoint) {
4566 SetPackedFlag<kFlagFinalizable>(finalizable);
4567 SetPackedFlag<kFlagPartialMaterialization>(false);
4568 SetRawInputAt(0, cls);
4569 }
4570
4571 bool IsClonable() const override { return true; }
4572
4573 void SetPartialMaterialization() {
4574 SetPackedFlag<kFlagPartialMaterialization>(true);
4575 }
4576
4577 dex::TypeIndex GetTypeIndex() const { return type_index_; }
4578 const DexFile& GetDexFile() const { return dex_file_; }
4579
4580 // Calls runtime so needs an environment.
4581 bool NeedsEnvironment() const override { return true; }
4582
4583 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
4584 bool CanThrow() const override { return true; }
4585 bool OnlyThrowsAsyncExceptions() const override {
4586 return !IsFinalizable() && !NeedsChecks();
4587 }
4588
4589 bool NeedsChecks() const {
4590 return entrypoint_ == kQuickAllocObjectWithChecks;
4591 }
4592
4593 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4594
4595 bool CanBeNull() const override { return false; }
4596
4597 bool IsPartialMaterialization() const {
4598 return GetPackedFlag<kFlagPartialMaterialization>();
4599 }
4600
4601 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4602
4603 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4604 entrypoint_ = entrypoint;
4605 }
4606
4607 HLoadClass* GetLoadClass() const {
4608 HInstruction* input = InputAt(0);
4609 if (input->IsClinitCheck()) {
4610 input = input->InputAt(0);
4611 }
4612 DCHECK(input->IsLoadClass());
4613 return input->AsLoadClass();
4614 }
4615
4616 bool IsStringAlloc() const;
4617
4618 DECLARE_INSTRUCTION(NewInstance);
4619
4620 protected:
4621 DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4622
4623 private:
4624 static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4625 static constexpr size_t kFlagPartialMaterialization = kFlagFinalizable + 1;
4626 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagPartialMaterialization + 1;
4627 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4628 "Too many packed fields.");
4629
4630 const dex::TypeIndex type_index_;
4631 const DexFile& dex_file_;
4632 QuickEntrypointEnum entrypoint_;
4633 };
4634
4635 enum IntrinsicNeedsEnvironment {
4636 kNoEnvironment, // Intrinsic does not require an environment.
4637 kNeedsEnvironment // Intrinsic requires an environment.
4638 };
4639
4640 enum IntrinsicSideEffects {
4641 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
4642 kReadSideEffects, // Intrinsic may read heap memory.
4643 kWriteSideEffects, // Intrinsic may write heap memory.
4644 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
4645 };
4646
4647 enum IntrinsicExceptions {
4648 kNoThrow, // Intrinsic does not throw any exceptions.
4649 kCanThrow // Intrinsic may throw exceptions.
4650 };
4651
4652 // Determines how to load an ArtMethod*.
4653 enum class MethodLoadKind {
4654 // Use a String init ArtMethod* loaded from Thread entrypoints.
4655 kStringInit,
4656
4657 // Use the method's own ArtMethod* loaded by the register allocator.
4658 kRecursive,
4659
4660 // Use PC-relative boot image ArtMethod* address that will be known at link time.
4661 // Used for boot image methods referenced by boot image code.
4662 kBootImageLinkTimePcRelative,
4663
4664 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
4665 // Used for app->boot calls with relocatable image.
4666 kBootImageRelRo,
4667
4668 // Load from an entry in the .bss section using a PC-relative load.
4669 // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4670 kBssEntry,
4671
4672 // Use ArtMethod* at a known address, embed the direct address in the code.
4673 // Used for for JIT-compiled calls.
4674 kJitDirectAddress,
4675
4676 // Make a runtime call to resolve and call the method. This is the last-resort-kind
4677 // used when other kinds are unimplemented on a particular architecture.
4678 kRuntimeCall,
4679 };
4680
4681 // Determines the location of the code pointer of an invoke.
4682 enum class CodePtrLocation {
4683 // Recursive call, use local PC-relative call instruction.
4684 kCallSelf,
4685
4686 // Use native pointer from the Artmethod*.
4687 // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
4688 // a special resolution stub if the class is not initialized or no native code is registered.
4689 kCallCriticalNative,
4690
4691 // Use code pointer from the ArtMethod*.
4692 // Used when we don't know the target code. This is also the last-resort-kind used when
4693 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4694 kCallArtMethod,
4695 };
4696
4697 static inline bool IsPcRelativeMethodLoadKind(MethodLoadKind load_kind) {
4698 return load_kind == MethodLoadKind::kBootImageLinkTimePcRelative ||
4699 load_kind == MethodLoadKind::kBootImageRelRo ||
4700 load_kind == MethodLoadKind::kBssEntry;
4701 }
4702
4703 class HInvoke : public HVariableInputSizeInstruction {
4704 public:
4705 bool NeedsEnvironment() const override;
4706
4707 void SetArgumentAt(size_t index, HInstruction* argument) {
4708 SetRawInputAt(index, argument);
4709 }
4710
4711 // Return the number of arguments. This number can be lower than
4712 // the number of inputs returned by InputCount(), as some invoke
4713 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4714 // inputs at the end of their list of inputs.
4715 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4716
4717 InvokeType GetInvokeType() const {
4718 return GetPackedField<InvokeTypeField>();
4719 }
4720
4721 Intrinsics GetIntrinsic() const {
4722 return intrinsic_;
4723 }
4724
4725 void SetIntrinsic(Intrinsics intrinsic,
4726 IntrinsicNeedsEnvironment needs_env,
4727 IntrinsicSideEffects side_effects,
4728 IntrinsicExceptions exceptions);
4729
4730 bool IsFromInlinedInvoke() const {
4731 return GetEnvironment()->IsFromInlinedInvoke();
4732 }
4733
4734 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4735
4736 bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4737
4738 void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4739
4740 bool AlwaysThrows() const override final { return GetPackedFlag<kFlagAlwaysThrows>(); }
4741
4742 bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4743
4744 bool InstructionDataEquals(const HInstruction* other) const override {
4745 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4746 }
4747
4748 uint32_t* GetIntrinsicOptimizations() {
4749 return &intrinsic_optimizations_;
4750 }
4751
4752 const uint32_t* GetIntrinsicOptimizations() const {
4753 return &intrinsic_optimizations_;
4754 }
4755
4756 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4757
4758 ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4759 void SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt);
4760
4761 MethodReference GetMethodReference() const { return method_reference_; }
4762
4763 const MethodReference GetResolvedMethodReference() const {
4764 return resolved_method_reference_;
4765 }
4766
4767 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4768
4769 protected:
4770 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4771 static constexpr size_t kFieldInvokeTypeSize =
4772 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4773 static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4774 static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4775 static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4776 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4777 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4778
4779 HInvoke(InstructionKind kind,
4780 ArenaAllocator* allocator,
4781 uint32_t number_of_arguments,
4782 uint32_t number_of_other_inputs,
4783 DataType::Type return_type,
4784 uint32_t dex_pc,
4785 MethodReference method_reference,
4786 ArtMethod* resolved_method,
4787 MethodReference resolved_method_reference,
4788 InvokeType invoke_type,
4789 bool enable_intrinsic_opt)
4790 : HVariableInputSizeInstruction(
4791 kind,
4792 return_type,
4793 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
4794 dex_pc,
4795 allocator,
4796 number_of_arguments + number_of_other_inputs,
4797 kArenaAllocInvokeInputs),
4798 number_of_arguments_(number_of_arguments),
4799 method_reference_(method_reference),
4800 resolved_method_reference_(resolved_method_reference),
4801 intrinsic_(Intrinsics::kNone),
4802 intrinsic_optimizations_(0) {
4803 SetPackedField<InvokeTypeField>(invoke_type);
4804 SetPackedFlag<kFlagCanThrow>(true);
4805 SetResolvedMethod(resolved_method, enable_intrinsic_opt);
4806 }
4807
4808 DEFAULT_COPY_CONSTRUCTOR(Invoke);
4809
4810 uint32_t number_of_arguments_;
4811 ArtMethod* resolved_method_;
4812 const MethodReference method_reference_;
4813 // Cached values of the resolved method, to avoid needing the mutator lock.
4814 const MethodReference resolved_method_reference_;
4815 Intrinsics intrinsic_;
4816
4817 // A magic word holding optimizations for intrinsics. See intrinsics.h.
4818 uint32_t intrinsic_optimizations_;
4819 };
4820
4821 class HInvokeUnresolved final : public HInvoke {
4822 public:
4823 HInvokeUnresolved(ArenaAllocator* allocator,
4824 uint32_t number_of_arguments,
4825 DataType::Type return_type,
4826 uint32_t dex_pc,
4827 MethodReference method_reference,
4828 InvokeType invoke_type)
4829 : HInvoke(kInvokeUnresolved,
4830 allocator,
4831 number_of_arguments,
4832 /* number_of_other_inputs= */ 0u,
4833 return_type,
4834 dex_pc,
4835 method_reference,
4836 nullptr,
4837 MethodReference(nullptr, 0u),
4838 invoke_type,
4839 /* enable_intrinsic_opt= */ false) {
4840 }
4841
4842 bool IsClonable() const override { return true; }
4843
4844 DECLARE_INSTRUCTION(InvokeUnresolved);
4845
4846 protected:
4847 DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4848 };
4849
4850 class HInvokePolymorphic final : public HInvoke {
4851 public:
4852 HInvokePolymorphic(ArenaAllocator* allocator,
4853 uint32_t number_of_arguments,
4854 DataType::Type return_type,
4855 uint32_t dex_pc,
4856 MethodReference method_reference,
4857 // resolved_method is the ArtMethod object corresponding to the polymorphic
4858 // method (e.g. VarHandle.get), resolved using the class linker. It is needed
4859 // to pass intrinsic information to the HInvokePolymorphic node.
4860 ArtMethod* resolved_method,
4861 MethodReference resolved_method_reference,
4862 dex::ProtoIndex proto_idx,
4863 bool enable_intrinsic_opt)
4864 : HInvoke(kInvokePolymorphic,
4865 allocator,
4866 number_of_arguments,
4867 /* number_of_other_inputs= */ 0u,
4868 return_type,
4869 dex_pc,
4870 method_reference,
4871 resolved_method,
4872 resolved_method_reference,
4873 kPolymorphic,
4874 enable_intrinsic_opt),
4875 proto_idx_(proto_idx) {
4876 }
4877
4878 bool IsClonable() const override { return true; }
4879
4880 dex::ProtoIndex GetProtoIndex() { return proto_idx_; }
4881
4882 DECLARE_INSTRUCTION(InvokePolymorphic);
4883
4884 protected:
4885 dex::ProtoIndex proto_idx_;
4886 DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4887 };
4888
4889 class HInvokeCustom final : public HInvoke {
4890 public:
4891 HInvokeCustom(ArenaAllocator* allocator,
4892 uint32_t number_of_arguments,
4893 uint32_t call_site_index,
4894 DataType::Type return_type,
4895 uint32_t dex_pc,
4896 MethodReference method_reference,
4897 bool enable_intrinsic_opt)
4898 : HInvoke(kInvokeCustom,
4899 allocator,
4900 number_of_arguments,
4901 /* number_of_other_inputs= */ 0u,
4902 return_type,
4903 dex_pc,
4904 method_reference,
4905 /* resolved_method= */ nullptr,
4906 MethodReference(nullptr, 0u),
4907 kStatic,
4908 enable_intrinsic_opt),
4909 call_site_index_(call_site_index) {
4910 }
4911
4912 uint32_t GetCallSiteIndex() const { return call_site_index_; }
4913
4914 bool IsClonable() const override { return true; }
4915
4916 DECLARE_INSTRUCTION(InvokeCustom);
4917
4918 protected:
4919 DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4920
4921 private:
4922 uint32_t call_site_index_;
4923 };
4924
4925 class HInvokeStaticOrDirect final : public HInvoke {
4926 public:
4927 // Requirements of this method call regarding the class
4928 // initialization (clinit) check of its declaring class.
4929 enum class ClinitCheckRequirement { // private marker to avoid generate-operator-out.py from processing.
4930 kNone, // Class already initialized.
4931 kExplicit, // Static call having explicit clinit check as last input.
4932 kImplicit, // Static call implicitly requiring a clinit check.
4933 kLast = kImplicit
4934 };
4935
4936 struct DispatchInfo {
4937 MethodLoadKind method_load_kind;
4938 CodePtrLocation code_ptr_location;
4939 // The method load data holds
4940 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
4941 // Note that there are multiple string init methods, each having its own offset.
4942 // - the method address for kDirectAddress
4943 uint64_t method_load_data;
4944 };
4945
4946 HInvokeStaticOrDirect(ArenaAllocator* allocator,
4947 uint32_t number_of_arguments,
4948 DataType::Type return_type,
4949 uint32_t dex_pc,
4950 MethodReference method_reference,
4951 ArtMethod* resolved_method,
4952 DispatchInfo dispatch_info,
4953 InvokeType invoke_type,
4954 MethodReference resolved_method_reference,
4955 ClinitCheckRequirement clinit_check_requirement,
4956 bool enable_intrinsic_opt)
4957 : HInvoke(kInvokeStaticOrDirect,
4958 allocator,
4959 number_of_arguments,
4960 // There is potentially one extra argument for the HCurrentMethod input,
4961 // and one other if the clinit check is explicit. These can be removed later.
4962 (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) +
4963 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4964 return_type,
4965 dex_pc,
4966 method_reference,
4967 resolved_method,
4968 resolved_method_reference,
4969 invoke_type,
4970 enable_intrinsic_opt),
4971 dispatch_info_(dispatch_info) {
4972 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4973 }
4974
4975 bool IsClonable() const override { return true; }
4976 bool NeedsBss() const override {
4977 return GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4978 }
4979
4980 void SetDispatchInfo(DispatchInfo dispatch_info) {
4981 bool had_current_method_input = HasCurrentMethodInput();
4982 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info);
4983
4984 // Using the current method is the default and once we find a better
4985 // method load kind, we should not go back to using the current method.
4986 DCHECK(had_current_method_input || !needs_current_method_input);
4987
4988 if (had_current_method_input && !needs_current_method_input) {
4989 DCHECK_EQ(InputAt(GetCurrentMethodIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4990 RemoveInputAt(GetCurrentMethodIndex());
4991 }
4992 dispatch_info_ = dispatch_info;
4993 }
4994
4995 DispatchInfo GetDispatchInfo() const {
4996 return dispatch_info_;
4997 }
4998
4999 using HInstruction::GetInputRecords; // Keep the const version visible.
5000 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
5001 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
5002 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
5003 DCHECK(!input_records.empty());
5004 DCHECK_GT(input_records.size(), GetNumberOfArguments());
5005 HInstruction* last_input = input_records.back().GetInstruction();
5006 // Note: `last_input` may be null during arguments setup.
5007 if (last_input != nullptr) {
5008 // `last_input` is the last input of a static invoke marked as having
5009 // an explicit clinit check. It must either be:
5010 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
5011 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
5012 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
5013 }
5014 }
5015 return input_records;
5016 }
5017
5018 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
5019 // We do not access the method via object reference, so we cannot do an implicit null check.
5020 // TODO: for intrinsics we can generate implicit null checks.
5021 return false;
5022 }
5023
5024 bool CanBeNull() const override {
5025 return GetType() == DataType::Type::kReference && !IsStringInit();
5026 }
5027
5028 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
5029 CodePtrLocation GetCodePtrLocation() const {
5030 // We do CHA analysis after sharpening. When a method has CHA inlining, it
5031 // cannot call itself, as if the CHA optmization is invalid we want to make
5032 // sure the method is never executed again. So, while sharpening can return
5033 // kCallSelf, we bypass it here if there is a CHA optimization.
5034 if (dispatch_info_.code_ptr_location == CodePtrLocation::kCallSelf &&
5035 GetBlock()->GetGraph()->HasShouldDeoptimizeFlag()) {
5036 return CodePtrLocation::kCallArtMethod;
5037 } else {
5038 return dispatch_info_.code_ptr_location;
5039 }
5040 }
5041 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
5042 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
5043 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
5044 bool HasPcRelativeMethodLoadKind() const {
5045 return IsPcRelativeMethodLoadKind(GetMethodLoadKind());
5046 }
5047
5048 QuickEntrypointEnum GetStringInitEntryPoint() const {
5049 DCHECK(IsStringInit());
5050 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
5051 }
5052
5053 uint64_t GetMethodAddress() const {
5054 DCHECK(HasMethodAddress());
5055 return dispatch_info_.method_load_data;
5056 }
5057
5058 const DexFile& GetDexFileForPcRelativeDexCache() const;
5059
5060 ClinitCheckRequirement GetClinitCheckRequirement() const {
5061 return GetPackedField<ClinitCheckRequirementField>();
5062 }
5063
5064 // Is this instruction a call to a static method?
5065 bool IsStatic() const {
5066 return GetInvokeType() == kStatic;
5067 }
5068
5069 // Does this method load kind need the current method as an input?
5070 static bool NeedsCurrentMethodInput(DispatchInfo dispatch_info) {
5071 return dispatch_info.method_load_kind == MethodLoadKind::kRecursive ||
5072 dispatch_info.method_load_kind == MethodLoadKind::kRuntimeCall ||
5073 dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative;
5074 }
5075
5076 // Get the index of the current method input.
5077 size_t GetCurrentMethodIndex() const {
5078 DCHECK(HasCurrentMethodInput());
5079 return GetCurrentMethodIndexUnchecked();
5080 }
5081 size_t GetCurrentMethodIndexUnchecked() const {
5082 return GetNumberOfArguments();
5083 }
5084
5085 // Check if the method has a current method input.
5086 bool HasCurrentMethodInput() const {
5087 if (NeedsCurrentMethodInput(GetDispatchInfo())) {
5088 DCHECK(InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5089 InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5090 return true;
5091 } else {
5092 DCHECK(InputCount() == GetCurrentMethodIndexUnchecked() ||
5093 InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5094 !InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5095 return false;
5096 }
5097 }
5098
5099 // Get the index of the special input.
5100 size_t GetSpecialInputIndex() const {
5101 DCHECK(HasSpecialInput());
5102 return GetSpecialInputIndexUnchecked();
5103 }
5104 size_t GetSpecialInputIndexUnchecked() const {
5105 return GetNumberOfArguments() + (HasCurrentMethodInput() ? 1u : 0u);
5106 }
5107
5108 // Check if the method has a special input.
5109 bool HasSpecialInput() const {
5110 size_t other_inputs =
5111 GetSpecialInputIndexUnchecked() + (IsStaticWithExplicitClinitCheck() ? 1u : 0u);
5112 size_t input_count = InputCount();
5113 DCHECK_LE(input_count - other_inputs, 1u) << other_inputs << " " << input_count;
5114 return other_inputs != input_count;
5115 }
5116
5117 void AddSpecialInput(HInstruction* input) {
5118 // We allow only one special input.
5119 DCHECK(!HasSpecialInput());
5120 InsertInputAt(GetSpecialInputIndexUnchecked(), input);
5121 }
5122
5123 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
5124 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
5125 // instruction; only relevant for static calls with explicit clinit check.
5126 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
5127 DCHECK(IsStaticWithExplicitClinitCheck());
5128 size_t last_input_index = inputs_.size() - 1u;
5129 HInstruction* last_input = inputs_.back().GetInstruction();
5130 DCHECK(last_input != nullptr);
5131 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
5132 RemoveAsUserOfInput(last_input_index);
5133 inputs_.pop_back();
5134 SetPackedField<ClinitCheckRequirementField>(new_requirement);
5135 DCHECK(!IsStaticWithExplicitClinitCheck());
5136 }
5137
5138 // Is this a call to a static method whose declaring class has an
5139 // explicit initialization check in the graph?
5140 bool IsStaticWithExplicitClinitCheck() const {
5141 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
5142 }
5143
5144 // Is this a call to a static method whose declaring class has an
5145 // implicit intialization check requirement?
5146 bool IsStaticWithImplicitClinitCheck() const {
5147 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
5148 }
5149
5150 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
5151
5152 protected:
5153 DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
5154
5155 private:
5156 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
5157 static constexpr size_t kFieldClinitCheckRequirementSize =
5158 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
5159 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
5160 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
5161 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
5162 "Too many packed fields.");
5163 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
5164 kFieldClinitCheckRequirement,
5165 kFieldClinitCheckRequirementSize>;
5166
5167 DispatchInfo dispatch_info_;
5168 };
5169 std::ostream& operator<<(std::ostream& os, MethodLoadKind rhs);
5170 std::ostream& operator<<(std::ostream& os, CodePtrLocation rhs);
5171 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
5172
5173 class HInvokeVirtual final : public HInvoke {
5174 public:
5175 HInvokeVirtual(ArenaAllocator* allocator,
5176 uint32_t number_of_arguments,
5177 DataType::Type return_type,
5178 uint32_t dex_pc,
5179 MethodReference method_reference,
5180 ArtMethod* resolved_method,
5181 MethodReference resolved_method_reference,
5182 uint32_t vtable_index,
5183 bool enable_intrinsic_opt)
5184 : HInvoke(kInvokeVirtual,
5185 allocator,
5186 number_of_arguments,
5187 0u,
5188 return_type,
5189 dex_pc,
5190 method_reference,
5191 resolved_method,
5192 resolved_method_reference,
5193 kVirtual,
5194 enable_intrinsic_opt),
5195 vtable_index_(vtable_index) {
5196 }
5197
5198 bool IsClonable() const override { return true; }
5199
5200 bool CanBeNull() const override {
5201 switch (GetIntrinsic()) {
5202 case Intrinsics::kThreadCurrentThread:
5203 case Intrinsics::kStringBufferAppend:
5204 case Intrinsics::kStringBufferToString:
5205 case Intrinsics::kStringBuilderAppendObject:
5206 case Intrinsics::kStringBuilderAppendString:
5207 case Intrinsics::kStringBuilderAppendCharSequence:
5208 case Intrinsics::kStringBuilderAppendCharArray:
5209 case Intrinsics::kStringBuilderAppendBoolean:
5210 case Intrinsics::kStringBuilderAppendChar:
5211 case Intrinsics::kStringBuilderAppendInt:
5212 case Intrinsics::kStringBuilderAppendLong:
5213 case Intrinsics::kStringBuilderAppendFloat:
5214 case Intrinsics::kStringBuilderAppendDouble:
5215 case Intrinsics::kStringBuilderToString:
5216 return false;
5217 default:
5218 return HInvoke::CanBeNull();
5219 }
5220 }
5221
5222 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override;
5223
5224 uint32_t GetVTableIndex() const { return vtable_index_; }
5225
5226 DECLARE_INSTRUCTION(InvokeVirtual);
5227
5228 protected:
5229 DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
5230
5231 private:
5232 // Cached value of the resolved method, to avoid needing the mutator lock.
5233 const uint32_t vtable_index_;
5234 };
5235
5236 class HInvokeInterface final : public HInvoke {
5237 public:
5238 HInvokeInterface(ArenaAllocator* allocator,
5239 uint32_t number_of_arguments,
5240 DataType::Type return_type,
5241 uint32_t dex_pc,
5242 MethodReference method_reference,
5243 ArtMethod* resolved_method,
5244 MethodReference resolved_method_reference,
5245 uint32_t imt_index,
5246 MethodLoadKind load_kind,
5247 bool enable_intrinsic_opt)
5248 : HInvoke(kInvokeInterface,
5249 allocator,
5250 number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0),
5251 0u,
5252 return_type,
5253 dex_pc,
5254 method_reference,
5255 resolved_method,
5256 resolved_method_reference,
5257 kInterface,
5258 enable_intrinsic_opt),
5259 imt_index_(imt_index),
5260 hidden_argument_load_kind_(load_kind) {
5261 }
5262
5263 static bool NeedsCurrentMethod(MethodLoadKind load_kind) {
5264 return load_kind == MethodLoadKind::kRecursive;
5265 }
5266
5267 bool IsClonable() const override { return true; }
5268 bool NeedsBss() const override {
5269 return GetHiddenArgumentLoadKind() == MethodLoadKind::kBssEntry;
5270 }
5271
5272 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5273 // TODO: Add implicit null checks in intrinsics.
5274 return (obj == InputAt(0)) && !IsIntrinsic();
5275 }
5276
5277 size_t GetSpecialInputIndex() const {
5278 return GetNumberOfArguments();
5279 }
5280
5281 void AddSpecialInput(HInstruction* input) {
5282 InsertInputAt(GetSpecialInputIndex(), input);
5283 }
5284
5285 uint32_t GetImtIndex() const { return imt_index_; }
5286 MethodLoadKind GetHiddenArgumentLoadKind() const { return hidden_argument_load_kind_; }
5287
5288 DECLARE_INSTRUCTION(InvokeInterface);
5289
5290 protected:
5291 DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
5292
5293 private:
5294 // Cached value of the resolved method, to avoid needing the mutator lock.
5295 const uint32_t imt_index_;
5296
5297 // How the hidden argument (the interface method) is being loaded.
5298 const MethodLoadKind hidden_argument_load_kind_;
5299 };
5300
5301 class HNeg final : public HUnaryOperation {
5302 public:
5303 HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5304 : HUnaryOperation(kNeg, result_type, input, dex_pc) {
5305 DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
5306 }
5307
5308 template <typename T> static T Compute(T x) { return -x; }
5309
5310 HConstant* Evaluate(HIntConstant* x) const override {
5311 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5312 }
5313 HConstant* Evaluate(HLongConstant* x) const override {
5314 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5315 }
5316 HConstant* Evaluate(HFloatConstant* x) const override {
5317 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
5318 }
5319 HConstant* Evaluate(HDoubleConstant* x) const override {
5320 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
5321 }
5322
5323 DECLARE_INSTRUCTION(Neg);
5324
5325 protected:
5326 DEFAULT_COPY_CONSTRUCTOR(Neg);
5327 };
5328
5329 class HNewArray final : public HExpression<2> {
5330 public:
5331 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
5332 : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
5333 SetRawInputAt(0, cls);
5334 SetRawInputAt(1, length);
5335 SetPackedField<ComponentSizeShiftField>(component_size_shift);
5336 }
5337
5338 bool IsClonable() const override { return true; }
5339
5340 // Calls runtime so needs an environment.
5341 bool NeedsEnvironment() const override { return true; }
5342
5343 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
5344 bool CanThrow() const override { return true; }
5345
5346 bool CanBeNull() const override { return false; }
5347
5348 HLoadClass* GetLoadClass() const {
5349 DCHECK(InputAt(0)->IsLoadClass());
5350 return InputAt(0)->AsLoadClass();
5351 }
5352
5353 HInstruction* GetLength() const {
5354 return InputAt(1);
5355 }
5356
5357 size_t GetComponentSizeShift() {
5358 return GetPackedField<ComponentSizeShiftField>();
5359 }
5360
5361 DECLARE_INSTRUCTION(NewArray);
5362
5363 protected:
5364 DEFAULT_COPY_CONSTRUCTOR(NewArray);
5365
5366 private:
5367 static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
5368 static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
5369 static constexpr size_t kNumberOfNewArrayPackedBits =
5370 kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
5371 static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5372 using ComponentSizeShiftField =
5373 BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShiftSize>;
5374 };
5375
5376 class HAdd final : public HBinaryOperation {
5377 public:
5378 HAdd(DataType::Type result_type,
5379 HInstruction* left,
5380 HInstruction* right,
5381 uint32_t dex_pc = kNoDexPc)
5382 : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
5383 }
5384
5385 bool IsCommutative() const override { return true; }
5386
5387 template <typename T> static T Compute(T x, T y) { return x + y; }
5388
5389 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5390 return GetBlock()->GetGraph()->GetIntConstant(
5391 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5392 }
5393 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5394 return GetBlock()->GetGraph()->GetLongConstant(
5395 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5396 }
5397 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5398 return GetBlock()->GetGraph()->GetFloatConstant(
5399 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5400 }
5401 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5402 return GetBlock()->GetGraph()->GetDoubleConstant(
5403 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5404 }
5405
5406 DECLARE_INSTRUCTION(Add);
5407
5408 protected:
5409 DEFAULT_COPY_CONSTRUCTOR(Add);
5410 };
5411
5412 class HSub final : public HBinaryOperation {
5413 public:
5414 HSub(DataType::Type result_type,
5415 HInstruction* left,
5416 HInstruction* right,
5417 uint32_t dex_pc = kNoDexPc)
5418 : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
5419 }
5420
5421 template <typename T> static T Compute(T x, T y) { return x - y; }
5422
5423 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5424 return GetBlock()->GetGraph()->GetIntConstant(
5425 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5426 }
5427 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5428 return GetBlock()->GetGraph()->GetLongConstant(
5429 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5430 }
5431 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5432 return GetBlock()->GetGraph()->GetFloatConstant(
5433 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5434 }
5435 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5436 return GetBlock()->GetGraph()->GetDoubleConstant(
5437 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5438 }
5439
5440 DECLARE_INSTRUCTION(Sub);
5441
5442 protected:
5443 DEFAULT_COPY_CONSTRUCTOR(Sub);
5444 };
5445
5446 class HMul final : public HBinaryOperation {
5447 public:
5448 HMul(DataType::Type result_type,
5449 HInstruction* left,
5450 HInstruction* right,
5451 uint32_t dex_pc = kNoDexPc)
5452 : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5453 }
5454
5455 bool IsCommutative() const override { return true; }
5456
5457 template <typename T> static T Compute(T x, T y) { return x * y; }
5458
5459 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5460 return GetBlock()->GetGraph()->GetIntConstant(
5461 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5462 }
5463 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5464 return GetBlock()->GetGraph()->GetLongConstant(
5465 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5466 }
5467 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5468 return GetBlock()->GetGraph()->GetFloatConstant(
5469 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5470 }
5471 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5472 return GetBlock()->GetGraph()->GetDoubleConstant(
5473 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5474 }
5475
5476 DECLARE_INSTRUCTION(Mul);
5477
5478 protected:
5479 DEFAULT_COPY_CONSTRUCTOR(Mul);
5480 };
5481
5482 class HDiv final : public HBinaryOperation {
5483 public:
5484 HDiv(DataType::Type result_type,
5485 HInstruction* left,
5486 HInstruction* right,
5487 uint32_t dex_pc)
5488 : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5489 }
5490
5491 template <typename T>
5492 T ComputeIntegral(T x, T y) const {
5493 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5494 // Our graph structure ensures we never have 0 for `y` during
5495 // constant folding.
5496 DCHECK_NE(y, 0);
5497 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5498 return (y == -1) ? -x : x / y;
5499 }
5500
5501 template <typename T>
5502 T ComputeFP(T x, T y) const {
5503 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5504 return x / y;
5505 }
5506
5507 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5508 return GetBlock()->GetGraph()->GetIntConstant(
5509 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5510 }
5511 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5512 return GetBlock()->GetGraph()->GetLongConstant(
5513 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5514 }
5515 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5516 return GetBlock()->GetGraph()->GetFloatConstant(
5517 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5518 }
5519 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5520 return GetBlock()->GetGraph()->GetDoubleConstant(
5521 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5522 }
5523
5524 DECLARE_INSTRUCTION(Div);
5525
5526 protected:
5527 DEFAULT_COPY_CONSTRUCTOR(Div);
5528 };
5529
5530 class HRem final : public HBinaryOperation {
5531 public:
5532 HRem(DataType::Type result_type,
5533 HInstruction* left,
5534 HInstruction* right,
5535 uint32_t dex_pc)
5536 : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5537 }
5538
5539 template <typename T>
5540 T ComputeIntegral(T x, T y) const {
5541 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5542 // Our graph structure ensures we never have 0 for `y` during
5543 // constant folding.
5544 DCHECK_NE(y, 0);
5545 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5546 return (y == -1) ? 0 : x % y;
5547 }
5548
5549 template <typename T>
5550 T ComputeFP(T x, T y) const {
5551 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5552 return std::fmod(x, y);
5553 }
5554
5555 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5556 return GetBlock()->GetGraph()->GetIntConstant(
5557 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5558 }
5559 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5560 return GetBlock()->GetGraph()->GetLongConstant(
5561 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5562 }
5563 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5564 return GetBlock()->GetGraph()->GetFloatConstant(
5565 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5566 }
5567 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5568 return GetBlock()->GetGraph()->GetDoubleConstant(
5569 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5570 }
5571
5572 DECLARE_INSTRUCTION(Rem);
5573
5574 protected:
5575 DEFAULT_COPY_CONSTRUCTOR(Rem);
5576 };
5577
5578 class HMin final : public HBinaryOperation {
5579 public:
5580 HMin(DataType::Type result_type,
5581 HInstruction* left,
5582 HInstruction* right,
5583 uint32_t dex_pc)
5584 : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5585
5586 bool IsCommutative() const override { return true; }
5587
5588 // Evaluation for integral values.
5589 template <typename T> static T ComputeIntegral(T x, T y) {
5590 return (x <= y) ? x : y;
5591 }
5592
5593 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5594 return GetBlock()->GetGraph()->GetIntConstant(
5595 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5596 }
5597 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5598 return GetBlock()->GetGraph()->GetLongConstant(
5599 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5600 }
5601 // TODO: Evaluation for floating-point values.
5602 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5603 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5604 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5605 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5606
5607 DECLARE_INSTRUCTION(Min);
5608
5609 protected:
5610 DEFAULT_COPY_CONSTRUCTOR(Min);
5611 };
5612
5613 class HMax final : public HBinaryOperation {
5614 public:
5615 HMax(DataType::Type result_type,
5616 HInstruction* left,
5617 HInstruction* right,
5618 uint32_t dex_pc)
5619 : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5620
5621 bool IsCommutative() const override { return true; }
5622
5623 // Evaluation for integral values.
5624 template <typename T> static T ComputeIntegral(T x, T y) {
5625 return (x >= y) ? x : y;
5626 }
5627
5628 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5629 return GetBlock()->GetGraph()->GetIntConstant(
5630 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5631 }
5632 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5633 return GetBlock()->GetGraph()->GetLongConstant(
5634 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5635 }
5636 // TODO: Evaluation for floating-point values.
5637 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5638 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5639 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5640 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5641
5642 DECLARE_INSTRUCTION(Max);
5643
5644 protected:
5645 DEFAULT_COPY_CONSTRUCTOR(Max);
5646 };
5647
5648 class HAbs final : public HUnaryOperation {
5649 public:
5650 HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5651 : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5652
5653 // Evaluation for integral values.
5654 template <typename T> static T ComputeIntegral(T x) {
5655 return x < 0 ? -x : x;
5656 }
5657
5658 // Evaluation for floating-point values.
5659 // Note, as a "quality of implementation", rather than pure "spec compliance",
5660 // we require that Math.abs() clears the sign bit (but changes nothing else)
5661 // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5662 // http://b/30758343
5663 template <typename T, typename S> static T ComputeFP(T x) {
5664 S bits = bit_cast<S, T>(x);
5665 return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5666 }
5667
5668 HConstant* Evaluate(HIntConstant* x) const override {
5669 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5670 }
5671 HConstant* Evaluate(HLongConstant* x) const override {
5672 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5673 }
5674 HConstant* Evaluate(HFloatConstant* x) const override {
5675 return GetBlock()->GetGraph()->GetFloatConstant(
5676 ComputeFP<float, int32_t>(x->GetValue()), GetDexPc());
5677 }
5678 HConstant* Evaluate(HDoubleConstant* x) const override {
5679 return GetBlock()->GetGraph()->GetDoubleConstant(
5680 ComputeFP<double, int64_t>(x->GetValue()), GetDexPc());
5681 }
5682
5683 DECLARE_INSTRUCTION(Abs);
5684
5685 protected:
5686 DEFAULT_COPY_CONSTRUCTOR(Abs);
5687 };
5688
5689 class HDivZeroCheck final : public HExpression<1> {
5690 public:
5691 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5692 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5693 // instruction following the current one; thus 'SideEffects::None()' is used.
5694 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5695 : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5696 SetRawInputAt(0, value);
5697 }
5698
5699 bool IsClonable() const override { return true; }
5700 bool CanBeMoved() const override { return true; }
5701
5702 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5703 return true;
5704 }
5705
5706 bool NeedsEnvironment() const override { return true; }
5707 bool CanThrow() const override { return true; }
5708
5709 DECLARE_INSTRUCTION(DivZeroCheck);
5710
5711 protected:
5712 DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5713 };
5714
5715 class HShl final : public HBinaryOperation {
5716 public:
5717 HShl(DataType::Type result_type,
5718 HInstruction* value,
5719 HInstruction* distance,
5720 uint32_t dex_pc = kNoDexPc)
5721 : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5722 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5723 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5724 }
5725
5726 template <typename T>
5727 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5728 return value << (distance & max_shift_distance);
5729 }
5730
5731 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5732 return GetBlock()->GetGraph()->GetIntConstant(
5733 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5734 }
5735 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5736 return GetBlock()->GetGraph()->GetLongConstant(
5737 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5738 }
5739 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5740 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5741 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5742 UNREACHABLE();
5743 }
5744 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5745 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5746 LOG(FATAL) << DebugName() << " is not defined for float values";
5747 UNREACHABLE();
5748 }
5749 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5750 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5751 LOG(FATAL) << DebugName() << " is not defined for double values";
5752 UNREACHABLE();
5753 }
5754
5755 DECLARE_INSTRUCTION(Shl);
5756
5757 protected:
5758 DEFAULT_COPY_CONSTRUCTOR(Shl);
5759 };
5760
5761 class HShr final : public HBinaryOperation {
5762 public:
5763 HShr(DataType::Type result_type,
5764 HInstruction* value,
5765 HInstruction* distance,
5766 uint32_t dex_pc = kNoDexPc)
5767 : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5768 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5769 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5770 }
5771
5772 template <typename T>
5773 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5774 return value >> (distance & max_shift_distance);
5775 }
5776
5777 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5778 return GetBlock()->GetGraph()->GetIntConstant(
5779 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5780 }
5781 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5782 return GetBlock()->GetGraph()->GetLongConstant(
5783 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5784 }
5785 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5786 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5787 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5788 UNREACHABLE();
5789 }
5790 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5791 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5792 LOG(FATAL) << DebugName() << " is not defined for float values";
5793 UNREACHABLE();
5794 }
5795 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5796 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5797 LOG(FATAL) << DebugName() << " is not defined for double values";
5798 UNREACHABLE();
5799 }
5800
5801 DECLARE_INSTRUCTION(Shr);
5802
5803 protected:
5804 DEFAULT_COPY_CONSTRUCTOR(Shr);
5805 };
5806
5807 class HUShr final : public HBinaryOperation {
5808 public:
5809 HUShr(DataType::Type result_type,
5810 HInstruction* value,
5811 HInstruction* distance,
5812 uint32_t dex_pc = kNoDexPc)
5813 : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5814 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5815 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5816 }
5817
5818 template <typename T>
5819 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5820 using V = std::make_unsigned_t<T>;
5821 V ux = static_cast<V>(value);
5822 return static_cast<T>(ux >> (distance & max_shift_distance));
5823 }
5824
5825 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5826 return GetBlock()->GetGraph()->GetIntConstant(
5827 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5828 }
5829 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5830 return GetBlock()->GetGraph()->GetLongConstant(
5831 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5832 }
5833 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5834 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5835 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5836 UNREACHABLE();
5837 }
5838 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5839 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5840 LOG(FATAL) << DebugName() << " is not defined for float values";
5841 UNREACHABLE();
5842 }
5843 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5844 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5845 LOG(FATAL) << DebugName() << " is not defined for double values";
5846 UNREACHABLE();
5847 }
5848
5849 DECLARE_INSTRUCTION(UShr);
5850
5851 protected:
5852 DEFAULT_COPY_CONSTRUCTOR(UShr);
5853 };
5854
5855 class HAnd final : public HBinaryOperation {
5856 public:
5857 HAnd(DataType::Type result_type,
5858 HInstruction* left,
5859 HInstruction* right,
5860 uint32_t dex_pc = kNoDexPc)
5861 : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5862 }
5863
5864 bool IsCommutative() const override { return true; }
5865
5866 template <typename T> static T Compute(T x, T y) { return x & y; }
5867
5868 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5869 return GetBlock()->GetGraph()->GetIntConstant(
5870 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5871 }
5872 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5873 return GetBlock()->GetGraph()->GetLongConstant(
5874 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5875 }
5876 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5877 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5878 LOG(FATAL) << DebugName() << " is not defined for float values";
5879 UNREACHABLE();
5880 }
5881 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5882 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5883 LOG(FATAL) << DebugName() << " is not defined for double values";
5884 UNREACHABLE();
5885 }
5886
5887 DECLARE_INSTRUCTION(And);
5888
5889 protected:
5890 DEFAULT_COPY_CONSTRUCTOR(And);
5891 };
5892
5893 class HOr final : public HBinaryOperation {
5894 public:
5895 HOr(DataType::Type result_type,
5896 HInstruction* left,
5897 HInstruction* right,
5898 uint32_t dex_pc = kNoDexPc)
5899 : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5900 }
5901
5902 bool IsCommutative() const override { return true; }
5903
5904 template <typename T> static T Compute(T x, T y) { return x | y; }
5905
5906 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5907 return GetBlock()->GetGraph()->GetIntConstant(
5908 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5909 }
5910 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5911 return GetBlock()->GetGraph()->GetLongConstant(
5912 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5913 }
5914 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5915 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5916 LOG(FATAL) << DebugName() << " is not defined for float values";
5917 UNREACHABLE();
5918 }
5919 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5920 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5921 LOG(FATAL) << DebugName() << " is not defined for double values";
5922 UNREACHABLE();
5923 }
5924
5925 DECLARE_INSTRUCTION(Or);
5926
5927 protected:
5928 DEFAULT_COPY_CONSTRUCTOR(Or);
5929 };
5930
5931 class HXor final : public HBinaryOperation {
5932 public:
5933 HXor(DataType::Type result_type,
5934 HInstruction* left,
5935 HInstruction* right,
5936 uint32_t dex_pc = kNoDexPc)
5937 : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5938 }
5939
5940 bool IsCommutative() const override { return true; }
5941
5942 template <typename T> static T Compute(T x, T y) { return x ^ y; }
5943
5944 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5945 return GetBlock()->GetGraph()->GetIntConstant(
5946 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5947 }
5948 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5949 return GetBlock()->GetGraph()->GetLongConstant(
5950 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5951 }
5952 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5953 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5954 LOG(FATAL) << DebugName() << " is not defined for float values";
5955 UNREACHABLE();
5956 }
5957 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5958 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5959 LOG(FATAL) << DebugName() << " is not defined for double values";
5960 UNREACHABLE();
5961 }
5962
5963 DECLARE_INSTRUCTION(Xor);
5964
5965 protected:
5966 DEFAULT_COPY_CONSTRUCTOR(Xor);
5967 };
5968
5969 class HRor final : public HBinaryOperation {
5970 public:
5971 HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5972 : HBinaryOperation(kRor, result_type, value, distance) {
5973 }
5974
5975 template <typename T>
5976 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5977 using V = std::make_unsigned_t<T>;
5978 V ux = static_cast<V>(value);
5979 if ((distance & max_shift_value) == 0) {
5980 return static_cast<T>(ux);
5981 } else {
5982 const V reg_bits = sizeof(T) * 8;
5983 return static_cast<T>(ux >> (distance & max_shift_value)) |
5984 (value << (reg_bits - (distance & max_shift_value)));
5985 }
5986 }
5987
5988 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5989 return GetBlock()->GetGraph()->GetIntConstant(
5990 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5991 }
5992 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5993 return GetBlock()->GetGraph()->GetLongConstant(
5994 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5995 }
5996 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5997 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5998 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5999 UNREACHABLE();
6000 }
6001 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
6002 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
6003 LOG(FATAL) << DebugName() << " is not defined for float values";
6004 UNREACHABLE();
6005 }
6006 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
6007 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
6008 LOG(FATAL) << DebugName() << " is not defined for double values";
6009 UNREACHABLE();
6010 }
6011
6012 DECLARE_INSTRUCTION(Ror);
6013
6014 protected:
6015 DEFAULT_COPY_CONSTRUCTOR(Ror);
6016 };
6017
6018 // The value of a parameter in this method. Its location depends on
6019 // the calling convention.
6020 class HParameterValue final : public HExpression<0> {
6021 public:
6022 HParameterValue(const DexFile& dex_file,
6023 dex::TypeIndex type_index,
6024 uint8_t index,
6025 DataType::Type parameter_type,
6026 bool is_this = false)
6027 : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
6028 dex_file_(dex_file),
6029 type_index_(type_index),
6030 index_(index) {
6031 SetPackedFlag<kFlagIsThis>(is_this);
6032 SetPackedFlag<kFlagCanBeNull>(!is_this);
6033 }
6034
6035 const DexFile& GetDexFile() const { return dex_file_; }
6036 dex::TypeIndex GetTypeIndex() const { return type_index_; }
6037 uint8_t GetIndex() const { return index_; }
6038 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
6039
6040 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
6041 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
6042
6043 DECLARE_INSTRUCTION(ParameterValue);
6044
6045 protected:
6046 DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
6047
6048 private:
6049 // Whether or not the parameter value corresponds to 'this' argument.
6050 static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
6051 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
6052 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
6053 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
6054 "Too many packed fields.");
6055
6056 const DexFile& dex_file_;
6057 const dex::TypeIndex type_index_;
6058 // The index of this parameter in the parameters list. Must be less
6059 // than HGraph::number_of_in_vregs_.
6060 const uint8_t index_;
6061 };
6062
6063 class HNot final : public HUnaryOperation {
6064 public:
6065 HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
6066 : HUnaryOperation(kNot, result_type, input, dex_pc) {
6067 }
6068
6069 bool CanBeMoved() const override { return true; }
6070 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6071 return true;
6072 }
6073
6074 template <typename T> static T Compute(T x) { return ~x; }
6075
6076 HConstant* Evaluate(HIntConstant* x) const override {
6077 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
6078 }
6079 HConstant* Evaluate(HLongConstant* x) const override {
6080 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
6081 }
6082 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
6083 LOG(FATAL) << DebugName() << " is not defined for float values";
6084 UNREACHABLE();
6085 }
6086 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
6087 LOG(FATAL) << DebugName() << " is not defined for double values";
6088 UNREACHABLE();
6089 }
6090
6091 DECLARE_INSTRUCTION(Not);
6092
6093 protected:
6094 DEFAULT_COPY_CONSTRUCTOR(Not);
6095 };
6096
6097 class HBooleanNot final : public HUnaryOperation {
6098 public:
6099 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6100 : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
6101 }
6102
6103 bool CanBeMoved() const override { return true; }
6104 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6105 return true;
6106 }
6107
6108 template <typename T> static bool Compute(T x) {
6109 DCHECK(IsUint<1>(x)) << x;
6110 return !x;
6111 }
6112
6113 HConstant* Evaluate(HIntConstant* x) const override {
6114 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
6115 }
6116 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override {
6117 LOG(FATAL) << DebugName() << " is not defined for long values";
6118 UNREACHABLE();
6119 }
6120 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
6121 LOG(FATAL) << DebugName() << " is not defined for float values";
6122 UNREACHABLE();
6123 }
6124 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
6125 LOG(FATAL) << DebugName() << " is not defined for double values";
6126 UNREACHABLE();
6127 }
6128
6129 DECLARE_INSTRUCTION(BooleanNot);
6130
6131 protected:
6132 DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
6133 };
6134
6135 class HTypeConversion final : public HExpression<1> {
6136 public:
6137 // Instantiate a type conversion of `input` to `result_type`.
6138 HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
6139 : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
6140 SetRawInputAt(0, input);
6141 // Invariant: We should never generate a conversion to a Boolean value.
6142 DCHECK_NE(DataType::Type::kBool, result_type);
6143 }
6144
6145 HInstruction* GetInput() const { return InputAt(0); }
6146 DataType::Type GetInputType() const { return GetInput()->GetType(); }
6147 DataType::Type GetResultType() const { return GetType(); }
6148
6149 bool IsClonable() const override { return true; }
6150 bool CanBeMoved() const override { return true; }
6151 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6152 return true;
6153 }
6154 // Return whether the conversion is implicit. This includes conversion to the same type.
6155 bool IsImplicitConversion() const {
6156 return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
6157 }
6158
6159 // Try to statically evaluate the conversion and return a HConstant
6160 // containing the result. If the input cannot be converted, return nullptr.
6161 HConstant* TryStaticEvaluation() const;
6162
6163 DECLARE_INSTRUCTION(TypeConversion);
6164
6165 protected:
6166 DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
6167 };
6168
6169 static constexpr uint32_t kNoRegNumber = -1;
6170
6171 class HNullCheck final : public HExpression<1> {
6172 public:
6173 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
6174 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6175 // instruction following the current one; thus 'SideEffects::None()' is used.
6176 HNullCheck(HInstruction* value, uint32_t dex_pc)
6177 : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
6178 SetRawInputAt(0, value);
6179 }
6180
6181 bool IsClonable() const override { return true; }
6182 bool CanBeMoved() const override { return true; }
6183 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6184 return true;
6185 }
6186
6187 bool NeedsEnvironment() const override { return true; }
6188
6189 bool CanThrow() const override { return true; }
6190
6191 bool CanBeNull() const override { return false; }
6192
6193 DECLARE_INSTRUCTION(NullCheck);
6194
6195 protected:
6196 DEFAULT_COPY_CONSTRUCTOR(NullCheck);
6197 };
6198
6199 // Embeds an ArtField and all the information required by the compiler. We cache
6200 // that information to avoid requiring the mutator lock every time we need it.
6201 class FieldInfo : public ValueObject {
6202 public:
6203 FieldInfo(ArtField* field,
6204 MemberOffset field_offset,
6205 DataType::Type field_type,
6206 bool is_volatile,
6207 uint32_t index,
6208 uint16_t declaring_class_def_index,
6209 const DexFile& dex_file)
6210 : field_(field),
6211 field_offset_(field_offset),
6212 field_type_(field_type),
6213 is_volatile_(is_volatile),
6214 index_(index),
6215 declaring_class_def_index_(declaring_class_def_index),
6216 dex_file_(dex_file) {}
6217
6218 ArtField* GetField() const { return field_; }
6219 MemberOffset GetFieldOffset() const { return field_offset_; }
6220 DataType::Type GetFieldType() const { return field_type_; }
6221 uint32_t GetFieldIndex() const { return index_; }
6222 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
6223 const DexFile& GetDexFile() const { return dex_file_; }
6224 bool IsVolatile() const { return is_volatile_; }
6225
6226 bool Equals(const FieldInfo& other) const {
6227 return field_ == other.field_ &&
6228 field_offset_ == other.field_offset_ &&
6229 field_type_ == other.field_type_ &&
6230 is_volatile_ == other.is_volatile_ &&
6231 index_ == other.index_ &&
6232 declaring_class_def_index_ == other.declaring_class_def_index_ &&
6233 &dex_file_ == &other.dex_file_;
6234 }
6235
6236 std::ostream& Dump(std::ostream& os) const {
6237 os << field_ << ", off: " << field_offset_ << ", type: " << field_type_
6238 << ", volatile: " << std::boolalpha << is_volatile_ << ", index_: " << std::dec << index_
6239 << ", declaring_class: " << declaring_class_def_index_ << ", dex: " << dex_file_;
6240 return os;
6241 }
6242
6243 private:
6244 ArtField* const field_;
6245 const MemberOffset field_offset_;
6246 const DataType::Type field_type_;
6247 const bool is_volatile_;
6248 const uint32_t index_;
6249 const uint16_t declaring_class_def_index_;
6250 const DexFile& dex_file_;
6251 };
6252
6253 inline bool operator==(const FieldInfo& a, const FieldInfo& b) {
6254 return a.Equals(b);
6255 }
6256
6257 inline std::ostream& operator<<(std::ostream& os, const FieldInfo& a) {
6258 return a.Dump(os);
6259 }
6260
6261 class HInstanceFieldGet final : public HExpression<1> {
6262 public:
6263 HInstanceFieldGet(HInstruction* value,
6264 ArtField* field,
6265 DataType::Type field_type,
6266 MemberOffset field_offset,
6267 bool is_volatile,
6268 uint32_t field_idx,
6269 uint16_t declaring_class_def_index,
6270 const DexFile& dex_file,
6271 uint32_t dex_pc)
6272 : HExpression(kInstanceFieldGet,
6273 field_type,
6274 SideEffects::FieldReadOfType(field_type, is_volatile),
6275 dex_pc),
6276 field_info_(field,
6277 field_offset,
6278 field_type,
6279 is_volatile,
6280 field_idx,
6281 declaring_class_def_index,
6282 dex_file) {
6283 SetRawInputAt(0, value);
6284 }
6285
6286 bool IsClonable() const override { return true; }
6287 bool CanBeMoved() const override { return !IsVolatile(); }
6288
6289 bool InstructionDataEquals(const HInstruction* other) const override {
6290 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
6291 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6292 }
6293
6294 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6295 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6296 }
6297
6298 size_t ComputeHashCode() const override {
6299 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6300 }
6301
6302 bool IsFieldAccess() const override { return true; }
6303 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6304 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6305 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6306 bool IsVolatile() const { return field_info_.IsVolatile(); }
6307
6308 void SetType(DataType::Type new_type) {
6309 DCHECK(DataType::IsIntegralType(GetType()));
6310 DCHECK(DataType::IsIntegralType(new_type));
6311 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6312 SetPackedField<TypeField>(new_type);
6313 }
6314
6315 DECLARE_INSTRUCTION(InstanceFieldGet);
6316
6317 protected:
6318 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
6319
6320 private:
6321 const FieldInfo field_info_;
6322 };
6323
6324 class HPredicatedInstanceFieldGet final : public HExpression<2> {
6325 public:
6326 HPredicatedInstanceFieldGet(HInstanceFieldGet* orig,
6327 HInstruction* target,
6328 HInstruction* default_val)
6329 : HExpression(kPredicatedInstanceFieldGet,
6330 orig->GetFieldType(),
6331 orig->GetSideEffects(),
6332 orig->GetDexPc()),
6333 field_info_(orig->GetFieldInfo()) {
6334 // NB Default-val is at 0 so we can avoid doing a move.
6335 SetRawInputAt(1, target);
6336 SetRawInputAt(0, default_val);
6337 }
6338
6339 HPredicatedInstanceFieldGet(HInstruction* value,
6340 ArtField* field,
6341 HInstruction* default_value,
6342 DataType::Type field_type,
6343 MemberOffset field_offset,
6344 bool is_volatile,
6345 uint32_t field_idx,
6346 uint16_t declaring_class_def_index,
6347 const DexFile& dex_file,
6348 uint32_t dex_pc)
6349 : HExpression(kPredicatedInstanceFieldGet,
6350 field_type,
6351 SideEffects::FieldReadOfType(field_type, is_volatile),
6352 dex_pc),
6353 field_info_(field,
6354 field_offset,
6355 field_type,
6356 is_volatile,
6357 field_idx,
6358 declaring_class_def_index,
6359 dex_file) {
6360 SetRawInputAt(1, value);
6361 SetRawInputAt(0, default_value);
6362 }
6363
6364 bool IsClonable() const override {
6365 return true;
6366 }
6367 bool CanBeMoved() const override {
6368 return !IsVolatile();
6369 }
6370
6371 HInstruction* GetDefaultValue() const {
6372 return InputAt(0);
6373 }
6374 HInstruction* GetTarget() const {
6375 return InputAt(1);
6376 }
6377
6378 bool InstructionDataEquals(const HInstruction* other) const override {
6379 const HPredicatedInstanceFieldGet* other_get = other->AsPredicatedInstanceFieldGet();
6380 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue() &&
6381 GetDefaultValue() == other_get->GetDefaultValue();
6382 }
6383
6384 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6385 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6386 }
6387
6388 size_t ComputeHashCode() const override {
6389 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6390 }
6391
6392 bool IsFieldAccess() const override { return true; }
6393 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6394 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6395 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6396 bool IsVolatile() const { return field_info_.IsVolatile(); }
6397
6398 void SetType(DataType::Type new_type) {
6399 DCHECK(DataType::IsIntegralType(GetType()));
6400 DCHECK(DataType::IsIntegralType(new_type));
6401 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6402 SetPackedField<TypeField>(new_type);
6403 }
6404
6405 DECLARE_INSTRUCTION(PredicatedInstanceFieldGet);
6406
6407 protected:
6408 DEFAULT_COPY_CONSTRUCTOR(PredicatedInstanceFieldGet);
6409
6410 private:
6411 const FieldInfo field_info_;
6412 };
6413
6414 enum class WriteBarrierKind {
6415 // Emit the write barrier, with a runtime optimization which checks if the value that it is being
6416 // set is null.
6417 kEmitWithNullCheck,
6418 // Emit the write barrier, without the runtime null check optimization. This could be set because:
6419 // A) It is a write barrier for an ArraySet (which does the optimization with the type check, so
6420 // it never does the optimization at the write barrier stage)
6421 // B) We know that the input can't be null
6422 // C) This write barrier is actually several write barriers coalesced into one. Potentially we
6423 // could ask if every value is null for a runtime optimization at the cost of compile time / code
6424 // size. At the time of writing it was deemed not worth the effort.
6425 kEmitNoNullCheck,
6426 // Skip emitting the write barrier. This could be set because:
6427 // A) The write barrier is not needed (e.g. it is not a reference, or the value is the null
6428 // constant)
6429 // B) This write barrier was coalesced into another one so there's no need to emit it.
6430 kDontEmit,
6431 kLast = kDontEmit
6432 };
6433 std::ostream& operator<<(std::ostream& os, WriteBarrierKind rhs);
6434
6435 class HInstanceFieldSet final : public HExpression<2> {
6436 public:
6437 HInstanceFieldSet(HInstruction* object,
6438 HInstruction* value,
6439 ArtField* field,
6440 DataType::Type field_type,
6441 MemberOffset field_offset,
6442 bool is_volatile,
6443 uint32_t field_idx,
6444 uint16_t declaring_class_def_index,
6445 const DexFile& dex_file,
6446 uint32_t dex_pc)
6447 : HExpression(kInstanceFieldSet,
6448 SideEffects::FieldWriteOfType(field_type, is_volatile),
6449 dex_pc),
6450 field_info_(field,
6451 field_offset,
6452 field_type,
6453 is_volatile,
6454 field_idx,
6455 declaring_class_def_index,
6456 dex_file) {
6457 SetPackedFlag<kFlagValueCanBeNull>(true);
6458 SetPackedFlag<kFlagIsPredicatedSet>(false);
6459 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitWithNullCheck);
6460 SetRawInputAt(0, object);
6461 SetRawInputAt(1, value);
6462 }
6463
6464 bool IsClonable() const override { return true; }
6465
6466 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6467 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6468 }
6469
6470 bool IsFieldAccess() const override { return true; }
6471 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6472 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6473 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6474 bool IsVolatile() const { return field_info_.IsVolatile(); }
6475 HInstruction* GetValue() const { return InputAt(1); }
6476 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6477 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6478 bool GetIsPredicatedSet() const { return GetPackedFlag<kFlagIsPredicatedSet>(); }
6479 void SetIsPredicatedSet(bool value = true) { SetPackedFlag<kFlagIsPredicatedSet>(value); }
6480 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6481 void SetWriteBarrierKind(WriteBarrierKind kind) {
6482 DCHECK(kind != WriteBarrierKind::kEmitWithNullCheck)
6483 << "We shouldn't go back to the original value.";
6484 SetPackedField<WriteBarrierKindField>(kind);
6485 }
6486
6487 DECLARE_INSTRUCTION(InstanceFieldSet);
6488
6489 protected:
6490 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
6491
6492 private:
6493 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6494 static constexpr size_t kFlagIsPredicatedSet = kFlagValueCanBeNull + 1;
6495 static constexpr size_t kWriteBarrierKind = kFlagIsPredicatedSet + 1;
6496 static constexpr size_t kWriteBarrierKindSize =
6497 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6498 static constexpr size_t kNumberOfInstanceFieldSetPackedBits =
6499 kWriteBarrierKind + kWriteBarrierKindSize;
6500 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
6501 "Too many packed fields.");
6502
6503 const FieldInfo field_info_;
6504 using WriteBarrierKindField =
6505 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6506 };
6507
6508 class HArrayGet final : public HExpression<2> {
6509 public:
6510 HArrayGet(HInstruction* array,
6511 HInstruction* index,
6512 DataType::Type type,
6513 uint32_t dex_pc)
6514 : HArrayGet(array,
6515 index,
6516 type,
6517 SideEffects::ArrayReadOfType(type),
6518 dex_pc,
6519 /* is_string_char_at= */ false) {
6520 }
6521
6522 HArrayGet(HInstruction* array,
6523 HInstruction* index,
6524 DataType::Type type,
6525 SideEffects side_effects,
6526 uint32_t dex_pc,
6527 bool is_string_char_at)
6528 : HExpression(kArrayGet, type, side_effects, dex_pc) {
6529 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6530 SetRawInputAt(0, array);
6531 SetRawInputAt(1, index);
6532 }
6533
6534 bool IsClonable() const override { return true; }
6535 bool CanBeMoved() const override { return true; }
6536 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6537 return true;
6538 }
6539 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6540 // TODO: We can be smarter here.
6541 // Currently, unless the array is the result of NewArray, the array access is always
6542 // preceded by some form of null NullCheck necessary for the bounds check, usually
6543 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
6544 // dynamic BCE. There are cases when these could be removed to produce better code.
6545 // If we ever add optimizations to do so we should allow an implicit check here
6546 // (as long as the address falls in the first page).
6547 //
6548 // As an example of such fancy optimization, we could eliminate BoundsCheck for
6549 // a = cond ? new int[1] : null;
6550 // a[0]; // The Phi does not need bounds check for either input.
6551 return false;
6552 }
6553
6554 bool IsEquivalentOf(HArrayGet* other) const {
6555 bool result = (GetDexPc() == other->GetDexPc());
6556 if (kIsDebugBuild && result) {
6557 DCHECK_EQ(GetBlock(), other->GetBlock());
6558 DCHECK_EQ(GetArray(), other->GetArray());
6559 DCHECK_EQ(GetIndex(), other->GetIndex());
6560 if (DataType::IsIntOrLongType(GetType())) {
6561 DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
6562 } else {
6563 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
6564 DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
6565 }
6566 }
6567 return result;
6568 }
6569
6570 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6571
6572 HInstruction* GetArray() const { return InputAt(0); }
6573 HInstruction* GetIndex() const { return InputAt(1); }
6574
6575 void SetType(DataType::Type new_type) {
6576 DCHECK(DataType::IsIntegralType(GetType()));
6577 DCHECK(DataType::IsIntegralType(new_type));
6578 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6579 SetPackedField<TypeField>(new_type);
6580 }
6581
6582 DECLARE_INSTRUCTION(ArrayGet);
6583
6584 protected:
6585 DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
6586
6587 private:
6588 // We treat a String as an array, creating the HArrayGet from String.charAt()
6589 // intrinsic in the instruction simplifier. We can always determine whether
6590 // a particular HArrayGet is actually a String.charAt() by looking at the type
6591 // of the input but that requires holding the mutator lock, so we prefer to use
6592 // a flag, so that code generators don't need to do the locking.
6593 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6594 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
6595 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6596 "Too many packed fields.");
6597 };
6598
6599 class HArraySet final : public HExpression<3> {
6600 public:
6601 HArraySet(HInstruction* array,
6602 HInstruction* index,
6603 HInstruction* value,
6604 DataType::Type expected_component_type,
6605 uint32_t dex_pc)
6606 : HArraySet(array,
6607 index,
6608 value,
6609 expected_component_type,
6610 // Make a best guess for side effects now, may be refined during SSA building.
6611 ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6612 dex_pc) {
6613 }
6614
6615 HArraySet(HInstruction* array,
6616 HInstruction* index,
6617 HInstruction* value,
6618 DataType::Type expected_component_type,
6619 SideEffects side_effects,
6620 uint32_t dex_pc)
6621 : HExpression(kArraySet, side_effects, dex_pc) {
6622 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6623 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6624 SetPackedFlag<kFlagValueCanBeNull>(true);
6625 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6626 // ArraySets never do the null check optimization at the write barrier stage.
6627 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNoNullCheck);
6628 SetRawInputAt(0, array);
6629 SetRawInputAt(1, index);
6630 SetRawInputAt(2, value);
6631 }
6632
6633 bool IsClonable() const override { return true; }
6634
6635 bool NeedsEnvironment() const override {
6636 // We call a runtime method to throw ArrayStoreException.
6637 return NeedsTypeCheck();
6638 }
6639
6640 // Can throw ArrayStoreException.
6641 bool CanThrow() const override { return NeedsTypeCheck(); }
6642
6643 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6644 // TODO: Same as for ArrayGet.
6645 return false;
6646 }
6647
6648 void ClearTypeCheck() {
6649 SetPackedFlag<kFlagNeedsTypeCheck>(false);
6650 // Clear the `CanTriggerGC` flag too as we can only trigger a GC when doing a type check.
6651 SetSideEffects(GetSideEffects().Exclusion(SideEffects::CanTriggerGC()));
6652 }
6653
6654 void ClearValueCanBeNull() {
6655 SetPackedFlag<kFlagValueCanBeNull>(false);
6656 }
6657
6658 void SetStaticTypeOfArrayIsObjectArray() {
6659 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6660 }
6661
6662 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6663 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
6664 bool StaticTypeOfArrayIsObjectArray() const {
6665 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6666 }
6667
6668 HInstruction* GetArray() const { return InputAt(0); }
6669 HInstruction* GetIndex() const { return InputAt(1); }
6670 HInstruction* GetValue() const { return InputAt(2); }
6671
6672 DataType::Type GetComponentType() const {
6673 return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6674 }
6675
6676 static DataType::Type GetComponentType(DataType::Type value_type,
6677 DataType::Type expected_component_type) {
6678 // The Dex format does not type floating point index operations. Since the
6679 // `expected_component_type` comes from SSA building and can therefore not
6680 // be correct, we also check what is the value type. If it is a floating
6681 // point type, we must use that type.
6682 return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6683 ? value_type
6684 : expected_component_type;
6685 }
6686
6687 DataType::Type GetRawExpectedComponentType() const {
6688 return GetPackedField<ExpectedComponentTypeField>();
6689 }
6690
6691 static SideEffects ComputeSideEffects(DataType::Type type) {
6692 return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6693 }
6694
6695 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6696 return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6697 : SideEffects::None();
6698 }
6699
6700 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6701
6702 void SetWriteBarrierKind(WriteBarrierKind kind) {
6703 DCHECK(kind != WriteBarrierKind::kEmitNoNullCheck)
6704 << "We shouldn't go back to the original value.";
6705 DCHECK(kind != WriteBarrierKind::kEmitWithNullCheck)
6706 << "We never do the null check optimization for ArraySets.";
6707 SetPackedField<WriteBarrierKindField>(kind);
6708 }
6709
6710 DECLARE_INSTRUCTION(ArraySet);
6711
6712 protected:
6713 DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6714
6715 private:
6716 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6717 static constexpr size_t kFieldExpectedComponentTypeSize =
6718 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6719 static constexpr size_t kFlagNeedsTypeCheck =
6720 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6721 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6722 // Cached information for the reference_type_info_ so that codegen
6723 // does not need to inspect the static type.
6724 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6725 static constexpr size_t kWriteBarrierKind = kFlagStaticTypeOfArrayIsObjectArray + 1;
6726 static constexpr size_t kWriteBarrierKindSize =
6727 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6728 static constexpr size_t kNumberOfArraySetPackedBits = kWriteBarrierKind + kWriteBarrierKindSize;
6729 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6730 using ExpectedComponentTypeField =
6731 BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6732
6733 using WriteBarrierKindField =
6734 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6735 };
6736
6737 class HArrayLength final : public HExpression<1> {
6738 public:
6739 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
6740 : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6741 SetPackedFlag<kFlagIsStringLength>(is_string_length);
6742 // Note that arrays do not change length, so the instruction does not
6743 // depend on any write.
6744 SetRawInputAt(0, array);
6745 }
6746
6747 bool IsClonable() const override { return true; }
6748 bool CanBeMoved() const override { return true; }
6749 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6750 return true;
6751 }
6752 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6753 return obj == InputAt(0);
6754 }
6755
6756 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6757
6758 DECLARE_INSTRUCTION(ArrayLength);
6759
6760 protected:
6761 DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6762
6763 private:
6764 // We treat a String as an array, creating the HArrayLength from String.length()
6765 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6766 // determine whether a particular HArrayLength is actually a String.length() by
6767 // looking at the type of the input but that requires holding the mutator lock, so
6768 // we prefer to use a flag, so that code generators don't need to do the locking.
6769 static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6770 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6771 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6772 "Too many packed fields.");
6773 };
6774
6775 class HBoundsCheck final : public HExpression<2> {
6776 public:
6777 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6778 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6779 // instruction following the current one; thus 'SideEffects::None()' is used.
6780 HBoundsCheck(HInstruction* index,
6781 HInstruction* length,
6782 uint32_t dex_pc,
6783 bool is_string_char_at = false)
6784 : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6785 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6786 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6787 SetRawInputAt(0, index);
6788 SetRawInputAt(1, length);
6789 }
6790
6791 bool IsClonable() const override { return true; }
6792 bool CanBeMoved() const override { return true; }
6793 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6794 return true;
6795 }
6796
6797 bool NeedsEnvironment() const override { return true; }
6798
6799 bool CanThrow() const override { return true; }
6800
6801 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6802
6803 HInstruction* GetIndex() const { return InputAt(0); }
6804
6805 DECLARE_INSTRUCTION(BoundsCheck);
6806
6807 protected:
6808 DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6809
6810 private:
6811 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6812 static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6813 static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6814 "Too many packed fields.");
6815 };
6816
6817 class HSuspendCheck final : public HExpression<0> {
6818 public:
6819 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc, bool is_no_op = false)
6820 : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6821 slow_path_(nullptr) {
6822 SetPackedFlag<kFlagIsNoOp>(is_no_op);
6823 }
6824
6825 bool IsClonable() const override { return true; }
6826
6827 bool NeedsEnvironment() const override {
6828 return true;
6829 }
6830
6831 void SetIsNoOp(bool is_no_op) { SetPackedFlag<kFlagIsNoOp>(is_no_op); }
6832 bool IsNoOp() const { return GetPackedFlag<kFlagIsNoOp>(); }
6833
6834
6835 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
6836 SlowPathCode* GetSlowPath() const { return slow_path_; }
6837
6838 DECLARE_INSTRUCTION(SuspendCheck);
6839
6840 protected:
6841 DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6842
6843 // True if the HSuspendCheck should not emit any code during codegen. It is
6844 // not possible to simply remove this instruction to disable codegen, as
6845 // other optimizations (e.g: CHAGuardVisitor::HoistGuard) depend on
6846 // HSuspendCheck being present in every loop.
6847 static constexpr size_t kFlagIsNoOp = kNumberOfGenericPackedBits;
6848 static constexpr size_t kNumberOfSuspendCheckPackedBits = kFlagIsNoOp + 1;
6849 static_assert(kNumberOfSuspendCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6850 "Too many packed fields.");
6851
6852 private:
6853 // Only used for code generation, in order to share the same slow path between back edges
6854 // of a same loop.
6855 SlowPathCode* slow_path_;
6856 };
6857
6858 // Pseudo-instruction which doesn't generate any code.
6859 // If `emit_environment` is true, it can be used to generate an environment. It is used, for
6860 // example, to provide the native debugger with mapping information. It ensures that we can generate
6861 // line number and local variables at this point.
6862 class HNop : public HExpression<0> {
6863 public:
6864 explicit HNop(uint32_t dex_pc, bool needs_environment)
6865 : HExpression<0>(kNop, SideEffects::None(), dex_pc), needs_environment_(needs_environment) {
6866 }
6867
6868 bool NeedsEnvironment() const override {
6869 return needs_environment_;
6870 }
6871
6872 DECLARE_INSTRUCTION(Nop);
6873
6874 protected:
6875 DEFAULT_COPY_CONSTRUCTOR(Nop);
6876
6877 private:
6878 bool needs_environment_;
6879 };
6880
6881 /**
6882 * Instruction to load a Class object.
6883 */
6884 class HLoadClass final : public HInstruction {
6885 public:
6886 // Determines how to load the Class.
6887 enum class LoadKind {
6888 // We cannot load this class. See HSharpening::SharpenLoadClass.
6889 kInvalid = -1,
6890
6891 // Use the Class* from the method's own ArtMethod*.
6892 kReferrersClass,
6893
6894 // Use PC-relative boot image Class* address that will be known at link time.
6895 // Used for boot image classes referenced by boot image code.
6896 kBootImageLinkTimePcRelative,
6897
6898 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6899 // Used for boot image classes referenced by apps in AOT-compiled code.
6900 kBootImageRelRo,
6901
6902 // Load from an entry in the .bss section using a PC-relative load.
6903 // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6904 kBssEntry,
6905
6906 // Load from an entry for public class in the .bss section using a PC-relative load.
6907 // Used for classes that were unresolved during AOT-compilation outside the literal
6908 // package of the compiling class. Such classes are accessible only if they are public
6909 // and the .bss entry shall therefore be filled only if the resolved class is public.
6910 kBssEntryPublic,
6911
6912 // Load from an entry for package class in the .bss section using a PC-relative load.
6913 // Used for classes that were unresolved during AOT-compilation but within the literal
6914 // package of the compiling class. Such classes are accessible if they are public or
6915 // in the same package which, given the literal package match, requires only matching
6916 // defining class loader and the .bss entry shall therefore be filled only if at least
6917 // one of those conditions holds. Note that all code in an oat file belongs to classes
6918 // with the same defining class loader.
6919 kBssEntryPackage,
6920
6921 // Use a known boot image Class* address, embedded in the code by the codegen.
6922 // Used for boot image classes referenced by apps in JIT-compiled code.
6923 kJitBootImageAddress,
6924
6925 // Load from the root table associated with the JIT compiled method.
6926 kJitTableAddress,
6927
6928 // Load using a simple runtime call. This is the fall-back load kind when
6929 // the codegen is unable to use another appropriate kind.
6930 kRuntimeCall,
6931
6932 kLast = kRuntimeCall
6933 };
6934
6935 HLoadClass(HCurrentMethod* current_method,
6936 dex::TypeIndex type_index,
6937 const DexFile& dex_file,
6938 Handle<mirror::Class> klass,
6939 bool is_referrers_class,
6940 uint32_t dex_pc,
6941 bool needs_access_check)
6942 : HInstruction(kLoadClass,
6943 DataType::Type::kReference,
6944 SideEffectsForArchRuntimeCalls(),
6945 dex_pc),
6946 special_input_(HUserRecord<HInstruction*>(current_method)),
6947 type_index_(type_index),
6948 dex_file_(dex_file),
6949 klass_(klass) {
6950 // Referrers class should not need access check. We never inline unverified
6951 // methods so we can't possibly end up in this situation.
6952 DCHECK_IMPLIES(is_referrers_class, !needs_access_check);
6953
6954 SetPackedField<LoadKindField>(
6955 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6956 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6957 SetPackedFlag<kFlagIsInBootImage>(false);
6958 SetPackedFlag<kFlagGenerateClInitCheck>(false);
6959 SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6960 }
6961
6962 bool IsClonable() const override { return true; }
6963
6964 void SetLoadKind(LoadKind load_kind);
6965
6966 LoadKind GetLoadKind() const {
6967 return GetPackedField<LoadKindField>();
6968 }
6969
6970 bool HasPcRelativeLoadKind() const {
6971 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6972 GetLoadKind() == LoadKind::kBootImageRelRo ||
6973 GetLoadKind() == LoadKind::kBssEntry ||
6974 GetLoadKind() == LoadKind::kBssEntryPublic ||
6975 GetLoadKind() == LoadKind::kBssEntryPackage;
6976 }
6977
6978 bool CanBeMoved() const override { return true; }
6979
6980 bool InstructionDataEquals(const HInstruction* other) const override;
6981
6982 size_t ComputeHashCode() const override { return type_index_.index_; }
6983
6984 bool CanBeNull() const override { return false; }
6985
6986 bool NeedsEnvironment() const override {
6987 return CanCallRuntime();
6988 }
6989 bool NeedsBss() const override {
6990 LoadKind load_kind = GetLoadKind();
6991 return load_kind == LoadKind::kBssEntry ||
6992 load_kind == LoadKind::kBssEntryPublic ||
6993 load_kind == LoadKind::kBssEntryPackage;
6994 }
6995
6996 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6997 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6998 }
6999
7000 bool CanCallRuntime() const {
7001 return NeedsAccessCheck() ||
7002 MustGenerateClinitCheck() ||
7003 GetLoadKind() == LoadKind::kRuntimeCall ||
7004 GetLoadKind() == LoadKind::kBssEntry;
7005 }
7006
7007 bool CanThrow() const override {
7008 return NeedsAccessCheck() ||
7009 MustGenerateClinitCheck() ||
7010 // If the class is in the boot image, the lookup in the runtime call cannot throw.
7011 ((GetLoadKind() == LoadKind::kRuntimeCall ||
7012 GetLoadKind() == LoadKind::kBssEntry) &&
7013 !IsInBootImage());
7014 }
7015
7016 ReferenceTypeInfo GetLoadedClassRTI() {
7017 if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
7018 // Note: The is_exact flag from the return value should not be used.
7019 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7020 } else {
7021 return ReferenceTypeInfo::CreateInvalid();
7022 }
7023 }
7024
7025 // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
7026 void SetValidLoadedClassRTI() {
7027 DCHECK(klass_ != nullptr);
7028 SetPackedFlag<kFlagValidLoadedClassRTI>(true);
7029 }
7030
7031 dex::TypeIndex GetTypeIndex() const { return type_index_; }
7032 const DexFile& GetDexFile() const { return dex_file_; }
7033
7034 static SideEffects SideEffectsForArchRuntimeCalls() {
7035 return SideEffects::CanTriggerGC();
7036 }
7037
7038 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
7039 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
7040 bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
7041 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
7042
7043 bool MustResolveTypeOnSlowPath() const {
7044 // Check that this instruction has a slow path.
7045 LoadKind load_kind = GetLoadKind();
7046 DCHECK(load_kind != LoadKind::kRuntimeCall); // kRuntimeCall calls on main path.
7047 bool must_resolve_type_on_slow_path =
7048 load_kind == LoadKind::kBssEntry ||
7049 load_kind == LoadKind::kBssEntryPublic ||
7050 load_kind == LoadKind::kBssEntryPackage;
7051 DCHECK(must_resolve_type_on_slow_path || MustGenerateClinitCheck());
7052 return must_resolve_type_on_slow_path;
7053 }
7054
7055 void MarkInBootImage() {
7056 SetPackedFlag<kFlagIsInBootImage>(true);
7057 }
7058
7059 void AddSpecialInput(HInstruction* special_input);
7060
7061 using HInstruction::GetInputRecords; // Keep the const version visible.
7062 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7063 return ArrayRef<HUserRecord<HInstruction*>>(
7064 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7065 }
7066
7067 Handle<mirror::Class> GetClass() const {
7068 return klass_;
7069 }
7070
7071 DECLARE_INSTRUCTION(LoadClass);
7072
7073 protected:
7074 DEFAULT_COPY_CONSTRUCTOR(LoadClass);
7075
7076 private:
7077 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
7078 static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1;
7079 // Whether this instruction must generate the initialization check.
7080 // Used for code generation.
7081 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
7082 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1;
7083 static constexpr size_t kFieldLoadKindSize =
7084 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7085 static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
7086 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
7087 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
7088 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7089
7090 static bool HasTypeReference(LoadKind load_kind) {
7091 return load_kind == LoadKind::kReferrersClass ||
7092 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
7093 load_kind == LoadKind::kBssEntry ||
7094 load_kind == LoadKind::kBssEntryPublic ||
7095 load_kind == LoadKind::kBssEntryPackage ||
7096 load_kind == LoadKind::kRuntimeCall;
7097 }
7098
7099 void SetLoadKindInternal(LoadKind load_kind);
7100
7101 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
7102 // For other load kinds it's empty or possibly some architecture-specific instruction
7103 // for PC-relative loads, i.e. kBssEntry* or kBootImageLinkTimePcRelative.
7104 HUserRecord<HInstruction*> special_input_;
7105
7106 // A type index and dex file where the class can be accessed. The dex file can be:
7107 // - The compiling method's dex file if the class is defined there too.
7108 // - The compiling method's dex file if the class is referenced there.
7109 // - The dex file where the class is defined. When the load kind can only be
7110 // kBssEntry* or kRuntimeCall, we cannot emit code for this `HLoadClass`.
7111 const dex::TypeIndex type_index_;
7112 const DexFile& dex_file_;
7113
7114 Handle<mirror::Class> klass_;
7115 };
7116 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
7117
7118 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
7119 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
7120 // The load kind should be determined before inserting the instruction to the graph.
7121 DCHECK(GetBlock() == nullptr);
7122 DCHECK(GetEnvironment() == nullptr);
7123 SetPackedField<LoadKindField>(load_kind);
7124 if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
7125 special_input_ = HUserRecord<HInstruction*>(nullptr);
7126 }
7127 if (!NeedsEnvironment()) {
7128 SetSideEffects(SideEffects::None());
7129 }
7130 }
7131
7132 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
7133 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
7134 // The special input is used for PC-relative loads on some architectures,
7135 // including literal pool loads, which are PC-relative too.
7136 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7137 GetLoadKind() == LoadKind::kBootImageRelRo ||
7138 GetLoadKind() == LoadKind::kBssEntry ||
7139 GetLoadKind() == LoadKind::kBssEntryPublic ||
7140 GetLoadKind() == LoadKind::kBssEntryPackage ||
7141 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
7142 DCHECK(special_input_.GetInstruction() == nullptr);
7143 special_input_ = HUserRecord<HInstruction*>(special_input);
7144 special_input->AddUseAt(this, 0);
7145 }
7146
7147 class HLoadString final : public HInstruction {
7148 public:
7149 // Determines how to load the String.
7150 enum class LoadKind {
7151 // Use PC-relative boot image String* address that will be known at link time.
7152 // Used for boot image strings referenced by boot image code.
7153 kBootImageLinkTimePcRelative,
7154
7155 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
7156 // Used for boot image strings referenced by apps in AOT-compiled code.
7157 kBootImageRelRo,
7158
7159 // Load from an entry in the .bss section using a PC-relative load.
7160 // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
7161 kBssEntry,
7162
7163 // Use a known boot image String* address, embedded in the code by the codegen.
7164 // Used for boot image strings referenced by apps in JIT-compiled code.
7165 kJitBootImageAddress,
7166
7167 // Load from the root table associated with the JIT compiled method.
7168 kJitTableAddress,
7169
7170 // Load using a simple runtime call. This is the fall-back load kind when
7171 // the codegen is unable to use another appropriate kind.
7172 kRuntimeCall,
7173
7174 kLast = kRuntimeCall,
7175 };
7176
7177 HLoadString(HCurrentMethod* current_method,
7178 dex::StringIndex string_index,
7179 const DexFile& dex_file,
7180 uint32_t dex_pc)
7181 : HInstruction(kLoadString,
7182 DataType::Type::kReference,
7183 SideEffectsForArchRuntimeCalls(),
7184 dex_pc),
7185 special_input_(HUserRecord<HInstruction*>(current_method)),
7186 string_index_(string_index),
7187 dex_file_(dex_file) {
7188 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
7189 }
7190
7191 bool IsClonable() const override { return true; }
7192 bool NeedsBss() const override {
7193 return GetLoadKind() == LoadKind::kBssEntry;
7194 }
7195
7196 void SetLoadKind(LoadKind load_kind);
7197
7198 LoadKind GetLoadKind() const {
7199 return GetPackedField<LoadKindField>();
7200 }
7201
7202 bool HasPcRelativeLoadKind() const {
7203 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7204 GetLoadKind() == LoadKind::kBootImageRelRo ||
7205 GetLoadKind() == LoadKind::kBssEntry;
7206 }
7207
7208 const DexFile& GetDexFile() const {
7209 return dex_file_;
7210 }
7211
7212 dex::StringIndex GetStringIndex() const {
7213 return string_index_;
7214 }
7215
7216 Handle<mirror::String> GetString() const {
7217 return string_;
7218 }
7219
7220 void SetString(Handle<mirror::String> str) {
7221 string_ = str;
7222 }
7223
7224 bool CanBeMoved() const override { return true; }
7225
7226 bool InstructionDataEquals(const HInstruction* other) const override;
7227
7228 size_t ComputeHashCode() const override { return string_index_.index_; }
7229
7230 // Will call the runtime if we need to load the string through
7231 // the dex cache and the string is not guaranteed to be there yet.
7232 bool NeedsEnvironment() const override {
7233 LoadKind load_kind = GetLoadKind();
7234 if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
7235 load_kind == LoadKind::kBootImageRelRo ||
7236 load_kind == LoadKind::kJitBootImageAddress ||
7237 load_kind == LoadKind::kJitTableAddress) {
7238 return false;
7239 }
7240 return true;
7241 }
7242
7243 bool CanBeNull() const override { return false; }
7244 bool CanThrow() const override { return NeedsEnvironment(); }
7245
7246 static SideEffects SideEffectsForArchRuntimeCalls() {
7247 return SideEffects::CanTriggerGC();
7248 }
7249
7250 void AddSpecialInput(HInstruction* special_input);
7251
7252 using HInstruction::GetInputRecords; // Keep the const version visible.
7253 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7254 return ArrayRef<HUserRecord<HInstruction*>>(
7255 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7256 }
7257
7258 DECLARE_INSTRUCTION(LoadString);
7259
7260 protected:
7261 DEFAULT_COPY_CONSTRUCTOR(LoadString);
7262
7263 private:
7264 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
7265 static constexpr size_t kFieldLoadKindSize =
7266 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7267 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
7268 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7269 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7270
7271 void SetLoadKindInternal(LoadKind load_kind);
7272
7273 // The special input is the HCurrentMethod for kRuntimeCall.
7274 // For other load kinds it's empty or possibly some architecture-specific instruction
7275 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
7276 HUserRecord<HInstruction*> special_input_;
7277
7278 dex::StringIndex string_index_;
7279 const DexFile& dex_file_;
7280
7281 Handle<mirror::String> string_;
7282 };
7283 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
7284
7285 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7286 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
7287 // The load kind should be determined before inserting the instruction to the graph.
7288 DCHECK(GetBlock() == nullptr);
7289 DCHECK(GetEnvironment() == nullptr);
7290 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
7291 SetPackedField<LoadKindField>(load_kind);
7292 if (load_kind != LoadKind::kRuntimeCall) {
7293 special_input_ = HUserRecord<HInstruction*>(nullptr);
7294 }
7295 if (!NeedsEnvironment()) {
7296 SetSideEffects(SideEffects::None());
7297 }
7298 }
7299
7300 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7301 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
7302 // The special input is used for PC-relative loads on some architectures,
7303 // including literal pool loads, which are PC-relative too.
7304 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7305 GetLoadKind() == LoadKind::kBootImageRelRo ||
7306 GetLoadKind() == LoadKind::kBssEntry ||
7307 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
7308 // HLoadString::GetInputRecords() returns an empty array at this point,
7309 // so use the GetInputRecords() from the base class to set the input record.
7310 DCHECK(special_input_.GetInstruction() == nullptr);
7311 special_input_ = HUserRecord<HInstruction*>(special_input);
7312 special_input->AddUseAt(this, 0);
7313 }
7314
7315 class HLoadMethodHandle final : public HInstruction {
7316 public:
7317 HLoadMethodHandle(HCurrentMethod* current_method,
7318 uint16_t method_handle_idx,
7319 const DexFile& dex_file,
7320 uint32_t dex_pc)
7321 : HInstruction(kLoadMethodHandle,
7322 DataType::Type::kReference,
7323 SideEffectsForArchRuntimeCalls(),
7324 dex_pc),
7325 special_input_(HUserRecord<HInstruction*>(current_method)),
7326 method_handle_idx_(method_handle_idx),
7327 dex_file_(dex_file) {
7328 }
7329
7330 using HInstruction::GetInputRecords; // Keep the const version visible.
7331 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7332 return ArrayRef<HUserRecord<HInstruction*>>(
7333 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7334 }
7335
7336 bool IsClonable() const override { return true; }
7337
7338 uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
7339
7340 const DexFile& GetDexFile() const { return dex_file_; }
7341
7342 static SideEffects SideEffectsForArchRuntimeCalls() {
7343 return SideEffects::CanTriggerGC();
7344 }
7345
7346 bool CanThrow() const override { return true; }
7347
7348 bool NeedsEnvironment() const override { return true; }
7349
7350 DECLARE_INSTRUCTION(LoadMethodHandle);
7351
7352 protected:
7353 DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
7354
7355 private:
7356 // The special input is the HCurrentMethod for kRuntimeCall.
7357 HUserRecord<HInstruction*> special_input_;
7358
7359 const uint16_t method_handle_idx_;
7360 const DexFile& dex_file_;
7361 };
7362
7363 class HLoadMethodType final : public HInstruction {
7364 public:
7365 HLoadMethodType(HCurrentMethod* current_method,
7366 dex::ProtoIndex proto_index,
7367 const DexFile& dex_file,
7368 uint32_t dex_pc)
7369 : HInstruction(kLoadMethodType,
7370 DataType::Type::kReference,
7371 SideEffectsForArchRuntimeCalls(),
7372 dex_pc),
7373 special_input_(HUserRecord<HInstruction*>(current_method)),
7374 proto_index_(proto_index),
7375 dex_file_(dex_file) {
7376 }
7377
7378 using HInstruction::GetInputRecords; // Keep the const version visible.
7379 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7380 return ArrayRef<HUserRecord<HInstruction*>>(
7381 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7382 }
7383
7384 bool IsClonable() const override { return true; }
7385
7386 dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
7387
7388 const DexFile& GetDexFile() const { return dex_file_; }
7389
7390 static SideEffects SideEffectsForArchRuntimeCalls() {
7391 return SideEffects::CanTriggerGC();
7392 }
7393
7394 bool CanThrow() const override { return true; }
7395
7396 bool NeedsEnvironment() const override { return true; }
7397
7398 DECLARE_INSTRUCTION(LoadMethodType);
7399
7400 protected:
7401 DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
7402
7403 private:
7404 // The special input is the HCurrentMethod for kRuntimeCall.
7405 HUserRecord<HInstruction*> special_input_;
7406
7407 const dex::ProtoIndex proto_index_;
7408 const DexFile& dex_file_;
7409 };
7410
7411 /**
7412 * Performs an initialization check on its Class object input.
7413 */
7414 class HClinitCheck final : public HExpression<1> {
7415 public:
7416 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
7417 : HExpression(
7418 kClinitCheck,
7419 DataType::Type::kReference,
7420 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
7421 dex_pc) {
7422 SetRawInputAt(0, constant);
7423 }
7424 // TODO: Make ClinitCheck clonable.
7425 bool CanBeMoved() const override { return true; }
7426 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
7427 return true;
7428 }
7429
7430 bool NeedsEnvironment() const override {
7431 // May call runtime to initialize the class.
7432 return true;
7433 }
7434
7435 bool CanThrow() const override { return true; }
7436
7437 HLoadClass* GetLoadClass() const {
7438 DCHECK(InputAt(0)->IsLoadClass());
7439 return InputAt(0)->AsLoadClass();
7440 }
7441
7442 DECLARE_INSTRUCTION(ClinitCheck);
7443
7444
7445 protected:
7446 DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
7447 };
7448
7449 class HStaticFieldGet final : public HExpression<1> {
7450 public:
7451 HStaticFieldGet(HInstruction* cls,
7452 ArtField* field,
7453 DataType::Type field_type,
7454 MemberOffset field_offset,
7455 bool is_volatile,
7456 uint32_t field_idx,
7457 uint16_t declaring_class_def_index,
7458 const DexFile& dex_file,
7459 uint32_t dex_pc)
7460 : HExpression(kStaticFieldGet,
7461 field_type,
7462 SideEffects::FieldReadOfType(field_type, is_volatile),
7463 dex_pc),
7464 field_info_(field,
7465 field_offset,
7466 field_type,
7467 is_volatile,
7468 field_idx,
7469 declaring_class_def_index,
7470 dex_file) {
7471 SetRawInputAt(0, cls);
7472 }
7473
7474
7475 bool IsClonable() const override { return true; }
7476 bool CanBeMoved() const override { return !IsVolatile(); }
7477
7478 bool InstructionDataEquals(const HInstruction* other) const override {
7479 const HStaticFieldGet* other_get = other->AsStaticFieldGet();
7480 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
7481 }
7482
7483 size_t ComputeHashCode() const override {
7484 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
7485 }
7486
7487 bool IsFieldAccess() const override { return true; }
7488 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7489 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7490 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7491 bool IsVolatile() const { return field_info_.IsVolatile(); }
7492
7493 void SetType(DataType::Type new_type) {
7494 DCHECK(DataType::IsIntegralType(GetType()));
7495 DCHECK(DataType::IsIntegralType(new_type));
7496 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
7497 SetPackedField<TypeField>(new_type);
7498 }
7499
7500 DECLARE_INSTRUCTION(StaticFieldGet);
7501
7502 protected:
7503 DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
7504
7505 private:
7506 const FieldInfo field_info_;
7507 };
7508
7509 class HStaticFieldSet final : public HExpression<2> {
7510 public:
7511 HStaticFieldSet(HInstruction* cls,
7512 HInstruction* value,
7513 ArtField* field,
7514 DataType::Type field_type,
7515 MemberOffset field_offset,
7516 bool is_volatile,
7517 uint32_t field_idx,
7518 uint16_t declaring_class_def_index,
7519 const DexFile& dex_file,
7520 uint32_t dex_pc)
7521 : HExpression(kStaticFieldSet,
7522 SideEffects::FieldWriteOfType(field_type, is_volatile),
7523 dex_pc),
7524 field_info_(field,
7525 field_offset,
7526 field_type,
7527 is_volatile,
7528 field_idx,
7529 declaring_class_def_index,
7530 dex_file) {
7531 SetPackedFlag<kFlagValueCanBeNull>(true);
7532 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitWithNullCheck);
7533 SetRawInputAt(0, cls);
7534 SetRawInputAt(1, value);
7535 }
7536
7537 bool IsClonable() const override { return true; }
7538 bool IsFieldAccess() const override { return true; }
7539 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7540 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7541 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7542 bool IsVolatile() const { return field_info_.IsVolatile(); }
7543
7544 HInstruction* GetValue() const { return InputAt(1); }
7545 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
7546 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
7547
7548 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
7549 void SetWriteBarrierKind(WriteBarrierKind kind) {
7550 DCHECK(kind != WriteBarrierKind::kEmitWithNullCheck)
7551 << "We shouldn't go back to the original value.";
7552 SetPackedField<WriteBarrierKindField>(kind);
7553 }
7554
7555 DECLARE_INSTRUCTION(StaticFieldSet);
7556
7557 protected:
7558 DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
7559
7560 private:
7561 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
7562 static constexpr size_t kWriteBarrierKind = kFlagValueCanBeNull + 1;
7563 static constexpr size_t kWriteBarrierKindSize =
7564 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
7565 static constexpr size_t kNumberOfStaticFieldSetPackedBits =
7566 kWriteBarrierKind + kWriteBarrierKindSize;
7567 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
7568 "Too many packed fields.");
7569
7570 const FieldInfo field_info_;
7571 using WriteBarrierKindField =
7572 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
7573 };
7574
7575 class HStringBuilderAppend final : public HVariableInputSizeInstruction {
7576 public:
7577 HStringBuilderAppend(HIntConstant* format,
7578 uint32_t number_of_arguments,
7579 bool has_fp_args,
7580 ArenaAllocator* allocator,
7581 uint32_t dex_pc)
7582 : HVariableInputSizeInstruction(
7583 kStringBuilderAppend,
7584 DataType::Type::kReference,
7585 SideEffects::CanTriggerGC().Union(
7586 // The runtime call may read memory from inputs. It never writes outside
7587 // of the newly allocated result object or newly allocated helper objects,
7588 // except for float/double arguments where we reuse thread-local helper objects.
7589 has_fp_args ? SideEffects::AllWritesAndReads() : SideEffects::AllReads()),
7590 dex_pc,
7591 allocator,
7592 number_of_arguments + /* format */ 1u,
7593 kArenaAllocInvokeInputs) {
7594 DCHECK_GE(number_of_arguments, 1u); // There must be something to append.
7595 SetRawInputAt(FormatIndex(), format);
7596 }
7597
7598 void SetArgumentAt(size_t index, HInstruction* argument) {
7599 DCHECK_LE(index, GetNumberOfArguments());
7600 SetRawInputAt(index, argument);
7601 }
7602
7603 // Return the number of arguments, excluding the format.
7604 size_t GetNumberOfArguments() const {
7605 DCHECK_GE(InputCount(), 1u);
7606 return InputCount() - 1u;
7607 }
7608
7609 size_t FormatIndex() const {
7610 return GetNumberOfArguments();
7611 }
7612
7613 HIntConstant* GetFormat() {
7614 return InputAt(FormatIndex())->AsIntConstant();
7615 }
7616
7617 bool NeedsEnvironment() const override { return true; }
7618
7619 bool CanThrow() const override { return true; }
7620
7621 bool CanBeNull() const override { return false; }
7622
7623 DECLARE_INSTRUCTION(StringBuilderAppend);
7624
7625 protected:
7626 DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend);
7627 };
7628
7629 class HUnresolvedInstanceFieldGet final : public HExpression<1> {
7630 public:
7631 HUnresolvedInstanceFieldGet(HInstruction* obj,
7632 DataType::Type field_type,
7633 uint32_t field_index,
7634 uint32_t dex_pc)
7635 : HExpression(kUnresolvedInstanceFieldGet,
7636 field_type,
7637 SideEffects::AllExceptGCDependency(),
7638 dex_pc),
7639 field_index_(field_index) {
7640 SetRawInputAt(0, obj);
7641 }
7642
7643 bool IsClonable() const override { return true; }
7644 bool NeedsEnvironment() const override { return true; }
7645 bool CanThrow() const override { return true; }
7646
7647 DataType::Type GetFieldType() const { return GetType(); }
7648 uint32_t GetFieldIndex() const { return field_index_; }
7649
7650 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
7651
7652 protected:
7653 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
7654
7655 private:
7656 const uint32_t field_index_;
7657 };
7658
7659 class HUnresolvedInstanceFieldSet final : public HExpression<2> {
7660 public:
7661 HUnresolvedInstanceFieldSet(HInstruction* obj,
7662 HInstruction* value,
7663 DataType::Type field_type,
7664 uint32_t field_index,
7665 uint32_t dex_pc)
7666 : HExpression(kUnresolvedInstanceFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7667 field_index_(field_index) {
7668 SetPackedField<FieldTypeField>(field_type);
7669 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7670 SetRawInputAt(0, obj);
7671 SetRawInputAt(1, value);
7672 }
7673
7674 bool IsClonable() const override { return true; }
7675 bool NeedsEnvironment() const override { return true; }
7676 bool CanThrow() const override { return true; }
7677
7678 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7679 uint32_t GetFieldIndex() const { return field_index_; }
7680
7681 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
7682
7683 protected:
7684 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
7685
7686 private:
7687 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7688 static constexpr size_t kFieldFieldTypeSize =
7689 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7690 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7691 kFieldFieldType + kFieldFieldTypeSize;
7692 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7693 "Too many packed fields.");
7694 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7695
7696 const uint32_t field_index_;
7697 };
7698
7699 class HUnresolvedStaticFieldGet final : public HExpression<0> {
7700 public:
7701 HUnresolvedStaticFieldGet(DataType::Type field_type,
7702 uint32_t field_index,
7703 uint32_t dex_pc)
7704 : HExpression(kUnresolvedStaticFieldGet,
7705 field_type,
7706 SideEffects::AllExceptGCDependency(),
7707 dex_pc),
7708 field_index_(field_index) {
7709 }
7710
7711 bool IsClonable() const override { return true; }
7712 bool NeedsEnvironment() const override { return true; }
7713 bool CanThrow() const override { return true; }
7714
7715 DataType::Type GetFieldType() const { return GetType(); }
7716 uint32_t GetFieldIndex() const { return field_index_; }
7717
7718 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
7719
7720 protected:
7721 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
7722
7723 private:
7724 const uint32_t field_index_;
7725 };
7726
7727 class HUnresolvedStaticFieldSet final : public HExpression<1> {
7728 public:
7729 HUnresolvedStaticFieldSet(HInstruction* value,
7730 DataType::Type field_type,
7731 uint32_t field_index,
7732 uint32_t dex_pc)
7733 : HExpression(kUnresolvedStaticFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7734 field_index_(field_index) {
7735 SetPackedField<FieldTypeField>(field_type);
7736 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7737 SetRawInputAt(0, value);
7738 }
7739
7740 bool IsClonable() const override { return true; }
7741 bool NeedsEnvironment() const override { return true; }
7742 bool CanThrow() const override { return true; }
7743
7744 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7745 uint32_t GetFieldIndex() const { return field_index_; }
7746
7747 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
7748
7749 protected:
7750 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
7751
7752 private:
7753 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7754 static constexpr size_t kFieldFieldTypeSize =
7755 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7756 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7757 kFieldFieldType + kFieldFieldTypeSize;
7758 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7759 "Too many packed fields.");
7760 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7761
7762 const uint32_t field_index_;
7763 };
7764
7765 // Implement the move-exception DEX instruction.
7766 class HLoadException final : public HExpression<0> {
7767 public:
7768 explicit HLoadException(uint32_t dex_pc = kNoDexPc)
7769 : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
7770 }
7771
7772 bool CanBeNull() const override { return false; }
7773
7774 DECLARE_INSTRUCTION(LoadException);
7775
7776 protected:
7777 DEFAULT_COPY_CONSTRUCTOR(LoadException);
7778 };
7779
7780 // Implicit part of move-exception which clears thread-local exception storage.
7781 // Must not be removed because the runtime expects the TLS to get cleared.
7782 class HClearException final : public HExpression<0> {
7783 public:
7784 explicit HClearException(uint32_t dex_pc = kNoDexPc)
7785 : HExpression(kClearException, SideEffects::AllWrites(), dex_pc) {
7786 }
7787
7788 DECLARE_INSTRUCTION(ClearException);
7789
7790 protected:
7791 DEFAULT_COPY_CONSTRUCTOR(ClearException);
7792 };
7793
7794 class HThrow final : public HExpression<1> {
7795 public:
7796 HThrow(HInstruction* exception, uint32_t dex_pc)
7797 : HExpression(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
7798 SetRawInputAt(0, exception);
7799 }
7800
7801 bool IsControlFlow() const override { return true; }
7802
7803 bool NeedsEnvironment() const override { return true; }
7804
7805 bool CanThrow() const override { return true; }
7806
7807 bool AlwaysThrows() const override { return true; }
7808
7809 DECLARE_INSTRUCTION(Throw);
7810
7811 protected:
7812 DEFAULT_COPY_CONSTRUCTOR(Throw);
7813 };
7814
7815 /**
7816 * Implementation strategies for the code generator of a HInstanceOf
7817 * or `HCheckCast`.
7818 */
7819 enum class TypeCheckKind { // private marker to avoid generate-operator-out.py from processing.
7820 kUnresolvedCheck, // Check against an unresolved type.
7821 kExactCheck, // Can do a single class compare.
7822 kClassHierarchyCheck, // Can just walk the super class chain.
7823 kAbstractClassCheck, // Can just walk the super class chain, starting one up.
7824 kInterfaceCheck, // No optimization yet when checking against an interface.
7825 kArrayObjectCheck, // Can just check if the array is not primitive.
7826 kArrayCheck, // No optimization yet when checking against a generic array.
7827 kBitstringCheck, // Compare the type check bitstring.
7828 kLast = kArrayCheck
7829 };
7830
7831 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
7832
7833 // Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
7834 // `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
7835 class HTypeCheckInstruction : public HVariableInputSizeInstruction {
7836 public:
7837 HTypeCheckInstruction(InstructionKind kind,
7838 DataType::Type type,
7839 HInstruction* object,
7840 HInstruction* target_class_or_null,
7841 TypeCheckKind check_kind,
7842 Handle<mirror::Class> klass,
7843 uint32_t dex_pc,
7844 ArenaAllocator* allocator,
7845 HIntConstant* bitstring_path_to_root,
7846 HIntConstant* bitstring_mask,
7847 SideEffects side_effects)
7848 : HVariableInputSizeInstruction(
7849 kind,
7850 type,
7851 side_effects,
7852 dex_pc,
7853 allocator,
7854 /* number_of_inputs= */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
7855 kArenaAllocTypeCheckInputs),
7856 klass_(klass) {
7857 SetPackedField<TypeCheckKindField>(check_kind);
7858 SetPackedFlag<kFlagMustDoNullCheck>(true);
7859 SetPackedFlag<kFlagValidTargetClassRTI>(false);
7860 SetRawInputAt(0, object);
7861 SetRawInputAt(1, target_class_or_null);
7862 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
7863 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
7864 if (check_kind == TypeCheckKind::kBitstringCheck) {
7865 DCHECK(target_class_or_null->IsNullConstant());
7866 SetRawInputAt(2, bitstring_path_to_root);
7867 SetRawInputAt(3, bitstring_mask);
7868 } else {
7869 DCHECK(target_class_or_null->IsLoadClass());
7870 }
7871 }
7872
7873 HLoadClass* GetTargetClass() const {
7874 DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7875 HInstruction* load_class = InputAt(1);
7876 DCHECK(load_class->IsLoadClass());
7877 return load_class->AsLoadClass();
7878 }
7879
7880 uint32_t GetBitstringPathToRoot() const {
7881 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7882 HInstruction* path_to_root = InputAt(2);
7883 DCHECK(path_to_root->IsIntConstant());
7884 return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
7885 }
7886
7887 uint32_t GetBitstringMask() const {
7888 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7889 HInstruction* mask = InputAt(3);
7890 DCHECK(mask->IsIntConstant());
7891 return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
7892 }
7893
7894 bool IsClonable() const override { return true; }
7895 bool CanBeMoved() const override { return true; }
7896
7897 bool InstructionDataEquals(const HInstruction* other) const override {
7898 DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
7899 return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
7900 }
7901
7902 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
7903 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
7904 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
7905 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
7906
7907 ReferenceTypeInfo GetTargetClassRTI() {
7908 if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
7909 // Note: The is_exact flag from the return value should not be used.
7910 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7911 } else {
7912 return ReferenceTypeInfo::CreateInvalid();
7913 }
7914 }
7915
7916 // Target class RTI is marked as valid by RTP if the klass_ is admissible.
7917 void SetValidTargetClassRTI() {
7918 DCHECK(klass_ != nullptr);
7919 SetPackedFlag<kFlagValidTargetClassRTI>(true);
7920 }
7921
7922 Handle<mirror::Class> GetClass() const {
7923 return klass_;
7924 }
7925
7926 protected:
7927 DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
7928
7929 private:
7930 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
7931 static constexpr size_t kFieldTypeCheckKindSize =
7932 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
7933 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
7934 static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
7935 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
7936 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7937 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
7938
7939 Handle<mirror::Class> klass_;
7940 };
7941
7942 class HInstanceOf final : public HTypeCheckInstruction {
7943 public:
7944 HInstanceOf(HInstruction* object,
7945 HInstruction* target_class_or_null,
7946 TypeCheckKind check_kind,
7947 Handle<mirror::Class> klass,
7948 uint32_t dex_pc,
7949 ArenaAllocator* allocator,
7950 HIntConstant* bitstring_path_to_root,
7951 HIntConstant* bitstring_mask)
7952 : HTypeCheckInstruction(kInstanceOf,
7953 DataType::Type::kBool,
7954 object,
7955 target_class_or_null,
7956 check_kind,
7957 klass,
7958 dex_pc,
7959 allocator,
7960 bitstring_path_to_root,
7961 bitstring_mask,
7962 SideEffectsForArchRuntimeCalls(check_kind)) {}
7963
7964 bool IsClonable() const override { return true; }
7965
7966 bool NeedsEnvironment() const override {
7967 return CanCallRuntime(GetTypeCheckKind());
7968 }
7969
7970 static bool CanCallRuntime(TypeCheckKind check_kind) {
7971 // TODO: Re-evaluate now that mips codegen has been removed.
7972 return check_kind != TypeCheckKind::kExactCheck;
7973 }
7974
7975 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
7976 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
7977 }
7978
7979 DECLARE_INSTRUCTION(InstanceOf);
7980
7981 protected:
7982 DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
7983 };
7984
7985 class HBoundType final : public HExpression<1> {
7986 public:
7987 explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
7988 : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
7989 upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
7990 SetPackedFlag<kFlagUpperCanBeNull>(true);
7991 SetPackedFlag<kFlagCanBeNull>(true);
7992 DCHECK_EQ(input->GetType(), DataType::Type::kReference);
7993 SetRawInputAt(0, input);
7994 }
7995
7996 bool InstructionDataEquals(const HInstruction* other) const override;
7997 bool IsClonable() const override { return true; }
7998
7999 // {Get,Set}Upper* should only be used in reference type propagation.
8000 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
8001 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
8002 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
8003
8004 void SetCanBeNull(bool can_be_null) {
8005 DCHECK(GetUpperCanBeNull() || !can_be_null);
8006 SetPackedFlag<kFlagCanBeNull>(can_be_null);
8007 }
8008
8009 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
8010
8011 DECLARE_INSTRUCTION(BoundType);
8012
8013 protected:
8014 DEFAULT_COPY_CONSTRUCTOR(BoundType);
8015
8016 private:
8017 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
8018 // is false then CanBeNull() cannot be true).
8019 static constexpr size_t kFlagUpperCanBeNull = kNumberOfGenericPackedBits;
8020 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
8021 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
8022 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
8023
8024 // Encodes the most upper class that this instruction can have. In other words
8025 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
8026 // It is used to bound the type in cases like:
8027 // if (x instanceof ClassX) {
8028 // // uper_bound_ will be ClassX
8029 // }
8030 ReferenceTypeInfo upper_bound_;
8031 };
8032
8033 class HCheckCast final : public HTypeCheckInstruction {
8034 public:
8035 HCheckCast(HInstruction* object,
8036 HInstruction* target_class_or_null,
8037 TypeCheckKind check_kind,
8038 Handle<mirror::Class> klass,
8039 uint32_t dex_pc,
8040 ArenaAllocator* allocator,
8041 HIntConstant* bitstring_path_to_root,
8042 HIntConstant* bitstring_mask)
8043 : HTypeCheckInstruction(kCheckCast,
8044 DataType::Type::kVoid,
8045 object,
8046 target_class_or_null,
8047 check_kind,
8048 klass,
8049 dex_pc,
8050 allocator,
8051 bitstring_path_to_root,
8052 bitstring_mask,
8053 SideEffects::CanTriggerGC()) {}
8054
8055 bool IsClonable() const override { return true; }
8056 bool NeedsEnvironment() const override {
8057 // Instruction may throw a CheckCastError.
8058 return true;
8059 }
8060
8061 bool CanThrow() const override { return true; }
8062
8063 DECLARE_INSTRUCTION(CheckCast);
8064
8065 protected:
8066 DEFAULT_COPY_CONSTRUCTOR(CheckCast);
8067 };
8068
8069 /**
8070 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
8071 * @details We define the combined barrier types that are actually required
8072 * by the Java Memory Model, rather than using exactly the terminology from
8073 * the JSR-133 cookbook. These should, in many cases, be replaced by acquire/release
8074 * primitives. Note that the JSR-133 cookbook generally does not deal with
8075 * store atomicity issues, and the recipes there are not always entirely sufficient.
8076 * The current recipe is as follows:
8077 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
8078 * -# Use AnyAny barrier after volatile store. (StoreLoad is as expensive.)
8079 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
8080 * -# Use StoreStore barrier after all stores but before return from any constructor whose
8081 * class has final fields.
8082 * -# Use NTStoreStore to order non-temporal stores with respect to all later
8083 * store-to-memory instructions. Only generated together with non-temporal stores.
8084 */
8085 enum MemBarrierKind {
8086 kAnyStore,
8087 kLoadAny,
8088 kStoreStore,
8089 kAnyAny,
8090 kNTStoreStore,
8091 kLastBarrierKind = kNTStoreStore
8092 };
8093 std::ostream& operator<<(std::ostream& os, MemBarrierKind kind);
8094
8095 class HMemoryBarrier final : public HExpression<0> {
8096 public:
8097 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
8098 : HExpression(kMemoryBarrier,
8099 SideEffects::AllWritesAndReads(), // Assume write/read on all fields/arrays.
8100 dex_pc) {
8101 SetPackedField<BarrierKindField>(barrier_kind);
8102 }
8103
8104 bool IsClonable() const override { return true; }
8105
8106 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
8107
8108 DECLARE_INSTRUCTION(MemoryBarrier);
8109
8110 protected:
8111 DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
8112
8113 private:
8114 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
8115 static constexpr size_t kFieldBarrierKindSize =
8116 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
8117 static constexpr size_t kNumberOfMemoryBarrierPackedBits =
8118 kFieldBarrierKind + kFieldBarrierKindSize;
8119 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
8120 "Too many packed fields.");
8121 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
8122 };
8123
8124 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
8125 // the specified object(s), with respect to any subsequent store that might "publish"
8126 // (i.e. make visible) the specified object to another thread.
8127 //
8128 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
8129 // for all final fields (that were set) at the end of the invoked constructor.
8130 //
8131 // The constructor fence models the freeze actions for the final fields of an object
8132 // being constructed (semantically at the end of the constructor). Constructor fences
8133 // have a per-object affinity; two separate objects being constructed get two separate
8134 // constructor fences.
8135 //
8136 // (Note: that if calling a super-constructor or forwarding to another constructor,
8137 // the freezes would happen at the end of *that* constructor being invoked).
8138 //
8139 // The memory model guarantees that when the object being constructed is "published" after
8140 // constructor completion (i.e. escapes the current thread via a store), then any final field
8141 // writes must be observable on other threads (once they observe that publication).
8142 //
8143 // Further, anything written before the freeze, and read by dereferencing through the final field,
8144 // must also be visible (so final object field could itself have an object with non-final fields;
8145 // yet the freeze must also extend to them).
8146 //
8147 // Constructor example:
8148 //
8149 // class HasFinal {
8150 // final int field; Optimizing IR for <init>()V:
8151 // HasFinal() {
8152 // field = 123; HInstanceFieldSet(this, HasFinal.field, 123)
8153 // // freeze(this.field); HConstructorFence(this)
8154 // } HReturn
8155 // }
8156 //
8157 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
8158 // already-initialized classes; in that case the allocation must act as a "default-initializer"
8159 // of the object which effectively writes the class pointer "final field".
8160 //
8161 // For example, we can model default-initialiation as roughly the equivalent of the following:
8162 //
8163 // class Object {
8164 // private final Class header;
8165 // }
8166 //
8167 // Java code: Optimizing IR:
8168 //
8169 // T new_instance<T>() {
8170 // Object obj = allocate_memory(T.class.size); obj = HInvoke(art_quick_alloc_object, T)
8171 // obj.header = T.class; // header write is done by above call.
8172 // // freeze(obj.header) HConstructorFence(obj)
8173 // return (T)obj;
8174 // }
8175 //
8176 // See also:
8177 // * DexCompilationUnit::RequiresConstructorBarrier
8178 // * QuasiAtomic::ThreadFenceForConstructor
8179 //
8180 class HConstructorFence final : public HVariableInputSizeInstruction {
8181 // A fence has variable inputs because the inputs can be removed
8182 // after prepare_for_register_allocation phase.
8183 // (TODO: In the future a fence could freeze multiple objects
8184 // after merging two fences together.)
8185 public:
8186 // `fence_object` is the reference that needs to be protected for correct publication.
8187 //
8188 // It makes sense in the following situations:
8189 // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
8190 // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
8191 //
8192 // After construction the `fence_object` becomes the 0th input.
8193 // This is not an input in a real sense, but just a convenient place to stash the information
8194 // about the associated object.
8195 HConstructorFence(HInstruction* fence_object,
8196 uint32_t dex_pc,
8197 ArenaAllocator* allocator)
8198 // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
8199 // constraints described in the class header. We claim that these SideEffects constraints
8200 // enforce a superset of the real constraints.
8201 //
8202 // The ordering described above is conservatively modeled with SideEffects as follows:
8203 //
8204 // * To prevent reordering of the publication stores:
8205 // ----> "Reads of objects" is the initial SideEffect.
8206 // * For every primitive final field store in the constructor:
8207 // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
8208 // * If there are any stores to reference final fields in the constructor:
8209 // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
8210 // that are reachable from `fence_object` also need to be prevented for reordering
8211 // (and we do not want to do alias analysis to figure out what those stores are).
8212 //
8213 // In the implementation, this initially starts out as an "all reads" side effect; this is an
8214 // even more conservative approach than the one described above, and prevents all of the
8215 // above reordering without analyzing any of the instructions in the constructor.
8216 //
8217 // If in a later phase we discover that there are no writes to reference final fields,
8218 // we can refine the side effect to a smaller set of type reads (see above constraints).
8219 : HVariableInputSizeInstruction(kConstructorFence,
8220 SideEffects::AllReads(),
8221 dex_pc,
8222 allocator,
8223 /* number_of_inputs= */ 1,
8224 kArenaAllocConstructorFenceInputs) {
8225 DCHECK(fence_object != nullptr);
8226 SetRawInputAt(0, fence_object);
8227 }
8228
8229 // The object associated with this constructor fence.
8230 //
8231 // (Note: This will be null after the prepare_for_register_allocation phase,
8232 // as all constructor fence inputs are removed there).
8233 HInstruction* GetFenceObject() const {
8234 return InputAt(0);
8235 }
8236
8237 // Find all the HConstructorFence uses (`fence_use`) for `this` and:
8238 // - Delete `fence_use` from `this`'s use list.
8239 // - Delete `this` from `fence_use`'s inputs list.
8240 // - If the `fence_use` is dead, remove it from the graph.
8241 //
8242 // A fence is considered dead once it no longer has any uses
8243 // and all of the inputs are dead.
8244 //
8245 // This must *not* be called during/after prepare_for_register_allocation,
8246 // because that removes all the inputs to the fences but the fence is actually
8247 // still considered live.
8248 //
8249 // Returns how many HConstructorFence instructions were removed from graph.
8250 static size_t RemoveConstructorFences(HInstruction* instruction);
8251
8252 // Combine all inputs of `this` and `other` instruction and remove
8253 // `other` from the graph.
8254 //
8255 // Inputs are unique after the merge.
8256 //
8257 // Requirement: `this` must not be the same as `other.
8258 void Merge(HConstructorFence* other);
8259
8260 // Check if this constructor fence is protecting
8261 // an HNewInstance or HNewArray that is also the immediate
8262 // predecessor of `this`.
8263 //
8264 // If `ignore_inputs` is true, then the immediate predecessor doesn't need
8265 // to be one of the inputs of `this`.
8266 //
8267 // Returns the associated HNewArray or HNewInstance,
8268 // or null otherwise.
8269 HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
8270
8271 DECLARE_INSTRUCTION(ConstructorFence);
8272
8273 protected:
8274 DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
8275 };
8276
8277 class HMonitorOperation final : public HExpression<1> {
8278 public:
8279 enum class OperationKind {
8280 kEnter,
8281 kExit,
8282 kLast = kExit
8283 };
8284
8285 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
8286 : HExpression(kMonitorOperation,
8287 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
8288 dex_pc) {
8289 SetPackedField<OperationKindField>(kind);
8290 SetRawInputAt(0, object);
8291 }
8292
8293 // Instruction may go into runtime, so we need an environment.
8294 bool NeedsEnvironment() const override { return true; }
8295
8296 bool CanThrow() const override {
8297 // Verifier guarantees that monitor-exit cannot throw.
8298 // This is important because it allows the HGraphBuilder to remove
8299 // a dead throw-catch loop generated for `synchronized` blocks/methods.
8300 return IsEnter();
8301 }
8302
8303 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
8304 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
8305
8306 DECLARE_INSTRUCTION(MonitorOperation);
8307
8308 protected:
8309 DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
8310
8311 private:
8312 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
8313 static constexpr size_t kFieldOperationKindSize =
8314 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
8315 static constexpr size_t kNumberOfMonitorOperationPackedBits =
8316 kFieldOperationKind + kFieldOperationKindSize;
8317 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
8318 "Too many packed fields.");
8319 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
8320 };
8321
8322 class HSelect final : public HExpression<3> {
8323 public:
8324 HSelect(HInstruction* condition,
8325 HInstruction* true_value,
8326 HInstruction* false_value,
8327 uint32_t dex_pc)
8328 : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
8329 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
8330
8331 // First input must be `true_value` or `false_value` to allow codegens to
8332 // use the SameAsFirstInput allocation policy. We make it `false_value`, so
8333 // that architectures which implement HSelect as a conditional move also
8334 // will not need to invert the condition.
8335 SetRawInputAt(0, false_value);
8336 SetRawInputAt(1, true_value);
8337 SetRawInputAt(2, condition);
8338 }
8339
8340 bool IsClonable() const override { return true; }
8341 HInstruction* GetFalseValue() const { return InputAt(0); }
8342 HInstruction* GetTrueValue() const { return InputAt(1); }
8343 HInstruction* GetCondition() const { return InputAt(2); }
8344
8345 bool CanBeMoved() const override { return true; }
8346 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
8347 return true;
8348 }
8349
8350 bool CanBeNull() const override {
8351 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
8352 }
8353
8354 DECLARE_INSTRUCTION(Select);
8355
8356 protected:
8357 DEFAULT_COPY_CONSTRUCTOR(Select);
8358 };
8359
8360 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
8361 public:
8362 MoveOperands(Location source,
8363 Location destination,
8364 DataType::Type type,
8365 HInstruction* instruction)
8366 : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
8367
8368 Location GetSource() const { return source_; }
8369 Location GetDestination() const { return destination_; }
8370
8371 void SetSource(Location value) { source_ = value; }
8372 void SetDestination(Location value) { destination_ = value; }
8373
8374 // The parallel move resolver marks moves as "in-progress" by clearing the
8375 // destination (but not the source).
8376 Location MarkPending() {
8377 DCHECK(!IsPending());
8378 Location dest = destination_;
8379 destination_ = Location::NoLocation();
8380 return dest;
8381 }
8382
8383 void ClearPending(Location dest) {
8384 DCHECK(IsPending());
8385 destination_ = dest;
8386 }
8387
8388 bool IsPending() const {
8389 DCHECK(source_.IsValid() || destination_.IsInvalid());
8390 return destination_.IsInvalid() && source_.IsValid();
8391 }
8392
8393 // True if this blocks a move from the given location.
8394 bool Blocks(Location loc) const {
8395 return !IsEliminated() && source_.OverlapsWith(loc);
8396 }
8397
8398 // A move is redundant if it's been eliminated, if its source and
8399 // destination are the same, or if its destination is unneeded.
8400 bool IsRedundant() const {
8401 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
8402 }
8403
8404 // We clear both operands to indicate move that's been eliminated.
8405 void Eliminate() {
8406 source_ = destination_ = Location::NoLocation();
8407 }
8408
8409 bool IsEliminated() const {
8410 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
8411 return source_.IsInvalid();
8412 }
8413
8414 DataType::Type GetType() const { return type_; }
8415
8416 bool Is64BitMove() const {
8417 return DataType::Is64BitType(type_);
8418 }
8419
8420 HInstruction* GetInstruction() const { return instruction_; }
8421
8422 private:
8423 Location source_;
8424 Location destination_;
8425 // The type this move is for.
8426 DataType::Type type_;
8427 // The instruction this move is assocatied with. Null when this move is
8428 // for moving an input in the expected locations of user (including a phi user).
8429 // This is only used in debug mode, to ensure we do not connect interval siblings
8430 // in the same parallel move.
8431 HInstruction* instruction_;
8432 };
8433
8434 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
8435
8436 static constexpr size_t kDefaultNumberOfMoves = 4;
8437
8438 class HParallelMove final : public HExpression<0> {
8439 public:
8440 explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
8441 : HExpression(kParallelMove, SideEffects::None(), dex_pc),
8442 moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
8443 moves_.reserve(kDefaultNumberOfMoves);
8444 }
8445
8446 void AddMove(Location source,
8447 Location destination,
8448 DataType::Type type,
8449 HInstruction* instruction) {
8450 DCHECK(source.IsValid());
8451 DCHECK(destination.IsValid());
8452 if (kIsDebugBuild) {
8453 if (instruction != nullptr) {
8454 for (const MoveOperands& move : moves_) {
8455 if (move.GetInstruction() == instruction) {
8456 // Special case the situation where the move is for the spill slot
8457 // of the instruction.
8458 if ((GetPrevious() == instruction)
8459 || ((GetPrevious() == nullptr)
8460 && instruction->IsPhi()
8461 && instruction->GetBlock() == GetBlock())) {
8462 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
8463 << "Doing parallel moves for the same instruction.";
8464 } else {
8465 DCHECK(false) << "Doing parallel moves for the same instruction.";
8466 }
8467 }
8468 }
8469 }
8470 for (const MoveOperands& move : moves_) {
8471 DCHECK(!destination.OverlapsWith(move.GetDestination()))
8472 << "Overlapped destination for two moves in a parallel move: "
8473 << move.GetSource() << " ==> " << move.GetDestination() << " and "
8474 << source << " ==> " << destination << " for " << SafePrint(instruction);
8475 }
8476 }
8477 moves_.emplace_back(source, destination, type, instruction);
8478 }
8479
8480 MoveOperands* MoveOperandsAt(size_t index) {
8481 return &moves_[index];
8482 }
8483
8484 size_t NumMoves() const { return moves_.size(); }
8485
8486 DECLARE_INSTRUCTION(ParallelMove);
8487
8488 protected:
8489 DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
8490
8491 private:
8492 ArenaVector<MoveOperands> moves_;
8493 };
8494
8495 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
8496 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
8497 // never used across anything that can trigger GC.
8498 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
8499 // So we represent it by the type `DataType::Type::kInt`.
8500 class HIntermediateAddress final : public HExpression<2> {
8501 public:
8502 HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
8503 : HExpression(kIntermediateAddress,
8504 DataType::Type::kInt32,
8505 SideEffects::DependsOnGC(),
8506 dex_pc) {
8507 DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
8508 DataType::Size(DataType::Type::kReference))
8509 << "kPrimInt and kPrimNot have different sizes.";
8510 SetRawInputAt(0, base_address);
8511 SetRawInputAt(1, offset);
8512 }
8513
8514 bool IsClonable() const override { return true; }
8515 bool CanBeMoved() const override { return true; }
8516 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
8517 return true;
8518 }
8519 bool IsActualObject() const override { return false; }
8520
8521 HInstruction* GetBaseAddress() const { return InputAt(0); }
8522 HInstruction* GetOffset() const { return InputAt(1); }
8523
8524 DECLARE_INSTRUCTION(IntermediateAddress);
8525
8526 protected:
8527 DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
8528 };
8529
8530
8531 } // namespace art
8532
8533 #include "nodes_vector.h"
8534
8535 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
8536 #include "nodes_shared.h"
8537 #endif
8538 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
8539 #include "nodes_x86.h"
8540 #endif
8541
8542 namespace art HIDDEN {
8543
8544 class OptimizingCompilerStats;
8545
8546 class HGraphVisitor : public ValueObject {
8547 public:
8548 explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8549 : stats_(stats),
8550 graph_(graph) {}
8551 virtual ~HGraphVisitor() {}
8552
8553 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
8554 virtual void VisitBasicBlock(HBasicBlock* block);
8555
8556 // Visit the graph following basic block insertion order.
8557 void VisitInsertionOrder();
8558
8559 // Visit the graph following dominator tree reverse post-order.
8560 void VisitReversePostOrder();
8561
8562 HGraph* GetGraph() const { return graph_; }
8563
8564 // Visit functions for instruction classes.
8565 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8566 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
8567
8568 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8569
8570 #undef DECLARE_VISIT_INSTRUCTION
8571
8572 protected:
8573 OptimizingCompilerStats* stats_;
8574
8575 private:
8576 HGraph* const graph_;
8577
8578 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
8579 };
8580
8581 class HGraphDelegateVisitor : public HGraphVisitor {
8582 public:
8583 explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8584 : HGraphVisitor(graph, stats) {}
8585 virtual ~HGraphDelegateVisitor() {}
8586
8587 // Visit functions that delegate to to super class.
8588 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8589 void Visit##name(H##name* instr) override { Visit##super(instr); }
8590
8591 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8592
8593 #undef DECLARE_VISIT_INSTRUCTION
8594
8595 private:
8596 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
8597 };
8598
8599 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
8600 // and remove the old instruction.
8601 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
8602
8603 // Create a clone for each clonable instructions/phis and replace the original with the clone.
8604 //
8605 // Used for testing individual instruction cloner.
8606 class CloneAndReplaceInstructionVisitor final : public HGraphDelegateVisitor {
8607 public:
8608 explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
8609 : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
8610
8611 void VisitInstruction(HInstruction* instruction) override {
8612 if (instruction->IsClonable()) {
8613 ReplaceInstrOrPhiByClone(instruction);
8614 instr_replaced_by_clones_count_++;
8615 }
8616 }
8617
8618 size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
8619
8620 private:
8621 size_t instr_replaced_by_clones_count_;
8622
8623 DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
8624 };
8625
8626 // Iterator over the blocks that art part of the loop. Includes blocks part
8627 // of an inner loop. The order in which the blocks are iterated is on their
8628 // block id.
8629 class HBlocksInLoopIterator : public ValueObject {
8630 public:
8631 explicit HBlocksInLoopIterator(const HLoopInformation& info)
8632 : blocks_in_loop_(info.GetBlocks()),
8633 blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
8634 index_(0) {
8635 if (!blocks_in_loop_.IsBitSet(index_)) {
8636 Advance();
8637 }
8638 }
8639
8640 bool Done() const { return index_ == blocks_.size(); }
8641 HBasicBlock* Current() const { return blocks_[index_]; }
8642 void Advance() {
8643 ++index_;
8644 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8645 if (blocks_in_loop_.IsBitSet(index_)) {
8646 break;
8647 }
8648 }
8649 }
8650
8651 private:
8652 const BitVector& blocks_in_loop_;
8653 const ArenaVector<HBasicBlock*>& blocks_;
8654 size_t index_;
8655
8656 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
8657 };
8658
8659 // Iterator over the blocks that art part of the loop. Includes blocks part
8660 // of an inner loop. The order in which the blocks are iterated is reverse
8661 // post order.
8662 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
8663 public:
8664 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
8665 : blocks_in_loop_(info.GetBlocks()),
8666 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8667 index_(0) {
8668 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8669 Advance();
8670 }
8671 }
8672
8673 bool Done() const { return index_ == blocks_.size(); }
8674 HBasicBlock* Current() const { return blocks_[index_]; }
8675 void Advance() {
8676 ++index_;
8677 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8678 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8679 break;
8680 }
8681 }
8682 }
8683
8684 private:
8685 const BitVector& blocks_in_loop_;
8686 const ArenaVector<HBasicBlock*>& blocks_;
8687 size_t index_;
8688
8689 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
8690 };
8691
8692 // Returns int64_t value of a properly typed constant.
8693 inline int64_t Int64FromConstant(HConstant* constant) {
8694 if (constant->IsIntConstant()) {
8695 return constant->AsIntConstant()->GetValue();
8696 } else if (constant->IsLongConstant()) {
8697 return constant->AsLongConstant()->GetValue();
8698 } else {
8699 DCHECK(constant->IsNullConstant()) << constant->DebugName();
8700 return 0;
8701 }
8702 }
8703
8704 // Returns true iff instruction is an integral constant (and sets value on success).
8705 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
8706 if (instruction->IsIntConstant()) {
8707 *value = instruction->AsIntConstant()->GetValue();
8708 return true;
8709 } else if (instruction->IsLongConstant()) {
8710 *value = instruction->AsLongConstant()->GetValue();
8711 return true;
8712 } else if (instruction->IsNullConstant()) {
8713 *value = 0;
8714 return true;
8715 }
8716 return false;
8717 }
8718
8719 // Returns true iff instruction is the given integral constant.
8720 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
8721 int64_t val = 0;
8722 return IsInt64AndGet(instruction, &val) && val == value;
8723 }
8724
8725 // Returns true iff instruction is a zero bit pattern.
8726 inline bool IsZeroBitPattern(HInstruction* instruction) {
8727 return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
8728 }
8729
8730 // Implement HInstruction::Is##type() for concrete instructions.
8731 #define INSTRUCTION_TYPE_CHECK(type, super) \
8732 inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
8733 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8734 #undef INSTRUCTION_TYPE_CHECK
8735
8736 // Implement HInstruction::Is##type() for abstract instructions.
8737 #define INSTRUCTION_TYPE_CHECK_RESULT(type, super) \
8738 std::is_base_of<BaseType, H##type>::value,
8739 #define INSTRUCTION_TYPE_CHECK(type, super) \
8740 inline bool HInstruction::Is##type() const { \
8741 DCHECK_LT(GetKind(), kLastInstructionKind); \
8742 using BaseType = H##type; \
8743 static constexpr bool results[] = { \
8744 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT) \
8745 }; \
8746 return results[static_cast<size_t>(GetKind())]; \
8747 }
8748
8749 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8750 #undef INSTRUCTION_TYPE_CHECK
8751 #undef INSTRUCTION_TYPE_CHECK_RESULT
8752
8753 #define INSTRUCTION_TYPE_CAST(type, super) \
8754 inline const H##type* HInstruction::As##type() const { \
8755 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
8756 } \
8757 inline H##type* HInstruction::As##type() { \
8758 return Is##type() ? static_cast<H##type*>(this) : nullptr; \
8759 }
8760
8761 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
8762 #undef INSTRUCTION_TYPE_CAST
8763
8764
8765 // Create space in `blocks` for adding `number_of_new_blocks` entries
8766 // starting at location `at`. Blocks after `at` are moved accordingly.
8767 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
8768 size_t number_of_new_blocks,
8769 size_t after) {
8770 DCHECK_LT(after, blocks->size());
8771 size_t old_size = blocks->size();
8772 size_t new_size = old_size + number_of_new_blocks;
8773 blocks->resize(new_size);
8774 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
8775 }
8776
8777 /*
8778 * Hunt "under the hood" of array lengths (leading to array references),
8779 * null checks (also leading to array references), and new arrays
8780 * (leading to the actual length). This makes it more likely related
8781 * instructions become actually comparable.
8782 */
8783 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
8784 while (instruction->IsArrayLength() ||
8785 instruction->IsNullCheck() ||
8786 instruction->IsNewArray()) {
8787 instruction = instruction->IsNewArray()
8788 ? instruction->AsNewArray()->GetLength()
8789 : instruction->InputAt(0);
8790 }
8791 return instruction;
8792 }
8793
8794 inline bool IsAddOrSub(const HInstruction* instruction) {
8795 return instruction->IsAdd() || instruction->IsSub();
8796 }
8797
8798 void RemoveEnvironmentUses(HInstruction* instruction);
8799 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
8800 void ResetEnvironmentInputRecords(HInstruction* instruction);
8801
8802 // Detects an instruction that is >= 0. As long as the value is carried by
8803 // a single instruction, arithmetic wrap-around cannot occur.
8804 bool IsGEZero(HInstruction* instruction);
8805
8806 } // namespace art
8807
8808 #endif // ART_COMPILER_OPTIMIZING_NODES_H_
8809