1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23
24 #include "base/arena_bit_vector.h"
25 #include "base/arena_containers.h"
26 #include "base/arena_object.h"
27 #include "base/array_ref.h"
28 #include "base/iteration_range.h"
29 #include "base/mutex.h"
30 #include "base/quasi_atomic.h"
31 #include "base/stl_util.h"
32 #include "base/transform_array_ref.h"
33 #include "art_method.h"
34 #include "data_type.h"
35 #include "deoptimization_kind.h"
36 #include "dex/dex_file.h"
37 #include "dex/dex_file_types.h"
38 #include "dex/invoke_type.h"
39 #include "dex/method_reference.h"
40 #include "entrypoints/quick/quick_entrypoints_enum.h"
41 #include "handle.h"
42 #include "handle_scope.h"
43 #include "intrinsics_enum.h"
44 #include "locations.h"
45 #include "mirror/class.h"
46 #include "mirror/method_type.h"
47 #include "offsets.h"
48 #include "utils/intrusive_forward_list.h"
49
50 namespace art {
51
52 class ArenaStack;
53 class GraphChecker;
54 class HBasicBlock;
55 class HConstructorFence;
56 class HCurrentMethod;
57 class HDoubleConstant;
58 class HEnvironment;
59 class HFloatConstant;
60 class HGraphBuilder;
61 class HGraphVisitor;
62 class HInstruction;
63 class HIntConstant;
64 class HInvoke;
65 class HLongConstant;
66 class HNullConstant;
67 class HParameterValue;
68 class HPhi;
69 class HSuspendCheck;
70 class HTryBoundary;
71 class LiveInterval;
72 class LocationSummary;
73 class SlowPathCode;
74 class SsaBuilder;
75
76 namespace mirror {
77 class DexCache;
78 } // namespace mirror
79
80 static const int kDefaultNumberOfBlocks = 8;
81 static const int kDefaultNumberOfSuccessors = 2;
82 static const int kDefaultNumberOfPredecessors = 2;
83 static const int kDefaultNumberOfExceptionalPredecessors = 0;
84 static const int kDefaultNumberOfDominatedBlocks = 1;
85 static const int kDefaultNumberOfBackEdges = 1;
86
87 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
88 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
89 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
90 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
91
92 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
93 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
94
95 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
96
97 static constexpr uint32_t kNoDexPc = -1;
98
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)99 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
100 // For the purposes of the compiler, the dex files must actually be the same object
101 // if we want to safely treat them as the same. This is especially important for JIT
102 // as custom class loaders can open the same underlying file (or memory) multiple
103 // times and provide different class resolution but no two class loaders should ever
104 // use the same DexFile object - doing so is an unsupported hack that can lead to
105 // all sorts of weird failures.
106 return &lhs == &rhs;
107 }
108
109 enum IfCondition {
110 // All types.
111 kCondEQ, // ==
112 kCondNE, // !=
113 // Signed integers and floating-point numbers.
114 kCondLT, // <
115 kCondLE, // <=
116 kCondGT, // >
117 kCondGE, // >=
118 // Unsigned integers.
119 kCondB, // <
120 kCondBE, // <=
121 kCondA, // >
122 kCondAE, // >=
123 // First and last aliases.
124 kCondFirst = kCondEQ,
125 kCondLast = kCondAE,
126 };
127
128 enum GraphAnalysisResult {
129 kAnalysisSkipped,
130 kAnalysisInvalidBytecode,
131 kAnalysisFailThrowCatchLoop,
132 kAnalysisFailAmbiguousArrayOp,
133 kAnalysisFailIrreducibleLoopAndStringInit,
134 kAnalysisSuccess,
135 };
136
137 template <typename T>
MakeUnsigned(T x)138 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
139 return static_cast<typename std::make_unsigned<T>::type>(x);
140 }
141
142 class HInstructionList : public ValueObject {
143 public:
HInstructionList()144 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
145
146 void AddInstruction(HInstruction* instruction);
147 void RemoveInstruction(HInstruction* instruction);
148
149 // Insert `instruction` before/after an existing instruction `cursor`.
150 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
151 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
152
153 // Return true if this list contains `instruction`.
154 bool Contains(HInstruction* instruction) const;
155
156 // Return true if `instruction1` is found before `instruction2` in
157 // this instruction list and false otherwise. Abort if none
158 // of these instructions is found.
159 bool FoundBefore(const HInstruction* instruction1,
160 const HInstruction* instruction2) const;
161
IsEmpty()162 bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()163 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
164
165 // Update the block of all instructions to be `block`.
166 void SetBlockOfInstructions(HBasicBlock* block) const;
167
168 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
169 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
170 void Add(const HInstructionList& instruction_list);
171
172 // Return the number of instructions in the list. This is an expensive operation.
173 size_t CountSize() const;
174
175 private:
176 HInstruction* first_instruction_;
177 HInstruction* last_instruction_;
178
179 friend class HBasicBlock;
180 friend class HGraph;
181 friend class HInstruction;
182 friend class HInstructionIterator;
183 friend class HInstructionIteratorHandleChanges;
184 friend class HBackwardInstructionIterator;
185
186 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
187 };
188
189 class ReferenceTypeInfo : ValueObject {
190 public:
191 typedef Handle<mirror::Class> TypeHandle;
192
193 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
194
Create(TypeHandle type_handle)195 static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
196 return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
197 }
198
CreateUnchecked(TypeHandle type_handle,bool is_exact)199 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
200 return ReferenceTypeInfo(type_handle, is_exact);
201 }
202
CreateInvalid()203 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
204
IsValidHandle(TypeHandle handle)205 static bool IsValidHandle(TypeHandle handle) {
206 return handle.GetReference() != nullptr;
207 }
208
IsValid()209 bool IsValid() const {
210 return IsValidHandle(type_handle_);
211 }
212
IsExact()213 bool IsExact() const { return is_exact_; }
214
IsObjectClass()215 bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
216 DCHECK(IsValid());
217 return GetTypeHandle()->IsObjectClass();
218 }
219
IsStringClass()220 bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
221 DCHECK(IsValid());
222 return GetTypeHandle()->IsStringClass();
223 }
224
IsObjectArray()225 bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
226 DCHECK(IsValid());
227 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
228 }
229
IsInterface()230 bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
231 DCHECK(IsValid());
232 return GetTypeHandle()->IsInterface();
233 }
234
IsArrayClass()235 bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
236 DCHECK(IsValid());
237 return GetTypeHandle()->IsArrayClass();
238 }
239
IsPrimitiveArrayClass()240 bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
241 DCHECK(IsValid());
242 return GetTypeHandle()->IsPrimitiveArray();
243 }
244
IsNonPrimitiveArrayClass()245 bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
246 DCHECK(IsValid());
247 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
248 }
249
CanArrayHold(ReferenceTypeInfo rti)250 bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
251 DCHECK(IsValid());
252 if (!IsExact()) return false;
253 if (!IsArrayClass()) return false;
254 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
255 }
256
CanArrayHoldValuesOf(ReferenceTypeInfo rti)257 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
258 DCHECK(IsValid());
259 if (!IsExact()) return false;
260 if (!IsArrayClass()) return false;
261 if (!rti.IsArrayClass()) return false;
262 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
263 rti.GetTypeHandle()->GetComponentType());
264 }
265
GetTypeHandle()266 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
267
IsSupertypeOf(ReferenceTypeInfo rti)268 bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269 DCHECK(IsValid());
270 DCHECK(rti.IsValid());
271 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
272 }
273
IsStrictSupertypeOf(ReferenceTypeInfo rti)274 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
275 DCHECK(IsValid());
276 DCHECK(rti.IsValid());
277 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
278 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
279 }
280
281 // Returns true if the type information provide the same amount of details.
282 // Note that it does not mean that the instructions have the same actual type
283 // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)284 bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
285 if (!IsValid() && !rti.IsValid()) {
286 // Invalid types are equal.
287 return true;
288 }
289 if (!IsValid() || !rti.IsValid()) {
290 // One is valid, the other not.
291 return false;
292 }
293 return IsExact() == rti.IsExact()
294 && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
295 }
296
297 private:
ReferenceTypeInfo()298 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)299 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
300 : type_handle_(type_handle), is_exact_(is_exact) { }
301
302 // The class of the object.
303 TypeHandle type_handle_;
304 // Whether or not the type is exact or a superclass of the actual type.
305 // Whether or not we have any information about this type.
306 bool is_exact_;
307 };
308
309 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
310
311 // Control-flow graph of a method. Contains a list of basic blocks.
312 class HGraph : public ArenaObject<kArenaAllocGraph> {
313 public:
314 HGraph(ArenaAllocator* allocator,
315 ArenaStack* arena_stack,
316 const DexFile& dex_file,
317 uint32_t method_idx,
318 InstructionSet instruction_set,
319 InvokeType invoke_type = kInvalidInvokeType,
320 bool dead_reference_safe = false,
321 bool debuggable = false,
322 bool osr = false,
323 int start_instruction_id = 0)
allocator_(allocator)324 : allocator_(allocator),
325 arena_stack_(arena_stack),
326 blocks_(allocator->Adapter(kArenaAllocBlockList)),
327 reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
328 linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
329 entry_block_(nullptr),
330 exit_block_(nullptr),
331 maximum_number_of_out_vregs_(0),
332 number_of_vregs_(0),
333 number_of_in_vregs_(0),
334 temporaries_vreg_slots_(0),
335 has_bounds_checks_(false),
336 has_try_catch_(false),
337 has_simd_(false),
338 has_loops_(false),
339 has_irreducible_loops_(false),
340 dead_reference_safe_(dead_reference_safe),
341 debuggable_(debuggable),
342 current_instruction_id_(start_instruction_id),
343 dex_file_(dex_file),
344 method_idx_(method_idx),
345 invoke_type_(invoke_type),
346 in_ssa_form_(false),
347 number_of_cha_guards_(0),
348 instruction_set_(instruction_set),
349 cached_null_constant_(nullptr),
350 cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
351 cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
352 cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
353 cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
354 cached_current_method_(nullptr),
355 art_method_(nullptr),
356 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
357 osr_(osr),
358 cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
359 blocks_.reserve(kDefaultNumberOfBlocks);
360 }
361
362 // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
363 void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
364
GetAllocator()365 ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()366 ArenaStack* GetArenaStack() const { return arena_stack_; }
GetBlocks()367 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
368
IsInSsaForm()369 bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()370 void SetInSsaForm() { in_ssa_form_ = true; }
371
GetEntryBlock()372 HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()373 HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()374 bool HasExitBlock() const { return exit_block_ != nullptr; }
375
SetEntryBlock(HBasicBlock * block)376 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)377 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
378
379 void AddBlock(HBasicBlock* block);
380
381 void ComputeDominanceInformation();
382 void ClearDominanceInformation();
383 void ClearLoopInformation();
384 void FindBackEdges(ArenaBitVector* visited);
385 GraphAnalysisResult BuildDominatorTree();
386 void SimplifyCFG();
387 void SimplifyCatchBlocks();
388
389 // Analyze all natural loops in this graph. Returns a code specifying that it
390 // was successful or the reason for failure. The method will fail if a loop
391 // is a throw-catch loop, i.e. the header is a catch block.
392 GraphAnalysisResult AnalyzeLoops() const;
393
394 // Iterate over blocks to compute try block membership. Needs reverse post
395 // order and loop information.
396 void ComputeTryBlockInformation();
397
398 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
399 // Returns the instruction to replace the invoke expression or null if the
400 // invoke is for a void method. Note that the caller is responsible for replacing
401 // and removing the invoke instruction.
402 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
403
404 // Update the loop and try membership of `block`, which was spawned from `reference`.
405 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
406 // should be the new back edge.
407 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
408 HBasicBlock* reference,
409 bool replace_if_back_edge);
410
411 // Need to add a couple of blocks to test if the loop body is entered and
412 // put deoptimization instructions, etc.
413 void TransformLoopHeaderForBCE(HBasicBlock* header);
414
415 // Adds a new loop directly after the loop with the given header and exit.
416 // Returns the new preheader.
417 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
418 HBasicBlock* body,
419 HBasicBlock* exit);
420
421 // Removes `block` from the graph. Assumes `block` has been disconnected from
422 // other blocks and has no instructions or phis.
423 void DeleteDeadEmptyBlock(HBasicBlock* block);
424
425 // Splits the edge between `block` and `successor` while preserving the
426 // indices in the predecessor/successor lists. If there are multiple edges
427 // between the blocks, the lowest indices are used.
428 // Returns the new block which is empty and has the same dex pc as `successor`.
429 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
430
431 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
432 void OrderLoopHeaderPredecessors(HBasicBlock* header);
433
434 // Transform a loop into a format with a single preheader.
435 //
436 // Each phi in the header should be split: original one in the header should only hold
437 // inputs reachable from the back edges and a single input from the preheader. The newly created
438 // phi in the preheader should collate the inputs from the original multiple incoming blocks.
439 //
440 // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
441 // that no longer have this property.
442 void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
443
444 void SimplifyLoop(HBasicBlock* header);
445
GetNextInstructionId()446 int32_t GetNextInstructionId() {
447 CHECK_NE(current_instruction_id_, INT32_MAX);
448 return current_instruction_id_++;
449 }
450
GetCurrentInstructionId()451 int32_t GetCurrentInstructionId() const {
452 return current_instruction_id_;
453 }
454
SetCurrentInstructionId(int32_t id)455 void SetCurrentInstructionId(int32_t id) {
456 CHECK_GE(id, current_instruction_id_);
457 current_instruction_id_ = id;
458 }
459
GetMaximumNumberOfOutVRegs()460 uint16_t GetMaximumNumberOfOutVRegs() const {
461 return maximum_number_of_out_vregs_;
462 }
463
SetMaximumNumberOfOutVRegs(uint16_t new_value)464 void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
465 maximum_number_of_out_vregs_ = new_value;
466 }
467
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)468 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
469 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
470 }
471
UpdateTemporariesVRegSlots(size_t slots)472 void UpdateTemporariesVRegSlots(size_t slots) {
473 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
474 }
475
GetTemporariesVRegSlots()476 size_t GetTemporariesVRegSlots() const {
477 DCHECK(!in_ssa_form_);
478 return temporaries_vreg_slots_;
479 }
480
SetNumberOfVRegs(uint16_t number_of_vregs)481 void SetNumberOfVRegs(uint16_t number_of_vregs) {
482 number_of_vregs_ = number_of_vregs;
483 }
484
GetNumberOfVRegs()485 uint16_t GetNumberOfVRegs() const {
486 return number_of_vregs_;
487 }
488
SetNumberOfInVRegs(uint16_t value)489 void SetNumberOfInVRegs(uint16_t value) {
490 number_of_in_vregs_ = value;
491 }
492
GetNumberOfInVRegs()493 uint16_t GetNumberOfInVRegs() const {
494 return number_of_in_vregs_;
495 }
496
GetNumberOfLocalVRegs()497 uint16_t GetNumberOfLocalVRegs() const {
498 DCHECK(!in_ssa_form_);
499 return number_of_vregs_ - number_of_in_vregs_;
500 }
501
GetReversePostOrder()502 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
503 return reverse_post_order_;
504 }
505
GetReversePostOrderSkipEntryBlock()506 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
507 DCHECK(GetReversePostOrder()[0] == entry_block_);
508 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
509 }
510
GetPostOrder()511 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
512 return ReverseRange(GetReversePostOrder());
513 }
514
GetLinearOrder()515 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
516 return linear_order_;
517 }
518
GetLinearPostOrder()519 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
520 return ReverseRange(GetLinearOrder());
521 }
522
HasBoundsChecks()523 bool HasBoundsChecks() const {
524 return has_bounds_checks_;
525 }
526
SetHasBoundsChecks(bool value)527 void SetHasBoundsChecks(bool value) {
528 has_bounds_checks_ = value;
529 }
530
531 // Is the code known to be robust against eliminating dead references
532 // and the effects of early finalization?
IsDeadReferenceSafe()533 bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
534
MarkDeadReferenceUnsafe()535 void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
536
IsDebuggable()537 bool IsDebuggable() const { return debuggable_; }
538
539 // Returns a constant of the given type and value. If it does not exist
540 // already, it is created and inserted into the graph. This method is only for
541 // integral types.
542 HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
543
544 // TODO: This is problematic for the consistency of reference type propagation
545 // because it can be created anytime after the pass and thus it will be left
546 // with an invalid type.
547 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
548
549 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
550 return CreateConstant(value, &cached_int_constants_, dex_pc);
551 }
552 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
553 return CreateConstant(value, &cached_long_constants_, dex_pc);
554 }
555 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
556 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
557 }
558 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
559 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
560 }
561
562 HCurrentMethod* GetCurrentMethod();
563
GetDexFile()564 const DexFile& GetDexFile() const {
565 return dex_file_;
566 }
567
GetMethodIdx()568 uint32_t GetMethodIdx() const {
569 return method_idx_;
570 }
571
572 // Get the method name (without the signature), e.g. "<init>"
573 const char* GetMethodName() const;
574
575 // Get the pretty method name (class + name + optionally signature).
576 std::string PrettyMethod(bool with_signature = true) const;
577
GetInvokeType()578 InvokeType GetInvokeType() const {
579 return invoke_type_;
580 }
581
GetInstructionSet()582 InstructionSet GetInstructionSet() const {
583 return instruction_set_;
584 }
585
IsCompilingOsr()586 bool IsCompilingOsr() const { return osr_; }
587
GetCHASingleImplementationList()588 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
589 return cha_single_implementation_list_;
590 }
591
AddCHASingleImplementationDependency(ArtMethod * method)592 void AddCHASingleImplementationDependency(ArtMethod* method) {
593 cha_single_implementation_list_.insert(method);
594 }
595
HasShouldDeoptimizeFlag()596 bool HasShouldDeoptimizeFlag() const {
597 return number_of_cha_guards_ != 0;
598 }
599
HasTryCatch()600 bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)601 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
602
HasSIMD()603 bool HasSIMD() const { return has_simd_; }
SetHasSIMD(bool value)604 void SetHasSIMD(bool value) { has_simd_ = value; }
605
HasLoops()606 bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)607 void SetHasLoops(bool value) { has_loops_ = value; }
608
HasIrreducibleLoops()609 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)610 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
611
GetArtMethod()612 ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)613 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
614
615 // Returns an instruction with the opposite Boolean value from 'cond'.
616 // The instruction has been inserted into the graph, either as a constant, or
617 // before cursor.
618 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
619
GetInexactObjectRti()620 ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
621
GetNumberOfCHAGuards()622 uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)623 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()624 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
625
626 private:
627 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
628 void RemoveDeadBlocks(const ArenaBitVector& visited);
629
630 template <class InstructionType, typename ValueType>
631 InstructionType* CreateConstant(ValueType value,
632 ArenaSafeMap<ValueType, InstructionType*>* cache,
633 uint32_t dex_pc = kNoDexPc) {
634 // Try to find an existing constant of the given value.
635 InstructionType* constant = nullptr;
636 auto cached_constant = cache->find(value);
637 if (cached_constant != cache->end()) {
638 constant = cached_constant->second;
639 }
640
641 // If not found or previously deleted, create and cache a new instruction.
642 // Don't bother reviving a previously deleted instruction, for simplicity.
643 if (constant == nullptr || constant->GetBlock() == nullptr) {
644 constant = new (allocator_) InstructionType(value, dex_pc);
645 cache->Overwrite(value, constant);
646 InsertConstant(constant);
647 }
648 return constant;
649 }
650
651 void InsertConstant(HConstant* instruction);
652
653 // Cache a float constant into the graph. This method should only be
654 // called by the SsaBuilder when creating "equivalent" instructions.
655 void CacheFloatConstant(HFloatConstant* constant);
656
657 // See CacheFloatConstant comment.
658 void CacheDoubleConstant(HDoubleConstant* constant);
659
660 ArenaAllocator* const allocator_;
661 ArenaStack* const arena_stack_;
662
663 // List of blocks in insertion order.
664 ArenaVector<HBasicBlock*> blocks_;
665
666 // List of blocks to perform a reverse post order tree traversal.
667 ArenaVector<HBasicBlock*> reverse_post_order_;
668
669 // List of blocks to perform a linear order tree traversal. Unlike the reverse
670 // post order, this order is not incrementally kept up-to-date.
671 ArenaVector<HBasicBlock*> linear_order_;
672
673 HBasicBlock* entry_block_;
674 HBasicBlock* exit_block_;
675
676 // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
677 uint16_t maximum_number_of_out_vregs_;
678
679 // The number of virtual registers in this method. Contains the parameters.
680 uint16_t number_of_vregs_;
681
682 // The number of virtual registers used by parameters of this method.
683 uint16_t number_of_in_vregs_;
684
685 // Number of vreg size slots that the temporaries use (used in baseline compiler).
686 size_t temporaries_vreg_slots_;
687
688 // Flag whether there are bounds checks in the graph. We can skip
689 // BCE if it's false. It's only best effort to keep it up to date in
690 // the presence of code elimination so there might be false positives.
691 bool has_bounds_checks_;
692
693 // Flag whether there are try/catch blocks in the graph. We will skip
694 // try/catch-related passes if it's false. It's only best effort to keep
695 // it up to date in the presence of code elimination so there might be
696 // false positives.
697 bool has_try_catch_;
698
699 // Flag whether SIMD instructions appear in the graph. If true, the
700 // code generators may have to be more careful spilling the wider
701 // contents of SIMD registers.
702 bool has_simd_;
703
704 // Flag whether there are any loops in the graph. We can skip loop
705 // optimization if it's false. It's only best effort to keep it up
706 // to date in the presence of code elimination so there might be false
707 // positives.
708 bool has_loops_;
709
710 // Flag whether there are any irreducible loops in the graph. It's only
711 // best effort to keep it up to date in the presence of code elimination
712 // so there might be false positives.
713 bool has_irreducible_loops_;
714
715 // Is the code known to be robust against eliminating dead references
716 // and the effects of early finalization? If false, dead reference variables
717 // are kept if they might be visible to the garbage collector.
718 // Currently this means that the class was declared to be dead-reference-safe,
719 // the method accesses no reachability-sensitive fields or data, and the same
720 // is true for any methods that were inlined into the current one.
721 bool dead_reference_safe_;
722
723 // Indicates whether the graph should be compiled in a way that
724 // ensures full debuggability. If false, we can apply more
725 // aggressive optimizations that may limit the level of debugging.
726 const bool debuggable_;
727
728 // The current id to assign to a newly added instruction. See HInstruction.id_.
729 int32_t current_instruction_id_;
730
731 // The dex file from which the method is from.
732 const DexFile& dex_file_;
733
734 // The method index in the dex file.
735 const uint32_t method_idx_;
736
737 // If inlined, this encodes how the callee is being invoked.
738 const InvokeType invoke_type_;
739
740 // Whether the graph has been transformed to SSA form. Only used
741 // in debug mode to ensure we are not using properties only valid
742 // for non-SSA form (like the number of temporaries).
743 bool in_ssa_form_;
744
745 // Number of CHA guards in the graph. Used to short-circuit the
746 // CHA guard optimization pass when there is no CHA guard left.
747 uint32_t number_of_cha_guards_;
748
749 const InstructionSet instruction_set_;
750
751 // Cached constants.
752 HNullConstant* cached_null_constant_;
753 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
754 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
755 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
756 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
757
758 HCurrentMethod* cached_current_method_;
759
760 // The ArtMethod this graph is for. Note that for AOT, it may be null,
761 // for example for methods whose declaring class could not be resolved
762 // (such as when the superclass could not be found).
763 ArtMethod* art_method_;
764
765 // Keep the RTI of inexact Object to avoid having to pass stack handle
766 // collection pointer to passes which may create NullConstant.
767 ReferenceTypeInfo inexact_object_rti_;
768
769 // Whether we are compiling this graph for on stack replacement: this will
770 // make all loops seen as irreducible and emit special stack maps to mark
771 // compiled code entries which the interpreter can directly jump to.
772 const bool osr_;
773
774 // List of methods that are assumed to have single implementation.
775 ArenaSet<ArtMethod*> cha_single_implementation_list_;
776
777 friend class SsaBuilder; // For caching constants.
778 friend class SsaLivenessAnalysis; // For the linear order.
779 friend class HInliner; // For the reverse post order.
780 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
781 DISALLOW_COPY_AND_ASSIGN(HGraph);
782 };
783
784 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
785 public:
HLoopInformation(HBasicBlock * header,HGraph * graph)786 HLoopInformation(HBasicBlock* header, HGraph* graph)
787 : header_(header),
788 suspend_check_(nullptr),
789 irreducible_(false),
790 contains_irreducible_loop_(false),
791 back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
792 // Make bit vector growable, as the number of blocks may change.
793 blocks_(graph->GetAllocator(),
794 graph->GetBlocks().size(),
795 true,
796 kArenaAllocLoopInfoBackEdges) {
797 back_edges_.reserve(kDefaultNumberOfBackEdges);
798 }
799
IsIrreducible()800 bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()801 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
802
803 void Dump(std::ostream& os);
804
GetHeader()805 HBasicBlock* GetHeader() const {
806 return header_;
807 }
808
SetHeader(HBasicBlock * block)809 void SetHeader(HBasicBlock* block) {
810 header_ = block;
811 }
812
GetSuspendCheck()813 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)814 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()815 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
816
AddBackEdge(HBasicBlock * back_edge)817 void AddBackEdge(HBasicBlock* back_edge) {
818 back_edges_.push_back(back_edge);
819 }
820
RemoveBackEdge(HBasicBlock * back_edge)821 void RemoveBackEdge(HBasicBlock* back_edge) {
822 RemoveElement(back_edges_, back_edge);
823 }
824
IsBackEdge(const HBasicBlock & block)825 bool IsBackEdge(const HBasicBlock& block) const {
826 return ContainsElement(back_edges_, &block);
827 }
828
NumberOfBackEdges()829 size_t NumberOfBackEdges() const {
830 return back_edges_.size();
831 }
832
833 HBasicBlock* GetPreHeader() const;
834
GetBackEdges()835 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
836 return back_edges_;
837 }
838
839 // Returns the lifetime position of the back edge that has the
840 // greatest lifetime position.
841 size_t GetLifetimeEnd() const;
842
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)843 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
844 ReplaceElement(back_edges_, existing, new_back_edge);
845 }
846
847 // Finds blocks that are part of this loop.
848 void Populate();
849
850 // Updates blocks population of the loop and all of its outer' ones recursively after the
851 // population of the inner loop is updated.
852 void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
853
854 // Returns whether this loop information contains `block`.
855 // Note that this loop information *must* be populated before entering this function.
856 bool Contains(const HBasicBlock& block) const;
857
858 // Returns whether this loop information is an inner loop of `other`.
859 // Note that `other` *must* be populated before entering this function.
860 bool IsIn(const HLoopInformation& other) const;
861
862 // Returns true if instruction is not defined within this loop.
863 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
864
GetBlocks()865 const ArenaBitVector& GetBlocks() const { return blocks_; }
866
867 void Add(HBasicBlock* block);
868 void Remove(HBasicBlock* block);
869
ClearAllBlocks()870 void ClearAllBlocks() {
871 blocks_.ClearAllBits();
872 }
873
874 bool HasBackEdgeNotDominatedByHeader() const;
875
IsPopulated()876 bool IsPopulated() const {
877 return blocks_.GetHighestBitSet() != -1;
878 }
879
880 bool DominatesAllBackEdges(HBasicBlock* block);
881
882 bool HasExitEdge() const;
883
884 // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()885 void ResetBasicBlockData() {
886 back_edges_.clear();
887 ClearAllBlocks();
888 }
889
890 private:
891 // Internal recursive implementation of `Populate`.
892 void PopulateRecursive(HBasicBlock* block);
893 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
894
895 HBasicBlock* header_;
896 HSuspendCheck* suspend_check_;
897 bool irreducible_;
898 bool contains_irreducible_loop_;
899 ArenaVector<HBasicBlock*> back_edges_;
900 ArenaBitVector blocks_;
901
902 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
903 };
904
905 // Stores try/catch information for basic blocks.
906 // Note that HGraph is constructed so that catch blocks cannot simultaneously
907 // be try blocks.
908 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
909 public:
910 // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)911 explicit TryCatchInformation(const HTryBoundary& try_entry)
912 : try_entry_(&try_entry),
913 catch_dex_file_(nullptr),
914 catch_type_index_(dex::TypeIndex::Invalid()) {
915 DCHECK(try_entry_ != nullptr);
916 }
917
918 // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)919 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
920 : try_entry_(nullptr),
921 catch_dex_file_(&dex_file),
922 catch_type_index_(catch_type_index) {}
923
IsTryBlock()924 bool IsTryBlock() const { return try_entry_ != nullptr; }
925
GetTryEntry()926 const HTryBoundary& GetTryEntry() const {
927 DCHECK(IsTryBlock());
928 return *try_entry_;
929 }
930
IsCatchBlock()931 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
932
IsValidTypeIndex()933 bool IsValidTypeIndex() const {
934 DCHECK(IsCatchBlock());
935 return catch_type_index_.IsValid();
936 }
937
GetCatchTypeIndex()938 dex::TypeIndex GetCatchTypeIndex() const {
939 DCHECK(IsCatchBlock());
940 return catch_type_index_;
941 }
942
GetCatchDexFile()943 const DexFile& GetCatchDexFile() const {
944 DCHECK(IsCatchBlock());
945 return *catch_dex_file_;
946 }
947
SetInvalidTypeIndex()948 void SetInvalidTypeIndex() {
949 catch_type_index_ = dex::TypeIndex::Invalid();
950 }
951
952 private:
953 // One of possibly several TryBoundary instructions entering the block's try.
954 // Only set for try blocks.
955 const HTryBoundary* try_entry_;
956
957 // Exception type information. Only set for catch blocks.
958 const DexFile* catch_dex_file_;
959 dex::TypeIndex catch_type_index_;
960 };
961
962 static constexpr size_t kNoLifetime = -1;
963 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
964
965 // A block in a method. Contains the list of instructions represented
966 // as a double linked list. Each block knows its predecessors and
967 // successors.
968
969 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
970 public:
971 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)972 : graph_(graph),
973 predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
974 successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
975 loop_information_(nullptr),
976 dominator_(nullptr),
977 dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
978 block_id_(kInvalidBlockId),
979 dex_pc_(dex_pc),
980 lifetime_start_(kNoLifetime),
981 lifetime_end_(kNoLifetime),
982 try_catch_information_(nullptr) {
983 predecessors_.reserve(kDefaultNumberOfPredecessors);
984 successors_.reserve(kDefaultNumberOfSuccessors);
985 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
986 }
987
GetPredecessors()988 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
989 return predecessors_;
990 }
991
GetSuccessors()992 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
993 return successors_;
994 }
995
996 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
997 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
998
999 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
1000 return ContainsElement(successors_, block, start_from);
1001 }
1002
GetDominatedBlocks()1003 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
1004 return dominated_blocks_;
1005 }
1006
IsEntryBlock()1007 bool IsEntryBlock() const {
1008 return graph_->GetEntryBlock() == this;
1009 }
1010
IsExitBlock()1011 bool IsExitBlock() const {
1012 return graph_->GetExitBlock() == this;
1013 }
1014
1015 bool IsSingleGoto() const;
1016 bool IsSingleReturn() const;
1017 bool IsSingleReturnOrReturnVoidAllowingPhis() const;
1018 bool IsSingleTryBoundary() const;
1019
1020 // Returns true if this block emits nothing but a jump.
IsSingleJump()1021 bool IsSingleJump() const {
1022 HLoopInformation* loop_info = GetLoopInformation();
1023 return (IsSingleGoto() || IsSingleTryBoundary())
1024 // Back edges generate a suspend check.
1025 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1026 }
1027
AddBackEdge(HBasicBlock * back_edge)1028 void AddBackEdge(HBasicBlock* back_edge) {
1029 if (loop_information_ == nullptr) {
1030 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1031 }
1032 DCHECK_EQ(loop_information_->GetHeader(), this);
1033 loop_information_->AddBackEdge(back_edge);
1034 }
1035
1036 // Registers a back edge; if the block was not a loop header before the call associates a newly
1037 // created loop info with it.
1038 //
1039 // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1040 // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1041 void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1042 if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1043 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1044 }
1045 loop_information_->AddBackEdge(back_edge);
1046 }
1047
GetGraph()1048 HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1049 void SetGraph(HGraph* graph) { graph_ = graph; }
1050
GetBlockId()1051 uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1052 void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1053 uint32_t GetDexPc() const { return dex_pc_; }
1054
GetDominator()1055 HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1056 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1057 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1058
RemoveDominatedBlock(HBasicBlock * block)1059 void RemoveDominatedBlock(HBasicBlock* block) {
1060 RemoveElement(dominated_blocks_, block);
1061 }
1062
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1063 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1064 ReplaceElement(dominated_blocks_, existing, new_block);
1065 }
1066
1067 void ClearDominanceInformation();
1068
NumberOfBackEdges()1069 int NumberOfBackEdges() const {
1070 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1071 }
1072
GetFirstInstruction()1073 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1074 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1075 const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1076 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1077 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1078 const HInstructionList& GetPhis() const { return phis_; }
1079
1080 HInstruction* GetFirstInstructionDisregardMoves() const;
1081
AddSuccessor(HBasicBlock * block)1082 void AddSuccessor(HBasicBlock* block) {
1083 successors_.push_back(block);
1084 block->predecessors_.push_back(this);
1085 }
1086
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1087 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1088 size_t successor_index = GetSuccessorIndexOf(existing);
1089 existing->RemovePredecessor(this);
1090 new_block->predecessors_.push_back(this);
1091 successors_[successor_index] = new_block;
1092 }
1093
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1094 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1095 size_t predecessor_index = GetPredecessorIndexOf(existing);
1096 existing->RemoveSuccessor(this);
1097 new_block->successors_.push_back(this);
1098 predecessors_[predecessor_index] = new_block;
1099 }
1100
1101 // Insert `this` between `predecessor` and `successor. This method
1102 // preserves the indicies, and will update the first edge found between
1103 // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1104 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1105 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1106 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1107 successor->predecessors_[predecessor_index] = this;
1108 predecessor->successors_[successor_index] = this;
1109 successors_.push_back(successor);
1110 predecessors_.push_back(predecessor);
1111 }
1112
RemovePredecessor(HBasicBlock * block)1113 void RemovePredecessor(HBasicBlock* block) {
1114 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1115 }
1116
RemoveSuccessor(HBasicBlock * block)1117 void RemoveSuccessor(HBasicBlock* block) {
1118 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1119 }
1120
ClearAllPredecessors()1121 void ClearAllPredecessors() {
1122 predecessors_.clear();
1123 }
1124
AddPredecessor(HBasicBlock * block)1125 void AddPredecessor(HBasicBlock* block) {
1126 predecessors_.push_back(block);
1127 block->successors_.push_back(this);
1128 }
1129
SwapPredecessors()1130 void SwapPredecessors() {
1131 DCHECK_EQ(predecessors_.size(), 2u);
1132 std::swap(predecessors_[0], predecessors_[1]);
1133 }
1134
SwapSuccessors()1135 void SwapSuccessors() {
1136 DCHECK_EQ(successors_.size(), 2u);
1137 std::swap(successors_[0], successors_[1]);
1138 }
1139
GetPredecessorIndexOf(HBasicBlock * predecessor)1140 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1141 return IndexOfElement(predecessors_, predecessor);
1142 }
1143
GetSuccessorIndexOf(HBasicBlock * successor)1144 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1145 return IndexOfElement(successors_, successor);
1146 }
1147
GetSinglePredecessor()1148 HBasicBlock* GetSinglePredecessor() const {
1149 DCHECK_EQ(GetPredecessors().size(), 1u);
1150 return GetPredecessors()[0];
1151 }
1152
GetSingleSuccessor()1153 HBasicBlock* GetSingleSuccessor() const {
1154 DCHECK_EQ(GetSuccessors().size(), 1u);
1155 return GetSuccessors()[0];
1156 }
1157
1158 // Returns whether the first occurrence of `predecessor` in the list of
1159 // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1160 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1161 DCHECK_EQ(GetPredecessors()[idx], predecessor);
1162 return GetPredecessorIndexOf(predecessor) == idx;
1163 }
1164
1165 // Create a new block between this block and its predecessors. The new block
1166 // is added to the graph, all predecessor edges are relinked to it and an edge
1167 // is created to `this`. Returns the new empty block. Reverse post order or
1168 // loop and try/catch information are not updated.
1169 HBasicBlock* CreateImmediateDominator();
1170
1171 // Split the block into two blocks just before `cursor`. Returns the newly
1172 // created, latter block. Note that this method will add the block to the
1173 // graph, create a Goto at the end of the former block and will create an edge
1174 // between the blocks. It will not, however, update the reverse post order or
1175 // loop and try/catch information.
1176 HBasicBlock* SplitBefore(HInstruction* cursor);
1177
1178 // Split the block into two blocks just before `cursor`. Returns the newly
1179 // created block. Note that this method just updates raw block information,
1180 // like predecessors, successors, dominators, and instruction list. It does not
1181 // update the graph, reverse post order, loop information, nor make sure the
1182 // blocks are consistent (for example ending with a control flow instruction).
1183 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1184
1185 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1186 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1187
1188 // Merge `other` at the end of `this`. Successors and dominated blocks of
1189 // `other` are changed to be successors and dominated blocks of `this`. Note
1190 // that this method does not update the graph, reverse post order, loop
1191 // information, nor make sure the blocks are consistent (for example ending
1192 // with a control flow instruction).
1193 void MergeWithInlined(HBasicBlock* other);
1194
1195 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1196 // of `this` are moved to `other`.
1197 // Note that this method does not update the graph, reverse post order, loop
1198 // information, nor make sure the blocks are consistent (for example ending
1199 // with a control flow instruction).
1200 void ReplaceWith(HBasicBlock* other);
1201
1202 // Merges the instructions of `other` at the end of `this`.
1203 void MergeInstructionsWith(HBasicBlock* other);
1204
1205 // Merge `other` at the end of `this`. This method updates loops, reverse post
1206 // order, links to predecessors, successors, dominators and deletes the block
1207 // from the graph. The two blocks must be successive, i.e. `this` the only
1208 // predecessor of `other` and vice versa.
1209 void MergeWith(HBasicBlock* other);
1210
1211 // Disconnects `this` from all its predecessors, successors and dominator,
1212 // removes it from all loops it is included in and eventually from the graph.
1213 // The block must not dominate any other block. Predecessors and successors
1214 // are safely updated.
1215 void DisconnectAndDelete();
1216
1217 void AddInstruction(HInstruction* instruction);
1218 // Insert `instruction` before/after an existing instruction `cursor`.
1219 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1220 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1221 // Replace phi `initial` with `replacement` within this block.
1222 void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1223 // Replace instruction `initial` with `replacement` within this block.
1224 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1225 HInstruction* replacement);
1226 void AddPhi(HPhi* phi);
1227 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1228 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1229 // instruction list. With 'ensure_safety' set to true, it verifies that the
1230 // instruction is not in use and removes it from the use lists of its inputs.
1231 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1232 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1233 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1234
IsLoopHeader()1235 bool IsLoopHeader() const {
1236 return IsInLoop() && (loop_information_->GetHeader() == this);
1237 }
1238
IsLoopPreHeaderFirstPredecessor()1239 bool IsLoopPreHeaderFirstPredecessor() const {
1240 DCHECK(IsLoopHeader());
1241 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1242 }
1243
IsFirstPredecessorBackEdge()1244 bool IsFirstPredecessorBackEdge() const {
1245 DCHECK(IsLoopHeader());
1246 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1247 }
1248
GetLoopInformation()1249 HLoopInformation* GetLoopInformation() const {
1250 return loop_information_;
1251 }
1252
1253 // Set the loop_information_ on this block. Overrides the current
1254 // loop_information if it is an outer loop of the passed loop information.
1255 // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1256 void SetInLoop(HLoopInformation* info) {
1257 if (IsLoopHeader()) {
1258 // Nothing to do. This just means `info` is an outer loop.
1259 } else if (!IsInLoop()) {
1260 loop_information_ = info;
1261 } else if (loop_information_->Contains(*info->GetHeader())) {
1262 // Block is currently part of an outer loop. Make it part of this inner loop.
1263 // Note that a non loop header having a loop information means this loop information
1264 // has already been populated
1265 loop_information_ = info;
1266 } else {
1267 // Block is part of an inner loop. Do not update the loop information.
1268 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1269 // at this point, because this method is being called while populating `info`.
1270 }
1271 }
1272
1273 // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1274 void SetLoopInformation(HLoopInformation* info) {
1275 loop_information_ = info;
1276 }
1277
IsInLoop()1278 bool IsInLoop() const { return loop_information_ != nullptr; }
1279
GetTryCatchInformation()1280 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1281
SetTryCatchInformation(TryCatchInformation * try_catch_information)1282 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1283 try_catch_information_ = try_catch_information;
1284 }
1285
IsTryBlock()1286 bool IsTryBlock() const {
1287 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1288 }
1289
IsCatchBlock()1290 bool IsCatchBlock() const {
1291 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1292 }
1293
1294 // Returns the try entry that this block's successors should have. They will
1295 // be in the same try, unless the block ends in a try boundary. In that case,
1296 // the appropriate try entry will be returned.
1297 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1298
1299 bool HasThrowingInstructions() const;
1300
1301 // Returns whether this block dominates the blocked passed as parameter.
1302 bool Dominates(HBasicBlock* block) const;
1303
GetLifetimeStart()1304 size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1305 size_t GetLifetimeEnd() const { return lifetime_end_; }
1306
SetLifetimeStart(size_t start)1307 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1308 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1309
1310 bool EndsWithControlFlowInstruction() const;
1311 bool EndsWithReturn() const;
1312 bool EndsWithIf() const;
1313 bool EndsWithTryBoundary() const;
1314 bool HasSinglePhi() const;
1315
1316 private:
1317 HGraph* graph_;
1318 ArenaVector<HBasicBlock*> predecessors_;
1319 ArenaVector<HBasicBlock*> successors_;
1320 HInstructionList instructions_;
1321 HInstructionList phis_;
1322 HLoopInformation* loop_information_;
1323 HBasicBlock* dominator_;
1324 ArenaVector<HBasicBlock*> dominated_blocks_;
1325 uint32_t block_id_;
1326 // The dex program counter of the first instruction of this block.
1327 const uint32_t dex_pc_;
1328 size_t lifetime_start_;
1329 size_t lifetime_end_;
1330 TryCatchInformation* try_catch_information_;
1331
1332 friend class HGraph;
1333 friend class HInstruction;
1334
1335 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1336 };
1337
1338 // Iterates over the LoopInformation of all loops which contain 'block'
1339 // from the innermost to the outermost.
1340 class HLoopInformationOutwardIterator : public ValueObject {
1341 public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1342 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1343 : current_(block.GetLoopInformation()) {}
1344
Done()1345 bool Done() const { return current_ == nullptr; }
1346
Advance()1347 void Advance() {
1348 DCHECK(!Done());
1349 current_ = current_->GetPreHeader()->GetLoopInformation();
1350 }
1351
Current()1352 HLoopInformation* Current() const {
1353 DCHECK(!Done());
1354 return current_;
1355 }
1356
1357 private:
1358 HLoopInformation* current_;
1359
1360 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1361 };
1362
1363 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1364 M(Above, Condition) \
1365 M(AboveOrEqual, Condition) \
1366 M(Abs, UnaryOperation) \
1367 M(Add, BinaryOperation) \
1368 M(And, BinaryOperation) \
1369 M(ArrayGet, Instruction) \
1370 M(ArrayLength, Instruction) \
1371 M(ArraySet, Instruction) \
1372 M(Below, Condition) \
1373 M(BelowOrEqual, Condition) \
1374 M(BooleanNot, UnaryOperation) \
1375 M(BoundsCheck, Instruction) \
1376 M(BoundType, Instruction) \
1377 M(CheckCast, Instruction) \
1378 M(ClassTableGet, Instruction) \
1379 M(ClearException, Instruction) \
1380 M(ClinitCheck, Instruction) \
1381 M(Compare, BinaryOperation) \
1382 M(ConstructorFence, Instruction) \
1383 M(CurrentMethod, Instruction) \
1384 M(ShouldDeoptimizeFlag, Instruction) \
1385 M(Deoptimize, Instruction) \
1386 M(Div, BinaryOperation) \
1387 M(DivZeroCheck, Instruction) \
1388 M(DoubleConstant, Constant) \
1389 M(Equal, Condition) \
1390 M(Exit, Instruction) \
1391 M(FloatConstant, Constant) \
1392 M(Goto, Instruction) \
1393 M(GreaterThan, Condition) \
1394 M(GreaterThanOrEqual, Condition) \
1395 M(If, Instruction) \
1396 M(InstanceFieldGet, Instruction) \
1397 M(InstanceFieldSet, Instruction) \
1398 M(InstanceOf, Instruction) \
1399 M(IntConstant, Constant) \
1400 M(IntermediateAddress, Instruction) \
1401 M(InvokeUnresolved, Invoke) \
1402 M(InvokeInterface, Invoke) \
1403 M(InvokeStaticOrDirect, Invoke) \
1404 M(InvokeVirtual, Invoke) \
1405 M(InvokePolymorphic, Invoke) \
1406 M(InvokeCustom, Invoke) \
1407 M(LessThan, Condition) \
1408 M(LessThanOrEqual, Condition) \
1409 M(LoadClass, Instruction) \
1410 M(LoadException, Instruction) \
1411 M(LoadMethodHandle, Instruction) \
1412 M(LoadMethodType, Instruction) \
1413 M(LoadString, Instruction) \
1414 M(LongConstant, Constant) \
1415 M(Max, Instruction) \
1416 M(MemoryBarrier, Instruction) \
1417 M(Min, BinaryOperation) \
1418 M(MonitorOperation, Instruction) \
1419 M(Mul, BinaryOperation) \
1420 M(NativeDebugInfo, Instruction) \
1421 M(Neg, UnaryOperation) \
1422 M(NewArray, Instruction) \
1423 M(NewInstance, Instruction) \
1424 M(Not, UnaryOperation) \
1425 M(NotEqual, Condition) \
1426 M(NullConstant, Instruction) \
1427 M(NullCheck, Instruction) \
1428 M(Or, BinaryOperation) \
1429 M(PackedSwitch, Instruction) \
1430 M(ParallelMove, Instruction) \
1431 M(ParameterValue, Instruction) \
1432 M(Phi, Instruction) \
1433 M(Rem, BinaryOperation) \
1434 M(Return, Instruction) \
1435 M(ReturnVoid, Instruction) \
1436 M(Ror, BinaryOperation) \
1437 M(Shl, BinaryOperation) \
1438 M(Shr, BinaryOperation) \
1439 M(StaticFieldGet, Instruction) \
1440 M(StaticFieldSet, Instruction) \
1441 M(UnresolvedInstanceFieldGet, Instruction) \
1442 M(UnresolvedInstanceFieldSet, Instruction) \
1443 M(UnresolvedStaticFieldGet, Instruction) \
1444 M(UnresolvedStaticFieldSet, Instruction) \
1445 M(Select, Instruction) \
1446 M(Sub, BinaryOperation) \
1447 M(SuspendCheck, Instruction) \
1448 M(Throw, Instruction) \
1449 M(TryBoundary, Instruction) \
1450 M(TypeConversion, Instruction) \
1451 M(UShr, BinaryOperation) \
1452 M(Xor, BinaryOperation) \
1453 M(VecReplicateScalar, VecUnaryOperation) \
1454 M(VecExtractScalar, VecUnaryOperation) \
1455 M(VecReduce, VecUnaryOperation) \
1456 M(VecCnv, VecUnaryOperation) \
1457 M(VecNeg, VecUnaryOperation) \
1458 M(VecAbs, VecUnaryOperation) \
1459 M(VecNot, VecUnaryOperation) \
1460 M(VecAdd, VecBinaryOperation) \
1461 M(VecHalvingAdd, VecBinaryOperation) \
1462 M(VecSub, VecBinaryOperation) \
1463 M(VecMul, VecBinaryOperation) \
1464 M(VecDiv, VecBinaryOperation) \
1465 M(VecMin, VecBinaryOperation) \
1466 M(VecMax, VecBinaryOperation) \
1467 M(VecAnd, VecBinaryOperation) \
1468 M(VecAndNot, VecBinaryOperation) \
1469 M(VecOr, VecBinaryOperation) \
1470 M(VecXor, VecBinaryOperation) \
1471 M(VecSaturationAdd, VecBinaryOperation) \
1472 M(VecSaturationSub, VecBinaryOperation) \
1473 M(VecShl, VecBinaryOperation) \
1474 M(VecShr, VecBinaryOperation) \
1475 M(VecUShr, VecBinaryOperation) \
1476 M(VecSetScalars, VecOperation) \
1477 M(VecMultiplyAccumulate, VecOperation) \
1478 M(VecSADAccumulate, VecOperation) \
1479 M(VecDotProd, VecOperation) \
1480 M(VecLoad, VecMemoryOperation) \
1481 M(VecStore, VecMemoryOperation) \
1482
1483 /*
1484 * Instructions, shared across several (not all) architectures.
1485 */
1486 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1487 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1488 #else
1489 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1490 M(BitwiseNegatedRight, Instruction) \
1491 M(DataProcWithShifterOp, Instruction) \
1492 M(MultiplyAccumulate, Instruction) \
1493 M(IntermediateAddressIndex, Instruction)
1494 #endif
1495
1496 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1497
1498 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1499
1500 #ifndef ART_ENABLE_CODEGEN_mips
1501 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1502 #else
1503 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \
1504 M(MipsComputeBaseMethodAddress, Instruction) \
1505 M(MipsPackedSwitch, Instruction) \
1506 M(IntermediateArrayAddressIndex, Instruction)
1507 #endif
1508
1509 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1510
1511 #ifndef ART_ENABLE_CODEGEN_x86
1512 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1513 #else
1514 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1515 M(X86ComputeBaseMethodAddress, Instruction) \
1516 M(X86LoadFromConstantTable, Instruction) \
1517 M(X86FPNeg, Instruction) \
1518 M(X86PackedSwitch, Instruction)
1519 #endif
1520
1521 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1522 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M) \
1523 M(X86AndNot, Instruction) \
1524 M(X86MaskOrResetLeastSetBit, Instruction)
1525 #else
1526 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1527 #endif
1528
1529 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1530
1531 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1532 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1533 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1534 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1535 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1536 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \
1537 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \
1538 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1539 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) \
1540 FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1541
1542 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1543 M(Condition, BinaryOperation) \
1544 M(Constant, Instruction) \
1545 M(UnaryOperation, Instruction) \
1546 M(BinaryOperation, Instruction) \
1547 M(Invoke, Instruction) \
1548 M(VecOperation, Instruction) \
1549 M(VecUnaryOperation, VecOperation) \
1550 M(VecBinaryOperation, VecOperation) \
1551 M(VecMemoryOperation, VecOperation)
1552
1553 #define FOR_EACH_INSTRUCTION(M) \
1554 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1555 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1556
1557 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1558 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1559 #undef FORWARD_DECLARATION
1560
1561 #define DECLARE_INSTRUCTION(type) \
1562 private: \
1563 H##type& operator=(const H##type&) = delete; \
1564 public: \
1565 const char* DebugName() const override { return #type; } \
1566 HInstruction* Clone(ArenaAllocator* arena) const override { \
1567 DCHECK(IsClonable()); \
1568 return new (arena) H##type(*this->As##type()); \
1569 } \
1570 void Accept(HGraphVisitor* visitor) override
1571
1572 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1573 private: \
1574 H##type& operator=(const H##type&) = delete; \
1575 public:
1576
1577 #define DEFAULT_COPY_CONSTRUCTOR(type) \
1578 explicit H##type(const H##type& other) = default;
1579
1580 template <typename T>
1581 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1582 public IntrusiveForwardListNode<HUseListNode<T>> {
1583 public:
1584 // Get the instruction which has this use as one of the inputs.
1585 T GetUser() const { return user_; }
1586 // Get the position of the input record that this use corresponds to.
1587 size_t GetIndex() const { return index_; }
1588 // Set the position of the input record that this use corresponds to.
1589 void SetIndex(size_t index) { index_ = index; }
1590
1591 private:
1592 HUseListNode(T user, size_t index)
1593 : user_(user), index_(index) {}
1594
1595 T const user_;
1596 size_t index_;
1597
1598 friend class HInstruction;
1599
1600 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1601 };
1602
1603 template <typename T>
1604 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1605
1606 // This class is used by HEnvironment and HInstruction classes to record the
1607 // instructions they use and pointers to the corresponding HUseListNodes kept
1608 // by the used instructions.
1609 template <typename T>
1610 class HUserRecord : public ValueObject {
1611 public:
HUserRecord()1612 HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1613 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1614
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1615 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1616 : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1617 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1618 : instruction_(instruction), before_use_node_(before_use_node) {
1619 DCHECK(instruction_ != nullptr);
1620 }
1621
GetInstruction()1622 HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1623 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1624 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1625
1626 private:
1627 // Instruction used by the user.
1628 HInstruction* instruction_;
1629
1630 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1631 typename HUseList<T>::iterator before_use_node_;
1632 };
1633
1634 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1635 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1636 // HInstruction* values even though the underlying container has HUserRecord<>s.
1637 struct HInputExtractor {
operatorHInputExtractor1638 HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1639 return record.GetInstruction();
1640 }
operatorHInputExtractor1641 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1642 return record.GetInstruction();
1643 }
1644 };
1645
1646 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1647 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1648
1649 /**
1650 * Side-effects representation.
1651 *
1652 * For write/read dependences on fields/arrays, the dependence analysis uses
1653 * type disambiguation (e.g. a float field write cannot modify the value of an
1654 * integer field read) and the access type (e.g. a reference array write cannot
1655 * modify the value of a reference field read [although it may modify the
1656 * reference fetch prior to reading the field, which is represented by its own
1657 * write/read dependence]). The analysis makes conservative points-to
1658 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1659 * the same, and any reference read depends on any reference read without
1660 * further regard of its type).
1661 *
1662 * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1663 * alive across the point where garbage collection might happen.
1664 *
1665 * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1666 *
1667 * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1668 * those instructions from the compiler perspective (between this instruction and the next one
1669 * in the IR).
1670 *
1671 * Note: Instructions which can cause GC only on a fatal slow path do not need
1672 * kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1673 * one. However the execution may return to compiled code if there is a catch block in the
1674 * current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1675 * set.
1676 *
1677 * The internal representation uses 38-bit and is described in the table below.
1678 * The first line indicates the side effect, and for field/array accesses the
1679 * second line indicates the type of the access (in the order of the
1680 * DataType::Type enum).
1681 * The two numbered lines below indicate the bit position in the bitfield (read
1682 * vertically).
1683 *
1684 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1685 * +-------------+---------+---------+--------------+---------+---------+
1686 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1687 * | 3 |333333322|222222221| 1 |111111110|000000000|
1688 * | 7 |654321098|765432109| 8 |765432109|876543210|
1689 *
1690 * Note that, to ease the implementation, 'changes' bits are least significant
1691 * bits, while 'dependency' bits are most significant bits.
1692 */
1693 class SideEffects : public ValueObject {
1694 public:
SideEffects()1695 SideEffects() : flags_(0) {}
1696
None()1697 static SideEffects None() {
1698 return SideEffects(0);
1699 }
1700
All()1701 static SideEffects All() {
1702 return SideEffects(kAllChangeBits | kAllDependOnBits);
1703 }
1704
AllChanges()1705 static SideEffects AllChanges() {
1706 return SideEffects(kAllChangeBits);
1707 }
1708
AllDependencies()1709 static SideEffects AllDependencies() {
1710 return SideEffects(kAllDependOnBits);
1711 }
1712
AllExceptGCDependency()1713 static SideEffects AllExceptGCDependency() {
1714 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1715 }
1716
AllWritesAndReads()1717 static SideEffects AllWritesAndReads() {
1718 return SideEffects(kAllWrites | kAllReads);
1719 }
1720
AllWrites()1721 static SideEffects AllWrites() {
1722 return SideEffects(kAllWrites);
1723 }
1724
AllReads()1725 static SideEffects AllReads() {
1726 return SideEffects(kAllReads);
1727 }
1728
FieldWriteOfType(DataType::Type type,bool is_volatile)1729 static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1730 return is_volatile
1731 ? AllWritesAndReads()
1732 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1733 }
1734
ArrayWriteOfType(DataType::Type type)1735 static SideEffects ArrayWriteOfType(DataType::Type type) {
1736 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1737 }
1738
FieldReadOfType(DataType::Type type,bool is_volatile)1739 static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1740 return is_volatile
1741 ? AllWritesAndReads()
1742 : SideEffects(TypeFlag(type, kFieldReadOffset));
1743 }
1744
ArrayReadOfType(DataType::Type type)1745 static SideEffects ArrayReadOfType(DataType::Type type) {
1746 return SideEffects(TypeFlag(type, kArrayReadOffset));
1747 }
1748
1749 // Returns whether GC might happen across this instruction from the compiler perspective so
1750 // the next instruction in the IR would see that.
1751 //
1752 // See the SideEffect class comments.
CanTriggerGC()1753 static SideEffects CanTriggerGC() {
1754 return SideEffects(1ULL << kCanTriggerGCBit);
1755 }
1756
1757 // Returns whether the instruction must not be alive across a GC point.
1758 //
1759 // See the SideEffect class comments.
DependsOnGC()1760 static SideEffects DependsOnGC() {
1761 return SideEffects(1ULL << kDependsOnGCBit);
1762 }
1763
1764 // Combines the side-effects of this and the other.
Union(SideEffects other)1765 SideEffects Union(SideEffects other) const {
1766 return SideEffects(flags_ | other.flags_);
1767 }
1768
Exclusion(SideEffects other)1769 SideEffects Exclusion(SideEffects other) const {
1770 return SideEffects(flags_ & ~other.flags_);
1771 }
1772
Add(SideEffects other)1773 void Add(SideEffects other) {
1774 flags_ |= other.flags_;
1775 }
1776
Includes(SideEffects other)1777 bool Includes(SideEffects other) const {
1778 return (other.flags_ & flags_) == other.flags_;
1779 }
1780
HasSideEffects()1781 bool HasSideEffects() const {
1782 return (flags_ & kAllChangeBits);
1783 }
1784
HasDependencies()1785 bool HasDependencies() const {
1786 return (flags_ & kAllDependOnBits);
1787 }
1788
1789 // Returns true if there are no side effects or dependencies.
DoesNothing()1790 bool DoesNothing() const {
1791 return flags_ == 0;
1792 }
1793
1794 // Returns true if something is written.
DoesAnyWrite()1795 bool DoesAnyWrite() const {
1796 return (flags_ & kAllWrites);
1797 }
1798
1799 // Returns true if something is read.
DoesAnyRead()1800 bool DoesAnyRead() const {
1801 return (flags_ & kAllReads);
1802 }
1803
1804 // Returns true if potentially everything is written and read
1805 // (every type and every kind of access).
DoesAllReadWrite()1806 bool DoesAllReadWrite() const {
1807 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1808 }
1809
DoesAll()1810 bool DoesAll() const {
1811 return flags_ == (kAllChangeBits | kAllDependOnBits);
1812 }
1813
1814 // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1815 bool MayDependOn(SideEffects other) const {
1816 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1817 return (other.flags_ & depends_on_flags);
1818 }
1819
1820 // Returns string representation of flags (for debugging only).
1821 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1822 std::string ToString() const {
1823 std::string flags = "|";
1824 for (int s = kLastBit; s >= 0; s--) {
1825 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1826 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1827 // This is a bit for the GC side effect.
1828 if (current_bit_is_set) {
1829 flags += "GC";
1830 }
1831 flags += "|";
1832 } else {
1833 // This is a bit for the array/field analysis.
1834 // The underscore character stands for the 'can trigger GC' bit.
1835 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1836 if (current_bit_is_set) {
1837 flags += kDebug[s];
1838 }
1839 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1840 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1841 flags += "|";
1842 }
1843 }
1844 }
1845 return flags;
1846 }
1847
Equals(const SideEffects & other)1848 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1849
1850 private:
1851 static constexpr int kFieldArrayAnalysisBits = 9;
1852
1853 static constexpr int kFieldWriteOffset = 0;
1854 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1855 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1856 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1857
1858 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1859
1860 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1861 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1862 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1863 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1864
1865 static constexpr int kLastBit = kDependsOnGCBit;
1866 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1867
1868 // Aliases.
1869
1870 static_assert(kChangeBits == kDependOnBits,
1871 "the 'change' bits should match the 'depend on' bits.");
1872
1873 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1874 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1875 static constexpr uint64_t kAllWrites =
1876 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1877 static constexpr uint64_t kAllReads =
1878 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1879
1880 // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)1881 static uint64_t TypeFlag(DataType::Type type, int offset) {
1882 int shift;
1883 switch (type) {
1884 case DataType::Type::kReference: shift = 0; break;
1885 case DataType::Type::kBool: shift = 1; break;
1886 case DataType::Type::kInt8: shift = 2; break;
1887 case DataType::Type::kUint16: shift = 3; break;
1888 case DataType::Type::kInt16: shift = 4; break;
1889 case DataType::Type::kInt32: shift = 5; break;
1890 case DataType::Type::kInt64: shift = 6; break;
1891 case DataType::Type::kFloat32: shift = 7; break;
1892 case DataType::Type::kFloat64: shift = 8; break;
1893 default:
1894 LOG(FATAL) << "Unexpected data type " << type;
1895 UNREACHABLE();
1896 }
1897 DCHECK_LE(kFieldWriteOffset, shift);
1898 DCHECK_LT(shift, kArrayWriteOffset);
1899 return UINT64_C(1) << (shift + offset);
1900 }
1901
1902 // Private constructor on direct flags value.
SideEffects(uint64_t flags)1903 explicit SideEffects(uint64_t flags) : flags_(flags) {}
1904
1905 uint64_t flags_;
1906 };
1907
1908 // A HEnvironment object contains the values of virtual registers at a given location.
1909 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1910 public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)1911 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1912 size_t number_of_vregs,
1913 ArtMethod* method,
1914 uint32_t dex_pc,
1915 HInstruction* holder)
1916 : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
1917 locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
1918 parent_(nullptr),
1919 method_(method),
1920 dex_pc_(dex_pc),
1921 holder_(holder) {
1922 }
1923
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)1924 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1925 const HEnvironment& to_copy,
1926 HInstruction* holder)
1927 : HEnvironment(allocator,
1928 to_copy.Size(),
1929 to_copy.GetMethod(),
1930 to_copy.GetDexPc(),
1931 holder) {}
1932
AllocateLocations()1933 void AllocateLocations() {
1934 DCHECK(locations_.empty());
1935 locations_.resize(vregs_.size());
1936 }
1937
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)1938 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1939 if (parent_ != nullptr) {
1940 parent_->SetAndCopyParentChain(allocator, parent);
1941 } else {
1942 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1943 parent_->CopyFrom(parent);
1944 if (parent->GetParent() != nullptr) {
1945 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1946 }
1947 }
1948 }
1949
1950 void CopyFrom(ArrayRef<HInstruction* const> locals);
1951 void CopyFrom(HEnvironment* environment);
1952
1953 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1954 // input to the loop phi instead. This is for inserting instructions that
1955 // require an environment (like HDeoptimization) in the loop pre-header.
1956 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1957
SetRawEnvAt(size_t index,HInstruction * instruction)1958 void SetRawEnvAt(size_t index, HInstruction* instruction) {
1959 vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1960 }
1961
GetInstructionAt(size_t index)1962 HInstruction* GetInstructionAt(size_t index) const {
1963 return vregs_[index].GetInstruction();
1964 }
1965
1966 void RemoveAsUserOfInput(size_t index) const;
1967
1968 // Replaces the input at the position 'index' with the replacement; the replacement and old
1969 // input instructions' env_uses_ lists are adjusted. The function works similar to
1970 // HInstruction::ReplaceInput.
1971 void ReplaceInput(HInstruction* replacement, size_t index);
1972
Size()1973 size_t Size() const { return vregs_.size(); }
1974
GetParent()1975 HEnvironment* GetParent() const { return parent_; }
1976
SetLocationAt(size_t index,Location location)1977 void SetLocationAt(size_t index, Location location) {
1978 locations_[index] = location;
1979 }
1980
GetLocationAt(size_t index)1981 Location GetLocationAt(size_t index) const {
1982 return locations_[index];
1983 }
1984
GetDexPc()1985 uint32_t GetDexPc() const {
1986 return dex_pc_;
1987 }
1988
GetMethod()1989 ArtMethod* GetMethod() const {
1990 return method_;
1991 }
1992
GetHolder()1993 HInstruction* GetHolder() const {
1994 return holder_;
1995 }
1996
1997
IsFromInlinedInvoke()1998 bool IsFromInlinedInvoke() const {
1999 return GetParent() != nullptr;
2000 }
2001
2002 private:
2003 ArenaVector<HUserRecord<HEnvironment*>> vregs_;
2004 ArenaVector<Location> locations_;
2005 HEnvironment* parent_;
2006 ArtMethod* method_;
2007 const uint32_t dex_pc_;
2008
2009 // The instruction that holds this environment.
2010 HInstruction* const holder_;
2011
2012 friend class HInstruction;
2013
2014 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2015 };
2016
2017 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2018 public:
2019 #define DECLARE_KIND(type, super) k##type,
2020 enum InstructionKind {
2021 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2022 kLastInstructionKind
2023 };
2024 #undef DECLARE_KIND
2025
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2026 HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2027 : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2028
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2029 HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2030 : previous_(nullptr),
2031 next_(nullptr),
2032 block_(nullptr),
2033 dex_pc_(dex_pc),
2034 id_(-1),
2035 ssa_index_(-1),
2036 packed_fields_(0u),
2037 environment_(nullptr),
2038 locations_(nullptr),
2039 live_interval_(nullptr),
2040 lifetime_position_(kNoLifetime),
2041 side_effects_(side_effects),
2042 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2043 SetPackedField<InstructionKindField>(kind);
2044 SetPackedField<TypeField>(type);
2045 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2046 }
2047
~HInstruction()2048 virtual ~HInstruction() {}
2049
2050
GetNext()2051 HInstruction* GetNext() const { return next_; }
GetPrevious()2052 HInstruction* GetPrevious() const { return previous_; }
2053
2054 HInstruction* GetNextDisregardingMoves() const;
2055 HInstruction* GetPreviousDisregardingMoves() const;
2056
GetBlock()2057 HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2058 ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2059 void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2060 bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2061 bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2062 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2063 bool IsIrreducibleLoopHeaderPhi() const {
2064 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2065 }
2066
2067 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2068
GetInputRecords()2069 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2070 // One virtual method is enough, just const_cast<> and then re-add the const.
2071 return ArrayRef<const HUserRecord<HInstruction*>>(
2072 const_cast<HInstruction*>(this)->GetInputRecords());
2073 }
2074
GetInputs()2075 HInputsRef GetInputs() {
2076 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2077 }
2078
GetInputs()2079 HConstInputsRef GetInputs() const {
2080 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2081 }
2082
InputCount()2083 size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2084 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2085
HasInput(HInstruction * input)2086 bool HasInput(HInstruction* input) const {
2087 for (const HInstruction* i : GetInputs()) {
2088 if (i == input) {
2089 return true;
2090 }
2091 }
2092 return false;
2093 }
2094
SetRawInputAt(size_t index,HInstruction * input)2095 void SetRawInputAt(size_t index, HInstruction* input) {
2096 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2097 }
2098
2099 virtual void Accept(HGraphVisitor* visitor) = 0;
2100 virtual const char* DebugName() const = 0;
2101
GetType()2102 DataType::Type GetType() const {
2103 return TypeField::Decode(GetPackedFields());
2104 }
2105
NeedsEnvironment()2106 virtual bool NeedsEnvironment() const { return false; }
2107
GetDexPc()2108 uint32_t GetDexPc() const { return dex_pc_; }
2109
IsControlFlow()2110 virtual bool IsControlFlow() const { return false; }
2111
2112 // Can the instruction throw?
2113 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2114 // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2115 virtual bool CanThrow() const { return false; }
2116
2117 // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2118 virtual bool AlwaysThrows() const { return false; }
2119
CanThrowIntoCatchBlock()2120 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2121
HasSideEffects()2122 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2123 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2124
2125 // Does not apply for all instructions, but having this at top level greatly
2126 // simplifies the null check elimination.
2127 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2128 virtual bool CanBeNull() const {
2129 DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2130 return true;
2131 }
2132
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)2133 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2134 return false;
2135 }
2136
2137 // If this instruction will do an implicit null check, return the `HNullCheck` associated
2138 // with it. Otherwise return null.
GetImplicitNullCheck()2139 HNullCheck* GetImplicitNullCheck() const {
2140 // Find the first previous instruction which is not a move.
2141 HInstruction* first_prev_not_move = GetPreviousDisregardingMoves();
2142 if (first_prev_not_move != nullptr &&
2143 first_prev_not_move->IsNullCheck() &&
2144 first_prev_not_move->IsEmittedAtUseSite()) {
2145 return first_prev_not_move->AsNullCheck();
2146 }
2147 return nullptr;
2148 }
2149
IsActualObject()2150 virtual bool IsActualObject() const {
2151 return GetType() == DataType::Type::kReference;
2152 }
2153
2154 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2155
GetReferenceTypeInfo()2156 ReferenceTypeInfo GetReferenceTypeInfo() const {
2157 DCHECK_EQ(GetType(), DataType::Type::kReference);
2158 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2159 GetPackedFlag<kFlagReferenceTypeIsExact>());
2160 }
2161
AddUseAt(HInstruction * user,size_t index)2162 void AddUseAt(HInstruction* user, size_t index) {
2163 DCHECK(user != nullptr);
2164 // Note: fixup_end remains valid across push_front().
2165 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2166 HUseListNode<HInstruction*>* new_node =
2167 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HInstruction*>(user, index);
2168 uses_.push_front(*new_node);
2169 FixUpUserRecordsAfterUseInsertion(fixup_end);
2170 }
2171
AddEnvUseAt(HEnvironment * user,size_t index)2172 void AddEnvUseAt(HEnvironment* user, size_t index) {
2173 DCHECK(user != nullptr);
2174 // Note: env_fixup_end remains valid across push_front().
2175 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2176 HUseListNode<HEnvironment*>* new_node =
2177 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2178 env_uses_.push_front(*new_node);
2179 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2180 }
2181
RemoveAsUserOfInput(size_t input)2182 void RemoveAsUserOfInput(size_t input) {
2183 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2184 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2185 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2186 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2187 }
2188
RemoveAsUserOfAllInputs()2189 void RemoveAsUserOfAllInputs() {
2190 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2191 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2192 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2193 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2194 }
2195 }
2196
GetUses()2197 const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2198 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2199
HasUses()2200 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2201 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2202 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2203 bool HasOnlyOneNonEnvironmentUse() const {
2204 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2205 }
2206
IsRemovable()2207 bool IsRemovable() const {
2208 return
2209 !DoesAnyWrite() &&
2210 !CanThrow() &&
2211 !IsSuspendCheck() &&
2212 !IsControlFlow() &&
2213 !IsNativeDebugInfo() &&
2214 !IsParameterValue() &&
2215 // If we added an explicit barrier then we should keep it.
2216 !IsMemoryBarrier() &&
2217 !IsConstructorFence();
2218 }
2219
IsDeadAndRemovable()2220 bool IsDeadAndRemovable() const {
2221 return IsRemovable() && !HasUses();
2222 }
2223
2224 // Does this instruction strictly dominate `other_instruction`?
2225 // Returns false if this instruction and `other_instruction` are the same.
2226 // Aborts if this instruction and `other_instruction` are both phis.
2227 bool StrictlyDominates(HInstruction* other_instruction) const;
2228
GetId()2229 int GetId() const { return id_; }
SetId(int id)2230 void SetId(int id) { id_ = id; }
2231
GetSsaIndex()2232 int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2233 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2234 bool HasSsaIndex() const { return ssa_index_ != -1; }
2235
HasEnvironment()2236 bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2237 HEnvironment* GetEnvironment() const { return environment_; }
2238 // Set the `environment_` field. Raw because this method does not
2239 // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2240 void SetRawEnvironment(HEnvironment* environment) {
2241 DCHECK(environment_ == nullptr);
2242 DCHECK_EQ(environment->GetHolder(), this);
2243 environment_ = environment;
2244 }
2245
InsertRawEnvironment(HEnvironment * environment)2246 void InsertRawEnvironment(HEnvironment* environment) {
2247 DCHECK(environment_ != nullptr);
2248 DCHECK_EQ(environment->GetHolder(), this);
2249 DCHECK(environment->GetParent() == nullptr);
2250 environment->parent_ = environment_;
2251 environment_ = environment;
2252 }
2253
2254 void RemoveEnvironment();
2255
2256 // Set the environment of this instruction, copying it from `environment`. While
2257 // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2258 void CopyEnvironmentFrom(HEnvironment* environment) {
2259 DCHECK(environment_ == nullptr);
2260 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2261 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2262 environment_->CopyFrom(environment);
2263 if (environment->GetParent() != nullptr) {
2264 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2265 }
2266 }
2267
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2268 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2269 HBasicBlock* block) {
2270 DCHECK(environment_ == nullptr);
2271 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2272 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2273 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2274 if (environment->GetParent() != nullptr) {
2275 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2276 }
2277 }
2278
2279 // Returns the number of entries in the environment. Typically, that is the
2280 // number of dex registers in a method. It could be more in case of inlining.
2281 size_t EnvironmentSize() const;
2282
GetLocations()2283 LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2284 void SetLocations(LocationSummary* locations) { locations_ = locations; }
2285
2286 void ReplaceWith(HInstruction* instruction);
2287 void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2288 void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2289 void ReplaceInput(HInstruction* replacement, size_t index);
2290
2291 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2292 // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2293 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2294 ReplaceWith(other);
2295 other->ReplaceInput(this, use_index);
2296 }
2297
2298 // Move `this` instruction before `cursor`
2299 void MoveBefore(HInstruction* cursor, bool do_checks = true);
2300
2301 // Move `this` before its first user and out of any loops. If there is no
2302 // out-of-loop user that dominates all other users, move the instruction
2303 // to the end of the out-of-loop common dominator of the user's blocks.
2304 //
2305 // This can be used only on non-throwing instructions with no side effects that
2306 // have at least one use but no environment uses.
2307 void MoveBeforeFirstUserAndOutOfLoops();
2308
2309 #define INSTRUCTION_TYPE_CHECK(type, super) \
2310 bool Is##type() const;
2311
2312 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2313 #undef INSTRUCTION_TYPE_CHECK
2314
2315 #define INSTRUCTION_TYPE_CAST(type, super) \
2316 const H##type* As##type() const; \
2317 H##type* As##type();
2318
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2319 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2320 #undef INSTRUCTION_TYPE_CAST
2321
2322 // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2323 // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2324 // the instruction then the behaviour of this function is undefined.
2325 //
2326 // Note: It is semantically valid to create a clone of the instruction only until
2327 // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2328 // copied.
2329 //
2330 // Note: HEnvironment and some other fields are not copied and are set to default values, see
2331 // 'explicit HInstruction(const HInstruction& other)' for details.
2332 virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2333 LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2334 DebugName() << " " << GetId();
2335 UNREACHABLE();
2336 }
2337
2338 // Return whether instruction can be cloned (copied).
IsClonable()2339 virtual bool IsClonable() const { return false; }
2340
2341 // Returns whether the instruction can be moved within the graph.
2342 // TODO: this method is used by LICM and GVN with possibly different
2343 // meanings? split and rename?
CanBeMoved()2344 virtual bool CanBeMoved() const { return false; }
2345
2346 // Returns whether any data encoded in the two instructions is equal.
2347 // This method does not look at the inputs. Both instructions must be
2348 // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2349 virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2350 return false;
2351 }
2352
2353 // Returns whether two instructions are equal, that is:
2354 // 1) They have the same type and contain the same data (InstructionDataEquals).
2355 // 2) Their inputs are identical.
2356 bool Equals(const HInstruction* other) const;
2357
GetKind()2358 InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2359
ComputeHashCode()2360 virtual size_t ComputeHashCode() const {
2361 size_t result = GetKind();
2362 for (const HInstruction* input : GetInputs()) {
2363 result = (result * 31) + input->GetId();
2364 }
2365 return result;
2366 }
2367
GetSideEffects()2368 SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2369 void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2370 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2371
GetLifetimePosition()2372 size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2373 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2374 LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2375 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2376 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2377
IsSuspendCheckEntry()2378 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2379
2380 // Returns whether the code generation of the instruction will require to have access
2381 // to the current method. Such instructions are:
2382 // (1): Instructions that require an environment, as calling the runtime requires
2383 // to walk the stack and have the current method stored at a specific stack address.
2384 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2385 // to access the dex cache.
NeedsCurrentMethod()2386 bool NeedsCurrentMethod() const {
2387 return NeedsEnvironment() || IsCurrentMethod();
2388 }
2389
2390 // Returns whether the code generation of the instruction will require to have access
2391 // to the dex cache of the current method's declaring class via the current method.
NeedsDexCacheOfDeclaringClass()2392 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2393
2394 // Does this instruction have any use in an environment before
2395 // control flow hits 'other'?
2396 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2397
2398 // Remove all references to environment uses of this instruction.
2399 // The caller must ensure that this is safe to do.
2400 void RemoveEnvironmentUsers();
2401
IsEmittedAtUseSite()2402 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2403 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2404
2405 protected:
2406 // If set, the machine code for this instruction is assumed to be generated by
2407 // its users. Used by liveness analysis to compute use positions accordingly.
2408 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2409 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2410 static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2411 static constexpr size_t kFieldInstructionKindSize =
2412 MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2413 static constexpr size_t kFieldType =
2414 kFieldInstructionKind + kFieldInstructionKindSize;
2415 static constexpr size_t kFieldTypeSize =
2416 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2417 static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2418 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2419
2420 static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2421 "Too many generic packed fields");
2422
2423 using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2424
InputRecordAt(size_t i)2425 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2426 return GetInputRecords()[i];
2427 }
2428
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2429 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2430 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2431 input_records[index] = input;
2432 }
2433
GetPackedFields()2434 uint32_t GetPackedFields() const {
2435 return packed_fields_;
2436 }
2437
2438 template <size_t flag>
GetPackedFlag()2439 bool GetPackedFlag() const {
2440 return (packed_fields_ & (1u << flag)) != 0u;
2441 }
2442
2443 template <size_t flag>
2444 void SetPackedFlag(bool value = true) {
2445 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2446 }
2447
2448 template <typename BitFieldType>
GetPackedField()2449 typename BitFieldType::value_type GetPackedField() const {
2450 return BitFieldType::Decode(packed_fields_);
2451 }
2452
2453 template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2454 void SetPackedField(typename BitFieldType::value_type value) {
2455 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2456 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2457 }
2458
2459 // Copy construction for the instruction (used for Clone function).
2460 //
2461 // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2462 // prepare_for_register_allocator are not copied (set to default values).
2463 //
2464 // Copy constructors must be provided for every HInstruction type; default copy constructor is
2465 // fine for most of them. However for some of the instructions a custom copy constructor must be
2466 // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2467 // for copying them).
HInstruction(const HInstruction & other)2468 explicit HInstruction(const HInstruction& other)
2469 : previous_(nullptr),
2470 next_(nullptr),
2471 block_(nullptr),
2472 dex_pc_(other.dex_pc_),
2473 id_(-1),
2474 ssa_index_(-1),
2475 packed_fields_(other.packed_fields_),
2476 environment_(nullptr),
2477 locations_(nullptr),
2478 live_interval_(nullptr),
2479 lifetime_position_(kNoLifetime),
2480 side_effects_(other.side_effects_),
2481 reference_type_handle_(other.reference_type_handle_) {
2482 }
2483
2484 private:
2485 using InstructionKindField =
2486 BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2487
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2488 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2489 auto before_use_node = uses_.before_begin();
2490 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2491 HInstruction* user = use_node->GetUser();
2492 size_t input_index = use_node->GetIndex();
2493 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2494 before_use_node = use_node;
2495 }
2496 }
2497
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2498 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2499 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2500 if (next != uses_.end()) {
2501 HInstruction* next_user = next->GetUser();
2502 size_t next_index = next->GetIndex();
2503 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2504 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2505 }
2506 }
2507
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2508 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2509 auto before_env_use_node = env_uses_.before_begin();
2510 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2511 HEnvironment* user = env_use_node->GetUser();
2512 size_t input_index = env_use_node->GetIndex();
2513 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2514 before_env_use_node = env_use_node;
2515 }
2516 }
2517
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2518 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2519 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2520 if (next != env_uses_.end()) {
2521 HEnvironment* next_user = next->GetUser();
2522 size_t next_index = next->GetIndex();
2523 DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2524 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2525 }
2526 }
2527
2528 HInstruction* previous_;
2529 HInstruction* next_;
2530 HBasicBlock* block_;
2531 const uint32_t dex_pc_;
2532
2533 // An instruction gets an id when it is added to the graph.
2534 // It reflects creation order. A negative id means the instruction
2535 // has not been added to the graph.
2536 int id_;
2537
2538 // When doing liveness analysis, instructions that have uses get an SSA index.
2539 int ssa_index_;
2540
2541 // Packed fields.
2542 uint32_t packed_fields_;
2543
2544 // List of instructions that have this instruction as input.
2545 HUseList<HInstruction*> uses_;
2546
2547 // List of environments that contain this instruction.
2548 HUseList<HEnvironment*> env_uses_;
2549
2550 // The environment associated with this instruction. Not null if the instruction
2551 // might jump out of the method.
2552 HEnvironment* environment_;
2553
2554 // Set by the code generator.
2555 LocationSummary* locations_;
2556
2557 // Set by the liveness analysis.
2558 LiveInterval* live_interval_;
2559
2560 // Set by the liveness analysis, this is the position in a linear
2561 // order of blocks where this instruction's live interval start.
2562 size_t lifetime_position_;
2563
2564 SideEffects side_effects_;
2565
2566 // The reference handle part of the reference type info.
2567 // The IsExact() flag is stored in packed fields.
2568 // TODO: for primitive types this should be marked as invalid.
2569 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2570
2571 friend class GraphChecker;
2572 friend class HBasicBlock;
2573 friend class HEnvironment;
2574 friend class HGraph;
2575 friend class HInstructionList;
2576 };
2577 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2578
2579 // Iterates over the instructions, while preserving the next instruction
2580 // in case the current instruction gets removed from the list by the user
2581 // of this iterator.
2582 class HInstructionIterator : public ValueObject {
2583 public:
HInstructionIterator(const HInstructionList & instructions)2584 explicit HInstructionIterator(const HInstructionList& instructions)
2585 : instruction_(instructions.first_instruction_) {
2586 next_ = Done() ? nullptr : instruction_->GetNext();
2587 }
2588
Done()2589 bool Done() const { return instruction_ == nullptr; }
Current()2590 HInstruction* Current() const { return instruction_; }
Advance()2591 void Advance() {
2592 instruction_ = next_;
2593 next_ = Done() ? nullptr : instruction_->GetNext();
2594 }
2595
2596 private:
2597 HInstruction* instruction_;
2598 HInstruction* next_;
2599
2600 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2601 };
2602
2603 // Iterates over the instructions without saving the next instruction,
2604 // therefore handling changes in the graph potentially made by the user
2605 // of this iterator.
2606 class HInstructionIteratorHandleChanges : public ValueObject {
2607 public:
HInstructionIteratorHandleChanges(const HInstructionList & instructions)2608 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2609 : instruction_(instructions.first_instruction_) {
2610 }
2611
Done()2612 bool Done() const { return instruction_ == nullptr; }
Current()2613 HInstruction* Current() const { return instruction_; }
Advance()2614 void Advance() {
2615 instruction_ = instruction_->GetNext();
2616 }
2617
2618 private:
2619 HInstruction* instruction_;
2620
2621 DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2622 };
2623
2624
2625 class HBackwardInstructionIterator : public ValueObject {
2626 public:
HBackwardInstructionIterator(const HInstructionList & instructions)2627 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2628 : instruction_(instructions.last_instruction_) {
2629 next_ = Done() ? nullptr : instruction_->GetPrevious();
2630 }
2631
Done()2632 bool Done() const { return instruction_ == nullptr; }
Current()2633 HInstruction* Current() const { return instruction_; }
Advance()2634 void Advance() {
2635 instruction_ = next_;
2636 next_ = Done() ? nullptr : instruction_->GetPrevious();
2637 }
2638
2639 private:
2640 HInstruction* instruction_;
2641 HInstruction* next_;
2642
2643 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2644 };
2645
2646 class HVariableInputSizeInstruction : public HInstruction {
2647 public:
2648 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()2649 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2650 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2651 }
2652
2653 void AddInput(HInstruction* input);
2654 void InsertInputAt(size_t index, HInstruction* input);
2655 void RemoveInputAt(size_t index);
2656
2657 // Removes all the inputs.
2658 // Also removes this instructions from each input's use list
2659 // (for non-environment uses only).
2660 void RemoveAllInputs();
2661
2662 protected:
HVariableInputSizeInstruction(InstructionKind inst_kind,SideEffects side_effects,uint32_t dex_pc,ArenaAllocator * allocator,size_t number_of_inputs,ArenaAllocKind kind)2663 HVariableInputSizeInstruction(InstructionKind inst_kind,
2664 SideEffects side_effects,
2665 uint32_t dex_pc,
2666 ArenaAllocator* allocator,
2667 size_t number_of_inputs,
2668 ArenaAllocKind kind)
2669 : HInstruction(inst_kind, side_effects, dex_pc),
2670 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
HVariableInputSizeInstruction(InstructionKind inst_kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc,ArenaAllocator * allocator,size_t number_of_inputs,ArenaAllocKind kind)2671 HVariableInputSizeInstruction(InstructionKind inst_kind,
2672 DataType::Type type,
2673 SideEffects side_effects,
2674 uint32_t dex_pc,
2675 ArenaAllocator* allocator,
2676 size_t number_of_inputs,
2677 ArenaAllocKind kind)
2678 : HInstruction(inst_kind, type, side_effects, dex_pc),
2679 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2680
2681 DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2682
2683 ArenaVector<HUserRecord<HInstruction*>> inputs_;
2684 };
2685
2686 template<size_t N>
2687 class HExpression : public HInstruction {
2688 public:
2689 HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
HInstruction(kind,side_effects,dex_pc)2690 : HInstruction(kind, side_effects, dex_pc), inputs_() {}
2691 HExpression<N>(InstructionKind kind,
2692 DataType::Type type,
2693 SideEffects side_effects,
2694 uint32_t dex_pc)
HInstruction(kind,type,side_effects,dex_pc)2695 : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
~HExpression()2696 virtual ~HExpression() {}
2697
2698 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()2699 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2700 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2701 }
2702
2703 protected:
2704 DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2705
2706 private:
2707 std::array<HUserRecord<HInstruction*>, N> inputs_;
2708
2709 friend class SsaBuilder;
2710 };
2711
2712 // HExpression specialization for N=0.
2713 template<>
2714 class HExpression<0> : public HInstruction {
2715 public:
2716 using HInstruction::HInstruction;
2717
~HExpression()2718 virtual ~HExpression() {}
2719
2720 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()2721 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2722 return ArrayRef<HUserRecord<HInstruction*>>();
2723 }
2724
2725 protected:
2726 DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
2727
2728 private:
2729 friend class SsaBuilder;
2730 };
2731
2732 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2733 // instruction that branches to the exit block.
2734 class HReturnVoid final : public HExpression<0> {
2735 public:
2736 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
HExpression(kReturnVoid,SideEffects::None (),dex_pc)2737 : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
2738 }
2739
IsControlFlow()2740 bool IsControlFlow() const override { return true; }
2741
2742 DECLARE_INSTRUCTION(ReturnVoid);
2743
2744 protected:
2745 DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
2746 };
2747
2748 // Represents dex's RETURN opcodes. A HReturn is a control flow
2749 // instruction that branches to the exit block.
2750 class HReturn final : public HExpression<1> {
2751 public:
2752 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
HExpression(kReturn,SideEffects::None (),dex_pc)2753 : HExpression(kReturn, SideEffects::None(), dex_pc) {
2754 SetRawInputAt(0, value);
2755 }
2756
IsControlFlow()2757 bool IsControlFlow() const override { return true; }
2758
2759 DECLARE_INSTRUCTION(Return);
2760
2761 protected:
2762 DEFAULT_COPY_CONSTRUCTOR(Return);
2763 };
2764
2765 class HPhi final : public HVariableInputSizeInstruction {
2766 public:
2767 HPhi(ArenaAllocator* allocator,
2768 uint32_t reg_number,
2769 size_t number_of_inputs,
2770 DataType::Type type,
2771 uint32_t dex_pc = kNoDexPc)
HVariableInputSizeInstruction(kPhi,ToPhiType (type),SideEffects::None (),dex_pc,allocator,number_of_inputs,kArenaAllocPhiInputs)2772 : HVariableInputSizeInstruction(
2773 kPhi,
2774 ToPhiType(type),
2775 SideEffects::None(),
2776 dex_pc,
2777 allocator,
2778 number_of_inputs,
2779 kArenaAllocPhiInputs),
2780 reg_number_(reg_number) {
2781 DCHECK_NE(GetType(), DataType::Type::kVoid);
2782 // Phis are constructed live and marked dead if conflicting or unused.
2783 // Individual steps of SsaBuilder should assume that if a phi has been
2784 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2785 SetPackedFlag<kFlagIsLive>(true);
2786 SetPackedFlag<kFlagCanBeNull>(true);
2787 }
2788
IsClonable()2789 bool IsClonable() const override { return true; }
2790
2791 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
ToPhiType(DataType::Type type)2792 static DataType::Type ToPhiType(DataType::Type type) {
2793 return DataType::Kind(type);
2794 }
2795
IsCatchPhi()2796 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2797
SetType(DataType::Type new_type)2798 void SetType(DataType::Type new_type) {
2799 // Make sure that only valid type changes occur. The following are allowed:
2800 // (1) int -> float/ref (primitive type propagation),
2801 // (2) long -> double (primitive type propagation).
2802 DCHECK(GetType() == new_type ||
2803 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2804 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2805 (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2806 SetPackedField<TypeField>(new_type);
2807 }
2808
CanBeNull()2809 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)2810 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2811
GetRegNumber()2812 uint32_t GetRegNumber() const { return reg_number_; }
2813
SetDead()2814 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
SetLive()2815 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
IsDead()2816 bool IsDead() const { return !IsLive(); }
IsLive()2817 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2818
IsVRegEquivalentOf(const HInstruction * other)2819 bool IsVRegEquivalentOf(const HInstruction* other) const {
2820 return other != nullptr
2821 && other->IsPhi()
2822 && other->AsPhi()->GetBlock() == GetBlock()
2823 && other->AsPhi()->GetRegNumber() == GetRegNumber();
2824 }
2825
HasEquivalentPhi()2826 bool HasEquivalentPhi() const {
2827 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2828 return true;
2829 }
2830 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2831 return true;
2832 }
2833 return false;
2834 }
2835
2836 // Returns the next equivalent phi (starting from the current one) or null if there is none.
2837 // An equivalent phi is a phi having the same dex register and type.
2838 // It assumes that phis with the same dex register are adjacent.
GetNextEquivalentPhiWithSameType()2839 HPhi* GetNextEquivalentPhiWithSameType() {
2840 HInstruction* next = GetNext();
2841 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2842 if (next->GetType() == GetType()) {
2843 return next->AsPhi();
2844 }
2845 next = next->GetNext();
2846 }
2847 return nullptr;
2848 }
2849
2850 DECLARE_INSTRUCTION(Phi);
2851
2852 protected:
2853 DEFAULT_COPY_CONSTRUCTOR(Phi);
2854
2855 private:
2856 static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
2857 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2858 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2859 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2860
2861 const uint32_t reg_number_;
2862 };
2863
2864 // The exit instruction is the only instruction of the exit block.
2865 // Instructions aborting the method (HThrow and HReturn) must branch to the
2866 // exit block.
2867 class HExit final : public HExpression<0> {
2868 public:
2869 explicit HExit(uint32_t dex_pc = kNoDexPc)
HExpression(kExit,SideEffects::None (),dex_pc)2870 : HExpression(kExit, SideEffects::None(), dex_pc) {
2871 }
2872
IsControlFlow()2873 bool IsControlFlow() const override { return true; }
2874
2875 DECLARE_INSTRUCTION(Exit);
2876
2877 protected:
2878 DEFAULT_COPY_CONSTRUCTOR(Exit);
2879 };
2880
2881 // Jumps from one block to another.
2882 class HGoto final : public HExpression<0> {
2883 public:
2884 explicit HGoto(uint32_t dex_pc = kNoDexPc)
HExpression(kGoto,SideEffects::None (),dex_pc)2885 : HExpression(kGoto, SideEffects::None(), dex_pc) {
2886 }
2887
IsClonable()2888 bool IsClonable() const override { return true; }
IsControlFlow()2889 bool IsControlFlow() const override { return true; }
2890
GetSuccessor()2891 HBasicBlock* GetSuccessor() const {
2892 return GetBlock()->GetSingleSuccessor();
2893 }
2894
2895 DECLARE_INSTRUCTION(Goto);
2896
2897 protected:
2898 DEFAULT_COPY_CONSTRUCTOR(Goto);
2899 };
2900
2901 class HConstant : public HExpression<0> {
2902 public:
2903 explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kind,type,SideEffects::None (),dex_pc)2904 : HExpression(kind, type, SideEffects::None(), dex_pc) {
2905 }
2906
CanBeMoved()2907 bool CanBeMoved() const override { return true; }
2908
2909 // Is this constant -1 in the arithmetic sense?
IsMinusOne()2910 virtual bool IsMinusOne() const { return false; }
2911 // Is this constant 0 in the arithmetic sense?
IsArithmeticZero()2912 virtual bool IsArithmeticZero() const { return false; }
2913 // Is this constant a 0-bit pattern?
IsZeroBitPattern()2914 virtual bool IsZeroBitPattern() const { return false; }
2915 // Is this constant 1 in the arithmetic sense?
IsOne()2916 virtual bool IsOne() const { return false; }
2917
2918 virtual uint64_t GetValueAsUint64() const = 0;
2919
2920 DECLARE_ABSTRACT_INSTRUCTION(Constant);
2921
2922 protected:
2923 DEFAULT_COPY_CONSTRUCTOR(Constant);
2924 };
2925
2926 class HNullConstant final : public HConstant {
2927 public:
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2928 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
2929 return true;
2930 }
2931
GetValueAsUint64()2932 uint64_t GetValueAsUint64() const override { return 0; }
2933
ComputeHashCode()2934 size_t ComputeHashCode() const override { return 0; }
2935
2936 // The null constant representation is a 0-bit pattern.
IsZeroBitPattern()2937 bool IsZeroBitPattern() const override { return true; }
2938
2939 DECLARE_INSTRUCTION(NullConstant);
2940
2941 protected:
2942 DEFAULT_COPY_CONSTRUCTOR(NullConstant);
2943
2944 private:
2945 explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
HConstant(kNullConstant,DataType::Type::kReference,dex_pc)2946 : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
2947 }
2948
2949 friend class HGraph;
2950 };
2951
2952 // Constants of the type int. Those can be from Dex instructions, or
2953 // synthesized (for example with the if-eqz instruction).
2954 class HIntConstant final : public HConstant {
2955 public:
GetValue()2956 int32_t GetValue() const { return value_; }
2957
GetValueAsUint64()2958 uint64_t GetValueAsUint64() const override {
2959 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2960 }
2961
InstructionDataEquals(const HInstruction * other)2962 bool InstructionDataEquals(const HInstruction* other) const override {
2963 DCHECK(other->IsIntConstant()) << other->DebugName();
2964 return other->AsIntConstant()->value_ == value_;
2965 }
2966
ComputeHashCode()2967 size_t ComputeHashCode() const override { return GetValue(); }
2968
IsMinusOne()2969 bool IsMinusOne() const override { return GetValue() == -1; }
IsArithmeticZero()2970 bool IsArithmeticZero() const override { return GetValue() == 0; }
IsZeroBitPattern()2971 bool IsZeroBitPattern() const override { return GetValue() == 0; }
IsOne()2972 bool IsOne() const override { return GetValue() == 1; }
2973
2974 // Integer constants are used to encode Boolean values as well,
2975 // where 1 means true and 0 means false.
IsTrue()2976 bool IsTrue() const { return GetValue() == 1; }
IsFalse()2977 bool IsFalse() const { return GetValue() == 0; }
2978
2979 DECLARE_INSTRUCTION(IntConstant);
2980
2981 protected:
2982 DEFAULT_COPY_CONSTRUCTOR(IntConstant);
2983
2984 private:
2985 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)2986 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
2987 }
2988 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)2989 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
2990 value_(value ? 1 : 0) {
2991 }
2992
2993 const int32_t value_;
2994
2995 friend class HGraph;
2996 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2997 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2998 };
2999
3000 class HLongConstant final : public HConstant {
3001 public:
GetValue()3002 int64_t GetValue() const { return value_; }
3003
GetValueAsUint64()3004 uint64_t GetValueAsUint64() const override { return value_; }
3005
InstructionDataEquals(const HInstruction * other)3006 bool InstructionDataEquals(const HInstruction* other) const override {
3007 DCHECK(other->IsLongConstant()) << other->DebugName();
3008 return other->AsLongConstant()->value_ == value_;
3009 }
3010
ComputeHashCode()3011 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3012
IsMinusOne()3013 bool IsMinusOne() const override { return GetValue() == -1; }
IsArithmeticZero()3014 bool IsArithmeticZero() const override { return GetValue() == 0; }
IsZeroBitPattern()3015 bool IsZeroBitPattern() const override { return GetValue() == 0; }
IsOne()3016 bool IsOne() const override { return GetValue() == 1; }
3017
3018 DECLARE_INSTRUCTION(LongConstant);
3019
3020 protected:
3021 DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3022
3023 private:
3024 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kLongConstant,DataType::Type::kInt64,dex_pc)3025 : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
3026 value_(value) {
3027 }
3028
3029 const int64_t value_;
3030
3031 friend class HGraph;
3032 };
3033
3034 class HFloatConstant final : public HConstant {
3035 public:
GetValue()3036 float GetValue() const { return value_; }
3037
GetValueAsUint64()3038 uint64_t GetValueAsUint64() const override {
3039 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3040 }
3041
InstructionDataEquals(const HInstruction * other)3042 bool InstructionDataEquals(const HInstruction* other) const override {
3043 DCHECK(other->IsFloatConstant()) << other->DebugName();
3044 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3045 }
3046
ComputeHashCode()3047 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3048
IsMinusOne()3049 bool IsMinusOne() const override {
3050 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3051 }
IsArithmeticZero()3052 bool IsArithmeticZero() const override {
3053 return std::fpclassify(value_) == FP_ZERO;
3054 }
IsArithmeticPositiveZero()3055 bool IsArithmeticPositiveZero() const {
3056 return IsArithmeticZero() && !std::signbit(value_);
3057 }
IsArithmeticNegativeZero()3058 bool IsArithmeticNegativeZero() const {
3059 return IsArithmeticZero() && std::signbit(value_);
3060 }
IsZeroBitPattern()3061 bool IsZeroBitPattern() const override {
3062 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3063 }
IsOne()3064 bool IsOne() const override {
3065 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3066 }
IsNaN()3067 bool IsNaN() const {
3068 return std::isnan(value_);
3069 }
3070
3071 DECLARE_INSTRUCTION(FloatConstant);
3072
3073 protected:
3074 DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3075
3076 private:
3077 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3078 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3079 value_(value) {
3080 }
3081 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3082 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3083 value_(bit_cast<float, int32_t>(value)) {
3084 }
3085
3086 const float value_;
3087
3088 // Only the SsaBuilder and HGraph can create floating-point constants.
3089 friend class SsaBuilder;
3090 friend class HGraph;
3091 };
3092
3093 class HDoubleConstant final : public HConstant {
3094 public:
GetValue()3095 double GetValue() const { return value_; }
3096
GetValueAsUint64()3097 uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3098
InstructionDataEquals(const HInstruction * other)3099 bool InstructionDataEquals(const HInstruction* other) const override {
3100 DCHECK(other->IsDoubleConstant()) << other->DebugName();
3101 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3102 }
3103
ComputeHashCode()3104 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3105
IsMinusOne()3106 bool IsMinusOne() const override {
3107 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3108 }
IsArithmeticZero()3109 bool IsArithmeticZero() const override {
3110 return std::fpclassify(value_) == FP_ZERO;
3111 }
IsArithmeticPositiveZero()3112 bool IsArithmeticPositiveZero() const {
3113 return IsArithmeticZero() && !std::signbit(value_);
3114 }
IsArithmeticNegativeZero()3115 bool IsArithmeticNegativeZero() const {
3116 return IsArithmeticZero() && std::signbit(value_);
3117 }
IsZeroBitPattern()3118 bool IsZeroBitPattern() const override {
3119 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3120 }
IsOne()3121 bool IsOne() const override {
3122 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3123 }
IsNaN()3124 bool IsNaN() const {
3125 return std::isnan(value_);
3126 }
3127
3128 DECLARE_INSTRUCTION(DoubleConstant);
3129
3130 protected:
3131 DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3132
3133 private:
3134 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3135 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3136 value_(value) {
3137 }
3138 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3139 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3140 value_(bit_cast<double, int64_t>(value)) {
3141 }
3142
3143 const double value_;
3144
3145 // Only the SsaBuilder and HGraph can create floating-point constants.
3146 friend class SsaBuilder;
3147 friend class HGraph;
3148 };
3149
3150 // Conditional branch. A block ending with an HIf instruction must have
3151 // two successors.
3152 class HIf final : public HExpression<1> {
3153 public:
3154 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kIf,SideEffects::None (),dex_pc)3155 : HExpression(kIf, SideEffects::None(), dex_pc) {
3156 SetRawInputAt(0, input);
3157 }
3158
IsClonable()3159 bool IsClonable() const override { return true; }
IsControlFlow()3160 bool IsControlFlow() const override { return true; }
3161
IfTrueSuccessor()3162 HBasicBlock* IfTrueSuccessor() const {
3163 return GetBlock()->GetSuccessors()[0];
3164 }
3165
IfFalseSuccessor()3166 HBasicBlock* IfFalseSuccessor() const {
3167 return GetBlock()->GetSuccessors()[1];
3168 }
3169
3170 DECLARE_INSTRUCTION(If);
3171
3172 protected:
3173 DEFAULT_COPY_CONSTRUCTOR(If);
3174 };
3175
3176
3177 // Abstract instruction which marks the beginning and/or end of a try block and
3178 // links it to the respective exception handlers. Behaves the same as a Goto in
3179 // non-exceptional control flow.
3180 // Normal-flow successor is stored at index zero, exception handlers under
3181 // higher indices in no particular order.
3182 class HTryBoundary final : public HExpression<0> {
3183 public:
3184 enum class BoundaryKind {
3185 kEntry,
3186 kExit,
3187 kLast = kExit
3188 };
3189
3190 // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3191 // across the catch block entering edges as GC might happen during throwing an exception.
3192 // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3193 // HInstruction which a catch block must start from.
3194 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3195 : HExpression(kTryBoundary,
3196 (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3197 : SideEffects::None(),
3198 dex_pc) {
3199 SetPackedField<BoundaryKindField>(kind);
3200 }
3201
IsControlFlow()3202 bool IsControlFlow() const override { return true; }
3203
3204 // Returns the block's non-exceptional successor (index zero).
GetNormalFlowSuccessor()3205 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3206
GetExceptionHandlers()3207 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3208 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3209 }
3210
3211 // Returns whether `handler` is among its exception handlers (non-zero index
3212 // successors).
HasExceptionHandler(const HBasicBlock & handler)3213 bool HasExceptionHandler(const HBasicBlock& handler) const {
3214 DCHECK(handler.IsCatchBlock());
3215 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3216 }
3217
3218 // If not present already, adds `handler` to its block's list of exception
3219 // handlers.
AddExceptionHandler(HBasicBlock * handler)3220 void AddExceptionHandler(HBasicBlock* handler) {
3221 if (!HasExceptionHandler(*handler)) {
3222 GetBlock()->AddSuccessor(handler);
3223 }
3224 }
3225
GetBoundaryKind()3226 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
IsEntry()3227 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3228
3229 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3230
3231 DECLARE_INSTRUCTION(TryBoundary);
3232
3233 protected:
3234 DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3235
3236 private:
3237 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3238 static constexpr size_t kFieldBoundaryKindSize =
3239 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3240 static constexpr size_t kNumberOfTryBoundaryPackedBits =
3241 kFieldBoundaryKind + kFieldBoundaryKindSize;
3242 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3243 "Too many packed fields.");
3244 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3245 };
3246
3247 // Deoptimize to interpreter, upon checking a condition.
3248 class HDeoptimize final : public HVariableInputSizeInstruction {
3249 public:
3250 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3251 // across.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,DeoptimizationKind kind,uint32_t dex_pc)3252 HDeoptimize(ArenaAllocator* allocator,
3253 HInstruction* cond,
3254 DeoptimizationKind kind,
3255 uint32_t dex_pc)
3256 : HVariableInputSizeInstruction(
3257 kDeoptimize,
3258 SideEffects::All(),
3259 dex_pc,
3260 allocator,
3261 /* number_of_inputs= */ 1,
3262 kArenaAllocMisc) {
3263 SetPackedFlag<kFieldCanBeMoved>(false);
3264 SetPackedField<DeoptimizeKindField>(kind);
3265 SetRawInputAt(0, cond);
3266 }
3267
IsClonable()3268 bool IsClonable() const override { return true; }
3269
3270 // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3271 // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3272 // instead of `guard`.
3273 // We set CanTriggerGC to prevent any intermediate address to be live
3274 // at the point of the `HDeoptimize`.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,HInstruction * guard,DeoptimizationKind kind,uint32_t dex_pc)3275 HDeoptimize(ArenaAllocator* allocator,
3276 HInstruction* cond,
3277 HInstruction* guard,
3278 DeoptimizationKind kind,
3279 uint32_t dex_pc)
3280 : HVariableInputSizeInstruction(
3281 kDeoptimize,
3282 guard->GetType(),
3283 SideEffects::CanTriggerGC(),
3284 dex_pc,
3285 allocator,
3286 /* number_of_inputs= */ 2,
3287 kArenaAllocMisc) {
3288 SetPackedFlag<kFieldCanBeMoved>(true);
3289 SetPackedField<DeoptimizeKindField>(kind);
3290 SetRawInputAt(0, cond);
3291 SetRawInputAt(1, guard);
3292 }
3293
CanBeMoved()3294 bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3295
InstructionDataEquals(const HInstruction * other)3296 bool InstructionDataEquals(const HInstruction* other) const override {
3297 return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3298 }
3299
NeedsEnvironment()3300 bool NeedsEnvironment() const override { return true; }
3301
CanThrow()3302 bool CanThrow() const override { return true; }
3303
GetDeoptimizationKind()3304 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3305
GuardsAnInput()3306 bool GuardsAnInput() const {
3307 return InputCount() == 2;
3308 }
3309
GuardedInput()3310 HInstruction* GuardedInput() const {
3311 DCHECK(GuardsAnInput());
3312 return InputAt(1);
3313 }
3314
RemoveGuard()3315 void RemoveGuard() {
3316 RemoveInputAt(1);
3317 }
3318
3319 DECLARE_INSTRUCTION(Deoptimize);
3320
3321 protected:
3322 DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3323
3324 private:
3325 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3326 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3327 static constexpr size_t kFieldDeoptimizeKindSize =
3328 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3329 static constexpr size_t kNumberOfDeoptimizePackedBits =
3330 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3331 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3332 "Too many packed fields.");
3333 using DeoptimizeKindField =
3334 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3335 };
3336
3337 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3338 // The compiled code checks this flag value in a guard before devirtualized call and
3339 // if it's true, starts to do deoptimization.
3340 // It has a 4-byte slot on stack.
3341 // TODO: allocate a register for this flag.
3342 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3343 public:
3344 // CHA guards are only optimized in a separate pass and it has no side effects
3345 // with regard to other passes.
HShouldDeoptimizeFlag(ArenaAllocator * allocator,uint32_t dex_pc)3346 HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3347 : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3348 DataType::Type::kInt32,
3349 SideEffects::None(),
3350 dex_pc,
3351 allocator,
3352 0,
3353 kArenaAllocCHA) {
3354 }
3355
3356 // We do all CHA guard elimination/motion in a single pass, after which there is no
3357 // further guard elimination/motion since a guard might have been used for justification
3358 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3359 // to avoid other optimizations trying to move it.
CanBeMoved()3360 bool CanBeMoved() const override { return false; }
3361
3362 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3363
3364 protected:
3365 DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3366 };
3367
3368 // Represents the ArtMethod that was passed as a first argument to
3369 // the method. It is used by instructions that depend on it, like
3370 // instructions that work with the dex cache.
3371 class HCurrentMethod final : public HExpression<0> {
3372 public:
3373 explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kCurrentMethod,type,SideEffects::None (),dex_pc)3374 : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3375 }
3376
3377 DECLARE_INSTRUCTION(CurrentMethod);
3378
3379 protected:
3380 DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3381 };
3382
3383 // Fetches an ArtMethod from the virtual table or the interface method table
3384 // of a class.
3385 class HClassTableGet final : public HExpression<1> {
3386 public:
3387 enum class TableKind {
3388 kVTable,
3389 kIMTable,
3390 kLast = kIMTable
3391 };
HClassTableGet(HInstruction * cls,DataType::Type type,TableKind kind,size_t index,uint32_t dex_pc)3392 HClassTableGet(HInstruction* cls,
3393 DataType::Type type,
3394 TableKind kind,
3395 size_t index,
3396 uint32_t dex_pc)
3397 : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3398 index_(index) {
3399 SetPackedField<TableKindField>(kind);
3400 SetRawInputAt(0, cls);
3401 }
3402
IsClonable()3403 bool IsClonable() const override { return true; }
CanBeMoved()3404 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other)3405 bool InstructionDataEquals(const HInstruction* other) const override {
3406 return other->AsClassTableGet()->GetIndex() == index_ &&
3407 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3408 }
3409
GetTableKind()3410 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
GetIndex()3411 size_t GetIndex() const { return index_; }
3412
3413 DECLARE_INSTRUCTION(ClassTableGet);
3414
3415 protected:
3416 DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3417
3418 private:
3419 static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3420 static constexpr size_t kFieldTableKindSize =
3421 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3422 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3423 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3424 "Too many packed fields.");
3425 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3426
3427 // The index of the ArtMethod in the table.
3428 const size_t index_;
3429 };
3430
3431 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3432 // have one successor for each entry in the switch table, and the final successor
3433 // will be the block containing the next Dex opcode.
3434 class HPackedSwitch final : public HExpression<1> {
3435 public:
3436 HPackedSwitch(int32_t start_value,
3437 uint32_t num_entries,
3438 HInstruction* input,
3439 uint32_t dex_pc = kNoDexPc)
HExpression(kPackedSwitch,SideEffects::None (),dex_pc)3440 : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3441 start_value_(start_value),
3442 num_entries_(num_entries) {
3443 SetRawInputAt(0, input);
3444 }
3445
IsClonable()3446 bool IsClonable() const override { return true; }
3447
IsControlFlow()3448 bool IsControlFlow() const override { return true; }
3449
GetStartValue()3450 int32_t GetStartValue() const { return start_value_; }
3451
GetNumEntries()3452 uint32_t GetNumEntries() const { return num_entries_; }
3453
GetDefaultBlock()3454 HBasicBlock* GetDefaultBlock() const {
3455 // Last entry is the default block.
3456 return GetBlock()->GetSuccessors()[num_entries_];
3457 }
3458 DECLARE_INSTRUCTION(PackedSwitch);
3459
3460 protected:
3461 DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3462
3463 private:
3464 const int32_t start_value_;
3465 const uint32_t num_entries_;
3466 };
3467
3468 class HUnaryOperation : public HExpression<1> {
3469 public:
3470 HUnaryOperation(InstructionKind kind,
3471 DataType::Type result_type,
3472 HInstruction* input,
3473 uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,SideEffects::None (),dex_pc)3474 : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3475 SetRawInputAt(0, input);
3476 }
3477
3478 // All of the UnaryOperation instructions are clonable.
IsClonable()3479 bool IsClonable() const override { return true; }
3480
GetInput()3481 HInstruction* GetInput() const { return InputAt(0); }
GetResultType()3482 DataType::Type GetResultType() const { return GetType(); }
3483
CanBeMoved()3484 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3485 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3486 return true;
3487 }
3488
3489 // Try to statically evaluate `this` and return a HConstant
3490 // containing the result of this evaluation. If `this` cannot
3491 // be evaluated as a constant, return null.
3492 HConstant* TryStaticEvaluation() const;
3493
3494 // Apply this operation to `x`.
3495 virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3496 virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3497 virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3498 virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3499
3500 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3501
3502 protected:
3503 DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3504 };
3505
3506 class HBinaryOperation : public HExpression<2> {
3507 public:
3508 HBinaryOperation(InstructionKind kind,
3509 DataType::Type result_type,
3510 HInstruction* left,
3511 HInstruction* right,
3512 SideEffects side_effects = SideEffects::None(),
3513 uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,side_effects,dex_pc)3514 : HExpression(kind, result_type, side_effects, dex_pc) {
3515 SetRawInputAt(0, left);
3516 SetRawInputAt(1, right);
3517 }
3518
3519 // All of the BinaryOperation instructions are clonable.
IsClonable()3520 bool IsClonable() const override { return true; }
3521
GetLeft()3522 HInstruction* GetLeft() const { return InputAt(0); }
GetRight()3523 HInstruction* GetRight() const { return InputAt(1); }
GetResultType()3524 DataType::Type GetResultType() const { return GetType(); }
3525
IsCommutative()3526 virtual bool IsCommutative() const { return false; }
3527
3528 // Put constant on the right.
3529 // Returns whether order is changed.
OrderInputsWithConstantOnTheRight()3530 bool OrderInputsWithConstantOnTheRight() {
3531 HInstruction* left = InputAt(0);
3532 HInstruction* right = InputAt(1);
3533 if (left->IsConstant() && !right->IsConstant()) {
3534 ReplaceInput(right, 0);
3535 ReplaceInput(left, 1);
3536 return true;
3537 }
3538 return false;
3539 }
3540
3541 // Order inputs by instruction id, but favor constant on the right side.
3542 // This helps GVN for commutative ops.
OrderInputs()3543 void OrderInputs() {
3544 DCHECK(IsCommutative());
3545 HInstruction* left = InputAt(0);
3546 HInstruction* right = InputAt(1);
3547 if (left == right || (!left->IsConstant() && right->IsConstant())) {
3548 return;
3549 }
3550 if (OrderInputsWithConstantOnTheRight()) {
3551 return;
3552 }
3553 // Order according to instruction id.
3554 if (left->GetId() > right->GetId()) {
3555 ReplaceInput(right, 0);
3556 ReplaceInput(left, 1);
3557 }
3558 }
3559
CanBeMoved()3560 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3561 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3562 return true;
3563 }
3564
3565 // Try to statically evaluate `this` and return a HConstant
3566 // containing the result of this evaluation. If `this` cannot
3567 // be evaluated as a constant, return null.
3568 HConstant* TryStaticEvaluation() const;
3569
3570 // Apply this operation to `x` and `y`.
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3571 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3572 HNullConstant* y ATTRIBUTE_UNUSED) const {
3573 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3574 UNREACHABLE();
3575 }
3576 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3577 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED,HIntConstant * y ATTRIBUTE_UNUSED)3578 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3579 HIntConstant* y ATTRIBUTE_UNUSED) const {
3580 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3581 UNREACHABLE();
3582 }
3583 virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3584 virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3585
3586 // Returns an input that can legally be used as the right input and is
3587 // constant, or null.
3588 HConstant* GetConstantRight() const;
3589
3590 // If `GetConstantRight()` returns one of the input, this returns the other
3591 // one. Otherwise it returns null.
3592 HInstruction* GetLeastConstantLeft() const;
3593
3594 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3595
3596 protected:
3597 DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3598 };
3599
3600 // The comparison bias applies for floating point operations and indicates how NaN
3601 // comparisons are treated:
3602 enum class ComparisonBias {
3603 kNoBias, // bias is not applicable (i.e. for long operation)
3604 kGtBias, // return 1 for NaN comparisons
3605 kLtBias, // return -1 for NaN comparisons
3606 kLast = kLtBias
3607 };
3608
3609 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3610
3611 class HCondition : public HBinaryOperation {
3612 public:
3613 HCondition(InstructionKind kind,
3614 HInstruction* first,
3615 HInstruction* second,
3616 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kind,DataType::Type::kBool,first,second,SideEffects::None (),dex_pc)3617 : HBinaryOperation(kind,
3618 DataType::Type::kBool,
3619 first,
3620 second,
3621 SideEffects::None(),
3622 dex_pc) {
3623 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3624 }
3625
3626 // For code generation purposes, returns whether this instruction is just before
3627 // `instruction`, and disregard moves in between.
3628 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3629
3630 DECLARE_ABSTRACT_INSTRUCTION(Condition);
3631
3632 virtual IfCondition GetCondition() const = 0;
3633
3634 virtual IfCondition GetOppositeCondition() const = 0;
3635
IsGtBias()3636 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
IsLtBias()3637 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3638
GetBias()3639 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
SetBias(ComparisonBias bias)3640 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3641
InstructionDataEquals(const HInstruction * other)3642 bool InstructionDataEquals(const HInstruction* other) const override {
3643 return GetPackedFields() == other->AsCondition()->GetPackedFields();
3644 }
3645
IsFPConditionTrueIfNaN()3646 bool IsFPConditionTrueIfNaN() const {
3647 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3648 IfCondition if_cond = GetCondition();
3649 if (if_cond == kCondNE) {
3650 return true;
3651 } else if (if_cond == kCondEQ) {
3652 return false;
3653 }
3654 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3655 }
3656
IsFPConditionFalseIfNaN()3657 bool IsFPConditionFalseIfNaN() const {
3658 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3659 IfCondition if_cond = GetCondition();
3660 if (if_cond == kCondEQ) {
3661 return true;
3662 } else if (if_cond == kCondNE) {
3663 return false;
3664 }
3665 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3666 }
3667
3668 protected:
3669 // Needed if we merge a HCompare into a HCondition.
3670 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
3671 static constexpr size_t kFieldComparisonBiasSize =
3672 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3673 static constexpr size_t kNumberOfConditionPackedBits =
3674 kFieldComparisonBias + kFieldComparisonBiasSize;
3675 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3676 using ComparisonBiasField =
3677 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3678
3679 template <typename T>
Compare(T x,T y)3680 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3681
3682 template <typename T>
CompareFP(T x,T y)3683 int32_t CompareFP(T x, T y) const {
3684 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3685 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3686 // Handle the bias.
3687 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3688 }
3689
3690 // Return an integer constant containing the result of a condition evaluated at compile time.
MakeConstantCondition(bool value,uint32_t dex_pc)3691 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3692 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3693 }
3694
3695 DEFAULT_COPY_CONSTRUCTOR(Condition);
3696 };
3697
3698 // Instruction to check if two inputs are equal to each other.
3699 class HEqual final : public HCondition {
3700 public:
3701 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kEqual,first,second,dex_pc)3702 : HCondition(kEqual, first, second, dex_pc) {
3703 }
3704
IsCommutative()3705 bool IsCommutative() const override { return true; }
3706
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3707 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3708 HNullConstant* y ATTRIBUTE_UNUSED) const override {
3709 return MakeConstantCondition(true, GetDexPc());
3710 }
Evaluate(HIntConstant * x,HIntConstant * y)3711 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3712 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3713 }
3714 // In the following Evaluate methods, a HCompare instruction has
3715 // been merged into this HEqual instruction; evaluate it as
3716 // `Compare(x, y) == 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3717 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3718 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3719 GetDexPc());
3720 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3721 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3722 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3723 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3724 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3725 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3726 }
3727
3728 DECLARE_INSTRUCTION(Equal);
3729
GetCondition()3730 IfCondition GetCondition() const override {
3731 return kCondEQ;
3732 }
3733
GetOppositeCondition()3734 IfCondition GetOppositeCondition() const override {
3735 return kCondNE;
3736 }
3737
3738 protected:
3739 DEFAULT_COPY_CONSTRUCTOR(Equal);
3740
3741 private:
Compute(T x,T y)3742 template <typename T> static bool Compute(T x, T y) { return x == y; }
3743 };
3744
3745 class HNotEqual final : public HCondition {
3746 public:
3747 HNotEqual(HInstruction* first, HInstruction* second,
3748 uint32_t dex_pc = kNoDexPc)
HCondition(kNotEqual,first,second,dex_pc)3749 : HCondition(kNotEqual, first, second, dex_pc) {
3750 }
3751
IsCommutative()3752 bool IsCommutative() const override { return true; }
3753
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3754 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3755 HNullConstant* y ATTRIBUTE_UNUSED) const override {
3756 return MakeConstantCondition(false, GetDexPc());
3757 }
Evaluate(HIntConstant * x,HIntConstant * y)3758 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3759 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3760 }
3761 // In the following Evaluate methods, a HCompare instruction has
3762 // been merged into this HNotEqual instruction; evaluate it as
3763 // `Compare(x, y) != 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3764 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3765 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3766 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3767 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3768 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3769 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3770 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3771 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3772 }
3773
3774 DECLARE_INSTRUCTION(NotEqual);
3775
GetCondition()3776 IfCondition GetCondition() const override {
3777 return kCondNE;
3778 }
3779
GetOppositeCondition()3780 IfCondition GetOppositeCondition() const override {
3781 return kCondEQ;
3782 }
3783
3784 protected:
3785 DEFAULT_COPY_CONSTRUCTOR(NotEqual);
3786
3787 private:
Compute(T x,T y)3788 template <typename T> static bool Compute(T x, T y) { return x != y; }
3789 };
3790
3791 class HLessThan final : public HCondition {
3792 public:
3793 HLessThan(HInstruction* first, HInstruction* second,
3794 uint32_t dex_pc = kNoDexPc)
HCondition(kLessThan,first,second,dex_pc)3795 : HCondition(kLessThan, first, second, dex_pc) {
3796 }
3797
Evaluate(HIntConstant * x,HIntConstant * y)3798 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3799 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3800 }
3801 // In the following Evaluate methods, a HCompare instruction has
3802 // been merged into this HLessThan instruction; evaluate it as
3803 // `Compare(x, y) < 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3804 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3805 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3806 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3807 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3808 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3809 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3810 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3811 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3812 }
3813
3814 DECLARE_INSTRUCTION(LessThan);
3815
GetCondition()3816 IfCondition GetCondition() const override {
3817 return kCondLT;
3818 }
3819
GetOppositeCondition()3820 IfCondition GetOppositeCondition() const override {
3821 return kCondGE;
3822 }
3823
3824 protected:
3825 DEFAULT_COPY_CONSTRUCTOR(LessThan);
3826
3827 private:
Compute(T x,T y)3828 template <typename T> static bool Compute(T x, T y) { return x < y; }
3829 };
3830
3831 class HLessThanOrEqual final : public HCondition {
3832 public:
3833 HLessThanOrEqual(HInstruction* first, HInstruction* second,
3834 uint32_t dex_pc = kNoDexPc)
HCondition(kLessThanOrEqual,first,second,dex_pc)3835 : HCondition(kLessThanOrEqual, first, second, dex_pc) {
3836 }
3837
Evaluate(HIntConstant * x,HIntConstant * y)3838 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3839 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3840 }
3841 // In the following Evaluate methods, a HCompare instruction has
3842 // been merged into this HLessThanOrEqual instruction; evaluate it as
3843 // `Compare(x, y) <= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3844 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3845 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3846 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3847 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3848 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3849 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3850 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3851 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3852 }
3853
3854 DECLARE_INSTRUCTION(LessThanOrEqual);
3855
GetCondition()3856 IfCondition GetCondition() const override {
3857 return kCondLE;
3858 }
3859
GetOppositeCondition()3860 IfCondition GetOppositeCondition() const override {
3861 return kCondGT;
3862 }
3863
3864 protected:
3865 DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
3866
3867 private:
Compute(T x,T y)3868 template <typename T> static bool Compute(T x, T y) { return x <= y; }
3869 };
3870
3871 class HGreaterThan final : public HCondition {
3872 public:
3873 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThan,first,second,dex_pc)3874 : HCondition(kGreaterThan, first, second, dex_pc) {
3875 }
3876
Evaluate(HIntConstant * x,HIntConstant * y)3877 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3878 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3879 }
3880 // In the following Evaluate methods, a HCompare instruction has
3881 // been merged into this HGreaterThan instruction; evaluate it as
3882 // `Compare(x, y) > 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3883 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3884 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3885 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3886 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3887 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3888 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3889 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3890 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3891 }
3892
3893 DECLARE_INSTRUCTION(GreaterThan);
3894
GetCondition()3895 IfCondition GetCondition() const override {
3896 return kCondGT;
3897 }
3898
GetOppositeCondition()3899 IfCondition GetOppositeCondition() const override {
3900 return kCondLE;
3901 }
3902
3903 protected:
3904 DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
3905
3906 private:
Compute(T x,T y)3907 template <typename T> static bool Compute(T x, T y) { return x > y; }
3908 };
3909
3910 class HGreaterThanOrEqual final : public HCondition {
3911 public:
3912 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThanOrEqual,first,second,dex_pc)3913 : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
3914 }
3915
Evaluate(HIntConstant * x,HIntConstant * y)3916 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3917 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3918 }
3919 // In the following Evaluate methods, a HCompare instruction has
3920 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3921 // `Compare(x, y) >= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3922 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3923 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3924 }
Evaluate(HFloatConstant * x,HFloatConstant * y)3925 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3926 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3927 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3928 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3929 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3930 }
3931
3932 DECLARE_INSTRUCTION(GreaterThanOrEqual);
3933
GetCondition()3934 IfCondition GetCondition() const override {
3935 return kCondGE;
3936 }
3937
GetOppositeCondition()3938 IfCondition GetOppositeCondition() const override {
3939 return kCondLT;
3940 }
3941
3942 protected:
3943 DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
3944
3945 private:
Compute(T x,T y)3946 template <typename T> static bool Compute(T x, T y) { return x >= y; }
3947 };
3948
3949 class HBelow final : public HCondition {
3950 public:
3951 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelow,first,second,dex_pc)3952 : HCondition(kBelow, first, second, dex_pc) {
3953 }
3954
Evaluate(HIntConstant * x,HIntConstant * y)3955 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3956 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3957 }
Evaluate(HLongConstant * x,HLongConstant * y)3958 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3959 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3960 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)3961 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3962 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
3963 LOG(FATAL) << DebugName() << " is not defined for float values";
3964 UNREACHABLE();
3965 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)3966 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3967 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
3968 LOG(FATAL) << DebugName() << " is not defined for double values";
3969 UNREACHABLE();
3970 }
3971
3972 DECLARE_INSTRUCTION(Below);
3973
GetCondition()3974 IfCondition GetCondition() const override {
3975 return kCondB;
3976 }
3977
GetOppositeCondition()3978 IfCondition GetOppositeCondition() const override {
3979 return kCondAE;
3980 }
3981
3982 protected:
3983 DEFAULT_COPY_CONSTRUCTOR(Below);
3984
3985 private:
Compute(T x,T y)3986 template <typename T> static bool Compute(T x, T y) {
3987 return MakeUnsigned(x) < MakeUnsigned(y);
3988 }
3989 };
3990
3991 class HBelowOrEqual final : public HCondition {
3992 public:
3993 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelowOrEqual,first,second,dex_pc)3994 : HCondition(kBelowOrEqual, first, second, dex_pc) {
3995 }
3996
Evaluate(HIntConstant * x,HIntConstant * y)3997 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3998 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3999 }
Evaluate(HLongConstant * x,HLongConstant * y)4000 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4001 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4002 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4003 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4004 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4005 LOG(FATAL) << DebugName() << " is not defined for float values";
4006 UNREACHABLE();
4007 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4008 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4009 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4010 LOG(FATAL) << DebugName() << " is not defined for double values";
4011 UNREACHABLE();
4012 }
4013
4014 DECLARE_INSTRUCTION(BelowOrEqual);
4015
GetCondition()4016 IfCondition GetCondition() const override {
4017 return kCondBE;
4018 }
4019
GetOppositeCondition()4020 IfCondition GetOppositeCondition() const override {
4021 return kCondA;
4022 }
4023
4024 protected:
4025 DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4026
4027 private:
Compute(T x,T y)4028 template <typename T> static bool Compute(T x, T y) {
4029 return MakeUnsigned(x) <= MakeUnsigned(y);
4030 }
4031 };
4032
4033 class HAbove final : public HCondition {
4034 public:
4035 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAbove,first,second,dex_pc)4036 : HCondition(kAbove, first, second, dex_pc) {
4037 }
4038
Evaluate(HIntConstant * x,HIntConstant * y)4039 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4040 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4041 }
Evaluate(HLongConstant * x,HLongConstant * y)4042 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4043 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4044 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4045 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4046 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4047 LOG(FATAL) << DebugName() << " is not defined for float values";
4048 UNREACHABLE();
4049 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4050 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4051 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4052 LOG(FATAL) << DebugName() << " is not defined for double values";
4053 UNREACHABLE();
4054 }
4055
4056 DECLARE_INSTRUCTION(Above);
4057
GetCondition()4058 IfCondition GetCondition() const override {
4059 return kCondA;
4060 }
4061
GetOppositeCondition()4062 IfCondition GetOppositeCondition() const override {
4063 return kCondBE;
4064 }
4065
4066 protected:
4067 DEFAULT_COPY_CONSTRUCTOR(Above);
4068
4069 private:
Compute(T x,T y)4070 template <typename T> static bool Compute(T x, T y) {
4071 return MakeUnsigned(x) > MakeUnsigned(y);
4072 }
4073 };
4074
4075 class HAboveOrEqual final : public HCondition {
4076 public:
4077 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAboveOrEqual,first,second,dex_pc)4078 : HCondition(kAboveOrEqual, first, second, dex_pc) {
4079 }
4080
Evaluate(HIntConstant * x,HIntConstant * y)4081 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4082 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4083 }
Evaluate(HLongConstant * x,HLongConstant * y)4084 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4085 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4086 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4087 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4088 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4089 LOG(FATAL) << DebugName() << " is not defined for float values";
4090 UNREACHABLE();
4091 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4092 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4093 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4094 LOG(FATAL) << DebugName() << " is not defined for double values";
4095 UNREACHABLE();
4096 }
4097
4098 DECLARE_INSTRUCTION(AboveOrEqual);
4099
GetCondition()4100 IfCondition GetCondition() const override {
4101 return kCondAE;
4102 }
4103
GetOppositeCondition()4104 IfCondition GetOppositeCondition() const override {
4105 return kCondB;
4106 }
4107
4108 protected:
4109 DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4110
4111 private:
Compute(T x,T y)4112 template <typename T> static bool Compute(T x, T y) {
4113 return MakeUnsigned(x) >= MakeUnsigned(y);
4114 }
4115 };
4116
4117 // Instruction to check how two inputs compare to each other.
4118 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4119 class HCompare final : public HBinaryOperation {
4120 public:
4121 // Note that `comparison_type` is the type of comparison performed
4122 // between the comparison's inputs, not the type of the instantiated
4123 // HCompare instruction (which is always DataType::Type::kInt).
HCompare(DataType::Type comparison_type,HInstruction * first,HInstruction * second,ComparisonBias bias,uint32_t dex_pc)4124 HCompare(DataType::Type comparison_type,
4125 HInstruction* first,
4126 HInstruction* second,
4127 ComparisonBias bias,
4128 uint32_t dex_pc)
4129 : HBinaryOperation(kCompare,
4130 DataType::Type::kInt32,
4131 first,
4132 second,
4133 SideEffectsForArchRuntimeCalls(comparison_type),
4134 dex_pc) {
4135 SetPackedField<ComparisonBiasField>(bias);
4136 DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
4137 DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
4138 }
4139
4140 template <typename T>
Compute(T x,T y)4141 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4142
4143 template <typename T>
ComputeFP(T x,T y)4144 int32_t ComputeFP(T x, T y) const {
4145 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4146 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4147 // Handle the bias.
4148 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4149 }
4150
Evaluate(HIntConstant * x,HIntConstant * y)4151 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4152 // Note that there is no "cmp-int" Dex instruction so we shouldn't
4153 // reach this code path when processing a freshly built HIR
4154 // graph. However HCompare integer instructions can be synthesized
4155 // by the instruction simplifier to implement IntegerCompare and
4156 // IntegerSignum intrinsics, so we have to handle this case.
4157 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4158 }
Evaluate(HLongConstant * x,HLongConstant * y)4159 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4160 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4161 }
Evaluate(HFloatConstant * x,HFloatConstant * y)4162 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4163 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4164 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4165 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4166 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4167 }
4168
InstructionDataEquals(const HInstruction * other)4169 bool InstructionDataEquals(const HInstruction* other) const override {
4170 return GetPackedFields() == other->AsCompare()->GetPackedFields();
4171 }
4172
GetBias()4173 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4174
4175 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4176 // Only meaningful for floating-point comparisons.
IsGtBias()4177 bool IsGtBias() const {
4178 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4179 return GetBias() == ComparisonBias::kGtBias;
4180 }
4181
SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED)4182 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4183 // Comparisons do not require a runtime call in any back end.
4184 return SideEffects::None();
4185 }
4186
4187 DECLARE_INSTRUCTION(Compare);
4188
4189 protected:
4190 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4191 static constexpr size_t kFieldComparisonBiasSize =
4192 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4193 static constexpr size_t kNumberOfComparePackedBits =
4194 kFieldComparisonBias + kFieldComparisonBiasSize;
4195 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4196 using ComparisonBiasField =
4197 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4198
4199 // Return an integer constant containing the result of a comparison evaluated at compile time.
MakeConstantComparison(int32_t value,uint32_t dex_pc)4200 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4201 DCHECK(value == -1 || value == 0 || value == 1) << value;
4202 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4203 }
4204
4205 DEFAULT_COPY_CONSTRUCTOR(Compare);
4206 };
4207
4208 class HNewInstance final : public HExpression<1> {
4209 public:
HNewInstance(HInstruction * cls,uint32_t dex_pc,dex::TypeIndex type_index,const DexFile & dex_file,bool finalizable,QuickEntrypointEnum entrypoint)4210 HNewInstance(HInstruction* cls,
4211 uint32_t dex_pc,
4212 dex::TypeIndex type_index,
4213 const DexFile& dex_file,
4214 bool finalizable,
4215 QuickEntrypointEnum entrypoint)
4216 : HExpression(kNewInstance,
4217 DataType::Type::kReference,
4218 SideEffects::CanTriggerGC(),
4219 dex_pc),
4220 type_index_(type_index),
4221 dex_file_(dex_file),
4222 entrypoint_(entrypoint) {
4223 SetPackedFlag<kFlagFinalizable>(finalizable);
4224 SetRawInputAt(0, cls);
4225 }
4226
IsClonable()4227 bool IsClonable() const override { return true; }
4228
GetTypeIndex()4229 dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()4230 const DexFile& GetDexFile() const { return dex_file_; }
4231
4232 // Calls runtime so needs an environment.
NeedsEnvironment()4233 bool NeedsEnvironment() const override { return true; }
4234
4235 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
CanThrow()4236 bool CanThrow() const override { return true; }
4237
NeedsChecks()4238 bool NeedsChecks() const {
4239 return entrypoint_ == kQuickAllocObjectWithChecks;
4240 }
4241
IsFinalizable()4242 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4243
CanBeNull()4244 bool CanBeNull() const override { return false; }
4245
GetEntrypoint()4246 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4247
SetEntrypoint(QuickEntrypointEnum entrypoint)4248 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4249 entrypoint_ = entrypoint;
4250 }
4251
GetLoadClass()4252 HLoadClass* GetLoadClass() const {
4253 HInstruction* input = InputAt(0);
4254 if (input->IsClinitCheck()) {
4255 input = input->InputAt(0);
4256 }
4257 DCHECK(input->IsLoadClass());
4258 return input->AsLoadClass();
4259 }
4260
4261 bool IsStringAlloc() const;
4262
4263 DECLARE_INSTRUCTION(NewInstance);
4264
4265 protected:
4266 DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4267
4268 private:
4269 static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4270 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
4271 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4272 "Too many packed fields.");
4273
4274 const dex::TypeIndex type_index_;
4275 const DexFile& dex_file_;
4276 QuickEntrypointEnum entrypoint_;
4277 };
4278
4279 enum IntrinsicNeedsEnvironmentOrCache {
4280 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache.
4281 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache.
4282 };
4283
4284 enum IntrinsicSideEffects {
4285 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
4286 kReadSideEffects, // Intrinsic may read heap memory.
4287 kWriteSideEffects, // Intrinsic may write heap memory.
4288 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
4289 };
4290
4291 enum IntrinsicExceptions {
4292 kNoThrow, // Intrinsic does not throw any exceptions.
4293 kCanThrow // Intrinsic may throw exceptions.
4294 };
4295
4296 class HInvoke : public HVariableInputSizeInstruction {
4297 public:
4298 bool NeedsEnvironment() const override;
4299
SetArgumentAt(size_t index,HInstruction * argument)4300 void SetArgumentAt(size_t index, HInstruction* argument) {
4301 SetRawInputAt(index, argument);
4302 }
4303
4304 // Return the number of arguments. This number can be lower than
4305 // the number of inputs returned by InputCount(), as some invoke
4306 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4307 // inputs at the end of their list of inputs.
GetNumberOfArguments()4308 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4309
GetDexMethodIndex()4310 uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4311
GetInvokeType()4312 InvokeType GetInvokeType() const {
4313 return GetPackedField<InvokeTypeField>();
4314 }
4315
GetIntrinsic()4316 Intrinsics GetIntrinsic() const {
4317 return intrinsic_;
4318 }
4319
4320 void SetIntrinsic(Intrinsics intrinsic,
4321 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4322 IntrinsicSideEffects side_effects,
4323 IntrinsicExceptions exceptions);
4324
IsFromInlinedInvoke()4325 bool IsFromInlinedInvoke() const {
4326 return GetEnvironment()->IsFromInlinedInvoke();
4327 }
4328
SetCanThrow(bool can_throw)4329 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4330
CanThrow()4331 bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4332
SetAlwaysThrows(bool always_throws)4333 void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4334
AlwaysThrows()4335 bool AlwaysThrows() const override { return GetPackedFlag<kFlagAlwaysThrows>(); }
4336
CanBeMoved()4337 bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4338
InstructionDataEquals(const HInstruction * other)4339 bool InstructionDataEquals(const HInstruction* other) const override {
4340 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4341 }
4342
GetIntrinsicOptimizations()4343 uint32_t* GetIntrinsicOptimizations() {
4344 return &intrinsic_optimizations_;
4345 }
4346
GetIntrinsicOptimizations()4347 const uint32_t* GetIntrinsicOptimizations() const {
4348 return &intrinsic_optimizations_;
4349 }
4350
IsIntrinsic()4351 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4352
GetResolvedMethod()4353 ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4354 void SetResolvedMethod(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
4355
4356 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4357
4358 protected:
4359 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4360 static constexpr size_t kFieldInvokeTypeSize =
4361 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4362 static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4363 static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4364 static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4365 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4366 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4367
HInvoke(InstructionKind kind,ArenaAllocator * allocator,uint32_t number_of_arguments,uint32_t number_of_other_inputs,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,InvokeType invoke_type)4368 HInvoke(InstructionKind kind,
4369 ArenaAllocator* allocator,
4370 uint32_t number_of_arguments,
4371 uint32_t number_of_other_inputs,
4372 DataType::Type return_type,
4373 uint32_t dex_pc,
4374 uint32_t dex_method_index,
4375 ArtMethod* resolved_method,
4376 InvokeType invoke_type)
4377 : HVariableInputSizeInstruction(
4378 kind,
4379 return_type,
4380 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
4381 dex_pc,
4382 allocator,
4383 number_of_arguments + number_of_other_inputs,
4384 kArenaAllocInvokeInputs),
4385 number_of_arguments_(number_of_arguments),
4386 dex_method_index_(dex_method_index),
4387 intrinsic_(Intrinsics::kNone),
4388 intrinsic_optimizations_(0) {
4389 SetPackedField<InvokeTypeField>(invoke_type);
4390 SetPackedFlag<kFlagCanThrow>(true);
4391 // Check mutator lock, constructors lack annotalysis support.
4392 Locks::mutator_lock_->AssertNotExclusiveHeld(Thread::Current());
4393 SetResolvedMethod(resolved_method);
4394 }
4395
4396 DEFAULT_COPY_CONSTRUCTOR(Invoke);
4397
4398 uint32_t number_of_arguments_;
4399 ArtMethod* resolved_method_;
4400 const uint32_t dex_method_index_;
4401 Intrinsics intrinsic_;
4402
4403 // A magic word holding optimizations for intrinsics. See intrinsics.h.
4404 uint32_t intrinsic_optimizations_;
4405 };
4406
4407 class HInvokeUnresolved final : public HInvoke {
4408 public:
HInvokeUnresolved(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,InvokeType invoke_type)4409 HInvokeUnresolved(ArenaAllocator* allocator,
4410 uint32_t number_of_arguments,
4411 DataType::Type return_type,
4412 uint32_t dex_pc,
4413 uint32_t dex_method_index,
4414 InvokeType invoke_type)
4415 : HInvoke(kInvokeUnresolved,
4416 allocator,
4417 number_of_arguments,
4418 /* number_of_other_inputs= */ 0u,
4419 return_type,
4420 dex_pc,
4421 dex_method_index,
4422 nullptr,
4423 invoke_type) {
4424 }
4425
IsClonable()4426 bool IsClonable() const override { return true; }
4427
4428 DECLARE_INSTRUCTION(InvokeUnresolved);
4429
4430 protected:
4431 DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4432 };
4433
4434 class HInvokePolymorphic final : public HInvoke {
4435 public:
HInvokePolymorphic(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index)4436 HInvokePolymorphic(ArenaAllocator* allocator,
4437 uint32_t number_of_arguments,
4438 DataType::Type return_type,
4439 uint32_t dex_pc,
4440 uint32_t dex_method_index)
4441 : HInvoke(kInvokePolymorphic,
4442 allocator,
4443 number_of_arguments,
4444 /* number_of_other_inputs= */ 0u,
4445 return_type,
4446 dex_pc,
4447 dex_method_index,
4448 nullptr,
4449 kVirtual) {
4450 }
4451
IsClonable()4452 bool IsClonable() const override { return true; }
4453
4454 DECLARE_INSTRUCTION(InvokePolymorphic);
4455
4456 protected:
4457 DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4458 };
4459
4460 class HInvokeCustom final : public HInvoke {
4461 public:
HInvokeCustom(ArenaAllocator * allocator,uint32_t number_of_arguments,uint32_t call_site_index,DataType::Type return_type,uint32_t dex_pc)4462 HInvokeCustom(ArenaAllocator* allocator,
4463 uint32_t number_of_arguments,
4464 uint32_t call_site_index,
4465 DataType::Type return_type,
4466 uint32_t dex_pc)
4467 : HInvoke(kInvokeCustom,
4468 allocator,
4469 number_of_arguments,
4470 /* number_of_other_inputs= */ 0u,
4471 return_type,
4472 dex_pc,
4473 /* dex_method_index= */ dex::kDexNoIndex,
4474 /* resolved_method= */ nullptr,
4475 kStatic),
4476 call_site_index_(call_site_index) {
4477 }
4478
GetCallSiteIndex()4479 uint32_t GetCallSiteIndex() const { return call_site_index_; }
4480
IsClonable()4481 bool IsClonable() const override { return true; }
4482
4483 DECLARE_INSTRUCTION(InvokeCustom);
4484
4485 protected:
4486 DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4487
4488 private:
4489 uint32_t call_site_index_;
4490 };
4491
4492 class HInvokeStaticOrDirect final : public HInvoke {
4493 public:
4494 // Requirements of this method call regarding the class
4495 // initialization (clinit) check of its declaring class.
4496 enum class ClinitCheckRequirement {
4497 kNone, // Class already initialized.
4498 kExplicit, // Static call having explicit clinit check as last input.
4499 kImplicit, // Static call implicitly requiring a clinit check.
4500 kLast = kImplicit
4501 };
4502
4503 // Determines how to load the target ArtMethod*.
4504 enum class MethodLoadKind {
4505 // Use a String init ArtMethod* loaded from Thread entrypoints.
4506 kStringInit,
4507
4508 // Use the method's own ArtMethod* loaded by the register allocator.
4509 kRecursive,
4510
4511 // Use PC-relative boot image ArtMethod* address that will be known at link time.
4512 // Used for boot image methods referenced by boot image code.
4513 kBootImageLinkTimePcRelative,
4514
4515 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
4516 // Used for app->boot calls with relocatable image.
4517 kBootImageRelRo,
4518
4519 // Load from an entry in the .bss section using a PC-relative load.
4520 // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4521 kBssEntry,
4522
4523 // Use ArtMethod* at a known address, embed the direct address in the code.
4524 // Used for for JIT-compiled calls.
4525 kJitDirectAddress,
4526
4527 // Make a runtime call to resolve and call the method. This is the last-resort-kind
4528 // used when other kinds are unimplemented on a particular architecture.
4529 kRuntimeCall,
4530 };
4531
4532 // Determines the location of the code pointer.
4533 enum class CodePtrLocation {
4534 // Recursive call, use local PC-relative call instruction.
4535 kCallSelf,
4536
4537 // Use code pointer from the ArtMethod*.
4538 // Used when we don't know the target code. This is also the last-resort-kind used when
4539 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4540 kCallArtMethod,
4541 };
4542
4543 struct DispatchInfo {
4544 MethodLoadKind method_load_kind;
4545 CodePtrLocation code_ptr_location;
4546 // The method load data holds
4547 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
4548 // Note that there are multiple string init methods, each having its own offset.
4549 // - the method address for kDirectAddress
4550 uint64_t method_load_data;
4551 };
4552
HInvokeStaticOrDirect(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t method_index,ArtMethod * resolved_method,DispatchInfo dispatch_info,InvokeType invoke_type,MethodReference target_method,ClinitCheckRequirement clinit_check_requirement)4553 HInvokeStaticOrDirect(ArenaAllocator* allocator,
4554 uint32_t number_of_arguments,
4555 DataType::Type return_type,
4556 uint32_t dex_pc,
4557 uint32_t method_index,
4558 ArtMethod* resolved_method,
4559 DispatchInfo dispatch_info,
4560 InvokeType invoke_type,
4561 MethodReference target_method,
4562 ClinitCheckRequirement clinit_check_requirement)
4563 : HInvoke(kInvokeStaticOrDirect,
4564 allocator,
4565 number_of_arguments,
4566 // There is potentially one extra argument for the HCurrentMethod node, and
4567 // potentially one other if the clinit check is explicit.
4568 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4569 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4570 return_type,
4571 dex_pc,
4572 method_index,
4573 resolved_method,
4574 invoke_type),
4575 target_method_(target_method),
4576 dispatch_info_(dispatch_info) {
4577 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4578 }
4579
IsClonable()4580 bool IsClonable() const override { return true; }
4581
SetDispatchInfo(const DispatchInfo & dispatch_info)4582 void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4583 bool had_current_method_input = HasCurrentMethodInput();
4584 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4585
4586 // Using the current method is the default and once we find a better
4587 // method load kind, we should not go back to using the current method.
4588 DCHECK(had_current_method_input || !needs_current_method_input);
4589
4590 if (had_current_method_input && !needs_current_method_input) {
4591 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4592 RemoveInputAt(GetSpecialInputIndex());
4593 }
4594 dispatch_info_ = dispatch_info;
4595 }
4596
GetDispatchInfo()4597 DispatchInfo GetDispatchInfo() const {
4598 return dispatch_info_;
4599 }
4600
AddSpecialInput(HInstruction * input)4601 void AddSpecialInput(HInstruction* input) {
4602 // We allow only one special input.
4603 DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4604 DCHECK(InputCount() == GetSpecialInputIndex() ||
4605 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4606 InsertInputAt(GetSpecialInputIndex(), input);
4607 }
4608
4609 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()4610 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
4611 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4612 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4613 DCHECK(!input_records.empty());
4614 DCHECK_GT(input_records.size(), GetNumberOfArguments());
4615 HInstruction* last_input = input_records.back().GetInstruction();
4616 // Note: `last_input` may be null during arguments setup.
4617 if (last_input != nullptr) {
4618 // `last_input` is the last input of a static invoke marked as having
4619 // an explicit clinit check. It must either be:
4620 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4621 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4622 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4623 }
4624 }
4625 return input_records;
4626 }
4627
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)4628 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
4629 // We access the method via the dex cache so we can't do an implicit null check.
4630 // TODO: for intrinsics we can generate implicit null checks.
4631 return false;
4632 }
4633
CanBeNull()4634 bool CanBeNull() const override {
4635 return GetType() == DataType::Type::kReference && !IsStringInit();
4636 }
4637
4638 // Get the index of the special input, if any.
4639 //
4640 // If the invoke HasCurrentMethodInput(), the "special input" is the current
4641 // method pointer; otherwise there may be one platform-specific special input,
4642 // such as PC-relative addressing base.
GetSpecialInputIndex()4643 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
HasSpecialInput()4644 bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4645
GetMethodLoadKind()4646 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
GetCodePtrLocation()4647 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
IsRecursive()4648 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4649 bool NeedsDexCacheOfDeclaringClass() const override;
IsStringInit()4650 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
HasMethodAddress()4651 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
HasPcRelativeMethodLoadKind()4652 bool HasPcRelativeMethodLoadKind() const {
4653 return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4654 GetMethodLoadKind() == MethodLoadKind::kBootImageRelRo ||
4655 GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4656 }
HasCurrentMethodInput()4657 bool HasCurrentMethodInput() const {
4658 // This function can be called only after the invoke has been fully initialized by the builder.
4659 if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4660 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4661 return true;
4662 } else {
4663 DCHECK(InputCount() == GetSpecialInputIndex() ||
4664 !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4665 return false;
4666 }
4667 }
4668
GetStringInitEntryPoint()4669 QuickEntrypointEnum GetStringInitEntryPoint() const {
4670 DCHECK(IsStringInit());
4671 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4672 }
4673
GetMethodAddress()4674 uint64_t GetMethodAddress() const {
4675 DCHECK(HasMethodAddress());
4676 return dispatch_info_.method_load_data;
4677 }
4678
4679 const DexFile& GetDexFileForPcRelativeDexCache() const;
4680
GetClinitCheckRequirement()4681 ClinitCheckRequirement GetClinitCheckRequirement() const {
4682 return GetPackedField<ClinitCheckRequirementField>();
4683 }
4684
4685 // Is this instruction a call to a static method?
IsStatic()4686 bool IsStatic() const {
4687 return GetInvokeType() == kStatic;
4688 }
4689
GetTargetMethod()4690 MethodReference GetTargetMethod() const {
4691 return target_method_;
4692 }
4693
4694 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4695 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4696 // instruction; only relevant for static calls with explicit clinit check.
RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement)4697 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4698 DCHECK(IsStaticWithExplicitClinitCheck());
4699 size_t last_input_index = inputs_.size() - 1u;
4700 HInstruction* last_input = inputs_.back().GetInstruction();
4701 DCHECK(last_input != nullptr);
4702 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4703 RemoveAsUserOfInput(last_input_index);
4704 inputs_.pop_back();
4705 SetPackedField<ClinitCheckRequirementField>(new_requirement);
4706 DCHECK(!IsStaticWithExplicitClinitCheck());
4707 }
4708
4709 // Is this a call to a static method whose declaring class has an
4710 // explicit initialization check in the graph?
IsStaticWithExplicitClinitCheck()4711 bool IsStaticWithExplicitClinitCheck() const {
4712 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4713 }
4714
4715 // Is this a call to a static method whose declaring class has an
4716 // implicit intialization check requirement?
IsStaticWithImplicitClinitCheck()4717 bool IsStaticWithImplicitClinitCheck() const {
4718 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4719 }
4720
4721 // Does this method load kind need the current method as an input?
NeedsCurrentMethodInput(MethodLoadKind kind)4722 static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4723 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4724 }
4725
4726 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4727
4728 protected:
4729 DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
4730
4731 private:
4732 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4733 static constexpr size_t kFieldClinitCheckRequirementSize =
4734 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4735 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4736 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4737 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4738 "Too many packed fields.");
4739 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4740 kFieldClinitCheckRequirement,
4741 kFieldClinitCheckRequirementSize>;
4742
4743 // Cached values of the resolved method, to avoid needing the mutator lock.
4744 const MethodReference target_method_;
4745 DispatchInfo dispatch_info_;
4746 };
4747 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4748 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4749
4750 class HInvokeVirtual final : public HInvoke {
4751 public:
HInvokeVirtual(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t vtable_index)4752 HInvokeVirtual(ArenaAllocator* allocator,
4753 uint32_t number_of_arguments,
4754 DataType::Type return_type,
4755 uint32_t dex_pc,
4756 uint32_t dex_method_index,
4757 ArtMethod* resolved_method,
4758 uint32_t vtable_index)
4759 : HInvoke(kInvokeVirtual,
4760 allocator,
4761 number_of_arguments,
4762 0u,
4763 return_type,
4764 dex_pc,
4765 dex_method_index,
4766 resolved_method,
4767 kVirtual),
4768 vtable_index_(vtable_index) {
4769 }
4770
IsClonable()4771 bool IsClonable() const override { return true; }
4772
CanBeNull()4773 bool CanBeNull() const override {
4774 switch (GetIntrinsic()) {
4775 case Intrinsics::kThreadCurrentThread:
4776 case Intrinsics::kStringBufferAppend:
4777 case Intrinsics::kStringBufferToString:
4778 case Intrinsics::kStringBuilderAppend:
4779 case Intrinsics::kStringBuilderToString:
4780 return false;
4781 default:
4782 return HInvoke::CanBeNull();
4783 }
4784 }
4785
CanDoImplicitNullCheckOn(HInstruction * obj)4786 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
4787 // TODO: Add implicit null checks in intrinsics.
4788 return (obj == InputAt(0)) && !IsIntrinsic();
4789 }
4790
GetVTableIndex()4791 uint32_t GetVTableIndex() const { return vtable_index_; }
4792
4793 DECLARE_INSTRUCTION(InvokeVirtual);
4794
4795 protected:
4796 DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
4797
4798 private:
4799 // Cached value of the resolved method, to avoid needing the mutator lock.
4800 const uint32_t vtable_index_;
4801 };
4802
4803 class HInvokeInterface final : public HInvoke {
4804 public:
HInvokeInterface(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t imt_index)4805 HInvokeInterface(ArenaAllocator* allocator,
4806 uint32_t number_of_arguments,
4807 DataType::Type return_type,
4808 uint32_t dex_pc,
4809 uint32_t dex_method_index,
4810 ArtMethod* resolved_method,
4811 uint32_t imt_index)
4812 : HInvoke(kInvokeInterface,
4813 allocator,
4814 number_of_arguments,
4815 0u,
4816 return_type,
4817 dex_pc,
4818 dex_method_index,
4819 resolved_method,
4820 kInterface),
4821 imt_index_(imt_index) {
4822 }
4823
IsClonable()4824 bool IsClonable() const override { return true; }
4825
CanDoImplicitNullCheckOn(HInstruction * obj)4826 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
4827 // TODO: Add implicit null checks in intrinsics.
4828 return (obj == InputAt(0)) && !IsIntrinsic();
4829 }
4830
NeedsDexCacheOfDeclaringClass()4831 bool NeedsDexCacheOfDeclaringClass() const override {
4832 // The assembly stub currently needs it.
4833 return true;
4834 }
4835
GetImtIndex()4836 uint32_t GetImtIndex() const { return imt_index_; }
4837
4838 DECLARE_INSTRUCTION(InvokeInterface);
4839
4840 protected:
4841 DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
4842
4843 private:
4844 // Cached value of the resolved method, to avoid needing the mutator lock.
4845 const uint32_t imt_index_;
4846 };
4847
4848 class HNeg final : public HUnaryOperation {
4849 public:
4850 HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNeg,result_type,input,dex_pc)4851 : HUnaryOperation(kNeg, result_type, input, dex_pc) {
4852 DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4853 }
4854
Compute(T x)4855 template <typename T> static T Compute(T x) { return -x; }
4856
Evaluate(HIntConstant * x)4857 HConstant* Evaluate(HIntConstant* x) const override {
4858 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4859 }
Evaluate(HLongConstant * x)4860 HConstant* Evaluate(HLongConstant* x) const override {
4861 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4862 }
Evaluate(HFloatConstant * x)4863 HConstant* Evaluate(HFloatConstant* x) const override {
4864 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4865 }
Evaluate(HDoubleConstant * x)4866 HConstant* Evaluate(HDoubleConstant* x) const override {
4867 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4868 }
4869
4870 DECLARE_INSTRUCTION(Neg);
4871
4872 protected:
4873 DEFAULT_COPY_CONSTRUCTOR(Neg);
4874 };
4875
4876 class HNewArray final : public HExpression<2> {
4877 public:
HNewArray(HInstruction * cls,HInstruction * length,uint32_t dex_pc,size_t component_size_shift)4878 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
4879 : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4880 SetRawInputAt(0, cls);
4881 SetRawInputAt(1, length);
4882 SetPackedField<ComponentSizeShiftField>(component_size_shift);
4883 }
4884
IsClonable()4885 bool IsClonable() const override { return true; }
4886
4887 // Calls runtime so needs an environment.
NeedsEnvironment()4888 bool NeedsEnvironment() const override { return true; }
4889
4890 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
CanThrow()4891 bool CanThrow() const override { return true; }
4892
CanBeNull()4893 bool CanBeNull() const override { return false; }
4894
GetLoadClass()4895 HLoadClass* GetLoadClass() const {
4896 DCHECK(InputAt(0)->IsLoadClass());
4897 return InputAt(0)->AsLoadClass();
4898 }
4899
GetLength()4900 HInstruction* GetLength() const {
4901 return InputAt(1);
4902 }
4903
GetComponentSizeShift()4904 size_t GetComponentSizeShift() {
4905 return GetPackedField<ComponentSizeShiftField>();
4906 }
4907
4908 DECLARE_INSTRUCTION(NewArray);
4909
4910 protected:
4911 DEFAULT_COPY_CONSTRUCTOR(NewArray);
4912
4913 private:
4914 static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
4915 static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
4916 static constexpr size_t kNumberOfNewArrayPackedBits =
4917 kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
4918 static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4919 using ComponentSizeShiftField =
4920 BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShift>;
4921 };
4922
4923 class HAdd final : public HBinaryOperation {
4924 public:
4925 HAdd(DataType::Type result_type,
4926 HInstruction* left,
4927 HInstruction* right,
4928 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAdd,result_type,left,right,SideEffects::None (),dex_pc)4929 : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
4930 }
4931
IsCommutative()4932 bool IsCommutative() const override { return true; }
4933
Compute(T x,T y)4934 template <typename T> static T Compute(T x, T y) { return x + y; }
4935
Evaluate(HIntConstant * x,HIntConstant * y)4936 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4937 return GetBlock()->GetGraph()->GetIntConstant(
4938 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4939 }
Evaluate(HLongConstant * x,HLongConstant * y)4940 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4941 return GetBlock()->GetGraph()->GetLongConstant(
4942 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4943 }
Evaluate(HFloatConstant * x,HFloatConstant * y)4944 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4945 return GetBlock()->GetGraph()->GetFloatConstant(
4946 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4947 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4948 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4949 return GetBlock()->GetGraph()->GetDoubleConstant(
4950 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4951 }
4952
4953 DECLARE_INSTRUCTION(Add);
4954
4955 protected:
4956 DEFAULT_COPY_CONSTRUCTOR(Add);
4957 };
4958
4959 class HSub final : public HBinaryOperation {
4960 public:
4961 HSub(DataType::Type result_type,
4962 HInstruction* left,
4963 HInstruction* right,
4964 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kSub,result_type,left,right,SideEffects::None (),dex_pc)4965 : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
4966 }
4967
Compute(T x,T y)4968 template <typename T> static T Compute(T x, T y) { return x - y; }
4969
Evaluate(HIntConstant * x,HIntConstant * y)4970 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4971 return GetBlock()->GetGraph()->GetIntConstant(
4972 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4973 }
Evaluate(HLongConstant * x,HLongConstant * y)4974 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4975 return GetBlock()->GetGraph()->GetLongConstant(
4976 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4977 }
Evaluate(HFloatConstant * x,HFloatConstant * y)4978 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4979 return GetBlock()->GetGraph()->GetFloatConstant(
4980 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4981 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4982 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4983 return GetBlock()->GetGraph()->GetDoubleConstant(
4984 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4985 }
4986
4987 DECLARE_INSTRUCTION(Sub);
4988
4989 protected:
4990 DEFAULT_COPY_CONSTRUCTOR(Sub);
4991 };
4992
4993 class HMul final : public HBinaryOperation {
4994 public:
4995 HMul(DataType::Type result_type,
4996 HInstruction* left,
4997 HInstruction* right,
4998 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kMul,result_type,left,right,SideEffects::None (),dex_pc)4999 : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5000 }
5001
IsCommutative()5002 bool IsCommutative() const override { return true; }
5003
Compute(T x,T y)5004 template <typename T> static T Compute(T x, T y) { return x * y; }
5005
Evaluate(HIntConstant * x,HIntConstant * y)5006 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5007 return GetBlock()->GetGraph()->GetIntConstant(
5008 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5009 }
Evaluate(HLongConstant * x,HLongConstant * y)5010 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5011 return GetBlock()->GetGraph()->GetLongConstant(
5012 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5013 }
Evaluate(HFloatConstant * x,HFloatConstant * y)5014 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5015 return GetBlock()->GetGraph()->GetFloatConstant(
5016 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5017 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5018 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5019 return GetBlock()->GetGraph()->GetDoubleConstant(
5020 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5021 }
5022
5023 DECLARE_INSTRUCTION(Mul);
5024
5025 protected:
5026 DEFAULT_COPY_CONSTRUCTOR(Mul);
5027 };
5028
5029 class HDiv final : public HBinaryOperation {
5030 public:
HDiv(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5031 HDiv(DataType::Type result_type,
5032 HInstruction* left,
5033 HInstruction* right,
5034 uint32_t dex_pc)
5035 : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5036 }
5037
5038 template <typename T>
ComputeIntegral(T x,T y)5039 T ComputeIntegral(T x, T y) const {
5040 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5041 // Our graph structure ensures we never have 0 for `y` during
5042 // constant folding.
5043 DCHECK_NE(y, 0);
5044 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5045 return (y == -1) ? -x : x / y;
5046 }
5047
5048 template <typename T>
ComputeFP(T x,T y)5049 T ComputeFP(T x, T y) const {
5050 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5051 return x / y;
5052 }
5053
Evaluate(HIntConstant * x,HIntConstant * y)5054 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5055 return GetBlock()->GetGraph()->GetIntConstant(
5056 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5057 }
Evaluate(HLongConstant * x,HLongConstant * y)5058 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5059 return GetBlock()->GetGraph()->GetLongConstant(
5060 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5061 }
Evaluate(HFloatConstant * x,HFloatConstant * y)5062 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5063 return GetBlock()->GetGraph()->GetFloatConstant(
5064 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5065 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5066 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5067 return GetBlock()->GetGraph()->GetDoubleConstant(
5068 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5069 }
5070
5071 DECLARE_INSTRUCTION(Div);
5072
5073 protected:
5074 DEFAULT_COPY_CONSTRUCTOR(Div);
5075 };
5076
5077 class HRem final : public HBinaryOperation {
5078 public:
HRem(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5079 HRem(DataType::Type result_type,
5080 HInstruction* left,
5081 HInstruction* right,
5082 uint32_t dex_pc)
5083 : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5084 }
5085
5086 template <typename T>
ComputeIntegral(T x,T y)5087 T ComputeIntegral(T x, T y) const {
5088 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5089 // Our graph structure ensures we never have 0 for `y` during
5090 // constant folding.
5091 DCHECK_NE(y, 0);
5092 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5093 return (y == -1) ? 0 : x % y;
5094 }
5095
5096 template <typename T>
ComputeFP(T x,T y)5097 T ComputeFP(T x, T y) const {
5098 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5099 return std::fmod(x, y);
5100 }
5101
Evaluate(HIntConstant * x,HIntConstant * y)5102 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5103 return GetBlock()->GetGraph()->GetIntConstant(
5104 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5105 }
Evaluate(HLongConstant * x,HLongConstant * y)5106 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5107 return GetBlock()->GetGraph()->GetLongConstant(
5108 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5109 }
Evaluate(HFloatConstant * x,HFloatConstant * y)5110 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5111 return GetBlock()->GetGraph()->GetFloatConstant(
5112 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5113 }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5114 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5115 return GetBlock()->GetGraph()->GetDoubleConstant(
5116 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5117 }
5118
5119 DECLARE_INSTRUCTION(Rem);
5120
5121 protected:
5122 DEFAULT_COPY_CONSTRUCTOR(Rem);
5123 };
5124
5125 class HMin final : public HBinaryOperation {
5126 public:
HMin(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5127 HMin(DataType::Type result_type,
5128 HInstruction* left,
5129 HInstruction* right,
5130 uint32_t dex_pc)
5131 : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5132
IsCommutative()5133 bool IsCommutative() const override { return true; }
5134
5135 // Evaluation for integral values.
ComputeIntegral(T x,T y)5136 template <typename T> static T ComputeIntegral(T x, T y) {
5137 return (x <= y) ? x : y;
5138 }
5139
Evaluate(HIntConstant * x,HIntConstant * y)5140 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5141 return GetBlock()->GetGraph()->GetIntConstant(
5142 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5143 }
Evaluate(HLongConstant * x,HLongConstant * y)5144 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5145 return GetBlock()->GetGraph()->GetLongConstant(
5146 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5147 }
5148 // TODO: Evaluation for floating-point values.
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5149 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5150 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5151 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5152 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5153
5154 DECLARE_INSTRUCTION(Min);
5155
5156 protected:
5157 DEFAULT_COPY_CONSTRUCTOR(Min);
5158 };
5159
5160 class HMax final : public HBinaryOperation {
5161 public:
HMax(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5162 HMax(DataType::Type result_type,
5163 HInstruction* left,
5164 HInstruction* right,
5165 uint32_t dex_pc)
5166 : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5167
IsCommutative()5168 bool IsCommutative() const override { return true; }
5169
5170 // Evaluation for integral values.
ComputeIntegral(T x,T y)5171 template <typename T> static T ComputeIntegral(T x, T y) {
5172 return (x >= y) ? x : y;
5173 }
5174
Evaluate(HIntConstant * x,HIntConstant * y)5175 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5176 return GetBlock()->GetGraph()->GetIntConstant(
5177 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5178 }
Evaluate(HLongConstant * x,HLongConstant * y)5179 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5180 return GetBlock()->GetGraph()->GetLongConstant(
5181 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5182 }
5183 // TODO: Evaluation for floating-point values.
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5184 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5185 HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5186 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5187 HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5188
5189 DECLARE_INSTRUCTION(Max);
5190
5191 protected:
5192 DEFAULT_COPY_CONSTRUCTOR(Max);
5193 };
5194
5195 class HAbs final : public HUnaryOperation {
5196 public:
5197 HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kAbs,result_type,input,dex_pc)5198 : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5199
5200 // Evaluation for integral values.
ComputeIntegral(T x)5201 template <typename T> static T ComputeIntegral(T x) {
5202 return x < 0 ? -x : x;
5203 }
5204
5205 // Evaluation for floating-point values.
5206 // Note, as a "quality of implementation", rather than pure "spec compliance",
5207 // we require that Math.abs() clears the sign bit (but changes nothing else)
5208 // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5209 // http://b/30758343
ComputeFP(T x)5210 template <typename T, typename S> static T ComputeFP(T x) {
5211 S bits = bit_cast<S, T>(x);
5212 return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5213 }
5214
Evaluate(HIntConstant * x)5215 HConstant* Evaluate(HIntConstant* x) const override {
5216 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5217 }
Evaluate(HLongConstant * x)5218 HConstant* Evaluate(HLongConstant* x) const override {
5219 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5220 }
Evaluate(HFloatConstant * x)5221 HConstant* Evaluate(HFloatConstant* x) const override {
5222 return GetBlock()->GetGraph()->GetFloatConstant(
5223 ComputeFP<float, int32_t>(x->GetValue()), GetDexPc());
5224 }
Evaluate(HDoubleConstant * x)5225 HConstant* Evaluate(HDoubleConstant* x) const override {
5226 return GetBlock()->GetGraph()->GetDoubleConstant(
5227 ComputeFP<double, int64_t>(x->GetValue()), GetDexPc());
5228 }
5229
5230 DECLARE_INSTRUCTION(Abs);
5231
5232 protected:
5233 DEFAULT_COPY_CONSTRUCTOR(Abs);
5234 };
5235
5236 class HDivZeroCheck final : public HExpression<1> {
5237 public:
5238 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5239 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5240 // instruction following the current one; thus 'SideEffects::None()' is used.
HDivZeroCheck(HInstruction * value,uint32_t dex_pc)5241 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5242 : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5243 SetRawInputAt(0, value);
5244 }
5245
IsClonable()5246 bool IsClonable() const override { return true; }
CanBeMoved()5247 bool CanBeMoved() const override { return true; }
5248
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5249 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5250 return true;
5251 }
5252
NeedsEnvironment()5253 bool NeedsEnvironment() const override { return true; }
CanThrow()5254 bool CanThrow() const override { return true; }
5255
5256 DECLARE_INSTRUCTION(DivZeroCheck);
5257
5258 protected:
5259 DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5260 };
5261
5262 class HShl final : public HBinaryOperation {
5263 public:
5264 HShl(DataType::Type result_type,
5265 HInstruction* value,
5266 HInstruction* distance,
5267 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShl,result_type,value,distance,SideEffects::None (),dex_pc)5268 : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5269 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5270 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5271 }
5272
5273 template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5274 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5275 return value << (distance & max_shift_distance);
5276 }
5277
Evaluate(HIntConstant * value,HIntConstant * distance)5278 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5279 return GetBlock()->GetGraph()->GetIntConstant(
5280 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5281 }
Evaluate(HLongConstant * value,HIntConstant * distance)5282 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5283 return GetBlock()->GetGraph()->GetLongConstant(
5284 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5285 }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5286 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5287 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5288 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5289 UNREACHABLE();
5290 }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5291 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5292 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5293 LOG(FATAL) << DebugName() << " is not defined for float values";
5294 UNREACHABLE();
5295 }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5296 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5297 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5298 LOG(FATAL) << DebugName() << " is not defined for double values";
5299 UNREACHABLE();
5300 }
5301
5302 DECLARE_INSTRUCTION(Shl);
5303
5304 protected:
5305 DEFAULT_COPY_CONSTRUCTOR(Shl);
5306 };
5307
5308 class HShr final : public HBinaryOperation {
5309 public:
5310 HShr(DataType::Type result_type,
5311 HInstruction* value,
5312 HInstruction* distance,
5313 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShr,result_type,value,distance,SideEffects::None (),dex_pc)5314 : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5315 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5316 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5317 }
5318
5319 template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5320 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5321 return value >> (distance & max_shift_distance);
5322 }
5323
Evaluate(HIntConstant * value,HIntConstant * distance)5324 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5325 return GetBlock()->GetGraph()->GetIntConstant(
5326 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5327 }
Evaluate(HLongConstant * value,HIntConstant * distance)5328 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5329 return GetBlock()->GetGraph()->GetLongConstant(
5330 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5331 }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5332 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5333 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5334 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5335 UNREACHABLE();
5336 }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5337 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5338 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5339 LOG(FATAL) << DebugName() << " is not defined for float values";
5340 UNREACHABLE();
5341 }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5342 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5343 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5344 LOG(FATAL) << DebugName() << " is not defined for double values";
5345 UNREACHABLE();
5346 }
5347
5348 DECLARE_INSTRUCTION(Shr);
5349
5350 protected:
5351 DEFAULT_COPY_CONSTRUCTOR(Shr);
5352 };
5353
5354 class HUShr final : public HBinaryOperation {
5355 public:
5356 HUShr(DataType::Type result_type,
5357 HInstruction* value,
5358 HInstruction* distance,
5359 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kUShr,result_type,value,distance,SideEffects::None (),dex_pc)5360 : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5361 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5362 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5363 }
5364
5365 template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5366 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5367 typedef typename std::make_unsigned<T>::type V;
5368 V ux = static_cast<V>(value);
5369 return static_cast<T>(ux >> (distance & max_shift_distance));
5370 }
5371
Evaluate(HIntConstant * value,HIntConstant * distance)5372 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5373 return GetBlock()->GetGraph()->GetIntConstant(
5374 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5375 }
Evaluate(HLongConstant * value,HIntConstant * distance)5376 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5377 return GetBlock()->GetGraph()->GetLongConstant(
5378 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5379 }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5380 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5381 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5382 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5383 UNREACHABLE();
5384 }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5385 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5386 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5387 LOG(FATAL) << DebugName() << " is not defined for float values";
5388 UNREACHABLE();
5389 }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5390 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5391 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5392 LOG(FATAL) << DebugName() << " is not defined for double values";
5393 UNREACHABLE();
5394 }
5395
5396 DECLARE_INSTRUCTION(UShr);
5397
5398 protected:
5399 DEFAULT_COPY_CONSTRUCTOR(UShr);
5400 };
5401
5402 class HAnd final : public HBinaryOperation {
5403 public:
5404 HAnd(DataType::Type result_type,
5405 HInstruction* left,
5406 HInstruction* right,
5407 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAnd,result_type,left,right,SideEffects::None (),dex_pc)5408 : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5409 }
5410
IsCommutative()5411 bool IsCommutative() const override { return true; }
5412
Compute(T x,T y)5413 template <typename T> static T Compute(T x, T y) { return x & y; }
5414
Evaluate(HIntConstant * x,HIntConstant * y)5415 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5416 return GetBlock()->GetGraph()->GetIntConstant(
5417 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5418 }
Evaluate(HLongConstant * x,HLongConstant * y)5419 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5420 return GetBlock()->GetGraph()->GetLongConstant(
5421 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5422 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5423 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5424 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5425 LOG(FATAL) << DebugName() << " is not defined for float values";
5426 UNREACHABLE();
5427 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5428 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5429 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5430 LOG(FATAL) << DebugName() << " is not defined for double values";
5431 UNREACHABLE();
5432 }
5433
5434 DECLARE_INSTRUCTION(And);
5435
5436 protected:
5437 DEFAULT_COPY_CONSTRUCTOR(And);
5438 };
5439
5440 class HOr final : public HBinaryOperation {
5441 public:
5442 HOr(DataType::Type result_type,
5443 HInstruction* left,
5444 HInstruction* right,
5445 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kOr,result_type,left,right,SideEffects::None (),dex_pc)5446 : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5447 }
5448
IsCommutative()5449 bool IsCommutative() const override { return true; }
5450
Compute(T x,T y)5451 template <typename T> static T Compute(T x, T y) { return x | y; }
5452
Evaluate(HIntConstant * x,HIntConstant * y)5453 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5454 return GetBlock()->GetGraph()->GetIntConstant(
5455 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5456 }
Evaluate(HLongConstant * x,HLongConstant * y)5457 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5458 return GetBlock()->GetGraph()->GetLongConstant(
5459 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5460 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5461 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5462 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5463 LOG(FATAL) << DebugName() << " is not defined for float values";
5464 UNREACHABLE();
5465 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5466 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5467 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5468 LOG(FATAL) << DebugName() << " is not defined for double values";
5469 UNREACHABLE();
5470 }
5471
5472 DECLARE_INSTRUCTION(Or);
5473
5474 protected:
5475 DEFAULT_COPY_CONSTRUCTOR(Or);
5476 };
5477
5478 class HXor final : public HBinaryOperation {
5479 public:
5480 HXor(DataType::Type result_type,
5481 HInstruction* left,
5482 HInstruction* right,
5483 uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kXor,result_type,left,right,SideEffects::None (),dex_pc)5484 : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5485 }
5486
IsCommutative()5487 bool IsCommutative() const override { return true; }
5488
Compute(T x,T y)5489 template <typename T> static T Compute(T x, T y) { return x ^ y; }
5490
Evaluate(HIntConstant * x,HIntConstant * y)5491 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5492 return GetBlock()->GetGraph()->GetIntConstant(
5493 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5494 }
Evaluate(HLongConstant * x,HLongConstant * y)5495 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5496 return GetBlock()->GetGraph()->GetLongConstant(
5497 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5498 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5499 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5500 HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5501 LOG(FATAL) << DebugName() << " is not defined for float values";
5502 UNREACHABLE();
5503 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5504 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5505 HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5506 LOG(FATAL) << DebugName() << " is not defined for double values";
5507 UNREACHABLE();
5508 }
5509
5510 DECLARE_INSTRUCTION(Xor);
5511
5512 protected:
5513 DEFAULT_COPY_CONSTRUCTOR(Xor);
5514 };
5515
5516 class HRor final : public HBinaryOperation {
5517 public:
HRor(DataType::Type result_type,HInstruction * value,HInstruction * distance)5518 HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5519 : HBinaryOperation(kRor, result_type, value, distance) {
5520 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5521 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5522 }
5523
5524 template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_value)5525 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5526 typedef typename std::make_unsigned<T>::type V;
5527 V ux = static_cast<V>(value);
5528 if ((distance & max_shift_value) == 0) {
5529 return static_cast<T>(ux);
5530 } else {
5531 const V reg_bits = sizeof(T) * 8;
5532 return static_cast<T>(ux >> (distance & max_shift_value)) |
5533 (value << (reg_bits - (distance & max_shift_value)));
5534 }
5535 }
5536
Evaluate(HIntConstant * value,HIntConstant * distance)5537 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5538 return GetBlock()->GetGraph()->GetIntConstant(
5539 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5540 }
Evaluate(HLongConstant * value,HIntConstant * distance)5541 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5542 return GetBlock()->GetGraph()->GetLongConstant(
5543 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5544 }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5545 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5546 HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5547 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5548 UNREACHABLE();
5549 }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5550 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5551 HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5552 LOG(FATAL) << DebugName() << " is not defined for float values";
5553 UNREACHABLE();
5554 }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5555 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5556 HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5557 LOG(FATAL) << DebugName() << " is not defined for double values";
5558 UNREACHABLE();
5559 }
5560
5561 DECLARE_INSTRUCTION(Ror);
5562
5563 protected:
5564 DEFAULT_COPY_CONSTRUCTOR(Ror);
5565 };
5566
5567 // The value of a parameter in this method. Its location depends on
5568 // the calling convention.
5569 class HParameterValue final : public HExpression<0> {
5570 public:
5571 HParameterValue(const DexFile& dex_file,
5572 dex::TypeIndex type_index,
5573 uint8_t index,
5574 DataType::Type parameter_type,
5575 bool is_this = false)
HExpression(kParameterValue,parameter_type,SideEffects::None (),kNoDexPc)5576 : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5577 dex_file_(dex_file),
5578 type_index_(type_index),
5579 index_(index) {
5580 SetPackedFlag<kFlagIsThis>(is_this);
5581 SetPackedFlag<kFlagCanBeNull>(!is_this);
5582 }
5583
GetDexFile()5584 const DexFile& GetDexFile() const { return dex_file_; }
GetTypeIndex()5585 dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetIndex()5586 uint8_t GetIndex() const { return index_; }
IsThis()5587 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5588
CanBeNull()5589 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)5590 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5591
5592 DECLARE_INSTRUCTION(ParameterValue);
5593
5594 protected:
5595 DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5596
5597 private:
5598 // Whether or not the parameter value corresponds to 'this' argument.
5599 static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
5600 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5601 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5602 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5603 "Too many packed fields.");
5604
5605 const DexFile& dex_file_;
5606 const dex::TypeIndex type_index_;
5607 // The index of this parameter in the parameters list. Must be less
5608 // than HGraph::number_of_in_vregs_.
5609 const uint8_t index_;
5610 };
5611
5612 class HNot final : public HUnaryOperation {
5613 public:
5614 HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNot,result_type,input,dex_pc)5615 : HUnaryOperation(kNot, result_type, input, dex_pc) {
5616 }
5617
CanBeMoved()5618 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5619 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5620 return true;
5621 }
5622
Compute(T x)5623 template <typename T> static T Compute(T x) { return ~x; }
5624
Evaluate(HIntConstant * x)5625 HConstant* Evaluate(HIntConstant* x) const override {
5626 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5627 }
Evaluate(HLongConstant * x)5628 HConstant* Evaluate(HLongConstant* x) const override {
5629 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5630 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5631 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
5632 LOG(FATAL) << DebugName() << " is not defined for float values";
5633 UNREACHABLE();
5634 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5635 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
5636 LOG(FATAL) << DebugName() << " is not defined for double values";
5637 UNREACHABLE();
5638 }
5639
5640 DECLARE_INSTRUCTION(Not);
5641
5642 protected:
5643 DEFAULT_COPY_CONSTRUCTOR(Not);
5644 };
5645
5646 class HBooleanNot final : public HUnaryOperation {
5647 public:
5648 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kBooleanNot,DataType::Type::kBool,input,dex_pc)5649 : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
5650 }
5651
CanBeMoved()5652 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5653 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5654 return true;
5655 }
5656
Compute(T x)5657 template <typename T> static bool Compute(T x) {
5658 DCHECK(IsUint<1>(x)) << x;
5659 return !x;
5660 }
5661
Evaluate(HIntConstant * x)5662 HConstant* Evaluate(HIntConstant* x) const override {
5663 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5664 }
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED)5665 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override {
5666 LOG(FATAL) << DebugName() << " is not defined for long values";
5667 UNREACHABLE();
5668 }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5669 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
5670 LOG(FATAL) << DebugName() << " is not defined for float values";
5671 UNREACHABLE();
5672 }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5673 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
5674 LOG(FATAL) << DebugName() << " is not defined for double values";
5675 UNREACHABLE();
5676 }
5677
5678 DECLARE_INSTRUCTION(BooleanNot);
5679
5680 protected:
5681 DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
5682 };
5683
5684 class HTypeConversion final : public HExpression<1> {
5685 public:
5686 // Instantiate a type conversion of `input` to `result_type`.
5687 HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kTypeConversion,result_type,SideEffects::None (),dex_pc)5688 : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
5689 SetRawInputAt(0, input);
5690 // Invariant: We should never generate a conversion to a Boolean value.
5691 DCHECK_NE(DataType::Type::kBool, result_type);
5692 }
5693
GetInput()5694 HInstruction* GetInput() const { return InputAt(0); }
GetInputType()5695 DataType::Type GetInputType() const { return GetInput()->GetType(); }
GetResultType()5696 DataType::Type GetResultType() const { return GetType(); }
5697
IsClonable()5698 bool IsClonable() const override { return true; }
CanBeMoved()5699 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5700 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5701 return true;
5702 }
5703 // Return whether the conversion is implicit. This includes conversion to the same type.
IsImplicitConversion()5704 bool IsImplicitConversion() const {
5705 return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
5706 }
5707
5708 // Try to statically evaluate the conversion and return a HConstant
5709 // containing the result. If the input cannot be converted, return nullptr.
5710 HConstant* TryStaticEvaluation() const;
5711
5712 DECLARE_INSTRUCTION(TypeConversion);
5713
5714 protected:
5715 DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
5716 };
5717
5718 static constexpr uint32_t kNoRegNumber = -1;
5719
5720 class HNullCheck final : public HExpression<1> {
5721 public:
5722 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5723 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5724 // instruction following the current one; thus 'SideEffects::None()' is used.
HNullCheck(HInstruction * value,uint32_t dex_pc)5725 HNullCheck(HInstruction* value, uint32_t dex_pc)
5726 : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
5727 SetRawInputAt(0, value);
5728 }
5729
IsClonable()5730 bool IsClonable() const override { return true; }
CanBeMoved()5731 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5732 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5733 return true;
5734 }
5735
NeedsEnvironment()5736 bool NeedsEnvironment() const override { return true; }
5737
CanThrow()5738 bool CanThrow() const override { return true; }
5739
CanBeNull()5740 bool CanBeNull() const override { return false; }
5741
5742 DECLARE_INSTRUCTION(NullCheck);
5743
5744 protected:
5745 DEFAULT_COPY_CONSTRUCTOR(NullCheck);
5746 };
5747
5748 // Embeds an ArtField and all the information required by the compiler. We cache
5749 // that information to avoid requiring the mutator lock every time we need it.
5750 class FieldInfo : public ValueObject {
5751 public:
FieldInfo(ArtField * field,MemberOffset field_offset,DataType::Type field_type,bool is_volatile,uint32_t index,uint16_t declaring_class_def_index,const DexFile & dex_file)5752 FieldInfo(ArtField* field,
5753 MemberOffset field_offset,
5754 DataType::Type field_type,
5755 bool is_volatile,
5756 uint32_t index,
5757 uint16_t declaring_class_def_index,
5758 const DexFile& dex_file)
5759 : field_(field),
5760 field_offset_(field_offset),
5761 field_type_(field_type),
5762 is_volatile_(is_volatile),
5763 index_(index),
5764 declaring_class_def_index_(declaring_class_def_index),
5765 dex_file_(dex_file) {}
5766
GetField()5767 ArtField* GetField() const { return field_; }
GetFieldOffset()5768 MemberOffset GetFieldOffset() const { return field_offset_; }
GetFieldType()5769 DataType::Type GetFieldType() const { return field_type_; }
GetFieldIndex()5770 uint32_t GetFieldIndex() const { return index_; }
GetDeclaringClassDefIndex()5771 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
GetDexFile()5772 const DexFile& GetDexFile() const { return dex_file_; }
IsVolatile()5773 bool IsVolatile() const { return is_volatile_; }
5774
5775 private:
5776 ArtField* const field_;
5777 const MemberOffset field_offset_;
5778 const DataType::Type field_type_;
5779 const bool is_volatile_;
5780 const uint32_t index_;
5781 const uint16_t declaring_class_def_index_;
5782 const DexFile& dex_file_;
5783 };
5784
5785 class HInstanceFieldGet final : public HExpression<1> {
5786 public:
HInstanceFieldGet(HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5787 HInstanceFieldGet(HInstruction* value,
5788 ArtField* field,
5789 DataType::Type field_type,
5790 MemberOffset field_offset,
5791 bool is_volatile,
5792 uint32_t field_idx,
5793 uint16_t declaring_class_def_index,
5794 const DexFile& dex_file,
5795 uint32_t dex_pc)
5796 : HExpression(kInstanceFieldGet,
5797 field_type,
5798 SideEffects::FieldReadOfType(field_type, is_volatile),
5799 dex_pc),
5800 field_info_(field,
5801 field_offset,
5802 field_type,
5803 is_volatile,
5804 field_idx,
5805 declaring_class_def_index,
5806 dex_file) {
5807 SetRawInputAt(0, value);
5808 }
5809
IsClonable()5810 bool IsClonable() const override { return true; }
CanBeMoved()5811 bool CanBeMoved() const override { return !IsVolatile(); }
5812
InstructionDataEquals(const HInstruction * other)5813 bool InstructionDataEquals(const HInstruction* other) const override {
5814 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5815 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5816 }
5817
CanDoImplicitNullCheckOn(HInstruction * obj)5818 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5819 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5820 }
5821
ComputeHashCode()5822 size_t ComputeHashCode() const override {
5823 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5824 }
5825
GetFieldInfo()5826 const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5827 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5828 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5829 bool IsVolatile() const { return field_info_.IsVolatile(); }
5830
SetType(DataType::Type new_type)5831 void SetType(DataType::Type new_type) {
5832 DCHECK(DataType::IsIntegralType(GetType()));
5833 DCHECK(DataType::IsIntegralType(new_type));
5834 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5835 SetPackedField<TypeField>(new_type);
5836 }
5837
5838 DECLARE_INSTRUCTION(InstanceFieldGet);
5839
5840 protected:
5841 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
5842
5843 private:
5844 const FieldInfo field_info_;
5845 };
5846
5847 class HInstanceFieldSet final : public HExpression<2> {
5848 public:
HInstanceFieldSet(HInstruction * object,HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5849 HInstanceFieldSet(HInstruction* object,
5850 HInstruction* value,
5851 ArtField* field,
5852 DataType::Type field_type,
5853 MemberOffset field_offset,
5854 bool is_volatile,
5855 uint32_t field_idx,
5856 uint16_t declaring_class_def_index,
5857 const DexFile& dex_file,
5858 uint32_t dex_pc)
5859 : HExpression(kInstanceFieldSet,
5860 SideEffects::FieldWriteOfType(field_type, is_volatile),
5861 dex_pc),
5862 field_info_(field,
5863 field_offset,
5864 field_type,
5865 is_volatile,
5866 field_idx,
5867 declaring_class_def_index,
5868 dex_file) {
5869 SetPackedFlag<kFlagValueCanBeNull>(true);
5870 SetRawInputAt(0, object);
5871 SetRawInputAt(1, value);
5872 }
5873
IsClonable()5874 bool IsClonable() const override { return true; }
5875
CanDoImplicitNullCheckOn(HInstruction * obj)5876 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5877 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5878 }
5879
GetFieldInfo()5880 const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5881 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5882 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5883 bool IsVolatile() const { return field_info_.IsVolatile(); }
GetValue()5884 HInstruction* GetValue() const { return InputAt(1); }
GetValueCanBeNull()5885 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
ClearValueCanBeNull()5886 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5887
5888 DECLARE_INSTRUCTION(InstanceFieldSet);
5889
5890 protected:
5891 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
5892
5893 private:
5894 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5895 static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5896 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5897 "Too many packed fields.");
5898
5899 const FieldInfo field_info_;
5900 };
5901
5902 class HArrayGet final : public HExpression<2> {
5903 public:
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,uint32_t dex_pc)5904 HArrayGet(HInstruction* array,
5905 HInstruction* index,
5906 DataType::Type type,
5907 uint32_t dex_pc)
5908 : HArrayGet(array,
5909 index,
5910 type,
5911 SideEffects::ArrayReadOfType(type),
5912 dex_pc,
5913 /* is_string_char_at= */ false) {
5914 }
5915
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,SideEffects side_effects,uint32_t dex_pc,bool is_string_char_at)5916 HArrayGet(HInstruction* array,
5917 HInstruction* index,
5918 DataType::Type type,
5919 SideEffects side_effects,
5920 uint32_t dex_pc,
5921 bool is_string_char_at)
5922 : HExpression(kArrayGet, type, side_effects, dex_pc) {
5923 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5924 SetRawInputAt(0, array);
5925 SetRawInputAt(1, index);
5926 }
5927
IsClonable()5928 bool IsClonable() const override { return true; }
CanBeMoved()5929 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5930 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5931 return true;
5932 }
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)5933 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
5934 // TODO: We can be smarter here.
5935 // Currently, unless the array is the result of NewArray, the array access is always
5936 // preceded by some form of null NullCheck necessary for the bounds check, usually
5937 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5938 // dynamic BCE. There are cases when these could be removed to produce better code.
5939 // If we ever add optimizations to do so we should allow an implicit check here
5940 // (as long as the address falls in the first page).
5941 //
5942 // As an example of such fancy optimization, we could eliminate BoundsCheck for
5943 // a = cond ? new int[1] : null;
5944 // a[0]; // The Phi does not need bounds check for either input.
5945 return false;
5946 }
5947
IsEquivalentOf(HArrayGet * other)5948 bool IsEquivalentOf(HArrayGet* other) const {
5949 bool result = (GetDexPc() == other->GetDexPc());
5950 if (kIsDebugBuild && result) {
5951 DCHECK_EQ(GetBlock(), other->GetBlock());
5952 DCHECK_EQ(GetArray(), other->GetArray());
5953 DCHECK_EQ(GetIndex(), other->GetIndex());
5954 if (DataType::IsIntOrLongType(GetType())) {
5955 DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5956 } else {
5957 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5958 DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5959 }
5960 }
5961 return result;
5962 }
5963
IsStringCharAt()5964 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5965
GetArray()5966 HInstruction* GetArray() const { return InputAt(0); }
GetIndex()5967 HInstruction* GetIndex() const { return InputAt(1); }
5968
SetType(DataType::Type new_type)5969 void SetType(DataType::Type new_type) {
5970 DCHECK(DataType::IsIntegralType(GetType()));
5971 DCHECK(DataType::IsIntegralType(new_type));
5972 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5973 SetPackedField<TypeField>(new_type);
5974 }
5975
5976 DECLARE_INSTRUCTION(ArrayGet);
5977
5978 protected:
5979 DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
5980
5981 private:
5982 // We treat a String as an array, creating the HArrayGet from String.charAt()
5983 // intrinsic in the instruction simplifier. We can always determine whether
5984 // a particular HArrayGet is actually a String.charAt() by looking at the type
5985 // of the input but that requires holding the mutator lock, so we prefer to use
5986 // a flag, so that code generators don't need to do the locking.
5987 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
5988 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5989 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5990 "Too many packed fields.");
5991 };
5992
5993 class HArraySet final : public HExpression<3> {
5994 public:
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,uint32_t dex_pc)5995 HArraySet(HInstruction* array,
5996 HInstruction* index,
5997 HInstruction* value,
5998 DataType::Type expected_component_type,
5999 uint32_t dex_pc)
6000 : HArraySet(array,
6001 index,
6002 value,
6003 expected_component_type,
6004 // Make a best guess for side effects now, may be refined during SSA building.
6005 ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6006 dex_pc) {
6007 }
6008
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,SideEffects side_effects,uint32_t dex_pc)6009 HArraySet(HInstruction* array,
6010 HInstruction* index,
6011 HInstruction* value,
6012 DataType::Type expected_component_type,
6013 SideEffects side_effects,
6014 uint32_t dex_pc)
6015 : HExpression(kArraySet, side_effects, dex_pc) {
6016 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6017 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6018 SetPackedFlag<kFlagValueCanBeNull>(true);
6019 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6020 SetRawInputAt(0, array);
6021 SetRawInputAt(1, index);
6022 SetRawInputAt(2, value);
6023 }
6024
IsClonable()6025 bool IsClonable() const override { return true; }
6026
NeedsEnvironment()6027 bool NeedsEnvironment() const override {
6028 // We call a runtime method to throw ArrayStoreException.
6029 return NeedsTypeCheck();
6030 }
6031
6032 // Can throw ArrayStoreException.
CanThrow()6033 bool CanThrow() const override { return NeedsTypeCheck(); }
6034
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)6035 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6036 // TODO: Same as for ArrayGet.
6037 return false;
6038 }
6039
ClearNeedsTypeCheck()6040 void ClearNeedsTypeCheck() {
6041 SetPackedFlag<kFlagNeedsTypeCheck>(false);
6042 }
6043
ClearValueCanBeNull()6044 void ClearValueCanBeNull() {
6045 SetPackedFlag<kFlagValueCanBeNull>(false);
6046 }
6047
SetStaticTypeOfArrayIsObjectArray()6048 void SetStaticTypeOfArrayIsObjectArray() {
6049 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6050 }
6051
GetValueCanBeNull()6052 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
NeedsTypeCheck()6053 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
StaticTypeOfArrayIsObjectArray()6054 bool StaticTypeOfArrayIsObjectArray() const {
6055 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6056 }
6057
GetArray()6058 HInstruction* GetArray() const { return InputAt(0); }
GetIndex()6059 HInstruction* GetIndex() const { return InputAt(1); }
GetValue()6060 HInstruction* GetValue() const { return InputAt(2); }
6061
GetComponentType()6062 DataType::Type GetComponentType() const {
6063 return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6064 }
6065
GetComponentType(DataType::Type value_type,DataType::Type expected_component_type)6066 static DataType::Type GetComponentType(DataType::Type value_type,
6067 DataType::Type expected_component_type) {
6068 // The Dex format does not type floating point index operations. Since the
6069 // `expected_component_type` comes from SSA building and can therefore not
6070 // be correct, we also check what is the value type. If it is a floating
6071 // point type, we must use that type.
6072 return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6073 ? value_type
6074 : expected_component_type;
6075 }
6076
GetRawExpectedComponentType()6077 DataType::Type GetRawExpectedComponentType() const {
6078 return GetPackedField<ExpectedComponentTypeField>();
6079 }
6080
ComputeSideEffects(DataType::Type type)6081 static SideEffects ComputeSideEffects(DataType::Type type) {
6082 return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6083 }
6084
SideEffectsForArchRuntimeCalls(DataType::Type value_type)6085 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6086 return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6087 : SideEffects::None();
6088 }
6089
6090 DECLARE_INSTRUCTION(ArraySet);
6091
6092 protected:
6093 DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6094
6095 private:
6096 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6097 static constexpr size_t kFieldExpectedComponentTypeSize =
6098 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6099 static constexpr size_t kFlagNeedsTypeCheck =
6100 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6101 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6102 // Cached information for the reference_type_info_ so that codegen
6103 // does not need to inspect the static type.
6104 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6105 static constexpr size_t kNumberOfArraySetPackedBits =
6106 kFlagStaticTypeOfArrayIsObjectArray + 1;
6107 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6108 using ExpectedComponentTypeField =
6109 BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6110 };
6111
6112 class HArrayLength final : public HExpression<1> {
6113 public:
6114 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
HExpression(kArrayLength,DataType::Type::kInt32,SideEffects::None (),dex_pc)6115 : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6116 SetPackedFlag<kFlagIsStringLength>(is_string_length);
6117 // Note that arrays do not change length, so the instruction does not
6118 // depend on any write.
6119 SetRawInputAt(0, array);
6120 }
6121
IsClonable()6122 bool IsClonable() const override { return true; }
CanBeMoved()6123 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6124 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6125 return true;
6126 }
CanDoImplicitNullCheckOn(HInstruction * obj)6127 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6128 return obj == InputAt(0);
6129 }
6130
IsStringLength()6131 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6132
6133 DECLARE_INSTRUCTION(ArrayLength);
6134
6135 protected:
6136 DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6137
6138 private:
6139 // We treat a String as an array, creating the HArrayLength from String.length()
6140 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6141 // determine whether a particular HArrayLength is actually a String.length() by
6142 // looking at the type of the input but that requires holding the mutator lock, so
6143 // we prefer to use a flag, so that code generators don't need to do the locking.
6144 static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6145 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6146 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6147 "Too many packed fields.");
6148 };
6149
6150 class HBoundsCheck final : public HExpression<2> {
6151 public:
6152 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6153 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6154 // instruction following the current one; thus 'SideEffects::None()' is used.
6155 HBoundsCheck(HInstruction* index,
6156 HInstruction* length,
6157 uint32_t dex_pc,
6158 bool is_string_char_at = false)
6159 : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6160 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6161 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6162 SetRawInputAt(0, index);
6163 SetRawInputAt(1, length);
6164 }
6165
IsClonable()6166 bool IsClonable() const override { return true; }
CanBeMoved()6167 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6168 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6169 return true;
6170 }
6171
NeedsEnvironment()6172 bool NeedsEnvironment() const override { return true; }
6173
CanThrow()6174 bool CanThrow() const override { return true; }
6175
IsStringCharAt()6176 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6177
GetIndex()6178 HInstruction* GetIndex() const { return InputAt(0); }
6179
6180 DECLARE_INSTRUCTION(BoundsCheck);
6181
6182 protected:
6183 DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6184
6185 private:
6186 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6187 static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6188 static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6189 "Too many packed fields.");
6190 };
6191
6192 class HSuspendCheck final : public HExpression<0> {
6193 public:
6194 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
HExpression(kSuspendCheck,SideEffects::CanTriggerGC (),dex_pc)6195 : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6196 slow_path_(nullptr) {
6197 }
6198
IsClonable()6199 bool IsClonable() const override { return true; }
6200
NeedsEnvironment()6201 bool NeedsEnvironment() const override {
6202 return true;
6203 }
6204
SetSlowPath(SlowPathCode * slow_path)6205 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
GetSlowPath()6206 SlowPathCode* GetSlowPath() const { return slow_path_; }
6207
6208 DECLARE_INSTRUCTION(SuspendCheck);
6209
6210 protected:
6211 DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6212
6213 private:
6214 // Only used for code generation, in order to share the same slow path between back edges
6215 // of a same loop.
6216 SlowPathCode* slow_path_;
6217 };
6218
6219 // Pseudo-instruction which provides the native debugger with mapping information.
6220 // It ensures that we can generate line number and local variables at this point.
6221 class HNativeDebugInfo : public HExpression<0> {
6222 public:
HNativeDebugInfo(uint32_t dex_pc)6223 explicit HNativeDebugInfo(uint32_t dex_pc)
6224 : HExpression<0>(kNativeDebugInfo, SideEffects::None(), dex_pc) {
6225 }
6226
NeedsEnvironment()6227 bool NeedsEnvironment() const override {
6228 return true;
6229 }
6230
6231 DECLARE_INSTRUCTION(NativeDebugInfo);
6232
6233 protected:
6234 DEFAULT_COPY_CONSTRUCTOR(NativeDebugInfo);
6235 };
6236
6237 /**
6238 * Instruction to load a Class object.
6239 */
6240 class HLoadClass final : public HInstruction {
6241 public:
6242 // Determines how to load the Class.
6243 enum class LoadKind {
6244 // We cannot load this class. See HSharpening::SharpenLoadClass.
6245 kInvalid = -1,
6246
6247 // Use the Class* from the method's own ArtMethod*.
6248 kReferrersClass,
6249
6250 // Use PC-relative boot image Class* address that will be known at link time.
6251 // Used for boot image classes referenced by boot image code.
6252 kBootImageLinkTimePcRelative,
6253
6254 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6255 // Used for boot image classes referenced by apps in AOT-compiled code.
6256 kBootImageRelRo,
6257
6258 // Load from an entry in the .bss section using a PC-relative load.
6259 // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6260 kBssEntry,
6261
6262 // Use a known boot image Class* address, embedded in the code by the codegen.
6263 // Used for boot image classes referenced by apps in JIT-compiled code.
6264 kJitBootImageAddress,
6265
6266 // Load from the root table associated with the JIT compiled method.
6267 kJitTableAddress,
6268
6269 // Load using a simple runtime call. This is the fall-back load kind when
6270 // the codegen is unable to use another appropriate kind.
6271 kRuntimeCall,
6272
6273 kLast = kRuntimeCall
6274 };
6275
HLoadClass(HCurrentMethod * current_method,dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,bool is_referrers_class,uint32_t dex_pc,bool needs_access_check)6276 HLoadClass(HCurrentMethod* current_method,
6277 dex::TypeIndex type_index,
6278 const DexFile& dex_file,
6279 Handle<mirror::Class> klass,
6280 bool is_referrers_class,
6281 uint32_t dex_pc,
6282 bool needs_access_check)
6283 : HInstruction(kLoadClass,
6284 DataType::Type::kReference,
6285 SideEffectsForArchRuntimeCalls(),
6286 dex_pc),
6287 special_input_(HUserRecord<HInstruction*>(current_method)),
6288 type_index_(type_index),
6289 dex_file_(dex_file),
6290 klass_(klass) {
6291 // Referrers class should not need access check. We never inline unverified
6292 // methods so we can't possibly end up in this situation.
6293 DCHECK(!is_referrers_class || !needs_access_check);
6294
6295 SetPackedField<LoadKindField>(
6296 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6297 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6298 SetPackedFlag<kFlagIsInBootImage>(false);
6299 SetPackedFlag<kFlagGenerateClInitCheck>(false);
6300 SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6301 }
6302
IsClonable()6303 bool IsClonable() const override { return true; }
6304
6305 void SetLoadKind(LoadKind load_kind);
6306
GetLoadKind()6307 LoadKind GetLoadKind() const {
6308 return GetPackedField<LoadKindField>();
6309 }
6310
HasPcRelativeLoadKind()6311 bool HasPcRelativeLoadKind() const {
6312 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6313 GetLoadKind() == LoadKind::kBootImageRelRo ||
6314 GetLoadKind() == LoadKind::kBssEntry;
6315 }
6316
CanBeMoved()6317 bool CanBeMoved() const override { return true; }
6318
6319 bool InstructionDataEquals(const HInstruction* other) const override;
6320
ComputeHashCode()6321 size_t ComputeHashCode() const override { return type_index_.index_; }
6322
CanBeNull()6323 bool CanBeNull() const override { return false; }
6324
NeedsEnvironment()6325 bool NeedsEnvironment() const override {
6326 return CanCallRuntime();
6327 }
6328
SetMustGenerateClinitCheck(bool generate_clinit_check)6329 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6330 // The entrypoint the code generator is going to call does not do
6331 // clinit of the class.
6332 DCHECK(!NeedsAccessCheck());
6333 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6334 }
6335
CanCallRuntime()6336 bool CanCallRuntime() const {
6337 return NeedsAccessCheck() ||
6338 MustGenerateClinitCheck() ||
6339 GetLoadKind() == LoadKind::kRuntimeCall ||
6340 GetLoadKind() == LoadKind::kBssEntry;
6341 }
6342
CanThrow()6343 bool CanThrow() const override {
6344 return NeedsAccessCheck() ||
6345 MustGenerateClinitCheck() ||
6346 // If the class is in the boot image, the lookup in the runtime call cannot throw.
6347 ((GetLoadKind() == LoadKind::kRuntimeCall ||
6348 GetLoadKind() == LoadKind::kBssEntry) &&
6349 !IsInBootImage());
6350 }
6351
GetLoadedClassRTI()6352 ReferenceTypeInfo GetLoadedClassRTI() {
6353 if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
6354 // Note: The is_exact flag from the return value should not be used.
6355 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
6356 } else {
6357 return ReferenceTypeInfo::CreateInvalid();
6358 }
6359 }
6360
6361 // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
SetValidLoadedClassRTI()6362 void SetValidLoadedClassRTI() REQUIRES_SHARED(Locks::mutator_lock_) {
6363 DCHECK(klass_ != nullptr);
6364 SetPackedFlag<kFlagValidLoadedClassRTI>(true);
6365 }
6366
GetTypeIndex()6367 dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()6368 const DexFile& GetDexFile() const { return dex_file_; }
6369
NeedsDexCacheOfDeclaringClass()6370 bool NeedsDexCacheOfDeclaringClass() const override {
6371 return GetLoadKind() == LoadKind::kRuntimeCall;
6372 }
6373
SideEffectsForArchRuntimeCalls()6374 static SideEffects SideEffectsForArchRuntimeCalls() {
6375 return SideEffects::CanTriggerGC();
6376 }
6377
IsReferrersClass()6378 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
NeedsAccessCheck()6379 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
IsInBootImage()6380 bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
MustGenerateClinitCheck()6381 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6382
MustResolveTypeOnSlowPath()6383 bool MustResolveTypeOnSlowPath() const {
6384 // Check that this instruction has a slow path.
6385 DCHECK(GetLoadKind() != LoadKind::kRuntimeCall); // kRuntimeCall calls on main path.
6386 DCHECK(GetLoadKind() == LoadKind::kBssEntry || MustGenerateClinitCheck());
6387 return GetLoadKind() == LoadKind::kBssEntry;
6388 }
6389
MarkInBootImage()6390 void MarkInBootImage() {
6391 SetPackedFlag<kFlagIsInBootImage>(true);
6392 }
6393
6394 void AddSpecialInput(HInstruction* special_input);
6395
6396 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()6397 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6398 return ArrayRef<HUserRecord<HInstruction*>>(
6399 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6400 }
6401
GetClass()6402 Handle<mirror::Class> GetClass() const {
6403 return klass_;
6404 }
6405
6406 DECLARE_INSTRUCTION(LoadClass);
6407
6408 protected:
6409 DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6410
6411 private:
6412 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
6413 static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1;
6414 // Whether this instruction must generate the initialization check.
6415 // Used for code generation.
6416 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
6417 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1;
6418 static constexpr size_t kFieldLoadKindSize =
6419 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6420 static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
6421 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
6422 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6423 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6424
HasTypeReference(LoadKind load_kind)6425 static bool HasTypeReference(LoadKind load_kind) {
6426 return load_kind == LoadKind::kReferrersClass ||
6427 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6428 load_kind == LoadKind::kBssEntry ||
6429 load_kind == LoadKind::kRuntimeCall;
6430 }
6431
6432 void SetLoadKindInternal(LoadKind load_kind);
6433
6434 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6435 // For other load kinds it's empty or possibly some architecture-specific instruction
6436 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6437 HUserRecord<HInstruction*> special_input_;
6438
6439 // A type index and dex file where the class can be accessed. The dex file can be:
6440 // - The compiling method's dex file if the class is defined there too.
6441 // - The compiling method's dex file if the class is referenced there.
6442 // - The dex file where the class is defined. When the load kind can only be
6443 // kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6444 const dex::TypeIndex type_index_;
6445 const DexFile& dex_file_;
6446
6447 Handle<mirror::Class> klass_;
6448 };
6449 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6450
6451 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
SetLoadKind(LoadKind load_kind)6452 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6453 // The load kind should be determined before inserting the instruction to the graph.
6454 DCHECK(GetBlock() == nullptr);
6455 DCHECK(GetEnvironment() == nullptr);
6456 SetPackedField<LoadKindField>(load_kind);
6457 if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6458 special_input_ = HUserRecord<HInstruction*>(nullptr);
6459 }
6460 if (!NeedsEnvironment()) {
6461 SetSideEffects(SideEffects::None());
6462 }
6463 }
6464
6465 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
AddSpecialInput(HInstruction * special_input)6466 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6467 // The special input is used for PC-relative loads on some architectures,
6468 // including literal pool loads, which are PC-relative too.
6469 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6470 GetLoadKind() == LoadKind::kBootImageRelRo ||
6471 GetLoadKind() == LoadKind::kBssEntry ||
6472 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6473 DCHECK(special_input_.GetInstruction() == nullptr);
6474 special_input_ = HUserRecord<HInstruction*>(special_input);
6475 special_input->AddUseAt(this, 0);
6476 }
6477
6478 class HLoadString final : public HInstruction {
6479 public:
6480 // Determines how to load the String.
6481 enum class LoadKind {
6482 // Use PC-relative boot image String* address that will be known at link time.
6483 // Used for boot image strings referenced by boot image code.
6484 kBootImageLinkTimePcRelative,
6485
6486 // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6487 // Used for boot image strings referenced by apps in AOT-compiled code.
6488 kBootImageRelRo,
6489
6490 // Load from an entry in the .bss section using a PC-relative load.
6491 // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
6492 kBssEntry,
6493
6494 // Use a known boot image String* address, embedded in the code by the codegen.
6495 // Used for boot image strings referenced by apps in JIT-compiled code.
6496 kJitBootImageAddress,
6497
6498 // Load from the root table associated with the JIT compiled method.
6499 kJitTableAddress,
6500
6501 // Load using a simple runtime call. This is the fall-back load kind when
6502 // the codegen is unable to use another appropriate kind.
6503 kRuntimeCall,
6504
6505 kLast = kRuntimeCall,
6506 };
6507
HLoadString(HCurrentMethod * current_method,dex::StringIndex string_index,const DexFile & dex_file,uint32_t dex_pc)6508 HLoadString(HCurrentMethod* current_method,
6509 dex::StringIndex string_index,
6510 const DexFile& dex_file,
6511 uint32_t dex_pc)
6512 : HInstruction(kLoadString,
6513 DataType::Type::kReference,
6514 SideEffectsForArchRuntimeCalls(),
6515 dex_pc),
6516 special_input_(HUserRecord<HInstruction*>(current_method)),
6517 string_index_(string_index),
6518 dex_file_(dex_file) {
6519 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6520 }
6521
IsClonable()6522 bool IsClonable() const override { return true; }
6523
6524 void SetLoadKind(LoadKind load_kind);
6525
GetLoadKind()6526 LoadKind GetLoadKind() const {
6527 return GetPackedField<LoadKindField>();
6528 }
6529
HasPcRelativeLoadKind()6530 bool HasPcRelativeLoadKind() const {
6531 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6532 GetLoadKind() == LoadKind::kBootImageRelRo ||
6533 GetLoadKind() == LoadKind::kBssEntry;
6534 }
6535
GetDexFile()6536 const DexFile& GetDexFile() const {
6537 return dex_file_;
6538 }
6539
GetStringIndex()6540 dex::StringIndex GetStringIndex() const {
6541 return string_index_;
6542 }
6543
GetString()6544 Handle<mirror::String> GetString() const {
6545 return string_;
6546 }
6547
SetString(Handle<mirror::String> str)6548 void SetString(Handle<mirror::String> str) {
6549 string_ = str;
6550 }
6551
CanBeMoved()6552 bool CanBeMoved() const override { return true; }
6553
6554 bool InstructionDataEquals(const HInstruction* other) const override;
6555
ComputeHashCode()6556 size_t ComputeHashCode() const override { return string_index_.index_; }
6557
6558 // Will call the runtime if we need to load the string through
6559 // the dex cache and the string is not guaranteed to be there yet.
NeedsEnvironment()6560 bool NeedsEnvironment() const override {
6561 LoadKind load_kind = GetLoadKind();
6562 if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6563 load_kind == LoadKind::kBootImageRelRo ||
6564 load_kind == LoadKind::kJitBootImageAddress ||
6565 load_kind == LoadKind::kJitTableAddress) {
6566 return false;
6567 }
6568 return true;
6569 }
6570
NeedsDexCacheOfDeclaringClass()6571 bool NeedsDexCacheOfDeclaringClass() const override {
6572 return GetLoadKind() == LoadKind::kRuntimeCall;
6573 }
6574
CanBeNull()6575 bool CanBeNull() const override { return false; }
CanThrow()6576 bool CanThrow() const override { return NeedsEnvironment(); }
6577
SideEffectsForArchRuntimeCalls()6578 static SideEffects SideEffectsForArchRuntimeCalls() {
6579 return SideEffects::CanTriggerGC();
6580 }
6581
6582 void AddSpecialInput(HInstruction* special_input);
6583
6584 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()6585 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6586 return ArrayRef<HUserRecord<HInstruction*>>(
6587 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6588 }
6589
6590 DECLARE_INSTRUCTION(LoadString);
6591
6592 protected:
6593 DEFAULT_COPY_CONSTRUCTOR(LoadString);
6594
6595 private:
6596 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6597 static constexpr size_t kFieldLoadKindSize =
6598 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6599 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6600 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6601 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6602
6603 void SetLoadKindInternal(LoadKind load_kind);
6604
6605 // The special input is the HCurrentMethod for kRuntimeCall.
6606 // For other load kinds it's empty or possibly some architecture-specific instruction
6607 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6608 HUserRecord<HInstruction*> special_input_;
6609
6610 dex::StringIndex string_index_;
6611 const DexFile& dex_file_;
6612
6613 Handle<mirror::String> string_;
6614 };
6615 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6616
6617 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
SetLoadKind(LoadKind load_kind)6618 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
6619 // The load kind should be determined before inserting the instruction to the graph.
6620 DCHECK(GetBlock() == nullptr);
6621 DCHECK(GetEnvironment() == nullptr);
6622 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
6623 SetPackedField<LoadKindField>(load_kind);
6624 if (load_kind != LoadKind::kRuntimeCall) {
6625 special_input_ = HUserRecord<HInstruction*>(nullptr);
6626 }
6627 if (!NeedsEnvironment()) {
6628 SetSideEffects(SideEffects::None());
6629 }
6630 }
6631
6632 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
AddSpecialInput(HInstruction * special_input)6633 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6634 // The special input is used for PC-relative loads on some architectures,
6635 // including literal pool loads, which are PC-relative too.
6636 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6637 GetLoadKind() == LoadKind::kBootImageRelRo ||
6638 GetLoadKind() == LoadKind::kBssEntry ||
6639 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6640 // HLoadString::GetInputRecords() returns an empty array at this point,
6641 // so use the GetInputRecords() from the base class to set the input record.
6642 DCHECK(special_input_.GetInstruction() == nullptr);
6643 special_input_ = HUserRecord<HInstruction*>(special_input);
6644 special_input->AddUseAt(this, 0);
6645 }
6646
6647 class HLoadMethodHandle final : public HInstruction {
6648 public:
HLoadMethodHandle(HCurrentMethod * current_method,uint16_t method_handle_idx,const DexFile & dex_file,uint32_t dex_pc)6649 HLoadMethodHandle(HCurrentMethod* current_method,
6650 uint16_t method_handle_idx,
6651 const DexFile& dex_file,
6652 uint32_t dex_pc)
6653 : HInstruction(kLoadMethodHandle,
6654 DataType::Type::kReference,
6655 SideEffectsForArchRuntimeCalls(),
6656 dex_pc),
6657 special_input_(HUserRecord<HInstruction*>(current_method)),
6658 method_handle_idx_(method_handle_idx),
6659 dex_file_(dex_file) {
6660 }
6661
6662 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()6663 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6664 return ArrayRef<HUserRecord<HInstruction*>>(
6665 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6666 }
6667
IsClonable()6668 bool IsClonable() const override { return true; }
6669
GetMethodHandleIndex()6670 uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
6671
GetDexFile()6672 const DexFile& GetDexFile() const { return dex_file_; }
6673
SideEffectsForArchRuntimeCalls()6674 static SideEffects SideEffectsForArchRuntimeCalls() {
6675 return SideEffects::CanTriggerGC();
6676 }
6677
6678 DECLARE_INSTRUCTION(LoadMethodHandle);
6679
6680 protected:
6681 DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
6682
6683 private:
6684 // The special input is the HCurrentMethod for kRuntimeCall.
6685 HUserRecord<HInstruction*> special_input_;
6686
6687 const uint16_t method_handle_idx_;
6688 const DexFile& dex_file_;
6689 };
6690
6691 class HLoadMethodType final : public HInstruction {
6692 public:
HLoadMethodType(HCurrentMethod * current_method,dex::ProtoIndex proto_index,const DexFile & dex_file,uint32_t dex_pc)6693 HLoadMethodType(HCurrentMethod* current_method,
6694 dex::ProtoIndex proto_index,
6695 const DexFile& dex_file,
6696 uint32_t dex_pc)
6697 : HInstruction(kLoadMethodType,
6698 DataType::Type::kReference,
6699 SideEffectsForArchRuntimeCalls(),
6700 dex_pc),
6701 special_input_(HUserRecord<HInstruction*>(current_method)),
6702 proto_index_(proto_index),
6703 dex_file_(dex_file) {
6704 }
6705
6706 using HInstruction::GetInputRecords; // Keep the const version visible.
GetInputRecords()6707 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6708 return ArrayRef<HUserRecord<HInstruction*>>(
6709 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6710 }
6711
IsClonable()6712 bool IsClonable() const override { return true; }
6713
GetProtoIndex()6714 dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
6715
GetDexFile()6716 const DexFile& GetDexFile() const { return dex_file_; }
6717
SideEffectsForArchRuntimeCalls()6718 static SideEffects SideEffectsForArchRuntimeCalls() {
6719 return SideEffects::CanTriggerGC();
6720 }
6721
6722 DECLARE_INSTRUCTION(LoadMethodType);
6723
6724 protected:
6725 DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
6726
6727 private:
6728 // The special input is the HCurrentMethod for kRuntimeCall.
6729 HUserRecord<HInstruction*> special_input_;
6730
6731 const dex::ProtoIndex proto_index_;
6732 const DexFile& dex_file_;
6733 };
6734
6735 /**
6736 * Performs an initialization check on its Class object input.
6737 */
6738 class HClinitCheck final : public HExpression<1> {
6739 public:
HClinitCheck(HLoadClass * constant,uint32_t dex_pc)6740 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6741 : HExpression(
6742 kClinitCheck,
6743 DataType::Type::kReference,
6744 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
6745 dex_pc) {
6746 SetRawInputAt(0, constant);
6747 }
6748 // TODO: Make ClinitCheck clonable.
CanBeMoved()6749 bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6750 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6751 return true;
6752 }
6753
NeedsEnvironment()6754 bool NeedsEnvironment() const override {
6755 // May call runtime to initialize the class.
6756 return true;
6757 }
6758
CanThrow()6759 bool CanThrow() const override { return true; }
6760
GetLoadClass()6761 HLoadClass* GetLoadClass() const {
6762 DCHECK(InputAt(0)->IsLoadClass());
6763 return InputAt(0)->AsLoadClass();
6764 }
6765
6766 DECLARE_INSTRUCTION(ClinitCheck);
6767
6768
6769 protected:
6770 DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
6771 };
6772
6773 class HStaticFieldGet final : public HExpression<1> {
6774 public:
HStaticFieldGet(HInstruction * cls,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)6775 HStaticFieldGet(HInstruction* cls,
6776 ArtField* field,
6777 DataType::Type field_type,
6778 MemberOffset field_offset,
6779 bool is_volatile,
6780 uint32_t field_idx,
6781 uint16_t declaring_class_def_index,
6782 const DexFile& dex_file,
6783 uint32_t dex_pc)
6784 : HExpression(kStaticFieldGet,
6785 field_type,
6786 SideEffects::FieldReadOfType(field_type, is_volatile),
6787 dex_pc),
6788 field_info_(field,
6789 field_offset,
6790 field_type,
6791 is_volatile,
6792 field_idx,
6793 declaring_class_def_index,
6794 dex_file) {
6795 SetRawInputAt(0, cls);
6796 }
6797
6798
IsClonable()6799 bool IsClonable() const override { return true; }
CanBeMoved()6800 bool CanBeMoved() const override { return !IsVolatile(); }
6801
InstructionDataEquals(const HInstruction * other)6802 bool InstructionDataEquals(const HInstruction* other) const override {
6803 const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6804 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6805 }
6806
ComputeHashCode()6807 size_t ComputeHashCode() const override {
6808 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6809 }
6810
GetFieldInfo()6811 const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()6812 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()6813