• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
19 
20 #include "base/arena_allocator.h"
21 #include "base/arena_bit_vector.h"
22 #include "base/bit_vector-inl.h"
23 #include "base/macros.h"
24 #include "base/scoped_arena_allocator.h"
25 #include "base/scoped_arena_containers.h"
26 #include "base/stl_util.h"
27 #include "escape.h"
28 #include "execution_subgraph.h"
29 #include "nodes.h"
30 #include "optimizing/optimizing_compiler_stats.h"
31 
32 namespace art HIDDEN {
33 
34 enum class LoadStoreAnalysisType {
35   kBasic,
36   kNoPredicatedInstructions,
37   kFull,
38 };
39 
40 // A ReferenceInfo contains additional info about a reference such as
41 // whether it's a singleton, returned, etc.
42 class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
43  public:
ReferenceInfo(HInstruction * reference,ScopedArenaAllocator * allocator,size_t pos,LoadStoreAnalysisType elimination_type)44   ReferenceInfo(HInstruction* reference,
45                 ScopedArenaAllocator* allocator,
46                 size_t pos,
47                 LoadStoreAnalysisType elimination_type)
48       : reference_(reference),
49         position_(pos),
50         is_singleton_(true),
51         is_singleton_and_not_returned_(true),
52         is_singleton_and_not_deopt_visible_(true),
53         allocator_(allocator),
54         subgraph_(nullptr) {
55     // TODO We can do this in one pass.
56     // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
57     // for now just ignore it.
58     bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
59                           (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
60     if (can_be_partial) {
61       subgraph_.reset(
62           new (allocator) ExecutionSubgraph(reference->GetBlock()->GetGraph(), allocator));
63       CollectPartialEscapes(reference_->GetBlock()->GetGraph());
64     }
65     CalculateEscape(reference_,
66                     nullptr,
67                     &is_singleton_,
68                     &is_singleton_and_not_returned_,
69                     &is_singleton_and_not_deopt_visible_);
70     if (can_be_partial) {
71       if (elimination_type == LoadStoreAnalysisType::kNoPredicatedInstructions) {
72         // This is to mark writes to partially escaped values as also part of the escaped subset.
73         // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
74         //      to see if the additional branches are worth it.
75         PrunePartialEscapeWrites();
76       }
77       DCHECK(subgraph_ != nullptr);
78       subgraph_->Finalize();
79     } else {
80       DCHECK(subgraph_ == nullptr);
81     }
82   }
83 
GetNoEscapeSubgraph()84   const ExecutionSubgraph* GetNoEscapeSubgraph() const {
85     DCHECK(IsPartialSingleton());
86     return subgraph_.get();
87   }
88 
GetReference()89   HInstruction* GetReference() const {
90     return reference_;
91   }
92 
GetPosition()93   size_t GetPosition() const {
94     return position_;
95   }
96 
97   // Returns true if reference_ is the only name that can refer to its value during
98   // the lifetime of the method. So it's guaranteed to not have any alias in
99   // the method (including its callees).
IsSingleton()100   bool IsSingleton() const {
101     return is_singleton_;
102   }
103 
104   // This is a singleton and there are paths that don't escape the method
IsPartialSingleton()105   bool IsPartialSingleton() const {
106     auto ref = GetReference();
107     // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
108     // for now just ignore it.
109     return (/* ref->IsNewArray() || */ ref->IsNewInstance()) &&
110            subgraph_ != nullptr &&
111            subgraph_->IsValid();
112   }
113 
114   // Returns true if reference_ is a singleton and not returned to the caller or
115   // used as an environment local of an HDeoptimize instruction.
116   // The allocation and stores into reference_ may be eliminated for such cases.
IsSingletonAndRemovable()117   bool IsSingletonAndRemovable() const {
118     return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
119   }
120 
121   // Returns true if reference_ is a singleton and returned to the caller or
122   // used as an environment local of an HDeoptimize instruction.
IsSingletonAndNonRemovable()123   bool IsSingletonAndNonRemovable() const {
124     return is_singleton_ &&
125            (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
126   }
127 
128  private:
129   void CollectPartialEscapes(HGraph* graph);
HandleEscape(HBasicBlock * escape)130   void HandleEscape(HBasicBlock* escape) {
131     DCHECK(subgraph_ != nullptr);
132     subgraph_->RemoveBlock(escape);
133   }
HandleEscape(HInstruction * escape)134   void HandleEscape(HInstruction* escape) {
135     HandleEscape(escape->GetBlock());
136   }
137 
138   // Make sure we mark any writes/potential writes to heap-locations within partially
139   // escaped values as escaping.
140   void PrunePartialEscapeWrites();
141 
142   HInstruction* const reference_;
143   const size_t position_;  // position in HeapLocationCollector's ref_info_array_.
144 
145   // Can only be referred to by a single name in the method.
146   bool is_singleton_;
147   // Is singleton and not returned to caller.
148   bool is_singleton_and_not_returned_;
149   // Is singleton and not used as an environment local of HDeoptimize.
150   bool is_singleton_and_not_deopt_visible_;
151 
152   ScopedArenaAllocator* allocator_;
153 
154   std::unique_ptr<ExecutionSubgraph> subgraph_;
155 
156   DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
157 };
158 
159 // A heap location is a reference-offset/index pair that a value can be loaded from
160 // or stored to.
161 class HeapLocation : public ArenaObject<kArenaAllocLSA> {
162  public:
163   static constexpr size_t kInvalidFieldOffset = -1;
164   // Default value for heap locations which are not vector data.
165   static constexpr size_t kScalar = 1;
166   // TODO: more fine-grained array types.
167   static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
168 
HeapLocation(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)169   HeapLocation(ReferenceInfo* ref_info,
170                DataType::Type type,
171                size_t offset,
172                HInstruction* index,
173                size_t vector_length,
174                int16_t declaring_class_def_index,
175                bool is_vec_op)
176       : ref_info_(ref_info),
177         type_(DataType::ToSigned(type)),
178         offset_(offset),
179         index_(index),
180         vector_length_(vector_length),
181         declaring_class_def_index_(declaring_class_def_index),
182         has_aliased_locations_(false),
183         is_vec_op_(is_vec_op) {
184     DCHECK(ref_info != nullptr);
185     DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
186            (offset != kInvalidFieldOffset && index == nullptr));
187   }
188 
GetReferenceInfo()189   ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
GetType()190   DataType::Type GetType() const { return type_; }
GetOffset()191   size_t GetOffset() const { return offset_; }
GetIndex()192   HInstruction* GetIndex() const { return index_; }
GetVectorLength()193   size_t GetVectorLength() const { return vector_length_; }
IsVecOp()194   bool IsVecOp() const { return is_vec_op_; }
195 
196   // Returns the definition of declaring class' dex index.
197   // It's kDeclaringClassDefIndexForArrays for an array element.
GetDeclaringClassDefIndex()198   int16_t GetDeclaringClassDefIndex() const {
199     return declaring_class_def_index_;
200   }
201 
IsArray()202   bool IsArray() const {
203     return index_ != nullptr;
204   }
205 
HasAliasedLocations()206   bool HasAliasedLocations() const {
207     return has_aliased_locations_;
208   }
209 
SetHasAliasedLocations(bool val)210   void SetHasAliasedLocations(bool val) {
211     has_aliased_locations_ = val;
212   }
213 
214  private:
215   // Reference for instance/static field, array element or vector data.
216   ReferenceInfo* const ref_info_;
217   // Type of data residing at HeapLocation (always signed for integral
218   // data since e.g. a[i] and a[i] & 0xff are represented by differently
219   // signed types; char vs short are disambiguated through the reference).
220   const DataType::Type type_;
221   // Offset of static/instance field.
222   // Invalid when this HeapLocation is not field.
223   const size_t offset_;
224   // Index of an array element or starting index of vector data.
225   // Invalid when this HeapLocation is not array.
226   HInstruction* const index_;
227   // Vector length of vector data.
228   // When this HeapLocation is not vector data, it's value is kScalar.
229   const size_t vector_length_;
230   // Declaring class's def's dex index.
231   // Invalid when this HeapLocation is not field access.
232   const int16_t declaring_class_def_index_;
233   // Has aliased heap locations in the method, due to either the
234   // reference is aliased or the array element is aliased via different
235   // index names.
236   bool has_aliased_locations_;
237   // Whether this HeapLocation represents a vector operation.
238   bool is_vec_op_;
239 
240   DISALLOW_COPY_AND_ASSIGN(HeapLocation);
241 };
242 
243 // A HeapLocationCollector collects all relevant heap locations and keeps
244 // an aliasing matrix for all locations.
245 class HeapLocationCollector : public HGraphVisitor {
246  public:
247   static constexpr size_t kHeapLocationNotFound = -1;
248   // Start with a single uint32_t word. That's enough bits for pair-wise
249   // aliasing matrix of 8 heap locations.
250   static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
251 
HeapLocationCollector(HGraph * graph,ScopedArenaAllocator * allocator,LoadStoreAnalysisType lse_type)252   HeapLocationCollector(HGraph* graph,
253                         ScopedArenaAllocator* allocator,
254                         LoadStoreAnalysisType lse_type)
255       : HGraphVisitor(graph),
256         allocator_(allocator),
257         ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
258         heap_locations_(allocator->Adapter(kArenaAllocLSA)),
259         aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA),
260         has_heap_stores_(false),
261         lse_type_(lse_type) {
262     aliasing_matrix_.ClearAllBits();
263   }
264 
~HeapLocationCollector()265   ~HeapLocationCollector() {
266     CleanUp();
267   }
268 
CleanUp()269   void CleanUp() {
270     heap_locations_.clear();
271     STLDeleteContainerPointers(ref_info_array_.begin(), ref_info_array_.end());
272     ref_info_array_.clear();
273   }
274 
CountPartialSingletons()275   size_t CountPartialSingletons() const {
276     return std::count_if(ref_info_array_.begin(),
277                          ref_info_array_.end(),
278                          [](ReferenceInfo* ri) { return ri->IsPartialSingleton(); });
279   }
280 
GetNumberOfHeapLocations()281   size_t GetNumberOfHeapLocations() const {
282     return heap_locations_.size();
283   }
284 
GetHeapLocation(size_t index)285   HeapLocation* GetHeapLocation(size_t index) const {
286     return heap_locations_[index];
287   }
288 
GetHeapLocationIndex(const HeapLocation * hl)289   size_t GetHeapLocationIndex(const HeapLocation* hl) const {
290     auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl);
291     return std::distance(heap_locations_.cbegin(), res);
292   }
293 
HuntForOriginalReference(HInstruction * ref)294   HInstruction* HuntForOriginalReference(HInstruction* ref) const {
295     // An original reference can be transformed by instructions like:
296     //   i0 NewArray
297     //   i1 HInstruction(i0)  <-- NullCheck, BoundType, IntermediateAddress.
298     //   i2 ArrayGet(i1, index)
299     DCHECK(ref != nullptr);
300     while (ref->IsNullCheck() || ref->IsBoundType() || ref->IsIntermediateAddress()) {
301       ref = ref->InputAt(0);
302     }
303     return ref;
304   }
305 
FindReferenceInfoOf(HInstruction * ref)306   ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
307     for (size_t i = 0; i < ref_info_array_.size(); i++) {
308       ReferenceInfo* ref_info = ref_info_array_[i];
309       if (ref_info->GetReference() == ref) {
310         DCHECK_EQ(i, ref_info->GetPosition());
311         return ref_info;
312       }
313     }
314     return nullptr;
315   }
316 
GetFieldHeapLocation(HInstruction * object,const FieldInfo * field)317   size_t GetFieldHeapLocation(HInstruction* object, const FieldInfo* field) const {
318     DCHECK(object != nullptr);
319     DCHECK(field != nullptr);
320     return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)),
321                                  field->GetFieldType(),
322                                  field->GetFieldOffset().SizeValue(),
323                                  nullptr,
324                                  HeapLocation::kScalar,
325                                  field->GetDeclaringClassDefIndex(),
326                                  /*is_vec_op=*/false);
327   }
328 
GetArrayHeapLocation(HInstruction * instruction)329   size_t GetArrayHeapLocation(HInstruction* instruction) const {
330     DCHECK(instruction != nullptr);
331     HInstruction* array = instruction->InputAt(0);
332     HInstruction* index = instruction->InputAt(1);
333     DataType::Type type = instruction->GetType();
334     size_t vector_length = HeapLocation::kScalar;
335     const bool is_vec_op = instruction->IsVecStore() || instruction->IsVecLoad();
336     if (instruction->IsArraySet()) {
337       type = instruction->AsArraySet()->GetComponentType();
338     } else if (is_vec_op) {
339       HVecOperation* vec_op = instruction->AsVecOperation();
340       type = vec_op->GetPackedType();
341       vector_length = vec_op->GetVectorLength();
342     } else {
343       DCHECK(instruction->IsArrayGet());
344     }
345     return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)),
346                                  type,
347                                  HeapLocation::kInvalidFieldOffset,
348                                  index,
349                                  vector_length,
350                                  HeapLocation::kDeclaringClassDefIndexForArrays,
351                                  is_vec_op);
352   }
353 
HasHeapStores()354   bool HasHeapStores() const {
355     return has_heap_stores_;
356   }
357 
358   // Find and return the heap location index in heap_locations_.
359   // NOTE: When heap locations are created, potentially aliasing/overlapping
360   // accesses are given different indexes. This find function also
361   // doesn't take aliasing/overlapping into account. For example,
362   // this function returns three different indexes for:
363   // - ref_info=array, index=i, vector_length=kScalar;
364   // - ref_info=array, index=i, vector_length=2;
365   // - ref_info=array, index=i, vector_length=4;
366   // In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether
367   // these indexes alias.
FindHeapLocationIndex(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)368   size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
369                                DataType::Type type,
370                                size_t offset,
371                                HInstruction* index,
372                                size_t vector_length,
373                                int16_t declaring_class_def_index,
374                                bool is_vec_op) const {
375     DataType::Type lookup_type = DataType::ToSigned(type);
376     for (size_t i = 0; i < heap_locations_.size(); i++) {
377       HeapLocation* loc = heap_locations_[i];
378       if (loc->GetReferenceInfo() == ref_info &&
379           loc->GetType() == lookup_type &&
380           loc->GetOffset() == offset &&
381           loc->GetIndex() == index &&
382           loc->GetVectorLength() == vector_length &&
383           loc->GetDeclaringClassDefIndex() == declaring_class_def_index &&
384           loc->IsVecOp() == is_vec_op) {
385         return i;
386       }
387     }
388     return kHeapLocationNotFound;
389   }
390 
391   bool InstructionEligibleForLSERemoval(HInstruction* inst) const;
392 
393   // Get some estimated statistics based on our analysis.
394   void DumpReferenceStats(OptimizingCompilerStats* stats);
395 
396   // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
MayAlias(size_t index1,size_t index2)397   bool MayAlias(size_t index1, size_t index2) const {
398     if (index1 < index2) {
399       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
400     } else if (index1 > index2) {
401       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
402     } else {
403       DCHECK(false) << "index1 and index2 are expected to be different";
404       return true;
405     }
406   }
407 
BuildAliasingMatrix()408   void BuildAliasingMatrix() {
409     const size_t number_of_locations = heap_locations_.size();
410     if (number_of_locations == 0) {
411       return;
412     }
413     size_t pos = 0;
414     // Compute aliasing info between every pair of different heap locations.
415     // Save the result in a matrix represented as a BitVector.
416     for (size_t i = 0; i < number_of_locations - 1; i++) {
417       for (size_t j = i + 1; j < number_of_locations; j++) {
418         if (ComputeMayAlias(i, j)) {
419           aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
420         }
421         pos++;
422       }
423     }
424   }
425 
CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)426   static bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
427     if (ref_info1 == ref_info2) {
428       return true;
429     } else if (ref_info1->IsSingleton()) {
430       return false;
431     } else if (ref_info2->IsSingleton()) {
432       return false;
433     } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
434         !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
435       return false;
436     }
437     return true;
438   }
439 
440  private:
441   // An allocation cannot alias with a name which already exists at the point
442   // of the allocation, such as a parameter or a load happening before the allocation.
MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)443   static bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
444     if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
445       // Any reference that can alias with the allocation must appear after it in the block/in
446       // the block's successors. In reverse post order, those instructions will be visited after
447       // the allocation.
448       return ref_info2->GetPosition() >= ref_info1->GetPosition();
449     }
450     return true;
451   }
452 
453   bool CanArrayElementsAlias(const HInstruction* idx1,
454                              const size_t vector_length1,
455                              const HInstruction* idx2,
456                              const size_t vector_length2) const;
457 
458   // `index1` and `index2` are indices in the array of collected heap locations.
459   // Returns the position in the bit vector that tracks whether the two heap
460   // locations may alias.
AliasingMatrixPosition(size_t index1,size_t index2)461   size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
462     DCHECK(index2 > index1);
463     const size_t number_of_locations = heap_locations_.size();
464     // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
465     return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
466   }
467 
468   // An additional position is passed in to make sure the calculated position is correct.
CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)469   size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
470     size_t calculated_position = AliasingMatrixPosition(index1, index2);
471     DCHECK_EQ(calculated_position, position);
472     return calculated_position;
473   }
474 
475   // Compute if two locations may alias to each other.
ComputeMayAlias(size_t index1,size_t index2)476   bool ComputeMayAlias(size_t index1, size_t index2) const {
477     DCHECK_NE(index1, index2);
478     HeapLocation* loc1 = heap_locations_[index1];
479     HeapLocation* loc2 = heap_locations_[index2];
480     if (loc1->GetOffset() != loc2->GetOffset()) {
481       // Either two different instance fields, or one is an instance
482       // field and the other is an array data.
483       return false;
484     }
485     if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
486       // Different types.
487       return false;
488     }
489     if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
490       return false;
491     }
492     if (loc1->IsArray() && loc2->IsArray()) {
493       HInstruction* idx1 = loc1->GetIndex();
494       HInstruction* idx2 = loc2->GetIndex();
495       size_t vector_length1 = loc1->GetVectorLength();
496       size_t vector_length2 = loc2->GetVectorLength();
497       if (!CanArrayElementsAlias(idx1, vector_length1, idx2, vector_length2)) {
498         return false;
499       }
500     }
501     loc1->SetHasAliasedLocations(true);
502     loc2->SetHasAliasedLocations(true);
503     return true;
504   }
505 
GetOrCreateReferenceInfo(HInstruction * instruction)506   ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
507     ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
508     if (ref_info == nullptr) {
509       size_t pos = ref_info_array_.size();
510       ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_);
511       ref_info_array_.push_back(ref_info);
512     }
513     return ref_info;
514   }
515 
CreateReferenceInfoForReferenceType(HInstruction * instruction)516   void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
517     if (instruction->GetType() != DataType::Type::kReference) {
518       return;
519     }
520     DCHECK(FindReferenceInfoOf(instruction) == nullptr);
521     GetOrCreateReferenceInfo(instruction);
522   }
523 
MaybeCreateHeapLocation(HInstruction * ref,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)524   void MaybeCreateHeapLocation(HInstruction* ref,
525                                DataType::Type type,
526                                size_t offset,
527                                HInstruction* index,
528                                size_t vector_length,
529                                int16_t declaring_class_def_index,
530                                bool is_vec_op) {
531     HInstruction* original_ref = HuntForOriginalReference(ref);
532     ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
533     size_t heap_location_idx = FindHeapLocationIndex(
534         ref_info, type, offset, index, vector_length, declaring_class_def_index, is_vec_op);
535     if (heap_location_idx == kHeapLocationNotFound) {
536       HeapLocation* heap_loc = new (allocator_) HeapLocation(
537           ref_info, type, offset, index, vector_length, declaring_class_def_index, is_vec_op);
538       heap_locations_.push_back(heap_loc);
539     }
540   }
541 
VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)542   void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
543     DataType::Type type = field_info.GetFieldType();
544     const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
545     const size_t offset = field_info.GetFieldOffset().SizeValue();
546     MaybeCreateHeapLocation(ref,
547                             type,
548                             offset,
549                             nullptr,
550                             HeapLocation::kScalar,
551                             declaring_class_def_index,
552                             /*is_vec_op=*/false);
553   }
554 
VisitArrayAccess(HInstruction * array,HInstruction * index,DataType::Type type,size_t vector_length,bool is_vec_op)555   void VisitArrayAccess(HInstruction* array,
556                         HInstruction* index,
557                         DataType::Type type,
558                         size_t vector_length,
559                         bool is_vec_op) {
560     MaybeCreateHeapLocation(array,
561                             type,
562                             HeapLocation::kInvalidFieldOffset,
563                             index,
564                             vector_length,
565                             HeapLocation::kDeclaringClassDefIndexForArrays,
566                             is_vec_op);
567   }
568 
VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet * instruction)569   void VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet* instruction) override {
570     VisitFieldAccess(instruction->GetTarget(), instruction->GetFieldInfo());
571     CreateReferenceInfoForReferenceType(instruction);
572   }
VisitInstanceFieldGet(HInstanceFieldGet * instruction)573   void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
574     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
575     CreateReferenceInfoForReferenceType(instruction);
576   }
577 
VisitInstanceFieldSet(HInstanceFieldSet * instruction)578   void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override {
579     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
580     has_heap_stores_ = true;
581   }
582 
VisitStaticFieldGet(HStaticFieldGet * instruction)583   void VisitStaticFieldGet(HStaticFieldGet* instruction) override {
584     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
585     CreateReferenceInfoForReferenceType(instruction);
586   }
587 
VisitStaticFieldSet(HStaticFieldSet * instruction)588   void VisitStaticFieldSet(HStaticFieldSet* instruction) override {
589     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
590     has_heap_stores_ = true;
591   }
592 
593   // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
594   // since we cannot accurately track the fields.
595 
VisitArrayGet(HArrayGet * instruction)596   void VisitArrayGet(HArrayGet* instruction) override {
597     HInstruction* array = instruction->InputAt(0);
598     HInstruction* index = instruction->InputAt(1);
599     DataType::Type type = instruction->GetType();
600     VisitArrayAccess(array, index, type, HeapLocation::kScalar, /*is_vec_op=*/false);
601     CreateReferenceInfoForReferenceType(instruction);
602   }
603 
VisitArraySet(HArraySet * instruction)604   void VisitArraySet(HArraySet* instruction) override {
605     HInstruction* array = instruction->InputAt(0);
606     HInstruction* index = instruction->InputAt(1);
607     DataType::Type type = instruction->GetComponentType();
608     VisitArrayAccess(array, index, type, HeapLocation::kScalar, /*is_vec_op=*/false);
609     has_heap_stores_ = true;
610   }
611 
VisitVecLoad(HVecLoad * instruction)612   void VisitVecLoad(HVecLoad* instruction) override {
613     HInstruction* array = instruction->InputAt(0);
614     HInstruction* index = instruction->InputAt(1);
615     DataType::Type type = instruction->GetPackedType();
616     VisitArrayAccess(array, index, type, instruction->GetVectorLength(), /*is_vec_op=*/true);
617     CreateReferenceInfoForReferenceType(instruction);
618   }
619 
VisitVecStore(HVecStore * instruction)620   void VisitVecStore(HVecStore* instruction) override {
621     HInstruction* array = instruction->InputAt(0);
622     HInstruction* index = instruction->InputAt(1);
623     DataType::Type type = instruction->GetPackedType();
624     VisitArrayAccess(array, index, type, instruction->GetVectorLength(), /*is_vec_op=*/true);
625     has_heap_stores_ = true;
626   }
627 
VisitInstruction(HInstruction * instruction)628   void VisitInstruction(HInstruction* instruction) override {
629     // Any new-instance or new-array cannot alias with references that
630     // pre-exist the new-instance/new-array. We append entries into
631     // ref_info_array_ which keeps track of the order of creation
632     // of reference values since we visit the blocks in reverse post order.
633     //
634     // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(),
635     // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above
636     // also call CreateReferenceInfoForReferenceType() explicitly.
637     CreateReferenceInfoForReferenceType(instruction);
638   }
639 
640   ScopedArenaAllocator* allocator_;
641   ScopedArenaVector<ReferenceInfo*> ref_info_array_;   // All references used for heap accesses.
642   ScopedArenaVector<HeapLocation*> heap_locations_;    // All heap locations.
643   ArenaBitVector aliasing_matrix_;    // aliasing info between each pair of locations.
644   bool has_heap_stores_;    // If there is no heap stores, LSE acts as GVN with better
645                             // alias analysis and won't be as effective.
646   LoadStoreAnalysisType lse_type_;
647 
648   DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
649 };
650 
651 class LoadStoreAnalysis {
652  public:
653   // for_elimination controls whether we should keep track of escapes at a per-block level for
654   // partial LSE.
LoadStoreAnalysis(HGraph * graph,OptimizingCompilerStats * stats,ScopedArenaAllocator * local_allocator,LoadStoreAnalysisType lse_type)655   explicit LoadStoreAnalysis(HGraph* graph,
656                              OptimizingCompilerStats* stats,
657                              ScopedArenaAllocator* local_allocator,
658                              LoadStoreAnalysisType lse_type)
659       : graph_(graph),
660         stats_(stats),
661         heap_location_collector_(
662             graph,
663             local_allocator,
664             ExecutionSubgraph::CanAnalyse(graph_) ? lse_type : LoadStoreAnalysisType::kBasic) {}
665 
GetHeapLocationCollector()666   const HeapLocationCollector& GetHeapLocationCollector() const {
667     return heap_location_collector_;
668   }
669 
670   bool Run();
671 
672  private:
673   HGraph* graph_;
674   OptimizingCompilerStats* stats_;
675   HeapLocationCollector heap_location_collector_;
676 
677   DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
678 };
679 
680 }  // namespace art
681 
682 #endif  // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
683