• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
19 
20 #include "base/arena_allocator.h"
21 #include "base/arena_bit_vector.h"
22 #include "base/bit_vector-inl.h"
23 #include "base/scoped_arena_allocator.h"
24 #include "base/scoped_arena_containers.h"
25 #include "base/stl_util.h"
26 #include "escape.h"
27 #include "execution_subgraph.h"
28 #include "nodes.h"
29 #include "optimizing/optimizing_compiler_stats.h"
30 
31 namespace art {
32 
33 enum class LoadStoreAnalysisType {
34   kBasic,
35   kNoPredicatedInstructions,
36   kFull,
37 };
38 
39 // A ReferenceInfo contains additional info about a reference such as
40 // whether it's a singleton, returned, etc.
41 class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
42  public:
ReferenceInfo(HInstruction * reference,ScopedArenaAllocator * allocator,size_t pos,LoadStoreAnalysisType elimination_type)43   ReferenceInfo(HInstruction* reference,
44                 ScopedArenaAllocator* allocator,
45                 size_t pos,
46                 LoadStoreAnalysisType elimination_type)
47       : reference_(reference),
48         position_(pos),
49         is_singleton_(true),
50         is_singleton_and_not_returned_(true),
51         is_singleton_and_not_deopt_visible_(true),
52         allocator_(allocator),
53         subgraph_(nullptr) {
54     // TODO We can do this in one pass.
55     // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
56     // for now just ignore it.
57     bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
58                           (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
59     if (can_be_partial) {
60       subgraph_.reset(
61           new (allocator) ExecutionSubgraph(reference->GetBlock()->GetGraph(), allocator));
62       CollectPartialEscapes(reference_->GetBlock()->GetGraph());
63     }
64     CalculateEscape(reference_,
65                     nullptr,
66                     &is_singleton_,
67                     &is_singleton_and_not_returned_,
68                     &is_singleton_and_not_deopt_visible_);
69     if (can_be_partial) {
70       if (elimination_type == LoadStoreAnalysisType::kNoPredicatedInstructions) {
71         // This is to mark writes to partially escaped values as also part of the escaped subset.
72         // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
73         //      to see if the additional branches are worth it.
74         PrunePartialEscapeWrites();
75       }
76       DCHECK(subgraph_ != nullptr);
77       subgraph_->Finalize();
78     } else {
79       DCHECK(subgraph_ == nullptr);
80     }
81   }
82 
GetNoEscapeSubgraph()83   const ExecutionSubgraph* GetNoEscapeSubgraph() const {
84     DCHECK(IsPartialSingleton());
85     return subgraph_.get();
86   }
87 
GetReference()88   HInstruction* GetReference() const {
89     return reference_;
90   }
91 
GetPosition()92   size_t GetPosition() const {
93     return position_;
94   }
95 
96   // Returns true if reference_ is the only name that can refer to its value during
97   // the lifetime of the method. So it's guaranteed to not have any alias in
98   // the method (including its callees).
IsSingleton()99   bool IsSingleton() const {
100     return is_singleton_;
101   }
102 
103   // This is a singleton and there are paths that don't escape the method
IsPartialSingleton()104   bool IsPartialSingleton() const {
105     auto ref = GetReference();
106     // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
107     // for now just ignore it.
108     return (/* ref->IsNewArray() || */ ref->IsNewInstance()) &&
109            subgraph_ != nullptr &&
110            subgraph_->IsValid();
111   }
112 
113   // Returns true if reference_ is a singleton and not returned to the caller or
114   // used as an environment local of an HDeoptimize instruction.
115   // The allocation and stores into reference_ may be eliminated for such cases.
IsSingletonAndRemovable()116   bool IsSingletonAndRemovable() const {
117     return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
118   }
119 
120   // Returns true if reference_ is a singleton and returned to the caller or
121   // used as an environment local of an HDeoptimize instruction.
IsSingletonAndNonRemovable()122   bool IsSingletonAndNonRemovable() const {
123     return is_singleton_ &&
124            (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
125   }
126 
127  private:
128   void CollectPartialEscapes(HGraph* graph);
HandleEscape(HBasicBlock * escape)129   void HandleEscape(HBasicBlock* escape) {
130     DCHECK(subgraph_ != nullptr);
131     subgraph_->RemoveBlock(escape);
132   }
HandleEscape(HInstruction * escape)133   void HandleEscape(HInstruction* escape) {
134     HandleEscape(escape->GetBlock());
135   }
136 
137   // Make sure we mark any writes/potential writes to heap-locations within partially
138   // escaped values as escaping.
139   void PrunePartialEscapeWrites();
140 
141   HInstruction* const reference_;
142   const size_t position_;  // position in HeapLocationCollector's ref_info_array_.
143 
144   // Can only be referred to by a single name in the method.
145   bool is_singleton_;
146   // Is singleton and not returned to caller.
147   bool is_singleton_and_not_returned_;
148   // Is singleton and not used as an environment local of HDeoptimize.
149   bool is_singleton_and_not_deopt_visible_;
150 
151   ScopedArenaAllocator* allocator_;
152 
153   std::unique_ptr<ExecutionSubgraph> subgraph_;
154 
155   DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
156 };
157 
158 // A heap location is a reference-offset/index pair that a value can be loaded from
159 // or stored to.
160 class HeapLocation : public ArenaObject<kArenaAllocLSA> {
161  public:
162   static constexpr size_t kInvalidFieldOffset = -1;
163   // Default value for heap locations which are not vector data.
164   static constexpr size_t kScalar = 1;
165   // TODO: more fine-grained array types.
166   static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
167 
HeapLocation(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)168   HeapLocation(ReferenceInfo* ref_info,
169                DataType::Type type,
170                size_t offset,
171                HInstruction* index,
172                size_t vector_length,
173                int16_t declaring_class_def_index)
174       : ref_info_(ref_info),
175         type_(DataType::ToSigned(type)),
176         offset_(offset),
177         index_(index),
178         vector_length_(vector_length),
179         declaring_class_def_index_(declaring_class_def_index),
180         has_aliased_locations_(false) {
181     DCHECK(ref_info != nullptr);
182     DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
183            (offset != kInvalidFieldOffset && index == nullptr));
184   }
185 
GetReferenceInfo()186   ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
GetType()187   DataType::Type GetType() const { return type_; }
GetOffset()188   size_t GetOffset() const { return offset_; }
GetIndex()189   HInstruction* GetIndex() const { return index_; }
GetVectorLength()190   size_t GetVectorLength() const { return vector_length_; }
191 
192   // Returns the definition of declaring class' dex index.
193   // It's kDeclaringClassDefIndexForArrays for an array element.
GetDeclaringClassDefIndex()194   int16_t GetDeclaringClassDefIndex() const {
195     return declaring_class_def_index_;
196   }
197 
IsArray()198   bool IsArray() const {
199     return index_ != nullptr;
200   }
201 
HasAliasedLocations()202   bool HasAliasedLocations() const {
203     return has_aliased_locations_;
204   }
205 
SetHasAliasedLocations(bool val)206   void SetHasAliasedLocations(bool val) {
207     has_aliased_locations_ = val;
208   }
209 
210  private:
211   // Reference for instance/static field, array element or vector data.
212   ReferenceInfo* const ref_info_;
213   // Type of data residing at HeapLocation (always signed for integral
214   // data since e.g. a[i] and a[i] & 0xff are represented by differently
215   // signed types; char vs short are disambiguated through the reference).
216   const DataType::Type type_;
217   // Offset of static/instance field.
218   // Invalid when this HeapLocation is not field.
219   const size_t offset_;
220   // Index of an array element or starting index of vector data.
221   // Invalid when this HeapLocation is not array.
222   HInstruction* const index_;
223   // Vector length of vector data.
224   // When this HeapLocation is not vector data, it's value is kScalar.
225   const size_t vector_length_;
226   // Declaring class's def's dex index.
227   // Invalid when this HeapLocation is not field access.
228   const int16_t declaring_class_def_index_;
229 
230   // Has aliased heap locations in the method, due to either the
231   // reference is aliased or the array element is aliased via different
232   // index names.
233   bool has_aliased_locations_;
234 
235   DISALLOW_COPY_AND_ASSIGN(HeapLocation);
236 };
237 
238 // A HeapLocationCollector collects all relevant heap locations and keeps
239 // an aliasing matrix for all locations.
240 class HeapLocationCollector : public HGraphVisitor {
241  public:
242   static constexpr size_t kHeapLocationNotFound = -1;
243   // Start with a single uint32_t word. That's enough bits for pair-wise
244   // aliasing matrix of 8 heap locations.
245   static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
246 
HeapLocationCollector(HGraph * graph,ScopedArenaAllocator * allocator,LoadStoreAnalysisType lse_type)247   HeapLocationCollector(HGraph* graph,
248                         ScopedArenaAllocator* allocator,
249                         LoadStoreAnalysisType lse_type)
250       : HGraphVisitor(graph),
251         allocator_(allocator),
252         ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
253         heap_locations_(allocator->Adapter(kArenaAllocLSA)),
254         aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA),
255         has_heap_stores_(false),
256         has_volatile_(false),
257         has_monitor_operations_(false),
258         lse_type_(lse_type) {
259     aliasing_matrix_.ClearAllBits();
260   }
261 
~HeapLocationCollector()262   ~HeapLocationCollector() {
263     CleanUp();
264   }
265 
CleanUp()266   void CleanUp() {
267     heap_locations_.clear();
268     STLDeleteContainerPointers(ref_info_array_.begin(), ref_info_array_.end());
269     ref_info_array_.clear();
270   }
271 
CountPartialSingletons()272   size_t CountPartialSingletons() const {
273     return std::count_if(ref_info_array_.begin(),
274                          ref_info_array_.end(),
275                          [](ReferenceInfo* ri) { return ri->IsPartialSingleton(); });
276   }
277 
GetNumberOfHeapLocations()278   size_t GetNumberOfHeapLocations() const {
279     return heap_locations_.size();
280   }
281 
GetHeapLocation(size_t index)282   HeapLocation* GetHeapLocation(size_t index) const {
283     return heap_locations_[index];
284   }
285 
GetHeapLocationIndex(const HeapLocation * hl)286   size_t GetHeapLocationIndex(const HeapLocation* hl) const {
287     auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl);
288     return std::distance(heap_locations_.cbegin(), res);
289   }
290 
HuntForOriginalReference(HInstruction * ref)291   HInstruction* HuntForOriginalReference(HInstruction* ref) const {
292     // An original reference can be transformed by instructions like:
293     //   i0 NewArray
294     //   i1 HInstruction(i0)  <-- NullCheck, BoundType, IntermediateAddress.
295     //   i2 ArrayGet(i1, index)
296     DCHECK(ref != nullptr);
297     while (ref->IsNullCheck() || ref->IsBoundType() || ref->IsIntermediateAddress()) {
298       ref = ref->InputAt(0);
299     }
300     return ref;
301   }
302 
FindReferenceInfoOf(HInstruction * ref)303   ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
304     for (size_t i = 0; i < ref_info_array_.size(); i++) {
305       ReferenceInfo* ref_info = ref_info_array_[i];
306       if (ref_info->GetReference() == ref) {
307         DCHECK_EQ(i, ref_info->GetPosition());
308         return ref_info;
309       }
310     }
311     return nullptr;
312   }
313 
GetFieldHeapLocation(HInstruction * object,const FieldInfo * field)314   size_t GetFieldHeapLocation(HInstruction* object, const FieldInfo* field) const {
315     DCHECK(object != nullptr);
316     DCHECK(field != nullptr);
317     return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)),
318                                  field->GetFieldType(),
319                                  field->GetFieldOffset().SizeValue(),
320                                  nullptr,
321                                  HeapLocation::kScalar,
322                                  field->GetDeclaringClassDefIndex());
323   }
324 
GetArrayHeapLocation(HInstruction * instruction)325   size_t GetArrayHeapLocation(HInstruction* instruction) const {
326     DCHECK(instruction != nullptr);
327     HInstruction* array = instruction->InputAt(0);
328     HInstruction* index = instruction->InputAt(1);
329     DataType::Type type = instruction->GetType();
330     size_t vector_length = HeapLocation::kScalar;
331     if (instruction->IsArraySet()) {
332       type = instruction->AsArraySet()->GetComponentType();
333     } else if (instruction->IsVecStore() ||
334                instruction->IsVecLoad()) {
335       HVecOperation* vec_op = instruction->AsVecOperation();
336       type = vec_op->GetPackedType();
337       vector_length = vec_op->GetVectorLength();
338     } else {
339       DCHECK(instruction->IsArrayGet());
340     }
341     return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)),
342                                  type,
343                                  HeapLocation::kInvalidFieldOffset,
344                                  index,
345                                  vector_length,
346                                  HeapLocation::kDeclaringClassDefIndexForArrays);
347   }
348 
HasHeapStores()349   bool HasHeapStores() const {
350     return has_heap_stores_;
351   }
352 
HasVolatile()353   bool HasVolatile() const {
354     return has_volatile_;
355   }
356 
HasMonitorOps()357   bool HasMonitorOps() const {
358     return has_monitor_operations_;
359   }
360 
361   // Find and return the heap location index in heap_locations_.
362   // NOTE: When heap locations are created, potentially aliasing/overlapping
363   // accesses are given different indexes. This find function also
364   // doesn't take aliasing/overlapping into account. For example,
365   // this function returns three different indexes for:
366   // - ref_info=array, index=i, vector_length=kScalar;
367   // - ref_info=array, index=i, vector_length=2;
368   // - ref_info=array, index=i, vector_length=4;
369   // In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether
370   // these indexes alias.
FindHeapLocationIndex(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)371   size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
372                                DataType::Type type,
373                                size_t offset,
374                                HInstruction* index,
375                                size_t vector_length,
376                                int16_t declaring_class_def_index) const {
377     DataType::Type lookup_type = DataType::ToSigned(type);
378     for (size_t i = 0; i < heap_locations_.size(); i++) {
379       HeapLocation* loc = heap_locations_[i];
380       if (loc->GetReferenceInfo() == ref_info &&
381           loc->GetType() == lookup_type &&
382           loc->GetOffset() == offset &&
383           loc->GetIndex() == index &&
384           loc->GetVectorLength() == vector_length &&
385           loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
386         return i;
387       }
388     }
389     return kHeapLocationNotFound;
390   }
391 
392   bool InstructionEligibleForLSERemoval(HInstruction* inst) const;
393 
394   // Get some estimated statistics based on our analysis.
395   void DumpReferenceStats(OptimizingCompilerStats* stats);
396 
397   // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
MayAlias(size_t index1,size_t index2)398   bool MayAlias(size_t index1, size_t index2) const {
399     if (index1 < index2) {
400       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
401     } else if (index1 > index2) {
402       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
403     } else {
404       DCHECK(false) << "index1 and index2 are expected to be different";
405       return true;
406     }
407   }
408 
BuildAliasingMatrix()409   void BuildAliasingMatrix() {
410     const size_t number_of_locations = heap_locations_.size();
411     if (number_of_locations == 0) {
412       return;
413     }
414     size_t pos = 0;
415     // Compute aliasing info between every pair of different heap locations.
416     // Save the result in a matrix represented as a BitVector.
417     for (size_t i = 0; i < number_of_locations - 1; i++) {
418       for (size_t j = i + 1; j < number_of_locations; j++) {
419         if (ComputeMayAlias(i, j)) {
420           aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
421         }
422         pos++;
423       }
424     }
425   }
426 
CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)427   static bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
428     if (ref_info1 == ref_info2) {
429       return true;
430     } else if (ref_info1->IsSingleton()) {
431       return false;
432     } else if (ref_info2->IsSingleton()) {
433       return false;
434     } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
435         !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
436       return false;
437     }
438     return true;
439   }
440 
441  private:
442   // An allocation cannot alias with a name which already exists at the point
443   // of the allocation, such as a parameter or a load happening before the allocation.
MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)444   static bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
445     if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
446       // Any reference that can alias with the allocation must appear after it in the block/in
447       // the block's successors. In reverse post order, those instructions will be visited after
448       // the allocation.
449       return ref_info2->GetPosition() >= ref_info1->GetPosition();
450     }
451     return true;
452   }
453 
454   bool CanArrayElementsAlias(const HInstruction* idx1,
455                              const size_t vector_length1,
456                              const HInstruction* idx2,
457                              const size_t vector_length2) const;
458 
459   // `index1` and `index2` are indices in the array of collected heap locations.
460   // Returns the position in the bit vector that tracks whether the two heap
461   // locations may alias.
AliasingMatrixPosition(size_t index1,size_t index2)462   size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
463     DCHECK(index2 > index1);
464     const size_t number_of_locations = heap_locations_.size();
465     // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
466     return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
467   }
468 
469   // An additional position is passed in to make sure the calculated position is correct.
CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)470   size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
471     size_t calculated_position = AliasingMatrixPosition(index1, index2);
472     DCHECK_EQ(calculated_position, position);
473     return calculated_position;
474   }
475 
476   // Compute if two locations may alias to each other.
ComputeMayAlias(size_t index1,size_t index2)477   bool ComputeMayAlias(size_t index1, size_t index2) const {
478     DCHECK_NE(index1, index2);
479     HeapLocation* loc1 = heap_locations_[index1];
480     HeapLocation* loc2 = heap_locations_[index2];
481     if (loc1->GetOffset() != loc2->GetOffset()) {
482       // Either two different instance fields, or one is an instance
483       // field and the other is an array data.
484       return false;
485     }
486     if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
487       // Different types.
488       return false;
489     }
490     if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
491       return false;
492     }
493     if (loc1->IsArray() && loc2->IsArray()) {
494       HInstruction* idx1 = loc1->GetIndex();
495       HInstruction* idx2 = loc2->GetIndex();
496       size_t vector_length1 = loc1->GetVectorLength();
497       size_t vector_length2 = loc2->GetVectorLength();
498       if (!CanArrayElementsAlias(idx1, vector_length1, idx2, vector_length2)) {
499         return false;
500       }
501     }
502     loc1->SetHasAliasedLocations(true);
503     loc2->SetHasAliasedLocations(true);
504     return true;
505   }
506 
GetOrCreateReferenceInfo(HInstruction * instruction)507   ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
508     ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
509     if (ref_info == nullptr) {
510       size_t pos = ref_info_array_.size();
511       ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_);
512       ref_info_array_.push_back(ref_info);
513     }
514     return ref_info;
515   }
516 
CreateReferenceInfoForReferenceType(HInstruction * instruction)517   void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
518     if (instruction->GetType() != DataType::Type::kReference) {
519       return;
520     }
521     DCHECK(FindReferenceInfoOf(instruction) == nullptr);
522     GetOrCreateReferenceInfo(instruction);
523   }
524 
MaybeCreateHeapLocation(HInstruction * ref,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)525   void MaybeCreateHeapLocation(HInstruction* ref,
526                                DataType::Type type,
527                                size_t offset,
528                                HInstruction* index,
529                                size_t vector_length,
530                                int16_t declaring_class_def_index) {
531     HInstruction* original_ref = HuntForOriginalReference(ref);
532     ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
533     size_t heap_location_idx = FindHeapLocationIndex(
534         ref_info, type, offset, index, vector_length, declaring_class_def_index);
535     if (heap_location_idx == kHeapLocationNotFound) {
536       HeapLocation* heap_loc = new (allocator_)
537           HeapLocation(ref_info, type, offset, index, vector_length, declaring_class_def_index);
538       heap_locations_.push_back(heap_loc);
539     }
540   }
541 
VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)542   void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
543     if (field_info.IsVolatile()) {
544       has_volatile_ = true;
545     }
546     DataType::Type type = field_info.GetFieldType();
547     const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
548     const size_t offset = field_info.GetFieldOffset().SizeValue();
549     MaybeCreateHeapLocation(ref,
550                             type,
551                             offset,
552                             nullptr,
553                             HeapLocation::kScalar,
554                             declaring_class_def_index);
555   }
556 
VisitArrayAccess(HInstruction * array,HInstruction * index,DataType::Type type,size_t vector_length)557   void VisitArrayAccess(HInstruction* array,
558                         HInstruction* index,
559                         DataType::Type type,
560                         size_t vector_length) {
561     MaybeCreateHeapLocation(array,
562                             type,
563                             HeapLocation::kInvalidFieldOffset,
564                             index,
565                             vector_length,
566                             HeapLocation::kDeclaringClassDefIndexForArrays);
567   }
568 
VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet * instruction)569   void VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet* instruction) override {
570     VisitFieldAccess(instruction->GetTarget(), instruction->GetFieldInfo());
571     CreateReferenceInfoForReferenceType(instruction);
572   }
VisitInstanceFieldGet(HInstanceFieldGet * instruction)573   void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
574     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
575     CreateReferenceInfoForReferenceType(instruction);
576   }
577 
VisitInstanceFieldSet(HInstanceFieldSet * instruction)578   void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override {
579     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
580     has_heap_stores_ = true;
581   }
582 
VisitStaticFieldGet(HStaticFieldGet * instruction)583   void VisitStaticFieldGet(HStaticFieldGet* instruction) override {
584     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
585     CreateReferenceInfoForReferenceType(instruction);
586   }
587 
VisitStaticFieldSet(HStaticFieldSet * instruction)588   void VisitStaticFieldSet(HStaticFieldSet* instruction) override {
589     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
590     has_heap_stores_ = true;
591   }
592 
593   // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
594   // since we cannot accurately track the fields.
595 
VisitArrayGet(HArrayGet * instruction)596   void VisitArrayGet(HArrayGet* instruction) override {
597     HInstruction* array = instruction->InputAt(0);
598     HInstruction* index = instruction->InputAt(1);
599     DataType::Type type = instruction->GetType();
600     VisitArrayAccess(array, index, type, HeapLocation::kScalar);
601     CreateReferenceInfoForReferenceType(instruction);
602   }
603 
VisitArraySet(HArraySet * instruction)604   void VisitArraySet(HArraySet* instruction) override {
605     HInstruction* array = instruction->InputAt(0);
606     HInstruction* index = instruction->InputAt(1);
607     DataType::Type type = instruction->GetComponentType();
608     VisitArrayAccess(array, index, type, HeapLocation::kScalar);
609     has_heap_stores_ = true;
610   }
611 
VisitVecLoad(HVecLoad * instruction)612   void VisitVecLoad(HVecLoad* instruction) override {
613     HInstruction* array = instruction->InputAt(0);
614     HInstruction* index = instruction->InputAt(1);
615     DataType::Type type = instruction->GetPackedType();
616     VisitArrayAccess(array, index, type, instruction->GetVectorLength());
617     CreateReferenceInfoForReferenceType(instruction);
618   }
619 
VisitVecStore(HVecStore * instruction)620   void VisitVecStore(HVecStore* instruction) override {
621     HInstruction* array = instruction->InputAt(0);
622     HInstruction* index = instruction->InputAt(1);
623     DataType::Type type = instruction->GetPackedType();
624     VisitArrayAccess(array, index, type, instruction->GetVectorLength());
625     has_heap_stores_ = true;
626   }
627 
VisitInstruction(HInstruction * instruction)628   void VisitInstruction(HInstruction* instruction) override {
629     // Any new-instance or new-array cannot alias with references that
630     // pre-exist the new-instance/new-array. We append entries into
631     // ref_info_array_ which keeps track of the order of creation
632     // of reference values since we visit the blocks in reverse post order.
633     //
634     // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(),
635     // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above
636     // also call CreateReferenceInfoForReferenceType() explicitly.
637     CreateReferenceInfoForReferenceType(instruction);
638   }
639 
VisitMonitorOperation(HMonitorOperation * monitor ATTRIBUTE_UNUSED)640   void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) override {
641     has_monitor_operations_ = true;
642   }
643 
644   ScopedArenaAllocator* allocator_;
645   ScopedArenaVector<ReferenceInfo*> ref_info_array_;   // All references used for heap accesses.
646   ScopedArenaVector<HeapLocation*> heap_locations_;    // All heap locations.
647   ArenaBitVector aliasing_matrix_;    // aliasing info between each pair of locations.
648   bool has_heap_stores_;    // If there is no heap stores, LSE acts as GVN with better
649                             // alias analysis and won't be as effective.
650   bool has_volatile_;       // If there are volatile field accesses.
651   bool has_monitor_operations_;    // If there are monitor operations.
652   LoadStoreAnalysisType lse_type_;
653 
654   DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
655 };
656 
657 class LoadStoreAnalysis {
658  public:
659   // for_elimination controls whether we should keep track of escapes at a per-block level for
660   // partial LSE.
LoadStoreAnalysis(HGraph * graph,OptimizingCompilerStats * stats,ScopedArenaAllocator * local_allocator,LoadStoreAnalysisType lse_type)661   explicit LoadStoreAnalysis(HGraph* graph,
662                              OptimizingCompilerStats* stats,
663                              ScopedArenaAllocator* local_allocator,
664                              LoadStoreAnalysisType lse_type)
665       : graph_(graph),
666         stats_(stats),
667         heap_location_collector_(
668             graph,
669             local_allocator,
670             ExecutionSubgraph::CanAnalyse(graph_) ? lse_type : LoadStoreAnalysisType::kBasic) {}
671 
GetHeapLocationCollector()672   const HeapLocationCollector& GetHeapLocationCollector() const {
673     return heap_location_collector_;
674   }
675 
676   bool Run();
677 
678  private:
679   HGraph* graph_;
680   OptimizingCompilerStats* stats_;
681   HeapLocationCollector heap_location_collector_;
682 
683   DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
684 };
685 
686 }  // namespace art
687 
688 #endif  // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
689