1 // Copyright 2016 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_ 6 #define V8_COMPILER_MEMORY_OPTIMIZER_H_ 7 8 #include "src/compiler/graph-assembler.h" 9 #include "src/zone/zone-containers.h" 10 11 namespace v8 { 12 namespace internal { 13 namespace compiler { 14 15 // Forward declarations. 16 class CommonOperatorBuilder; 17 struct ElementAccess; 18 class Graph; 19 class JSGraph; 20 class MachineOperatorBuilder; 21 class Node; 22 class Operator; 23 24 // NodeIds are identifying numbers for nodes that can be used to index auxiliary 25 // out-of-line data associated with each node. 26 typedef uint32_t NodeId; 27 28 // Lowers all simplified memory access and allocation related nodes (i.e. 29 // Allocate, LoadField, StoreField and friends) to machine operators. 30 // Performs allocation folding and store write barrier elimination 31 // implicitly. 32 class MemoryOptimizer final { 33 public: 34 enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding }; 35 36 MemoryOptimizer(JSGraph* jsgraph, Zone* zone, 37 PoisoningMitigationLevel poisoning_level, 38 AllocationFolding allocation_folding); ~MemoryOptimizer()39 ~MemoryOptimizer() {} 40 41 void Optimize(); 42 43 private: 44 // An allocation group represents a set of allocations that have been folded 45 // together. 46 class AllocationGroup final : public ZoneObject { 47 public: 48 AllocationGroup(Node* node, PretenureFlag pretenure, Zone* zone); 49 AllocationGroup(Node* node, PretenureFlag pretenure, Node* size, 50 Zone* zone); ~AllocationGroup()51 ~AllocationGroup() {} 52 53 void Add(Node* object); 54 bool Contains(Node* object) const; IsNewSpaceAllocation()55 bool IsNewSpaceAllocation() const { return pretenure() == NOT_TENURED; } 56 pretenure()57 PretenureFlag pretenure() const { return pretenure_; } size()58 Node* size() const { return size_; } 59 60 private: 61 ZoneSet<NodeId> node_ids_; 62 PretenureFlag const pretenure_; 63 Node* const size_; 64 65 DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup); 66 }; 67 68 // An allocation state is propagated on the effect paths through the graph. 69 class AllocationState final : public ZoneObject { 70 public: Empty(Zone * zone)71 static AllocationState const* Empty(Zone* zone) { 72 return new (zone) AllocationState(); 73 } Closed(AllocationGroup * group,Zone * zone)74 static AllocationState const* Closed(AllocationGroup* group, Zone* zone) { 75 return new (zone) AllocationState(group); 76 } Open(AllocationGroup * group,int size,Node * top,Zone * zone)77 static AllocationState const* Open(AllocationGroup* group, int size, 78 Node* top, Zone* zone) { 79 return new (zone) AllocationState(group, size, top); 80 } 81 82 bool IsNewSpaceAllocation() const; 83 group()84 AllocationGroup* group() const { return group_; } top()85 Node* top() const { return top_; } size()86 int size() const { return size_; } 87 88 private: 89 AllocationState(); 90 explicit AllocationState(AllocationGroup* group); 91 AllocationState(AllocationGroup* group, int size, Node* top); 92 93 AllocationGroup* const group_; 94 // The upper bound of the combined allocated object size on the current path 95 // (max int if allocation folding is impossible on this path). 96 int const size_; 97 Node* const top_; 98 99 DISALLOW_COPY_AND_ASSIGN(AllocationState); 100 }; 101 102 // An array of allocation states used to collect states on merges. 103 typedef ZoneVector<AllocationState const*> AllocationStates; 104 105 // We thread through tokens to represent the current state on a given effect 106 // path through the graph. 107 struct Token { 108 Node* node; 109 AllocationState const* state; 110 }; 111 112 void VisitNode(Node*, AllocationState const*); 113 void VisitAllocateRaw(Node*, AllocationState const*); 114 void VisitCall(Node*, AllocationState const*); 115 void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*); 116 void VisitLoadElement(Node*, AllocationState const*); 117 void VisitLoadField(Node*, AllocationState const*); 118 void VisitStoreElement(Node*, AllocationState const*); 119 void VisitStoreField(Node*, AllocationState const*); 120 void VisitOtherEffect(Node*, AllocationState const*); 121 122 Node* ComputeIndex(ElementAccess const&, Node*); 123 WriteBarrierKind ComputeWriteBarrierKind(Node* object, 124 AllocationState const* state, 125 WriteBarrierKind); 126 127 AllocationState const* MergeStates(AllocationStates const& states); 128 129 void EnqueueMerge(Node*, int, AllocationState const*); 130 void EnqueueUses(Node*, AllocationState const*); 131 void EnqueueUse(Node*, int, AllocationState const*); 132 133 bool NeedsPoisoning(LoadSensitivity load_sensitivity) const; 134 empty_state()135 AllocationState const* empty_state() const { return empty_state_; } 136 Graph* graph() const; 137 Isolate* isolate() const; jsgraph()138 JSGraph* jsgraph() const { return jsgraph_; } 139 CommonOperatorBuilder* common() const; 140 MachineOperatorBuilder* machine() const; zone()141 Zone* zone() const { return zone_; } gasm()142 GraphAssembler* gasm() { return &graph_assembler_; } 143 144 SetOncePointer<const Operator> allocate_operator_; 145 JSGraph* const jsgraph_; 146 AllocationState const* const empty_state_; 147 ZoneMap<NodeId, AllocationStates> pending_; 148 ZoneQueue<Token> tokens_; 149 Zone* const zone_; 150 GraphAssembler graph_assembler_; 151 PoisoningMitigationLevel poisoning_level_; 152 AllocationFolding allocation_folding_; 153 154 DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer); 155 }; 156 157 } // namespace compiler 158 } // namespace internal 159 } // namespace v8 160 161 #endif // V8_COMPILER_MEMORY_OPTIMIZER_H_ 162