1 // Copyright 2016 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_ 6 #define V8_COMPILER_MEMORY_OPTIMIZER_H_ 7 8 #include "src/compiler/graph-assembler.h" 9 #include "src/compiler/memory-lowering.h" 10 #include "src/zone/zone-containers.h" 11 12 namespace v8 { 13 namespace internal { 14 15 class TickCounter; 16 17 namespace compiler { 18 19 class JSGraph; 20 class Graph; 21 22 // NodeIds are identifying numbers for nodes that can be used to index auxiliary 23 // out-of-line data associated with each node. 24 using NodeId = uint32_t; 25 26 // Performs allocation folding and store write barrier elimination 27 // implicitly, while lowering all simplified memory access and allocation 28 // related nodes (i.e. Allocate, LoadField, StoreField and friends) to machine 29 // operators. 30 class MemoryOptimizer final { 31 public: 32 MemoryOptimizer(JSGraph* jsgraph, Zone* zone, 33 MemoryLowering::AllocationFolding allocation_folding, 34 const char* function_debug_name, TickCounter* tick_counter); 35 ~MemoryOptimizer() = default; 36 37 void Optimize(); 38 39 private: 40 using AllocationState = MemoryLowering::AllocationState; 41 42 // An array of allocation states used to collect states on merges. 43 using AllocationStates = ZoneVector<AllocationState const*>; 44 45 // We thread through tokens to represent the current state on a given effect 46 // path through the graph. 47 struct Token { 48 Node* node; 49 AllocationState const* state; 50 }; 51 52 void VisitNode(Node*, AllocationState const*); 53 void VisitAllocateRaw(Node*, AllocationState const*); 54 void VisitCall(Node*, AllocationState const*); 55 void VisitLoadFromObject(Node*, AllocationState const*); 56 void VisitLoadElement(Node*, AllocationState const*); 57 void VisitLoadField(Node*, AllocationState const*); 58 void VisitStoreToObject(Node*, AllocationState const*); 59 void VisitStoreElement(Node*, AllocationState const*); 60 void VisitStoreField(Node*, AllocationState const*); 61 void VisitStore(Node*, AllocationState const*); 62 void VisitOtherEffect(Node*, AllocationState const*); 63 64 AllocationState const* MergeStates(AllocationStates const& states); 65 66 void EnqueueMerge(Node*, int, AllocationState const*); 67 void EnqueueUses(Node*, AllocationState const*); 68 void EnqueueUse(Node*, int, AllocationState const*); 69 70 void ReplaceUsesAndKillNode(Node* node, Node* replacement); 71 72 // Returns true if the AllocationType of the current AllocateRaw node that we 73 // are visiting needs to be updated to kOld, due to propagation of tenuring 74 // from outer to inner allocations. 75 bool AllocationTypeNeedsUpdateToOld(Node* const user, const Edge edge); 76 empty_state()77 AllocationState const* empty_state() const { return empty_state_; } memory_lowering()78 MemoryLowering* memory_lowering() { return &memory_lowering_; } 79 Graph* graph() const; jsgraph()80 JSGraph* jsgraph() const { return jsgraph_; } zone()81 Zone* zone() const { return zone_; } 82 83 JSGraphAssembler graph_assembler_; 84 MemoryLowering memory_lowering_; 85 JSGraph* jsgraph_; 86 AllocationState const* const empty_state_; 87 ZoneMap<NodeId, AllocationStates> pending_; 88 ZoneQueue<Token> tokens_; 89 Zone* const zone_; 90 TickCounter* const tick_counter_; 91 92 DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer); 93 }; 94 95 } // namespace compiler 96 } // namespace internal 97 } // namespace v8 98 99 #endif // V8_COMPILER_MEMORY_OPTIMIZER_H_ 100