1 // Copyright 2019 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_MEMORY_LOWERING_H_ 6 #define V8_COMPILER_MEMORY_LOWERING_H_ 7 8 #include "src/compiler/graph-assembler.h" 9 #include "src/compiler/graph-reducer.h" 10 11 namespace v8 { 12 namespace internal { 13 namespace compiler { 14 15 // Forward declarations. 16 class CommonOperatorBuilder; 17 struct ElementAccess; 18 class Graph; 19 class JSGraph; 20 class MachineOperatorBuilder; 21 class Node; 22 class Operator; 23 24 // Provides operations to lower all simplified memory access and allocation 25 // related nodes (i.e. Allocate, LoadField, StoreField and friends) to machine 26 // operators. 27 class MemoryLowering final : public Reducer { 28 public: 29 enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding }; 30 class AllocationGroup; 31 32 // An allocation state is propagated on the effect paths through the graph. 33 class AllocationState final : public ZoneObject { 34 public: 35 AllocationState(const AllocationState&) = delete; 36 AllocationState& operator=(const AllocationState&) = delete; 37 Empty(Zone * zone)38 static AllocationState const* Empty(Zone* zone) { 39 return zone->New<AllocationState>(); 40 } Closed(AllocationGroup * group,Node * effect,Zone * zone)41 static AllocationState const* Closed(AllocationGroup* group, Node* effect, 42 Zone* zone) { 43 return zone->New<AllocationState>(group, effect); 44 } Open(AllocationGroup * group,intptr_t size,Node * top,Node * effect,Zone * zone)45 static AllocationState const* Open(AllocationGroup* group, intptr_t size, 46 Node* top, Node* effect, Zone* zone) { 47 return zone->New<AllocationState>(group, size, top, effect); 48 } 49 50 bool IsYoungGenerationAllocation() const; 51 group()52 AllocationGroup* group() const { return group_; } top()53 Node* top() const { return top_; } effect()54 Node* effect() const { return effect_; } size()55 intptr_t size() const { return size_; } 56 57 private: 58 friend Zone; 59 60 AllocationState(); 61 explicit AllocationState(AllocationGroup* group, Node* effect); 62 AllocationState(AllocationGroup* group, intptr_t size, Node* top, 63 Node* effect); 64 65 AllocationGroup* const group_; 66 // The upper bound of the combined allocated object size on the current path 67 // (max int if allocation folding is impossible on this path). 68 intptr_t const size_; 69 Node* const top_; 70 Node* const effect_; 71 }; 72 73 using WriteBarrierAssertFailedCallback = std::function<void( 74 Node* node, Node* object, const char* name, Zone* temp_zone)>; 75 76 MemoryLowering( 77 JSGraph* jsgraph, Zone* zone, JSGraphAssembler* graph_assembler, 78 AllocationFolding allocation_folding = 79 AllocationFolding::kDontAllocationFolding, 80 WriteBarrierAssertFailedCallback callback = [](Node*, Node*, const char*, 81 Zone*) { UNREACHABLE(); }, 82 const char* function_debug_name = nullptr); 83 reducer_name()84 const char* reducer_name() const override { return "MemoryReducer"; } 85 86 // Perform memory lowering reduction on the given Node. 87 Reduction Reduce(Node* node) override; 88 89 // Specific reducers for each optype to enable keeping track of 90 // AllocationState by the MemoryOptimizer. 91 Reduction ReduceAllocateRaw(Node* node, AllocationType allocation_type, 92 AllowLargeObjects allow_large_objects, 93 AllocationState const** state); 94 Reduction ReduceLoadFromObject(Node* node); 95 Reduction ReduceLoadElement(Node* node); 96 Reduction ReduceLoadField(Node* node); 97 Reduction ReduceStoreToObject(Node* node, 98 AllocationState const* state = nullptr); 99 Reduction ReduceStoreElement(Node* node, 100 AllocationState const* state = nullptr); 101 Reduction ReduceStoreField(Node* node, 102 AllocationState const* state = nullptr); 103 Reduction ReduceStore(Node* node, AllocationState const* state = nullptr); 104 105 private: 106 Reduction ReduceAllocateRaw(Node* node); 107 WriteBarrierKind ComputeWriteBarrierKind(Node* node, Node* object, 108 Node* value, 109 AllocationState const* state, 110 WriteBarrierKind); 111 Node* DecodeExternalPointer(Node* encoded_pointer, ExternalPointerTag tag); 112 Reduction ReduceLoadMap(Node* encoded_pointer); 113 Node* ComputeIndex(ElementAccess const& access, Node* node); 114 void EnsureAllocateOperator(); 115 Node* GetWasmInstanceNode(); 116 graph()117 Graph* graph() const { return graph_; } isolate()118 Isolate* isolate() const { return isolate_; } zone()119 Zone* zone() const { return zone_; } 120 inline Zone* graph_zone() const; common()121 CommonOperatorBuilder* common() const { return common_; } machine()122 MachineOperatorBuilder* machine() const { return machine_; } gasm()123 JSGraphAssembler* gasm() const { return graph_assembler_; } 124 125 SetOncePointer<const Operator> allocate_operator_; 126 SetOncePointer<Node> wasm_instance_node_; 127 Isolate* isolate_; 128 Zone* zone_; 129 Graph* graph_; 130 CommonOperatorBuilder* common_; 131 MachineOperatorBuilder* machine_; 132 JSGraphAssembler* graph_assembler_; 133 AllocationFolding allocation_folding_; 134 WriteBarrierAssertFailedCallback write_barrier_assert_failed_; 135 const char* function_debug_name_; 136 137 DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryLowering); 138 }; 139 140 } // namespace compiler 141 } // namespace internal 142 } // namespace v8 143 144 #endif // V8_COMPILER_MEMORY_LOWERING_H_ 145