1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/store-store-elimination.h"
6
7 #include "src/codegen/tick-counter.h"
8 #include "src/compiler/all-nodes.h"
9 #include "src/compiler/common-operator.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/node-properties.h"
12 #include "src/compiler/persistent-map.h"
13 #include "src/compiler/simplified-operator.h"
14 #include "src/zone/zone-containers.h"
15
16 namespace v8 {
17 namespace internal {
18 namespace compiler {
19
20 #define TRACE(fmt, ...) \
21 do { \
22 if (FLAG_trace_store_elimination) { \
23 PrintF("RedundantStoreFinder: " fmt "\n", ##__VA_ARGS__); \
24 } \
25 } while (false)
26
27 // CHECK_EXTRA is like CHECK, but has two or more arguments: a boolean
28 // expression, a format string, and any number of extra arguments. The boolean
29 // expression will be evaluated at runtime. If it evaluates to false, then an
30 // error message will be shown containing the condition, as well as the extra
31 // info formatted like with printf.
32 #define CHECK_EXTRA(condition, fmt, ...) \
33 do { \
34 if (V8_UNLIKELY(!(condition))) { \
35 FATAL("Check failed: %s. Extra info: " fmt, #condition, ##__VA_ARGS__); \
36 } \
37 } while (false)
38
39 #ifdef DEBUG
40 #define DCHECK_EXTRA(condition, fmt, ...) \
41 CHECK_EXTRA(condition, fmt, ##__VA_ARGS__)
42 #else
43 #define DCHECK_EXTRA(condition, fmt, ...) ((void)0)
44 #endif
45
46 namespace {
47
48 using StoreOffset = uint32_t;
49
50 struct UnobservableStore {
51 NodeId id_;
52 StoreOffset offset_;
53
operator ==v8::internal::compiler::__anon93a164fb0111::UnobservableStore54 bool operator==(const UnobservableStore other) const {
55 return (id_ == other.id_) && (offset_ == other.offset_);
56 }
57
operator <v8::internal::compiler::__anon93a164fb0111::UnobservableStore58 bool operator<(const UnobservableStore other) const {
59 return (id_ < other.id_) || (id_ == other.id_ && offset_ < other.offset_);
60 }
61 };
62
hash_value(const UnobservableStore & p)63 size_t hash_value(const UnobservableStore& p) {
64 return base::hash_combine(p.id_, p.offset_);
65 }
66
67 // Instances of UnobservablesSet are immutable. They represent either a set of
68 // UnobservableStores, or the "unvisited empty set".
69 //
70 // We apply some sharing to save memory. The class UnobservablesSet is only a
71 // pointer wide, and a copy does not use any heap (or temp_zone) memory. Most
72 // changes to an UnobservablesSet might allocate in the temp_zone.
73 //
74 // The size of an instance should be the size of a pointer, plus additional
75 // space in the zone in the case of non-unvisited UnobservablesSets. Copying
76 // an UnobservablesSet allocates no memory.
77 class UnobservablesSet final {
78 private:
79 using KeyT = UnobservableStore;
80 using ValueT = bool; // Emulates set semantics in the map.
81
82 // The PersistentMap uses a special value to signify 'not present'. We use
83 // a boolean value to emulate set semantics.
84 static constexpr ValueT kNotPresent = false;
85 static constexpr ValueT kPresent = true;
86
87 public:
88 using SetT = PersistentMap<KeyT, ValueT>;
89
90 // Creates a new UnobservablesSet, with the null set.
Unvisited()91 static UnobservablesSet Unvisited() { return UnobservablesSet(); }
92
93 // Create a new empty UnobservablesSet. This allocates in the zone, and
94 // can probably be optimized to use a global singleton.
95 static UnobservablesSet VisitedEmpty(Zone* zone);
96 UnobservablesSet(const UnobservablesSet& other) V8_NOEXCEPT = default;
97
98 // Computes the intersection of two UnobservablesSets. If one of the sets is
99 // empty, will return empty.
100 UnobservablesSet Intersect(const UnobservablesSet& other,
101 const UnobservablesSet& empty, Zone* zone) const;
102
103 // Returns a set that it is the current one, plus the observation obs passed
104 // as parameter. If said obs it's already in the set, we don't have to
105 // create a new one.
106 UnobservablesSet Add(UnobservableStore obs, Zone* zone) const;
107
108 // Returns a set that it is the current one, except for all of the
109 // observations with offset off. This is done by creating a new set and
110 // copying all observations with different offsets.
111 // This can probably be done better if the observations are stored first by
112 // offset and then by node.
113 // We are removing all nodes with offset off since different nodes may
114 // alias one another, and we currently we don't have the means to know if
115 // two nodes are definitely the same value.
116 UnobservablesSet RemoveSameOffset(StoreOffset off, Zone* zone) const;
117
set() const118 const SetT* set() const { return set_; }
119
IsUnvisited() const120 bool IsUnvisited() const { return set_ == nullptr; }
IsEmpty() const121 bool IsEmpty() const {
122 return set_ == nullptr || set_->begin() == set_->end();
123 }
Contains(UnobservableStore obs) const124 bool Contains(UnobservableStore obs) const {
125 return set_ != nullptr && set_->Get(obs) != kNotPresent;
126 }
127
operator ==(const UnobservablesSet & other) const128 bool operator==(const UnobservablesSet& other) const {
129 if (IsUnvisited() || other.IsUnvisited()) {
130 return IsEmpty() && other.IsEmpty();
131 } else {
132 // Both pointers guaranteed not to be nullptrs.
133 return *set() == *(other.set());
134 }
135 }
136
operator !=(const UnobservablesSet & other) const137 bool operator!=(const UnobservablesSet& other) const {
138 return !(*this == other);
139 }
140
141 private:
142 UnobservablesSet() = default;
UnobservablesSet(const SetT * set)143 explicit UnobservablesSet(const SetT* set) : set_(set) {}
144
NewSet(Zone * zone)145 static SetT* NewSet(Zone* zone) {
146 return zone->New<UnobservablesSet::SetT>(zone, kNotPresent);
147 }
148
SetAdd(SetT * set,const KeyT & key)149 static void SetAdd(SetT* set, const KeyT& key) { set->Set(key, kPresent); }
SetErase(SetT * set,const KeyT & key)150 static void SetErase(SetT* set, const KeyT& key) {
151 set->Set(key, kNotPresent);
152 }
153
154 const SetT* set_ = nullptr;
155 };
156
157 // These definitions are here in order to please the linker, which in debug mode
158 // sometimes requires static constants to be defined in .cc files.
159 constexpr UnobservablesSet::ValueT UnobservablesSet::kNotPresent;
160 constexpr UnobservablesSet::ValueT UnobservablesSet::kPresent;
161
162 class RedundantStoreFinder final {
163 public:
164 // Note that we Initialize unobservable_ with js_graph->graph->NodeCount()
165 // amount of empty sets.
RedundantStoreFinder(JSGraph * js_graph,TickCounter * tick_counter,Zone * temp_zone)166 RedundantStoreFinder(JSGraph* js_graph, TickCounter* tick_counter,
167 Zone* temp_zone)
168 : jsgraph_(js_graph),
169 tick_counter_(tick_counter),
170 temp_zone_(temp_zone),
171 revisit_(temp_zone),
172 in_revisit_(js_graph->graph()->NodeCount(), temp_zone),
173 unobservable_(js_graph->graph()->NodeCount(),
174 UnobservablesSet::Unvisited(), temp_zone),
175 to_remove_(temp_zone),
176 unobservables_visited_empty_(
177 UnobservablesSet::VisitedEmpty(temp_zone)) {}
178
179 // Crawls from the end of the graph to the beginning, with the objective of
180 // finding redundant stores.
181 void Find();
182
183 // This method is used for const correctness to go through the final list of
184 // redundant stores that are replaced on the graph.
to_remove_const()185 const ZoneSet<Node*>& to_remove_const() { return to_remove_; }
186
187 private:
188 // Assumption: All effectful nodes are reachable from End via a sequence of
189 // control, then a sequence of effect edges.
190 // Visit goes through the control chain, visiting effectful nodes that it
191 // encounters.
192 void Visit(Node* node);
193
194 // Marks effect inputs for visiting, if we are able to update this path of
195 // the graph.
196 void VisitEffectfulNode(Node* node);
197
198 // Compute the intersection of the UnobservablesSets of all effect uses and
199 // return it.
200 // The result UnobservablesSet will never be null.
201 UnobservablesSet RecomputeUseIntersection(Node* node);
202
203 // Recompute unobservables-set for a node. Will also mark superfluous nodes
204 // as to be removed.
205 UnobservablesSet RecomputeSet(Node* node, const UnobservablesSet& uses);
206
207 // Returns true if node's opcode cannot observe StoreFields.
208 static bool CannotObserveStoreField(Node* node);
209
210 void MarkForRevisit(Node* node);
211 bool HasBeenVisited(Node* node);
212
213 // To safely cast an offset from a FieldAccess, which has a potentially
214 // wider range (namely int).
ToOffset(const FieldAccess & access)215 StoreOffset ToOffset(const FieldAccess& access) {
216 DCHECK_GE(access.offset, 0);
217 return static_cast<StoreOffset>(access.offset);
218 }
219
jsgraph() const220 JSGraph* jsgraph() const { return jsgraph_; }
isolate()221 Isolate* isolate() { return jsgraph()->isolate(); }
temp_zone() const222 Zone* temp_zone() const { return temp_zone_; }
unobservable_for_id(NodeId id)223 UnobservablesSet& unobservable_for_id(NodeId id) {
224 DCHECK_LT(id, unobservable_.size());
225 return unobservable_[id];
226 }
to_remove()227 ZoneSet<Node*>& to_remove() { return to_remove_; }
228
229 JSGraph* const jsgraph_;
230 TickCounter* const tick_counter_;
231 Zone* const temp_zone_;
232
233 ZoneStack<Node*> revisit_;
234 ZoneVector<bool> in_revisit_;
235
236 // Maps node IDs to UnobservableNodeSets.
237 ZoneVector<UnobservablesSet> unobservable_;
238 ZoneSet<Node*> to_remove_;
239 const UnobservablesSet unobservables_visited_empty_;
240 };
241
Find()242 void RedundantStoreFinder::Find() {
243 Visit(jsgraph()->graph()->end());
244
245 while (!revisit_.empty()) {
246 tick_counter_->TickAndMaybeEnterSafepoint();
247 Node* next = revisit_.top();
248 revisit_.pop();
249 DCHECK_LT(next->id(), in_revisit_.size());
250 in_revisit_[next->id()] = false;
251 Visit(next);
252 }
253
254 #ifdef DEBUG
255 // Check that we visited all the StoreFields
256 AllNodes all(temp_zone(), jsgraph()->graph());
257 for (Node* node : all.reachable) {
258 if (node->op()->opcode() == IrOpcode::kStoreField) {
259 DCHECK_EXTRA(HasBeenVisited(node), "#%d:%s", node->id(),
260 node->op()->mnemonic());
261 }
262 }
263 #endif
264 }
265
MarkForRevisit(Node * node)266 void RedundantStoreFinder::MarkForRevisit(Node* node) {
267 DCHECK_LT(node->id(), in_revisit_.size());
268 if (!in_revisit_[node->id()]) {
269 revisit_.push(node);
270 in_revisit_[node->id()] = true;
271 }
272 }
273
HasBeenVisited(Node * node)274 bool RedundantStoreFinder::HasBeenVisited(Node* node) {
275 return !unobservable_for_id(node->id()).IsUnvisited();
276 }
277
RecomputeSet(Node * node,const UnobservablesSet & uses)278 UnobservablesSet RedundantStoreFinder::RecomputeSet(
279 Node* node, const UnobservablesSet& uses) {
280 switch (node->op()->opcode()) {
281 case IrOpcode::kStoreField: {
282 Node* stored_to = node->InputAt(0);
283 const FieldAccess& access = FieldAccessOf(node->op());
284 StoreOffset offset = ToOffset(access);
285
286 UnobservableStore observation = {stored_to->id(), offset};
287 bool is_not_observable = uses.Contains(observation);
288
289 if (is_not_observable) {
290 TRACE(" #%d is StoreField[+%d,%s](#%d), unobservable", node->id(),
291 offset, MachineReprToString(access.machine_type.representation()),
292 stored_to->id());
293 to_remove().insert(node);
294 return uses;
295 } else {
296 TRACE(" #%d is StoreField[+%d,%s](#%d), observable, recording in set",
297 node->id(), offset,
298 MachineReprToString(access.machine_type.representation()),
299 stored_to->id());
300 return uses.Add(observation, temp_zone());
301 }
302 }
303 case IrOpcode::kLoadField: {
304 Node* loaded_from = node->InputAt(0);
305 const FieldAccess& access = FieldAccessOf(node->op());
306 StoreOffset offset = ToOffset(access);
307
308 TRACE(
309 " #%d is LoadField[+%d,%s](#%d), removing all offsets [+%d] from "
310 "set",
311 node->id(), offset,
312 MachineReprToString(access.machine_type.representation()),
313 loaded_from->id(), offset);
314
315 return uses.RemoveSameOffset(offset, temp_zone());
316 }
317 default:
318 if (CannotObserveStoreField(node)) {
319 TRACE(" #%d:%s can observe nothing, set stays unchanged", node->id(),
320 node->op()->mnemonic());
321 return uses;
322 } else {
323 TRACE(" #%d:%s might observe anything, recording empty set",
324 node->id(), node->op()->mnemonic());
325 return unobservables_visited_empty_;
326 }
327 }
328 UNREACHABLE();
329 }
330
CannotObserveStoreField(Node * node)331 bool RedundantStoreFinder::CannotObserveStoreField(Node* node) {
332 IrOpcode::Value opcode = node->opcode();
333 return opcode == IrOpcode::kLoadElement || opcode == IrOpcode::kLoad ||
334 opcode == IrOpcode::kStore || opcode == IrOpcode::kEffectPhi ||
335 opcode == IrOpcode::kStoreElement ||
336 opcode == IrOpcode::kUnsafePointerAdd || opcode == IrOpcode::kRetain;
337 }
338
Visit(Node * node)339 void RedundantStoreFinder::Visit(Node* node) {
340 if (!HasBeenVisited(node)) {
341 for (int i = 0; i < node->op()->ControlInputCount(); i++) {
342 Node* control_input = NodeProperties::GetControlInput(node, i);
343 if (!HasBeenVisited(control_input)) {
344 MarkForRevisit(control_input);
345 }
346 }
347 }
348
349 bool is_effectful = node->op()->EffectInputCount() >= 1;
350 if (is_effectful) {
351 // mark all effect inputs for revisiting (if they might have stale state).
352 VisitEffectfulNode(node);
353 DCHECK(HasBeenVisited(node));
354 } else if (!HasBeenVisited(node)) {
355 // Mark as visited.
356 unobservable_for_id(node->id()) = unobservables_visited_empty_;
357 }
358 }
359
VisitEffectfulNode(Node * node)360 void RedundantStoreFinder::VisitEffectfulNode(Node* node) {
361 if (HasBeenVisited(node)) {
362 TRACE("- Revisiting: #%d:%s", node->id(), node->op()->mnemonic());
363 }
364 UnobservablesSet after_set = RecomputeUseIntersection(node);
365 UnobservablesSet before_set = RecomputeSet(node, after_set);
366 DCHECK(!before_set.IsUnvisited());
367
368 UnobservablesSet stores_for_node = unobservable_for_id(node->id());
369 bool cur_set_changed =
370 stores_for_node.IsUnvisited() || stores_for_node != before_set;
371 if (!cur_set_changed) {
372 // We will not be able to update the part of this chain above any more.
373 // Exit.
374 TRACE("+ No change: stabilized. Not visiting effect inputs.");
375 } else {
376 unobservable_for_id(node->id()) = before_set;
377
378 // Mark effect inputs for visiting.
379 for (int i = 0; i < node->op()->EffectInputCount(); i++) {
380 Node* input = NodeProperties::GetEffectInput(node, i);
381 TRACE(" marking #%d:%s for revisit", input->id(),
382 input->op()->mnemonic());
383 MarkForRevisit(input);
384 }
385 }
386 }
387
RecomputeUseIntersection(Node * node)388 UnobservablesSet RedundantStoreFinder::RecomputeUseIntersection(Node* node) {
389 // There were no effect uses. Break early.
390 if (node->op()->EffectOutputCount() == 0) {
391 IrOpcode::Value opcode = node->opcode();
392 // List of opcodes that may end this effect chain. The opcodes are not
393 // important to the soundness of this optimization; this serves as a
394 // general check. Add opcodes to this list as it suits you.
395 //
396 // Everything is observable after these opcodes; return the empty set.
397 DCHECK_EXTRA(
398 opcode == IrOpcode::kReturn || opcode == IrOpcode::kTerminate ||
399 opcode == IrOpcode::kDeoptimize || opcode == IrOpcode::kThrow ||
400 opcode == IrOpcode::kTailCall,
401 "for #%d:%s", node->id(), node->op()->mnemonic());
402 USE(opcode);
403
404 return unobservables_visited_empty_;
405 }
406
407 // {first} == true indicates that we haven't looked at any elements yet.
408 // {first} == false indicates that cur_set is the intersection of at least one
409 // thing.
410 bool first = true;
411 UnobservablesSet cur_set = UnobservablesSet::Unvisited(); // irrelevant
412 for (Edge edge : node->use_edges()) {
413 if (!NodeProperties::IsEffectEdge(edge)) {
414 continue;
415 }
416
417 // Intersect with the new use node.
418 Node* use = edge.from();
419 UnobservablesSet new_set = unobservable_for_id(use->id());
420 if (first) {
421 first = false;
422 cur_set = new_set;
423 if (cur_set.IsUnvisited()) {
424 cur_set = unobservables_visited_empty_;
425 }
426 } else {
427 cur_set =
428 cur_set.Intersect(new_set, unobservables_visited_empty_, temp_zone());
429 }
430
431 // Break fast for the empty set since the intersection will always be empty.
432 if (cur_set.IsEmpty()) {
433 break;
434 }
435 }
436
437 DCHECK(!cur_set.IsUnvisited());
438 return cur_set;
439 }
440
VisitedEmpty(Zone * zone)441 UnobservablesSet UnobservablesSet::VisitedEmpty(Zone* zone) {
442 return UnobservablesSet(NewSet(zone));
443 }
444
Intersect(const UnobservablesSet & other,const UnobservablesSet & empty,Zone * zone) const445 UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
446 const UnobservablesSet& empty,
447 Zone* zone) const {
448 if (IsEmpty() || other.IsEmpty()) return empty;
449
450 UnobservablesSet::SetT* intersection = NewSet(zone);
451 for (const auto& triple : set()->Zip(*other.set())) {
452 if (std::get<1>(triple) && std::get<2>(triple)) {
453 intersection->Set(std::get<0>(triple), kPresent);
454 }
455 }
456
457 return UnobservablesSet(intersection);
458 }
459
Add(UnobservableStore obs,Zone * zone) const460 UnobservablesSet UnobservablesSet::Add(UnobservableStore obs,
461 Zone* zone) const {
462 if (set()->Get(obs) != kNotPresent) return *this;
463
464 UnobservablesSet::SetT* new_set = NewSet(zone);
465 *new_set = *set();
466 SetAdd(new_set, obs);
467
468 return UnobservablesSet(new_set);
469 }
470
RemoveSameOffset(StoreOffset offset,Zone * zone) const471 UnobservablesSet UnobservablesSet::RemoveSameOffset(StoreOffset offset,
472 Zone* zone) const {
473 UnobservablesSet::SetT* new_set = NewSet(zone);
474 *new_set = *set();
475
476 // Remove elements with the given offset.
477 for (const auto& entry : *new_set) {
478 const UnobservableStore& obs = entry.first;
479 if (obs.offset_ == offset) SetErase(new_set, obs);
480 }
481
482 return UnobservablesSet(new_set);
483 }
484
485 } // namespace
486
487 // static
Run(JSGraph * js_graph,TickCounter * tick_counter,Zone * temp_zone)488 void StoreStoreElimination::Run(JSGraph* js_graph, TickCounter* tick_counter,
489 Zone* temp_zone) {
490 // Find superfluous nodes
491 RedundantStoreFinder finder(js_graph, tick_counter, temp_zone);
492 finder.Find();
493
494 // Remove superfluous nodes
495 for (Node* node : finder.to_remove_const()) {
496 if (FLAG_trace_store_elimination) {
497 PrintF("StoreStoreElimination::Run: Eliminating node #%d:%s\n",
498 node->id(), node->op()->mnemonic());
499 }
500 Node* previous_effect = NodeProperties::GetEffectInput(node);
501 NodeProperties::ReplaceUses(node, nullptr, previous_effect, nullptr,
502 nullptr);
503 node->Kill();
504 }
505 }
506
507 #undef TRACE
508 #undef CHECK_EXTRA
509 #undef DCHECK_EXTRA
510
511 } // namespace compiler
512 } // namespace internal
513 } // namespace v8
514