• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/store-store-elimination.h"
6 
7 #include "src/codegen/tick-counter.h"
8 #include "src/compiler/all-nodes.h"
9 #include "src/compiler/common-operator.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/node-properties.h"
12 #include "src/compiler/persistent-map.h"
13 #include "src/compiler/simplified-operator.h"
14 #include "src/zone/zone-containers.h"
15 
16 namespace v8 {
17 namespace internal {
18 namespace compiler {
19 
20 #define TRACE(fmt, ...)                                         \
21   do {                                                          \
22     if (FLAG_trace_store_elimination) {                         \
23       PrintF("RedundantStoreFinder: " fmt "\n", ##__VA_ARGS__); \
24     }                                                           \
25   } while (false)
26 
27 // CHECK_EXTRA is like CHECK, but has two or more arguments: a boolean
28 // expression, a format string, and any number of extra arguments. The boolean
29 // expression will be evaluated at runtime. If it evaluates to false, then an
30 // error message will be shown containing the condition, as well as the extra
31 // info formatted like with printf.
32 #define CHECK_EXTRA(condition, fmt, ...)                                      \
33   do {                                                                        \
34     if (V8_UNLIKELY(!(condition))) {                                          \
35       FATAL("Check failed: %s. Extra info: " fmt, #condition, ##__VA_ARGS__); \
36     }                                                                         \
37   } while (false)
38 
39 #ifdef DEBUG
40 #define DCHECK_EXTRA(condition, fmt, ...) \
41   CHECK_EXTRA(condition, fmt, ##__VA_ARGS__)
42 #else
43 #define DCHECK_EXTRA(condition, fmt, ...) ((void)0)
44 #endif
45 
46 namespace {
47 
48 using StoreOffset = uint32_t;
49 
50 struct UnobservableStore {
51   NodeId id_;
52   StoreOffset offset_;
53   bool maybe_gc_observable_ = false;
54 
operator ==v8::internal::compiler::__anon65d803f20111::UnobservableStore55   bool operator==(const UnobservableStore other) const {
56     return (id_ == other.id_) && (offset_ == other.offset_);
57   }
58 
operator <v8::internal::compiler::__anon65d803f20111::UnobservableStore59   bool operator<(const UnobservableStore other) const {
60     return (id_ < other.id_) || (id_ == other.id_ && offset_ < other.offset_);
61   }
62 };
63 
hash_value(const UnobservableStore & p)64 size_t hash_value(const UnobservableStore& p) {
65   return base::hash_combine(p.id_, p.offset_);
66 }
67 
68 // Instances of UnobservablesSet are immutable. They represent either a set of
69 // UnobservableStores, or the "unvisited empty set".
70 //
71 // We apply some sharing to save memory. The class UnobservablesSet is only a
72 // pointer wide, and a copy does not use any heap (or temp_zone) memory. Most
73 // changes to an UnobservablesSet might allocate in the temp_zone.
74 //
75 // The size of an instance should be the size of a pointer, plus additional
76 // space in the zone in the case of non-unvisited UnobservablesSets. Copying
77 // an UnobservablesSet allocates no memory.
78 class UnobservablesSet final {
79  private:
80   enum ObservableState {
81     kObservable = 0,    // We haven't seen a store to this offset before.
82     kUnobservable = 1,  // Stores to the same offset can be eliminated.
83     kGCObservable = 2   // Stores to the same offset can only be eliminated,
84                         // if they are not initializing or transitioning.
85   };
86 
87   using KeyT = UnobservableStore;
88   using ValueT = ObservableState;
89 
90  public:
91   using SetT = PersistentMap<KeyT, ValueT>;
92 
93   // Creates a new UnobservablesSet, with the null set.
Unvisited()94   static UnobservablesSet Unvisited() { return UnobservablesSet(); }
95 
96   // Create a new empty UnobservablesSet. This allocates in the zone, and
97   // can probably be optimized to use a global singleton.
98   static UnobservablesSet VisitedEmpty(Zone* zone);
99   UnobservablesSet(const UnobservablesSet& other) V8_NOEXCEPT = default;
100   UnobservablesSet& operator=(const UnobservablesSet& other)
101       V8_NOEXCEPT = default;
102 
103   // Computes the intersection of two states.
104   ObservableState Intersect(const ObservableState state1,
105                             const ObservableState state2) const;
106 
107   // Computes the intersection of two UnobservablesSets. If one of the sets is
108   // empty, will return empty.
109   UnobservablesSet Intersect(const UnobservablesSet& other,
110                              const UnobservablesSet& empty, Zone* zone) const;
111 
112   // Returns a set that it is the current one, plus the observation obs passed
113   // as parameter. If said obs it's already unobservable, we don't have to
114   // create a new one.
115   UnobservablesSet Add(UnobservableStore obs, Zone* zone) const;
116 
117   // Returns a set that it is the current one, except for all of the
118   // observations with offset off. This is done by creating a new set and
119   // copying all observations with different offsets.
120   // This can probably be done better if the observations are stored first by
121   // offset and then by node.
122   // We are removing all nodes with offset off since different nodes may
123   // alias one another, and currently we don't have the means to know if
124   // two nodes are definitely the same value.
125   UnobservablesSet RemoveSameOffset(StoreOffset off, Zone* zone) const;
126 
127   // Returns a new set where all observations are marked as being observable
128   // by GC.
129   UnobservablesSet MarkGCObservable(Zone* zone) const;
130 
set() const131   const SetT* set() const { return set_; }
132 
IsUnvisited() const133   bool IsUnvisited() const { return set_ == nullptr; }
IsEmpty() const134   bool IsEmpty() const {
135     return set_ == nullptr || set_->begin() == set_->end();
136   }
137 
138   // We need to guarantee that objects are fully initialized and fields are in
139   // sync with their map when a GC is triggered (potentially by any allocation).
140   // Therefore initializing or transitioning stores are observable if they are
141   // observable by GC. All other stores are not relevant for correct GC
142   // behaviour and can be eliminated even if they are observable by GC.
IsUnobservable(UnobservableStore obs,bool maybe_initializing_or_transitioning) const143   bool IsUnobservable(UnobservableStore obs,
144                       bool maybe_initializing_or_transitioning) const {
145     if (set_ == nullptr) return false;
146     ObservableState state = set_->Get(obs);
147     switch (state) {
148       case kUnobservable:
149         return true;
150       case kObservable:
151         return false;
152       case kGCObservable:
153         return !maybe_initializing_or_transitioning;
154     }
155     UNREACHABLE();
156   }
157 
IsGCObservable(UnobservableStore obs) const158   bool IsGCObservable(UnobservableStore obs) const {
159     return set_ != nullptr && set_->Get(obs) == kGCObservable;
160   }
161 
operator ==(const UnobservablesSet & other) const162   bool operator==(const UnobservablesSet& other) const {
163     if (IsUnvisited() || other.IsUnvisited()) {
164       return IsEmpty() && other.IsEmpty();
165     } else {
166       // Both pointers guaranteed not to be nullptrs.
167       return *set() == *(other.set());
168     }
169   }
170 
operator !=(const UnobservablesSet & other) const171   bool operator!=(const UnobservablesSet& other) const {
172     return !(*this == other);
173   }
174 
175  private:
176   UnobservablesSet() = default;
UnobservablesSet(const SetT * set)177   explicit UnobservablesSet(const SetT* set) : set_(set) {}
178 
NewSet(Zone * zone)179   static SetT* NewSet(Zone* zone) {
180     return zone->New<UnobservablesSet::SetT>(zone, kObservable);
181   }
182 
SetAdd(SetT * set,const KeyT & key)183   static void SetAdd(SetT* set, const KeyT& key) {
184     set->Set(key, kUnobservable);
185   }
SetErase(SetT * set,const KeyT & key)186   static void SetErase(SetT* set, const KeyT& key) {
187     set->Set(key, kObservable);
188   }
SetGCObservable(SetT * set,const KeyT & key)189   static void SetGCObservable(SetT* set, const KeyT& key) {
190     set->Set(key, kGCObservable);
191   }
192 
193   const SetT* set_ = nullptr;
194 };
195 
196 class RedundantStoreFinder final {
197  public:
198   // Note that we Initialize unobservable_ with js_graph->graph->NodeCount()
199   // amount of empty sets.
RedundantStoreFinder(JSGraph * js_graph,TickCounter * tick_counter,Zone * temp_zone)200   RedundantStoreFinder(JSGraph* js_graph, TickCounter* tick_counter,
201                        Zone* temp_zone)
202       : jsgraph_(js_graph),
203         tick_counter_(tick_counter),
204         temp_zone_(temp_zone),
205         revisit_(temp_zone),
206         in_revisit_(js_graph->graph()->NodeCount(), temp_zone),
207         unobservable_(js_graph->graph()->NodeCount(),
208                       UnobservablesSet::Unvisited(), temp_zone),
209         to_remove_(temp_zone),
210         unobservables_visited_empty_(
211             UnobservablesSet::VisitedEmpty(temp_zone)) {}
212 
213   // Crawls from the end of the graph to the beginning, with the objective of
214   // finding redundant stores.
215   void Find();
216 
217   // This method is used for const correctness to go through the final list of
218   // redundant stores that are replaced on the graph.
to_remove_const()219   const ZoneSet<Node*>& to_remove_const() { return to_remove_; }
220 
221  private:
222   // Assumption: All effectful nodes are reachable from End via a sequence of
223   // control, then a sequence of effect edges.
224   // Visit goes through the control chain, visiting effectful nodes that it
225   // encounters.
226   void Visit(Node* node);
227 
228   // Marks effect inputs for visiting, if we are able to update this path of
229   // the graph.
230   void VisitEffectfulNode(Node* node);
231 
232   // Compute the intersection of the UnobservablesSets of all effect uses and
233   // return it.
234   // The result UnobservablesSet will never be null.
235   UnobservablesSet RecomputeUseIntersection(Node* node);
236 
237   // Recompute unobservables-set for a node. Will also mark superfluous nodes
238   // as to be removed.
239   UnobservablesSet RecomputeSet(Node* node, const UnobservablesSet& uses);
240 
241   // Returns true if node's opcode cannot observe StoreFields.
242   static bool CannotObserveStoreField(Node* node);
243 
244   void MarkForRevisit(Node* node);
245   bool HasBeenVisited(Node* node);
246 
247   // To safely cast an offset from a FieldAccess, which has a potentially
248   // wider range (namely int).
ToOffset(const FieldAccess & access)249   StoreOffset ToOffset(const FieldAccess& access) {
250     DCHECK_GE(access.offset, 0);
251     return static_cast<StoreOffset>(access.offset);
252   }
253 
jsgraph() const254   JSGraph* jsgraph() const { return jsgraph_; }
isolate()255   Isolate* isolate() { return jsgraph()->isolate(); }
temp_zone() const256   Zone* temp_zone() const { return temp_zone_; }
unobservable_for_id(NodeId id)257   UnobservablesSet& unobservable_for_id(NodeId id) {
258     DCHECK_LT(id, unobservable_.size());
259     return unobservable_[id];
260   }
to_remove()261   ZoneSet<Node*>& to_remove() { return to_remove_; }
262 
263   JSGraph* const jsgraph_;
264   TickCounter* const tick_counter_;
265   Zone* const temp_zone_;
266 
267   ZoneStack<Node*> revisit_;
268   ZoneVector<bool> in_revisit_;
269 
270   // Maps node IDs to UnobservableNodeSets.
271   ZoneVector<UnobservablesSet> unobservable_;
272   ZoneSet<Node*> to_remove_;
273   const UnobservablesSet unobservables_visited_empty_;
274 };
275 
Find()276 void RedundantStoreFinder::Find() {
277   Visit(jsgraph()->graph()->end());
278 
279   while (!revisit_.empty()) {
280     tick_counter_->TickAndMaybeEnterSafepoint();
281     Node* next = revisit_.top();
282     revisit_.pop();
283     DCHECK_LT(next->id(), in_revisit_.size());
284     in_revisit_[next->id()] = false;
285     Visit(next);
286   }
287 
288 #ifdef DEBUG
289   // Check that we visited all the StoreFields
290   AllNodes all(temp_zone(), jsgraph()->graph());
291   for (Node* node : all.reachable) {
292     if (node->op()->opcode() == IrOpcode::kStoreField) {
293       DCHECK_EXTRA(HasBeenVisited(node), "#%d:%s", node->id(),
294                    node->op()->mnemonic());
295     }
296   }
297 #endif
298 }
299 
MarkForRevisit(Node * node)300 void RedundantStoreFinder::MarkForRevisit(Node* node) {
301   DCHECK_LT(node->id(), in_revisit_.size());
302   if (!in_revisit_[node->id()]) {
303     revisit_.push(node);
304     in_revisit_[node->id()] = true;
305   }
306 }
307 
HasBeenVisited(Node * node)308 bool RedundantStoreFinder::HasBeenVisited(Node* node) {
309   return !unobservable_for_id(node->id()).IsUnvisited();
310 }
311 
RecomputeSet(Node * node,const UnobservablesSet & uses)312 UnobservablesSet RedundantStoreFinder::RecomputeSet(
313     Node* node, const UnobservablesSet& uses) {
314   switch (node->op()->opcode()) {
315     case IrOpcode::kStoreField: {
316       Node* stored_to = node->InputAt(0);
317       const FieldAccess& access = FieldAccessOf(node->op());
318       StoreOffset offset = ToOffset(access);
319 
320       UnobservableStore observation = {stored_to->id(), offset};
321       const bool is_not_observable = uses.IsUnobservable(
322           observation, access.maybe_initializing_or_transitioning_store);
323 
324       if (is_not_observable) {
325         TRACE("  #%d is StoreField[+%d,%s](#%d), unobservable", node->id(),
326               offset, MachineReprToString(access.machine_type.representation()),
327               stored_to->id());
328         to_remove().insert(node);
329         return uses;
330       } else {
331         const bool is_gc_observable =
332             access.maybe_initializing_or_transitioning_store &&
333             uses.IsGCObservable(observation);
334         // A GC observable store could have been unobservable in a previous
335         // visit. This is possible if the node that previously shadowed the
336         // initializing store is now unobservable, due to additional stores
337         // added to the unobservables set. Example:
338         //            StoreA --> StoreB (init)
339         //               ^
340         //               |
341         // PathX --> Allocate <-- StoreC <-- PathY
342         // When traversing PathX, StoreA will shadow StoreB and we will
343         // eliminate StoreB. When traversing PathY, StoreA will be shadowed by
344         // StoreC and we will eliminate StoreA, but StoreB is now observable by
345         // GC and should not be eliminated.
346         if (is_gc_observable) {
347           to_remove().erase(node);
348         }
349         TRACE(
350             "  #%d is StoreField[+%d,%s](#%d), observable%s, recording in set",
351             node->id(), offset,
352             MachineReprToString(access.machine_type.representation()),
353             stored_to->id(), is_gc_observable ? " by GC" : "");
354         return uses.Add(observation, temp_zone());
355       }
356     }
357     case IrOpcode::kLoadField: {
358       Node* loaded_from = node->InputAt(0);
359       const FieldAccess& access = FieldAccessOf(node->op());
360       StoreOffset offset = ToOffset(access);
361 
362       TRACE(
363           "  #%d is LoadField[+%d,%s](#%d), removing all offsets [+%d] from "
364           "set",
365           node->id(), offset,
366           MachineReprToString(access.machine_type.representation()),
367           loaded_from->id(), offset);
368 
369       return uses.RemoveSameOffset(offset, temp_zone());
370     }
371     case IrOpcode::kAllocate:
372     case IrOpcode::kAllocateRaw: {
373       // Allocations can trigger a GC, therefore stores observable by allocation
374       // can not be eliminated, if they are initializing or tranisitioning
375       // stores.
376       TRACE(
377           "  #%d is Allocate or AllocateRaw, marking recorded offsets as "
378           "observable by GC",
379           node->id());
380       return uses.MarkGCObservable(temp_zone());
381     }
382     default:
383       if (CannotObserveStoreField(node)) {
384         TRACE("  #%d:%s can observe nothing, set stays unchanged", node->id(),
385               node->op()->mnemonic());
386         return uses;
387       } else {
388         TRACE("  #%d:%s might observe anything, recording empty set",
389               node->id(), node->op()->mnemonic());
390         return unobservables_visited_empty_;
391       }
392   }
393   UNREACHABLE();
394 }
395 
CannotObserveStoreField(Node * node)396 bool RedundantStoreFinder::CannotObserveStoreField(Node* node) {
397   IrOpcode::Value opcode = node->opcode();
398   return opcode == IrOpcode::kLoadElement || opcode == IrOpcode::kLoad ||
399          opcode == IrOpcode::kLoadImmutable || opcode == IrOpcode::kStore ||
400          opcode == IrOpcode::kEffectPhi || opcode == IrOpcode::kStoreElement ||
401          opcode == IrOpcode::kUnsafePointerAdd || opcode == IrOpcode::kRetain;
402 }
403 
Visit(Node * node)404 void RedundantStoreFinder::Visit(Node* node) {
405   if (!HasBeenVisited(node)) {
406     for (int i = 0; i < node->op()->ControlInputCount(); i++) {
407       Node* control_input = NodeProperties::GetControlInput(node, i);
408       if (!HasBeenVisited(control_input)) {
409         MarkForRevisit(control_input);
410       }
411     }
412   }
413 
414   bool is_effectful = node->op()->EffectInputCount() >= 1;
415   if (is_effectful) {
416     // mark all effect inputs for revisiting (if they might have stale state).
417     VisitEffectfulNode(node);
418     DCHECK(HasBeenVisited(node));
419   } else if (!HasBeenVisited(node)) {
420     // Mark as visited.
421     unobservable_for_id(node->id()) = unobservables_visited_empty_;
422   }
423 }
424 
VisitEffectfulNode(Node * node)425 void RedundantStoreFinder::VisitEffectfulNode(Node* node) {
426   if (HasBeenVisited(node)) {
427     TRACE("- Revisiting: #%d:%s", node->id(), node->op()->mnemonic());
428   }
429   UnobservablesSet after_set = RecomputeUseIntersection(node);
430   UnobservablesSet before_set = RecomputeSet(node, after_set);
431   DCHECK(!before_set.IsUnvisited());
432 
433   UnobservablesSet stores_for_node = unobservable_for_id(node->id());
434   bool cur_set_changed =
435       stores_for_node.IsUnvisited() || stores_for_node != before_set;
436   if (!cur_set_changed) {
437     // We will not be able to update the part of this chain above any more.
438     // Exit.
439     TRACE("+ No change: stabilized. Not visiting effect inputs.");
440   } else {
441     unobservable_for_id(node->id()) = before_set;
442 
443     // Mark effect inputs for visiting.
444     for (int i = 0; i < node->op()->EffectInputCount(); i++) {
445       Node* input = NodeProperties::GetEffectInput(node, i);
446       TRACE("    marking #%d:%s for revisit", input->id(),
447             input->op()->mnemonic());
448       MarkForRevisit(input);
449     }
450   }
451 }
452 
RecomputeUseIntersection(Node * node)453 UnobservablesSet RedundantStoreFinder::RecomputeUseIntersection(Node* node) {
454   // There were no effect uses. Break early.
455   if (node->op()->EffectOutputCount() == 0) {
456     IrOpcode::Value opcode = node->opcode();
457     // List of opcodes that may end this effect chain. The opcodes are not
458     // important to the soundness of this optimization; this serves as a
459     // general check. Add opcodes to this list as it suits you.
460     //
461     // Everything is observable after these opcodes; return the empty set.
462     DCHECK_EXTRA(
463         opcode == IrOpcode::kReturn || opcode == IrOpcode::kTerminate ||
464             opcode == IrOpcode::kDeoptimize || opcode == IrOpcode::kThrow ||
465             opcode == IrOpcode::kTailCall,
466         "for #%d:%s", node->id(), node->op()->mnemonic());
467     USE(opcode);
468 
469     return unobservables_visited_empty_;
470   }
471 
472   // {first} == true indicates that we haven't looked at any elements yet.
473   // {first} == false indicates that cur_set is the intersection of at least one
474   // thing.
475   bool first = true;
476   UnobservablesSet cur_set = UnobservablesSet::Unvisited();  // irrelevant
477   for (Edge edge : node->use_edges()) {
478     if (!NodeProperties::IsEffectEdge(edge)) {
479       continue;
480     }
481 
482     // Intersect with the new use node.
483     Node* use = edge.from();
484     UnobservablesSet new_set = unobservable_for_id(use->id());
485     if (first) {
486       first = false;
487       cur_set = new_set;
488       if (cur_set.IsUnvisited()) {
489         cur_set = unobservables_visited_empty_;
490       }
491     } else {
492       cur_set =
493           cur_set.Intersect(new_set, unobservables_visited_empty_, temp_zone());
494     }
495 
496     // Break fast for the empty set since the intersection will always be empty.
497     if (cur_set.IsEmpty()) {
498       break;
499     }
500   }
501 
502   DCHECK(!cur_set.IsUnvisited());
503   return cur_set;
504 }
505 
VisitedEmpty(Zone * zone)506 UnobservablesSet UnobservablesSet::VisitedEmpty(Zone* zone) {
507   return UnobservablesSet(NewSet(zone));
508 }
509 
Intersect(const ObservableState state1,const ObservableState state2) const510 UnobservablesSet::ObservableState UnobservablesSet::Intersect(
511     const ObservableState state1, const ObservableState state2) const {
512   if (state1 == state2) return state1;
513   if (state1 == kObservable || state2 == kObservable) return kObservable;
514   if (state1 == kGCObservable || state2 == kGCObservable) {
515     return kGCObservable;
516   }
517   UNREACHABLE();
518 }
519 
Intersect(const UnobservablesSet & other,const UnobservablesSet & empty,Zone * zone) const520 UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
521                                              const UnobservablesSet& empty,
522                                              Zone* zone) const {
523   if (IsEmpty() || other.IsEmpty()) return empty;
524 
525   UnobservablesSet::SetT* intersection = NewSet(zone);
526   for (auto triple : set()->Zip(*other.set())) {
527     ObservableState new_state =
528         Intersect(std::get<1>(triple), std::get<2>(triple));
529     intersection->Set(std::get<0>(triple), new_state);
530   }
531 
532   return UnobservablesSet(intersection);
533 }
534 
Add(UnobservableStore obs,Zone * zone) const535 UnobservablesSet UnobservablesSet::Add(UnobservableStore obs,
536                                        Zone* zone) const {
537   if (set()->Get(obs) == kUnobservable) return *this;
538 
539   UnobservablesSet::SetT* new_set = NewSet(zone);
540   *new_set = *set();
541   SetAdd(new_set, obs);
542 
543   return UnobservablesSet(new_set);
544 }
545 
RemoveSameOffset(StoreOffset offset,Zone * zone) const546 UnobservablesSet UnobservablesSet::RemoveSameOffset(StoreOffset offset,
547                                                     Zone* zone) const {
548   UnobservablesSet::SetT* new_set = NewSet(zone);
549   *new_set = *set();
550 
551   // Remove elements with the given offset.
552   for (auto entry : *new_set) {
553     const UnobservableStore& obs = entry.first;
554     if (obs.offset_ == offset) SetErase(new_set, obs);
555   }
556 
557   return UnobservablesSet(new_set);
558 }
559 
MarkGCObservable(Zone * zone) const560 UnobservablesSet UnobservablesSet::MarkGCObservable(Zone* zone) const {
561   UnobservablesSet::SetT* new_set = NewSet(zone);
562   *new_set = *set();
563 
564   // Mark all elements as observable by GC.
565   for (auto entry : *new_set) {
566     const UnobservableStore& obs = entry.first;
567     SetGCObservable(new_set, obs);
568   }
569 
570   return UnobservablesSet(new_set);
571 }
572 
573 }  // namespace
574 
575 // static
Run(JSGraph * js_graph,TickCounter * tick_counter,Zone * temp_zone)576 void StoreStoreElimination::Run(JSGraph* js_graph, TickCounter* tick_counter,
577                                 Zone* temp_zone) {
578   // Find superfluous nodes
579   RedundantStoreFinder finder(js_graph, tick_counter, temp_zone);
580   finder.Find();
581 
582   // Remove superfluous nodes
583   for (Node* node : finder.to_remove_const()) {
584     if (FLAG_trace_store_elimination) {
585       PrintF("StoreStoreElimination::Run: Eliminating node #%d:%s\n",
586              node->id(), node->op()->mnemonic());
587     }
588     Node* previous_effect = NodeProperties::GetEffectInput(node);
589     NodeProperties::ReplaceUses(node, nullptr, previous_effect, nullptr,
590                                 nullptr);
591     node->Kill();
592   }
593 }
594 
595 #undef TRACE
596 #undef CHECK_EXTRA
597 #undef DCHECK_EXTRA
598 
599 }  // namespace compiler
600 }  // namespace internal
601 }  // namespace v8
602