1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/compiler/common-operator.h"
6 #include "src/compiler/control-reducer.h"
7 #include "src/compiler/graph.h"
8 #include "src/compiler/js-graph.h"
9 #include "src/compiler/node-matchers.h"
10 #include "src/compiler/node-properties-inl.h"
11 #include "src/zone-containers.h"
17 enum VisitState { kUnvisited = 0, kOnStack = 1, kRevisit = 2, kVisited = 3 };
18 enum Reachability { kFromStart = 8 };
21 if (FLAG_trace_turbo_reduction) PrintF x
23 class ControlReducerImpl {
25 ControlReducerImpl(Zone* zone, JSGraph* jsgraph,
26 CommonOperatorBuilder* common)
30 state_(jsgraph->graph()->NodeCount(), kUnvisited, zone_),
37 CommonOperatorBuilder* common_;
38 ZoneVector<VisitState> state_;
39 ZoneDeque<Node*> stack_;
40 ZoneDeque<Node*> revisit_;
46 // Process the node on the top of the stack, potentially pushing more
47 // or popping the node off the stack.
49 // If the stack becomes empty, revisit any nodes in the revisit queue.
50 // If no nodes in the revisit queue, try removing dead loops.
51 // If no dead loops, then finish.
52 } while (!stack_.empty() || TryRevisit() || RepairAndRemoveLoops());
56 while (!revisit_.empty()) {
57 Node* n = revisit_.back();
59 if (state_[n->id()] == kRevisit) { // state can change while in queue.
67 // Repair the graph after the possible creation of non-terminating or dead
68 // loops. Removing dead loops can produce more opportunities for reduction.
69 bool RepairAndRemoveLoops() {
70 // TODO(turbofan): we can skip this if the graph has no loops, but
71 // we have to be careful about proper loop detection during reduction.
73 // Gather all nodes backwards-reachable from end (through inputs).
74 state_.assign(graph()->NodeCount(), kUnvisited);
75 NodeVector nodes(zone_);
76 AddNodesReachableFromEnd(nodes);
78 // Walk forward through control nodes, looking for back edges to nodes
79 // that are not connected to end. Those are non-terminating loops (NTLs).
80 Node* start = graph()->start();
81 ZoneVector<byte> fw_reachability(graph()->NodeCount(), 0, zone_);
82 fw_reachability[start->id()] = kFromStart | kOnStack;
83 stack_.push_back(start);
85 while (!stack_.empty()) {
86 Node* node = stack_.back();
87 TRACE(("ControlFw: #%d:%s\n", node->id(), node->op()->mnemonic()));
89 for (Node* const succ : node->uses()) {
90 byte reach = fw_reachability[succ->id()];
91 if ((reach & kOnStack) != 0 && state_[succ->id()] != kVisited) {
92 // {succ} is on stack and not reachable from end.
93 ConnectNTL(nodes, succ);
94 fw_reachability.resize(graph()->NodeCount(), 0);
95 pop = false; // continue traversing inputs to this node.
98 if ((reach & kFromStart) == 0 &&
99 IrOpcode::IsControlOpcode(succ->opcode())) {
100 // {succ} is a control node and not yet reached from start.
101 fw_reachability[succ->id()] |= kFromStart | kOnStack;
102 stack_.push_back(succ);
103 pop = false; // "recurse" into successor control node.
108 fw_reachability[node->id()] &= ~kOnStack;
113 // Trim references from dead nodes to live nodes first.
114 jsgraph_->GetCachedNodes(&nodes);
117 // Any control nodes not reachable from start are dead, even loops.
118 for (size_t i = 0; i < nodes.size(); i++) {
119 Node* node = nodes[i];
120 byte reach = fw_reachability[node->id()];
121 if ((reach & kFromStart) == 0 &&
122 IrOpcode::IsControlOpcode(node->opcode())) {
123 ReplaceNode(node, dead()); // uses will be added to revisit queue.
126 return TryRevisit(); // try to push a node onto the stack.
129 // Connect {loop}, the header of a non-terminating loop, to the end node.
130 void ConnectNTL(NodeVector& nodes, Node* loop) {
131 TRACE(("ConnectNTL: #%d:%s\n", loop->id(), loop->op()->mnemonic()));
133 if (loop->opcode() != IrOpcode::kTerminate) {
134 // Insert a {Terminate} node if the loop has effects.
135 ZoneDeque<Node*> effects(zone_);
136 for (Node* const use : loop->uses()) {
137 if (use->opcode() == IrOpcode::kEffectPhi) effects.push_back(use);
139 int count = static_cast<int>(effects.size());
141 Node** inputs = zone_->NewArray<Node*>(1 + count);
142 for (int i = 0; i < count; i++) inputs[i] = effects[i];
143 inputs[count] = loop;
144 loop = graph()->NewNode(common_->Terminate(count), 1 + count, inputs);
145 TRACE(("AddTerminate: #%d:%s[%d]\n", loop->id(), loop->op()->mnemonic(),
151 Node* end = graph()->end();
152 CHECK_EQ(IrOpcode::kEnd, end->opcode());
153 Node* merge = end->InputAt(0);
154 if (merge == NULL || merge->opcode() == IrOpcode::kDead) {
155 // The end node died; just connect end to {loop}.
156 end->ReplaceInput(0, loop);
157 } else if (merge->opcode() != IrOpcode::kMerge) {
158 // Introduce a final merge node for {end->InputAt(0)} and {loop}.
159 merge = graph()->NewNode(common_->Merge(2), merge, loop);
160 end->ReplaceInput(0, merge);
163 // Append a new input to the final merge at the end.
164 merge->AppendInput(graph()->zone(), loop);
165 merge->set_op(common_->Merge(merge->InputCount()));
167 nodes.push_back(to_add);
168 state_.resize(graph()->NodeCount(), kUnvisited);
169 state_[to_add->id()] = kVisited;
170 AddBackwardsReachableNodes(nodes, nodes.size() - 1);
173 void AddNodesReachableFromEnd(NodeVector& nodes) {
174 Node* end = graph()->end();
175 state_[end->id()] = kVisited;
176 if (!end->IsDead()) {
177 nodes.push_back(end);
178 AddBackwardsReachableNodes(nodes, nodes.size() - 1);
182 void AddBackwardsReachableNodes(NodeVector& nodes, size_t cursor) {
183 while (cursor < nodes.size()) {
184 Node* node = nodes[cursor++];
185 for (Node* const input : node->inputs()) {
186 if (state_[input->id()] != kVisited) {
187 state_[input->id()] = kVisited;
188 nodes.push_back(input);
195 // Gather all nodes backwards-reachable from end through inputs.
196 state_.assign(graph()->NodeCount(), kUnvisited);
197 NodeVector nodes(zone_);
198 AddNodesReachableFromEnd(nodes);
200 // Process cached nodes in the JSGraph too.
201 jsgraph_->GetCachedNodes(&nodes);
205 void TrimNodes(NodeVector& nodes) {
206 // Remove dead->live edges.
207 for (size_t j = 0; j < nodes.size(); j++) {
208 Node* node = nodes[j];
209 for (UseIter i = node->uses().begin(); i != node->uses().end();) {
210 size_t id = static_cast<size_t>((*i)->id());
211 if (state_[id] != kVisited) {
212 TRACE(("DeadLink: #%d:%s(%d) -> #%d:%s\n", (*i)->id(),
213 (*i)->op()->mnemonic(), i.index(), node->id(),
214 node->op()->mnemonic()));
215 i.UpdateToAndIncrement(NULL);
222 // Verify that no inputs to live nodes are NULL.
223 for (size_t j = 0; j < nodes.size(); j++) {
224 Node* node = nodes[j];
225 for (Node* const input : node->inputs()) {
226 CHECK_NE(NULL, input);
228 for (Node* const use : node->uses()) {
229 size_t id = static_cast<size_t>(use->id());
230 CHECK_EQ(kVisited, state_[id]);
236 // Reduce the node on the top of the stack.
237 // If an input {i} is not yet visited or needs to be revisited, push {i} onto
238 // the stack and return. Otherwise, all inputs are visited, so apply
239 // reductions for {node} and pop it off the stack.
241 size_t height = stack_.size();
242 Node* node = stack_.back();
244 if (node->IsDead()) return Pop(); // Node was killed while on stack.
246 TRACE(("ControlReduce: #%d:%s\n", node->id(), node->op()->mnemonic()));
248 // Recurse on an input if necessary.
249 for (Node* const input : node->inputs()) {
250 if (Recurse(input)) return;
253 // All inputs should be visited or on stack. Apply reductions to node.
254 Node* replacement = ReduceNode(node);
255 if (replacement != node) ReplaceNode(node, replacement);
257 // After reducing the node, pop it off the stack.
258 CHECK_EQ(static_cast<int>(height), static_cast<int>(stack_.size()));
261 // If there was a replacement, reduce it after popping {node}.
262 if (replacement != node) Recurse(replacement);
265 // Push a node onto the stack if its state is {kUnvisited} or {kRevisit}.
266 bool Recurse(Node* node) {
267 size_t id = static_cast<size_t>(node->id());
268 if (id < state_.size()) {
269 if (state_[id] != kRevisit && state_[id] != kUnvisited) return false;
271 state_.resize((3 * id) / 2, kUnvisited);
277 void Push(Node* node) {
278 state_[node->id()] = kOnStack;
279 stack_.push_back(node);
283 int pos = static_cast<int>(stack_.size()) - 1;
285 DCHECK_EQ(kOnStack, state_[stack_[pos]->id()]);
286 state_[stack_[pos]->id()] = kVisited;
290 // Queue a node to be revisited if it has been visited once already.
291 void Revisit(Node* node) {
292 size_t id = static_cast<size_t>(node->id());
293 if (id < state_.size() && state_[id] == kVisited) {
294 TRACE((" Revisit #%d:%s\n", node->id(), node->op()->mnemonic()));
295 state_[id] = kRevisit;
296 revisit_.push_back(node);
301 if (dead_ == NULL) dead_ = graph()->NewNode(common_->Dead());
305 //===========================================================================
306 // Reducer implementation: perform reductions on a node.
307 //===========================================================================
308 Node* ReduceNode(Node* node) {
309 if (node->op()->ControlInputCount() == 1) {
310 // If a node has only one control input and it is dead, replace with dead.
311 Node* control = NodeProperties::GetControlInput(node);
312 if (control->opcode() == IrOpcode::kDead) {
313 TRACE(("ControlDead: #%d:%s\n", node->id(), node->op()->mnemonic()));
318 // Reduce branches, phis, and merges.
319 switch (node->opcode()) {
320 case IrOpcode::kBranch:
321 return ReduceBranch(node);
322 case IrOpcode::kLoop:
323 case IrOpcode::kMerge:
324 return ReduceMerge(node);
325 case IrOpcode::kSelect:
326 return ReduceSelect(node);
328 case IrOpcode::kEffectPhi:
329 return ReducePhi(node);
335 // Reduce redundant selects.
336 Node* ReduceSelect(Node* const node) {
337 Node* const cond = node->InputAt(0);
338 Node* const tvalue = node->InputAt(1);
339 Node* const fvalue = node->InputAt(2);
340 if (tvalue == fvalue) return tvalue;
341 switch (cond->opcode()) {
342 case IrOpcode::kInt32Constant:
343 return Int32Matcher(cond).Is(0) ? fvalue : tvalue;
344 case IrOpcode::kInt64Constant:
345 return Int64Matcher(cond).Is(0) ? fvalue : tvalue;
346 case IrOpcode::kNumberConstant:
347 return NumberMatcher(cond).Is(0) ? fvalue : tvalue;
348 case IrOpcode::kHeapConstant: {
349 Handle<Object> object =
350 HeapObjectMatcher<Object>(cond).Value().handle();
351 if (object->IsTrue()) return tvalue;
352 if (object->IsFalse()) return fvalue;
361 // Reduce redundant phis.
362 Node* ReducePhi(Node* node) {
363 int n = node->InputCount();
364 if (n <= 1) return dead(); // No non-control inputs.
365 if (n == 2) return node->InputAt(0); // Only one non-control input.
367 Node* replacement = NULL;
368 Node::Inputs inputs = node->inputs();
369 for (InputIter it = inputs.begin(); n > 1; --n, ++it) {
371 if (input->opcode() == IrOpcode::kDead) continue; // ignore dead inputs.
372 if (input != node && input != replacement) { // non-redundant input.
373 if (replacement != NULL) return node;
377 return replacement == NULL ? dead() : replacement;
380 // Reduce merges by trimming away dead inputs from the merge and phis.
381 Node* ReduceMerge(Node* node) {
382 // Count the number of live inputs.
386 for (Node* const input : node->inputs()) {
387 if (input->opcode() != IrOpcode::kDead) {
394 if (live > 1 && live == node->InputCount()) return node; // nothing to do.
396 TRACE(("ReduceMerge: #%d:%s (%d live)\n", node->id(),
397 node->op()->mnemonic(), live));
399 if (live == 0) return dead(); // no remaining inputs.
401 // Gather phis and effect phis to be edited.
402 ZoneVector<Node*> phis(zone_);
403 for (Node* const use : node->uses()) {
404 if (use->opcode() == IrOpcode::kPhi ||
405 use->opcode() == IrOpcode::kEffectPhi) {
411 // All phis are redundant. Replace them with their live input.
412 for (Node* const phi : phis) ReplaceNode(phi, phi->InputAt(live_index));
413 // The merge itself is redundant.
414 return node->InputAt(live_index);
417 // Edit phis in place, removing dead inputs and revisiting them.
418 for (Node* const phi : phis) {
419 TRACE((" PhiInMerge: #%d:%s (%d live)\n", phi->id(),
420 phi->op()->mnemonic(), live));
421 RemoveDeadInputs(node, phi);
424 // Edit the merge in place, removing dead inputs.
425 RemoveDeadInputs(node, node);
429 // Reduce branches if they have constant inputs.
430 Node* ReduceBranch(Node* node) {
431 Node* cond = node->InputAt(0);
433 switch (cond->opcode()) {
434 case IrOpcode::kInt32Constant:
435 is_true = !Int32Matcher(cond).Is(0);
437 case IrOpcode::kInt64Constant:
438 is_true = !Int64Matcher(cond).Is(0);
440 case IrOpcode::kNumberConstant:
441 is_true = !NumberMatcher(cond).Is(0);
443 case IrOpcode::kHeapConstant: {
444 Handle<Object> object =
445 HeapObjectMatcher<Object>(cond).Value().handle();
446 if (object->IsTrue())
448 else if (object->IsFalse())
451 return node; // TODO(turbofan): fold branches on strings, objects.
458 TRACE(("BranchReduce: #%d:%s = %s\n", node->id(), node->op()->mnemonic(),
459 is_true ? "true" : "false"));
461 // Replace IfTrue and IfFalse projections from this branch.
462 Node* control = NodeProperties::GetControlInput(node);
463 for (UseIter i = node->uses().begin(); i != node->uses().end();) {
465 if (to->opcode() == IrOpcode::kIfTrue) {
466 TRACE((" IfTrue: #%d:%s\n", to->id(), to->op()->mnemonic()));
467 i.UpdateToAndIncrement(NULL);
468 ReplaceNode(to, is_true ? control : dead());
469 } else if (to->opcode() == IrOpcode::kIfFalse) {
470 TRACE((" IfFalse: #%d:%s\n", to->id(), to->op()->mnemonic()));
471 i.UpdateToAndIncrement(NULL);
472 ReplaceNode(to, is_true ? dead() : control);
480 // Remove inputs to {node} corresponding to the dead inputs to {merge}
481 // and compact the remaining inputs, updating the operator.
482 void RemoveDeadInputs(Node* merge, Node* node) {
484 for (int i = 0; i < node->InputCount(); i++) {
486 if (i < merge->InputCount() &&
487 merge->InputAt(i)->opcode() == IrOpcode::kDead)
489 // compact live inputs.
490 if (pos != i) node->ReplaceInput(pos, node->InputAt(i));
493 node->TrimInputCount(pos);
494 if (node->opcode() == IrOpcode::kPhi) {
495 node->set_op(common_->Phi(OpParameter<MachineType>(node->op()), pos - 1));
496 } else if (node->opcode() == IrOpcode::kEffectPhi) {
497 node->set_op(common_->EffectPhi(pos - 1));
498 } else if (node->opcode() == IrOpcode::kMerge) {
499 node->set_op(common_->Merge(pos));
500 } else if (node->opcode() == IrOpcode::kLoop) {
501 node->set_op(common_->Loop(pos));
507 // Replace uses of {node} with {replacement} and revisit the uses.
508 void ReplaceNode(Node* node, Node* replacement) {
509 if (node == replacement) return;
510 TRACE((" Replace: #%d:%s with #%d:%s\n", node->id(),
511 node->op()->mnemonic(), replacement->id(),
512 replacement->op()->mnemonic()));
513 for (Node* const use : node->uses()) {
514 // Don't revisit this node if it refers to itself.
515 if (use != node) Revisit(use);
517 node->ReplaceUses(replacement);
521 Graph* graph() { return jsgraph_->graph(); }
525 void ControlReducer::ReduceGraph(Zone* zone, JSGraph* jsgraph,
526 CommonOperatorBuilder* common) {
527 ControlReducerImpl impl(zone, jsgraph, common);
533 void ControlReducer::TrimGraph(Zone* zone, JSGraph* jsgraph) {
534 ControlReducerImpl impl(zone, jsgraph, NULL);
539 Node* ControlReducer::ReducePhiForTesting(JSGraph* jsgraph,
540 CommonOperatorBuilder* common,
542 Zone zone(jsgraph->graph()->zone()->isolate());
543 ControlReducerImpl impl(&zone, jsgraph, common);
544 return impl.ReducePhi(node);
548 Node* ControlReducer::ReduceMergeForTesting(JSGraph* jsgraph,
549 CommonOperatorBuilder* common,
551 Zone zone(jsgraph->graph()->zone()->isolate());
552 ControlReducerImpl impl(&zone, jsgraph, common);
553 return impl.ReduceMerge(node);
557 Node* ControlReducer::ReduceBranchForTesting(JSGraph* jsgraph,
558 CommonOperatorBuilder* common,
560 Zone zone(jsgraph->graph()->zone()->isolate());
561 ControlReducerImpl impl(&zone, jsgraph, common);
562 return impl.ReduceBranch(node);
566 } // namespace v8::internal::compiler