// don't emit code for nops.
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
DCHECK_EQ(LeaveCC, i.OutputSBit());
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
case kArchNop:
// don't emit code for nops.
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
break;
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
void AssembleArchLookupSwitch(Instruction* instr);
void AssembleArchTableSwitch(Instruction* instr);
- void AssembleDeoptimizerCall(int deoptimization_id);
+ void AssembleDeoptimizerCall(int deoptimization_id,
+ Deoptimizer::BailoutType bailout_type);
// Generates an architecture-specific, descriptor-specific prologue
// to set up a stack frame.
V(IfException, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
V(IfDefault, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
V(Throw, Operator::kFoldable, 1, 1, 1, 0, 0, 1) \
+ V(Deoptimize, Operator::kNoThrow, 1, 1, 1, 0, 0, 1) \
V(Return, Operator::kNoThrow, 1, 1, 1, 0, 0, 1) \
V(OsrNormalEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1) \
V(OsrLoopEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1)
const Operator* IfValue(int32_t value);
const Operator* IfDefault();
const Operator* Throw();
+ const Operator* Deoptimize();
const Operator* Return();
const Operator* Start(int num_formal_parameters);
case kArchNop:
// don't emit code for nops.
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
break;
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
V(ArchLookupSwitch) \
V(ArchTableSwitch) \
V(ArchNop) \
+ V(ArchDeoptimize) \
V(ArchRet) \
V(ArchStackPointer) \
V(ArchTruncateDoubleToI) \
}
case BasicBlock::kReturn: {
// If the result itself is a return, return its input.
- Node* value = (input != NULL && input->opcode() == IrOpcode::kReturn)
+ Node* value = (input != nullptr && input->opcode() == IrOpcode::kReturn)
? input->InputAt(0)
: input;
return VisitReturn(value);
}
+ case BasicBlock::kDeoptimize: {
+ // If the result itself is a return, return its input.
+ Node* value =
+ (input != nullptr && input->opcode() == IrOpcode::kDeoptimize)
+ ? input->InputAt(0)
+ : input;
+ return VisitDeoptimize(value);
+ }
case BasicBlock::kThrow:
DCHECK_EQ(IrOpcode::kThrow, input->opcode());
return VisitThrow(input->InputAt(0));
}
+void InstructionSelector::VisitDeoptimize(Node* value) {
+ DCHECK(FLAG_turbo_deoptimization);
+
+ OperandGenerator g(this);
+
+ FrameStateDescriptor* desc = GetFrameStateDescriptor(value);
+ size_t arg_count = desc->GetTotalSize() + 1; // Include deopt id.
+
+ InstructionOperandVector args(instruction_zone());
+ args.reserve(arg_count);
+
+ InstructionSequence::StateId state_id =
+ sequence()->AddFrameStateDescriptor(desc);
+ args.push_back(g.TempImmediate(state_id.ToInt()));
+
+ AddFrameStateInputs(value, &args, desc);
+
+ DCHECK_EQ(args.size(), arg_count);
+
+ Emit(kArchDeoptimize, 0, nullptr, arg_count, &args.front(), 0, nullptr);
+}
+
+
void InstructionSelector::VisitThrow(Node* value) {
OperandGenerator g(this);
Emit(kArchNop, g.NoOutput()); // TODO(titzer)
void VisitSwitch(Node* node, BasicBlock* default_branch,
BasicBlock** case_branches, int32_t* case_values,
size_t case_count, int32_t min_value, int32_t max_value);
+ void VisitDeoptimize(Node* value);
void VisitReturn(Node* value);
void VisitThrow(Node* value);
- void VisitDeoptimize(Node* deopt);
// ===========================================================================
if (node->opcode() != IrOpcode::kJSCallRuntime) return NoChange();
const Runtime::Function* const f =
Runtime::FunctionForId(CallRuntimeParametersOf(node->op()).id());
- if (f->intrinsic_type != Runtime::IntrinsicType::INLINE) return NoChange();
switch (f->function_id) {
+ case Runtime::kDeoptimizeNow:
+ return ReduceDeoptimizeNow(node);
case Runtime::kInlineIsSmi:
return ReduceInlineIsSmi(node);
case Runtime::kInlineIsNonNegativeSmi:
}
+Reduction JSIntrinsicLowering::ReduceDeoptimizeNow(Node* node) {
+ if (!FLAG_turbo_deoptimization) return NoChange();
+
+ Node* frame_state = NodeProperties::GetFrameStateInput(node);
+ DCHECK_EQ(frame_state->opcode(), IrOpcode::kFrameState);
+
+ Node* effect = NodeProperties::GetEffectInput(node);
+ Node* control = NodeProperties::GetControlInput(node);
+
+ // We are making the continuation after the call dead. To
+ // model this, we generate if (true) statement with deopt
+ // in the true branch and continuation in the false branch.
+ Node* branch =
+ graph()->NewNode(common()->Branch(), jsgraph()->TrueConstant(), control);
+
+ // False branch - the original continuation.
+ Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
+ NodeProperties::ReplaceWithValue(node, jsgraph()->UndefinedConstant(), effect,
+ if_false);
+
+ // True branch: deopt.
+ Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
+ Node* deopt =
+ graph()->NewNode(common()->Deoptimize(), frame_state, effect, if_true);
+
+ // Connect the deopt to the merge exiting the graph.
+ Node* end_pred = NodeProperties::GetControlInput(graph()->end());
+ if (end_pred->opcode() == IrOpcode::kMerge) {
+ int inputs = end_pred->op()->ControlInputCount() + 1;
+ end_pred->AppendInput(graph()->zone(), deopt);
+ end_pred->set_op(common()->Merge(inputs));
+ } else {
+ Node* merge = graph()->NewNode(common()->Merge(2), end_pred, deopt);
+ NodeProperties::ReplaceControlInput(graph()->end(), merge);
+ }
+
+ return Changed(deopt);
+}
+
+
Reduction JSIntrinsicLowering::ReduceInlineIsSmi(Node* node) {
return Change(node, simplified()->ObjectIsSmi());
}
Reduction Reduce(Node* node) FINAL;
private:
+ Reduction ReduceDeoptimizeNow(Node* node);
Reduction ReduceInlineIsSmi(Node* node);
Reduction ReduceInlineIsNonNegativeSmi(Node* node);
Reduction ReduceInlineIsInstanceType(Node* node, InstanceType instance_type);
case kArchNop:
// don't emit code for nops.
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
break;
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
case kArchNop:
// don't emit code for nops.
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
break;
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
// static
-void NodeProperties::ReplaceWithValue(Node* node, Node* value, Node* effect) {
+void NodeProperties::ReplaceWithValue(Node* node, Node* value, Node* effect,
+ Node* control) {
if (!effect && node->op()->EffectInputCount() > 0) {
effect = NodeProperties::GetEffectInput(node);
}
+ if (control == nullptr && node->op()->ControlInputCount() > 0) {
+ control = NodeProperties::GetControlInput(node);
+ }
// Requires distinguishing between value, effect and control edges.
for (Edge edge : node->use_edges()) {
if (IsControlEdge(edge)) {
DCHECK_EQ(IrOpcode::kIfSuccess, edge.from()->opcode());
- Node* control = NodeProperties::GetControlInput(node);
+ DCHECK_NOT_NULL(control);
edge.from()->ReplaceUses(control);
edge.UpdateTo(NULL);
} else if (IsEffectEdge(edge)) {
// Replace value uses of {node} with {value} and effect uses of {node} with
// {effect}. If {effect == NULL}, then use the effect input to {node}. All
// control uses will be relaxed assuming {node} cannot throw.
- static void ReplaceWithValue(Node* node, Node* value, Node* effect = nullptr);
-
+ static void ReplaceWithValue(Node* node, Node* value, Node* effect = nullptr,
+ Node* control = nullptr);
// ---------------------------------------------------------------------------
// Miscellaneous utilities.
V(IfValue) \
V(IfDefault) \
V(Merge) \
+ V(Deoptimize) \
V(Return) \
V(OsrNormalEntry) \
V(OsrLoopEntry) \
// don't emit code for nops.
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
DCHECK_EQ(LeaveRC, i.OutputRCBit());
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
return os << "branch";
case BasicBlock::kSwitch:
return os << "switch";
+ case BasicBlock::kDeoptimize:
+ return os << "deoptimize";
case BasicBlock::kReturn:
return os << "return";
case BasicBlock::kThrow:
}
+void Schedule::AddDeoptimize(BasicBlock* block, Node* input) {
+ DCHECK_EQ(BasicBlock::kNone, block->control());
+ block->set_control(BasicBlock::kDeoptimize);
+ SetControlInput(block, input);
+ if (block != end()) AddSuccessor(block, end());
+}
+
+
void Schedule::AddThrow(BasicBlock* block, Node* input) {
DCHECK_EQ(BasicBlock::kNone, block->control());
block->set_control(BasicBlock::kThrow);
public:
// Possible control nodes that can end a block.
enum Control {
- kNone, // Control not initialized yet.
- kGoto, // Goto a single successor block.
- kCall, // Call with continuation as first successor, exception second.
- kBranch, // Branch if true to first successor, otherwise second.
- kSwitch, // Table dispatch to one of the successor blocks.
- kReturn, // Return a value from this method.
- kThrow // Throw an exception.
+ kNone, // Control not initialized yet.
+ kGoto, // Goto a single successor block.
+ kCall, // Call with continuation as first successor, exception
+ // second.
+ kBranch, // Branch if true to first successor, otherwise second.
+ kSwitch, // Table dispatch to one of the successor blocks.
+ kDeoptimize, // Return a value from this method.
+ kReturn, // Return a value from this method.
+ kThrow // Throw an exception.
};
class Id {
void AddSwitch(BasicBlock* block, Node* sw, BasicBlock** succ_blocks,
size_t succ_count);
+ // BasicBlock building: add a deoptimize at the end of {block}.
+ void AddDeoptimize(BasicBlock* block, Node* input);
+
// BasicBlock building: add a return at the end of {block}.
void AddReturn(BasicBlock* block, Node* input);
scheduler_->UpdatePlacement(node, Scheduler::kFixed);
ConnectSwitch(node);
break;
+ case IrOpcode::kDeoptimize:
+ scheduler_->UpdatePlacement(node, Scheduler::kFixed);
+ ConnectDeoptimize(node);
+ break;
case IrOpcode::kReturn:
scheduler_->UpdatePlacement(node, Scheduler::kFixed);
ConnectReturn(node);
schedule_->AddReturn(return_block, ret);
}
+ void ConnectDeoptimize(Node* deopt) {
+ Node* deoptimize_control = NodeProperties::GetControlInput(deopt);
+ BasicBlock* deoptimize_block = FindPredecessorBlock(deoptimize_control);
+ TraceConnect(deopt, deoptimize_block, NULL);
+ schedule_->AddDeoptimize(deoptimize_block, deopt);
+ }
+
void ConnectThrow(Node* thr) {
Node* throw_control = NodeProperties::GetControlInput(thr);
BasicBlock* throw_block = FindPredecessorBlock(throw_control);
// Type is empty.
CheckNotTyped(node);
break;
+ case IrOpcode::kDeoptimize:
+ // TODO(rossberg): check successor is End
+ // Type is empty.
+ CheckNotTyped(node);
case IrOpcode::kReturn:
// TODO(rossberg): check successor is End
// Type is empty.
case kArchNop:
// don't emit code for nops.
break;
+ case kArchDeoptimize: {
+ int deopt_state_id =
+ BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
+ AssembleDeoptimizerCall(deopt_state_id, Deoptimizer::EAGER);
+ break;
+ }
case kArchRet:
AssembleReturn();
break;
}
-void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+void CodeGenerator::AssembleDeoptimizerCall(
+ int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
+ isolate(), deoptimization_id, bailout_type);
__ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
}
}
+RUNTIME_FUNCTION(Runtime_DeoptimizeNow) {
+ HandleScope scope(isolate);
+ DCHECK(args.length() == 0);
+
+ Handle<JSFunction> function;
+
+ // If the argument is 'undefined', deoptimize the topmost
+ // function.
+ JavaScriptFrameIterator it(isolate);
+ while (!it.done()) {
+ if (it.frame()->is_java_script()) {
+ function = Handle<JSFunction>(it.frame()->function());
+ break;
+ }
+ }
+ if (function.is_null()) return isolate->heap()->undefined_value();
+
+ if (!function->IsOptimized()) return isolate->heap()->undefined_value();
+
+ // TODO(turbofan): Deoptimization is not supported yet.
+ if (function->code()->is_turbofanned() && !FLAG_turbo_deoptimization) {
+ return isolate->heap()->undefined_value();
+ }
+
+ Deoptimizer::DeoptimizeFunction(*function);
+
+ return isolate->heap()->undefined_value();
+}
+
+
RUNTIME_FUNCTION(Runtime_RunningInSimulator) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 0);
F(GetFunctionDelegate, 1, 1) \
F(GetConstructorDelegate, 1, 1) \
F(DeoptimizeFunction, 1, 1) \
+ F(DeoptimizeNow, 0, 1) \
F(ClearFunctionTypeFeedback, 1, 1) \
F(RunningInSimulator, 0, 1) \
F(IsConcurrentRecompilationSupported, 0, 1) \
--- /dev/null
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax
+
+function g(a, b, c) {
+ return a + b + c;
+}
+
+function f() {
+ return g(1, (%DeoptimizeNow(), 2), 3);
+}
+
+f();
+f();
+%OptimizeFunctionOnNextCall(f);
+assertEquals(6, f());