From a668cd6fc80ea452921c94560dcb6e653976ae47 Mon Sep 17 00:00:00 2001 From: "jarin@chromium.org" Date: Mon, 1 Sep 2014 09:31:14 +0000 Subject: [PATCH] Context deoptimization and removal of the deoptimization block in Turbofan This adds context deoptimization to Turbofan and Crankshaft (also submitted separately as https://codereview.chromium.org/515723004/). The second patchset removes the deoptimization/continuation block from calls. BUG= R=bmeurer@chromium.org Review URL: https://codereview.chromium.org/522873002 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23547 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/lithium-arm.cc | 1 + src/arm64/lithium-arm64.cc | 1 + src/compiler/arm/code-generator-arm.cc | 16 ++- src/compiler/arm/instruction-selector-arm.cc | 4 +- src/compiler/arm64/code-generator-arm64.cc | 15 ++- src/compiler/arm64/instruction-selector-arm64.cc | 3 +- src/compiler/ast-graph-builder.cc | 54 +++----- src/compiler/ast-graph-builder.h | 15 +-- src/compiler/code-generator.cc | 124 ++++++++++--------- src/compiler/code-generator.h | 58 +++++---- src/compiler/common-operator.h | 36 ++++-- src/compiler/graph-builder.h | 3 + src/compiler/ia32/code-generator-ia32.cc | 15 ++- src/compiler/ia32/instruction-selector-ia32.cc | 2 +- src/compiler/instruction-codes.h | 1 - src/compiler/instruction-selector-impl.h | 4 - src/compiler/instruction-selector.cc | 70 +++-------- src/compiler/instruction-selector.h | 3 +- src/compiler/instruction.cc | 12 +- src/compiler/instruction.h | 30 +++-- src/compiler/js-generic-lowering.cc | 3 - src/compiler/linkage-impl.h | 2 +- src/compiler/linkage.cc | 2 +- src/compiler/linkage.h | 8 +- src/compiler/opcodes.h | 5 +- src/compiler/operator-properties-inl.h | 86 +++---------- src/compiler/operator-properties.h | 1 - src/compiler/raw-machine-assembler.cc | 37 ++---- src/compiler/raw-machine-assembler.h | 8 +- src/compiler/schedule.cc | 4 - src/compiler/schedule.h | 35 +----- src/compiler/scheduler.cc | 42 +------ src/compiler/verifier.cc | 9 -- src/compiler/x64/code-generator-x64.cc | 15 ++- src/compiler/x64/instruction-selector-x64.cc | 2 +- src/deoptimizer.cc | 16 +-- src/deoptimizer.h | 4 + src/hydrogen-instructions.h | 34 +++--- src/hydrogen.cc | 5 +- src/ia32/lithium-ia32.cc | 6 +- src/lithium.cc | 22 ++-- src/mips/lithium-mips.cc | 1 + src/mips64/lithium-mips64.cc | 1 + src/runtime.cc | 7 +- src/x64/lithium-x64.cc | 6 +- src/x87/lithium-x87.cc | 1 + test/cctest/compiler/test-codegen-deopt.cc | 60 ++------- test/cctest/compiler/test-schedule.cc | 14 --- test/cctest/compiler/test-scheduler.cc | 136 --------------------- .../instruction-selector-unittest.cc | 100 +++++---------- .../instruction-selector-unittest.h | 6 +- test/mjsunit/debug-clearbreakpointgroup.js | 1 + test/mjsunit/debug-evaluate-closure.js | 1 + test/mjsunit/debug-evaluate-with.js | 2 + test/mjsunit/debug-step-2.js | 1 + test/mjsunit/mjsunit.status | 15 --- test/mjsunit/regress/regress-1170187.js | 2 + test/mjsunit/regress/regress-119609.js | 1 + test/mjsunit/regress/regress-131994.js | 1 + test/mjsunit/regress/regress-325676.js | 1 + test/mjsunit/regress/regress-crbug-107996.js | 1 + test/mjsunit/regress/regress-crbug-171715.js | 1 + test/mjsunit/regress/regress-crbug-222893.js | 1 + .../regress/regress-debug-deopt-while-recompile.js | 1 + .../regress/regress-opt-after-debug-deopt.js | 1 + 65 files changed, 390 insertions(+), 785 deletions(-) diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc index fc3de67..39b3393 100644 --- a/src/arm/lithium-arm.cc +++ b/src/arm/lithium-arm.cc @@ -2559,6 +2559,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/arm64/lithium-arm64.cc b/src/arm64/lithium-arm64.cc index b731d96..1f670cc 100644 --- a/src/arm64/lithium-arm64.cc +++ b/src/arm64/lithium-arm64.cc @@ -1475,6 +1475,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc index 762572b..bc11f5a 100644 --- a/src/compiler/arm/code-generator-arm.cc +++ b/src/compiler/arm/code-generator-arm.cc @@ -165,15 +165,6 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { DCHECK_EQ(LeaveCC, i.OutputSBit()); break; } - case kArchDeoptimize: { - int deoptimization_id = BuildTranslation(instr, 0); - - Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( - isolate(), deoptimization_id, Deoptimizer::LAZY); - __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); - DCHECK_EQ(LeaveCC, i.OutputSBit()); - break; - } case kArchDrop: { int words = MiscField::decode(instr->opcode()); __ Drop(words); @@ -614,6 +605,13 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, } +void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) { + Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( + isolate(), deoptimization_id, Deoptimizer::LAZY); + __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); +} + + void CodeGenerator::AssemblePrologue() { CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); if (descriptor->kind() == CallDescriptor::kCallAddress) { diff --git a/src/compiler/arm/instruction-selector-arm.cc b/src/compiler/arm/instruction-selector-arm.cc index 74442f7..05dae15 100644 --- a/src/compiler/arm/instruction-selector-arm.cc +++ b/src/compiler/arm/instruction-selector-arm.cc @@ -77,7 +77,6 @@ class ArmOperandGenerator V8_FINAL : public OperandGenerator { case kArchCallAddress: case kArchCallCodeObject: case kArchCallJSFunction: - case kArchDeoptimize: case kArchDrop: case kArchJmp: case kArchNop: @@ -794,8 +793,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, // TODO(turbofan): on ARM64 it's probably better to use the code object in a // register if there are multiple uses of it. Improve constant pool and the // heuristics in the register allocator for where to emit constants. - InitializeCallBuffer(call, &buffer, true, false, continuation, - deoptimization); + InitializeCallBuffer(call, &buffer, true, false); // TODO(dcarney): might be possible to use claim/poke instead // Push any stack arguments. diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc index 88c2b8d..1fb7c65 100644 --- a/src/compiler/arm64/code-generator-arm64.cc +++ b/src/compiler/arm64/code-generator-arm64.cc @@ -157,14 +157,6 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { AddSafepointAndDeopt(instr); break; } - case kArchDeoptimize: { - int deoptimization_id = BuildTranslation(instr, 0); - - Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( - isolate(), deoptimization_id, Deoptimizer::LAZY); - __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); - break; - } case kArchDrop: { int words = MiscField::decode(instr->opcode()); __ Drop(words); @@ -627,6 +619,13 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, } +void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) { + Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( + isolate(), deoptimization_id, Deoptimizer::LAZY); + __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); +} + + // TODO(dcarney): increase stack slots in frame once before first use. static int AlignedStackSlots(int stack_slots) { if (stack_slots & 1) stack_slots++; diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc index 03c7aa5..710c495 100644 --- a/src/compiler/arm64/instruction-selector-arm64.cc +++ b/src/compiler/arm64/instruction-selector-arm64.cc @@ -598,8 +598,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, // TODO(turbofan): on ARM64 it's probably better to use the code object in a // register if there are multiple uses of it. Improve constant pool and the // heuristics in the register allocator for where to emit constants. - InitializeCallBuffer(call, &buffer, true, false, continuation, - deoptimization); + InitializeCallBuffer(call, &buffer, true, false); // Push the arguments to the stack. bool is_c_frame = descriptor->kind() == CallDescriptor::kCallAddress; diff --git a/src/compiler/ast-graph-builder.cc b/src/compiler/ast-graph-builder.cc index 9ec91e5..afc58e9 100644 --- a/src/compiler/ast-graph-builder.cc +++ b/src/compiler/ast-graph-builder.cc @@ -217,15 +217,17 @@ void AstGraphBuilder::Environment::UpdateStateValues(Node** state_values, } -Node* AstGraphBuilder::Environment::Checkpoint(BailoutId ast_id) { +Node* AstGraphBuilder::Environment::Checkpoint( + BailoutId ast_id, OutputFrameStateCombine combine) { UpdateStateValues(¶meters_node_, 0, parameters_count()); UpdateStateValues(&locals_node_, parameters_count(), locals_count()); UpdateStateValues(&stack_node_, parameters_count() + locals_count(), stack_height()); - Operator* op = common()->FrameState(ast_id); + Operator* op = common()->FrameState(ast_id, combine); - return graph()->NewNode(op, parameters_node_, locals_node_, stack_node_); + return graph()->NewNode(op, parameters_node_, locals_node_, stack_node_, + GetContext()); } @@ -1084,14 +1086,14 @@ void AstGraphBuilder::VisitAssignment(Assignment* expr) { PrintableUnique name = MakeUnique(property->key()->AsLiteral()->AsPropertyName()); old_value = NewNode(javascript()->LoadNamed(name), object); - PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT); + PrepareFrameState(old_value, property->LoadId(), kPushOutput); break; } case KEYED_PROPERTY: { Node* key = environment()->Top(); Node* object = environment()->Peek(1); old_value = NewNode(javascript()->LoadProperty(), object, key); - PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT); + PrepareFrameState(old_value, property->LoadId(), kPushOutput); break; } } @@ -1100,7 +1102,7 @@ void AstGraphBuilder::VisitAssignment(Assignment* expr) { Node* right = environment()->Pop(); Node* left = environment()->Pop(); Node* value = BuildBinaryOp(left, right, expr->binary_op()); - PrepareFrameState(value, expr->binary_operation()->id(), PUSH_OUTPUT); + PrepareFrameState(value, expr->binary_operation()->id(), kPushOutput); environment()->Push(value); } else { VisitForValue(expr->value()); @@ -1217,7 +1219,7 @@ void AstGraphBuilder::VisitCall(Call* expr) { Node* key = environment()->Pop(); callee_value = NewNode(javascript()->LoadProperty(), object, key); } - PrepareFrameState(callee_value, property->LoadId(), PUSH_OUTPUT); + PrepareFrameState(callee_value, property->LoadId(), kPushOutput); receiver_value = environment()->Pop(); // Note that a PROPERTY_CALL requires the receiver to be wrapped into an // object for sloppy callees. This could also be modeled explicitly here, @@ -1303,7 +1305,7 @@ void AstGraphBuilder::VisitCallJSRuntime(CallRuntime* expr) { Node* callee_value = NewNode(javascript()->LoadNamed(unique), receiver_value); // TODO(jarin): Find/create a bailout id to deoptimize to (crankshaft // refuses to optimize functions with jsruntime calls). - PrepareFrameState(callee_value, BailoutId::None(), PUSH_OUTPUT); + PrepareFrameState(callee_value, BailoutId::None(), kPushOutput); environment()->Push(callee_value); environment()->Push(receiver_value); @@ -1385,7 +1387,7 @@ void AstGraphBuilder::VisitCountOperation(CountOperation* expr) { PrintableUnique name = MakeUnique(property->key()->AsLiteral()->AsPropertyName()); old_value = NewNode(javascript()->LoadNamed(name), object); - PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT); + PrepareFrameState(old_value, property->LoadId(), kPushOutput); stack_depth = 1; break; } @@ -1395,7 +1397,7 @@ void AstGraphBuilder::VisitCountOperation(CountOperation* expr) { Node* key = environment()->Top(); Node* object = environment()->Peek(1); old_value = NewNode(javascript()->LoadProperty(), object, key); - PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT); + PrepareFrameState(old_value, property->LoadId(), kPushOutput); stack_depth = 2; break; } @@ -1746,7 +1748,7 @@ Node* AstGraphBuilder::BuildVariableLoad(Variable* variable, PrintableUnique name = MakeUnique(variable->name()); Operator* op = javascript()->LoadNamed(name, contextual_mode); Node* node = NewNode(op, global); - PrepareFrameState(node, bailout_id, PUSH_OUTPUT); + PrepareFrameState(node, bailout_id, kPushOutput); return node; } case Variable::PARAMETER: @@ -2009,37 +2011,9 @@ void AstGraphBuilder::PrepareFrameState(Node* node, BailoutId ast_id, DCHECK(node->InputAt(frame_state_index)->op()->opcode() == IrOpcode::kDead); - Node* frame_state_node = environment()->Checkpoint(ast_id); + Node* frame_state_node = environment()->Checkpoint(ast_id, combine); node->ReplaceInput(frame_state_index, frame_state_node); } - - if (OperatorProperties::CanLazilyDeoptimize(node->op())) { - // The deopting node should have an outgoing control dependency. - DCHECK(environment()->GetControlDependency() == node); - - StructuredGraphBuilder::Environment* continuation_env = environment(); - // Create environment for the deoptimization block, and build the block. - StructuredGraphBuilder::Environment* deopt_env = - CopyEnvironment(continuation_env); - set_environment(deopt_env); - - if (combine == PUSH_OUTPUT) { - environment()->Push(node); - } - - NewNode(common()->LazyDeoptimization()); - - // TODO(jarin) If ast_id.IsNone(), perhaps we should generate an empty - // deopt block and make sure there is no patch entry for this (so - // that the deoptimizer dies when trying to deoptimize here). - Node* state_node = environment()->Checkpoint(ast_id); - Node* deoptimize_node = NewNode(common()->Deoptimize(), state_node); - UpdateControlDependencyToLeaveFunction(deoptimize_node); - - // Continue with the original environment. - set_environment(continuation_env); - NewNode(common()->Continuation()); - } } } diff --git a/src/compiler/ast-graph-builder.h b/src/compiler/ast-graph-builder.h index 05bc227..9bb02c5 100644 --- a/src/compiler/ast-graph-builder.h +++ b/src/compiler/ast-graph-builder.h @@ -172,16 +172,9 @@ class AstGraphBuilder : public StructuredGraphBuilder, public AstVisitor { // Dispatched from VisitForInStatement. void VisitForInAssignment(Expression* expr, Node* value); - // Flag that describes how to combine the current environment with - // the output of a node to obtain a framestate for lazy bailout. - enum OutputFrameStateCombine { - PUSH_OUTPUT, // Push the output on the expression stack. - IGNORE_OUTPUT // Use the frame state as-is. - }; - // Builds deoptimization for a given node. void PrepareFrameState(Node* node, BailoutId ast_id, - OutputFrameStateCombine combine = IGNORE_OUTPUT); + OutputFrameStateCombine combine = kIgnoreOutput); OutputFrameStateCombine StateCombineFromAstContext(); @@ -265,7 +258,7 @@ class AstGraphBuilder::Environment // Preserve a checkpoint of the environment for the IR graph. Any // further mutation of the environment will not affect checkpoints. - Node* Checkpoint(BailoutId ast_id); + Node* Checkpoint(BailoutId ast_id, OutputFrameStateCombine combine); private: void UpdateStateValues(Node** state_values, int offset, int count); @@ -288,8 +281,8 @@ class AstGraphBuilder::AstContext BASE_EMBEDDED { // Determines how to combine the frame state with the value // that is about to be plugged into this AstContext. - AstGraphBuilder::OutputFrameStateCombine GetStateCombine() { - return IsEffect() ? IGNORE_OUTPUT : PUSH_OUTPUT; + OutputFrameStateCombine GetStateCombine() { + return IsEffect() ? kIgnoreOutput : kPushOutput; } // Plug a node into this expression context. Call this function in tail diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc index edfdaa7..8fbefe0 100644 --- a/src/compiler/code-generator.cc +++ b/src/compiler/code-generator.cc @@ -19,12 +19,10 @@ CodeGenerator::CodeGenerator(InstructionSequence* code) masm_(code->zone()->isolate(), NULL, 0), resolver_(this), safepoints_(code->zone()), - lazy_deoptimization_entries_(code->zone()), + deoptimization_points_(code->zone()), deoptimization_states_(code->zone()), deoptimization_literals_(code->zone()), - translations_(code->zone()) { - deoptimization_states_.resize(code->GetDeoptimizationEntryCount(), NULL); -} + translations_(code->zone()) {} Handle CodeGenerator::GenerateCode() { @@ -49,9 +47,10 @@ Handle CodeGenerator::GenerateCode() { AssembleInstruction(*i); } + EmitLazyDeoptimizationCallTable(); + FinishCode(masm()); - UpdateSafepointsWithDeoptimizationPc(); safepoints()->Emit(masm(), frame()->GetSpillSlotCount()); // TODO(titzer): what are the right code flags here? @@ -173,23 +172,23 @@ void CodeGenerator::AssembleGap(GapInstruction* instr) { } -void CodeGenerator::UpdateSafepointsWithDeoptimizationPc() { - int patch_count = static_cast(lazy_deoptimization_entries_.size()); - for (int i = 0; i < patch_count; ++i) { - LazyDeoptimizationEntry entry = lazy_deoptimization_entries_[i]; - // TODO(jarin) make sure that there is no code (other than nops) - // between the call position and the continuation position. - safepoints()->SetDeoptimizationPc(entry.safepoint_id(), - entry.deoptimization()->pos()); +void CodeGenerator::EmitLazyDeoptimizationCallTable() { + // ZoneDeque::iterator iter; + int i = 0; + for (ZoneDeque::iterator + iter = deoptimization_points_.begin(); + iter != deoptimization_points_.end(); iter++, i++) { + int pc_offset = masm()->pc_offset(); + AssembleDeoptimizerCall((*iter)->lazy_state_id()); + safepoints()->SetDeoptimizationPc((*iter)->safepoint(), pc_offset); } } void CodeGenerator::PopulateDeoptimizationData(Handle code_object) { CompilationInfo* info = linkage()->info(); - int deopt_count = code()->GetDeoptimizationEntryCount(); - int patch_count = static_cast(lazy_deoptimization_entries_.size()); - if (patch_count == 0 && deopt_count == 0) return; + int deopt_count = static_cast(deoptimization_states_.size()); + if (deopt_count == 0) return; Handle data = DeoptimizationInputData::New(isolate(), deopt_count, TENURED); @@ -226,11 +225,11 @@ void CodeGenerator::PopulateDeoptimizationData(Handle code_object) { // Populate deoptimization entries. for (int i = 0; i < deopt_count; i++) { - FrameStateDescriptor* descriptor = code()->GetDeoptimizationEntry(i); - data->SetAstId(i, descriptor->bailout_id()); + DeoptimizationState* deoptimization_state = deoptimization_states_[i]; + data->SetAstId(i, deoptimization_state->bailout_id()); CHECK_NE(NULL, deoptimization_states_[i]); data->SetTranslationIndex( - i, Smi::FromInt(deoptimization_states_[i]->translation_id_)); + i, Smi::FromInt(deoptimization_states_[i]->translation_id())); data->SetArgumentsStackHeight(i, Smi::FromInt(0)); data->SetPc(i, Smi::FromInt(-1)); } @@ -248,29 +247,33 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) { instr->pointer_map(), Safepoint::kSimple, 0, needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt); - if (flags & CallDescriptor::kLazyDeoptimization) { - RecordLazyDeoptimizationEntry(instr, safepoint_id); - } - if (needs_frame_state) { // If the frame state is present, it starts at argument 1 // (just after the code address). InstructionOperandConverter converter(this, instr); // Deoptimization info starts at argument 1 int frame_state_offset = 1; - int deoptimization_id = BuildTranslation(instr, frame_state_offset); + FrameStateDescriptor* descriptor = + GetFrameStateDescriptor(instr, frame_state_offset); + int deopt_state_id = + BuildTranslation(instr, frame_state_offset, kIgnoreOutput); + int lazy_deopt_state_id = deopt_state_id; + if (descriptor->state_combine() != kIgnoreOutput) { + lazy_deopt_state_id = BuildTranslation(instr, frame_state_offset, + descriptor->state_combine()); + } + deoptimization_points_.push_back(new (zone()) DeoptimizationPoint( + deopt_state_id, lazy_deopt_state_id, descriptor, safepoint_id)); #if DEBUG // Make sure all the values live in stack slots or they are immediates. // (The values should not live in register because registers are clobbered // by calls.) - FrameStateDescriptor* descriptor = - code()->GetDeoptimizationEntry(deoptimization_id); for (int i = 0; i < descriptor->size(); i++) { InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i); CHECK(op->IsStackSlot() || op->IsImmediate()); } #endif - safepoints()->RecordLazyDeoptimizationIndex(deoptimization_id); + safepoints()->RecordLazyDeoptimizationIndex(lazy_deopt_state_id); } if (flags & CallDescriptor::kNeedsNopAfterCall) { @@ -279,27 +282,6 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) { } -void CodeGenerator::RecordLazyDeoptimizationEntry(Instruction* instr, - Safepoint::Id safepoint_id) { - InstructionOperandConverter i(this, instr); - - Label after_call; - masm()->bind(&after_call); - - // The continuation and deoptimization are the last two inputs: - BasicBlock* cont_block = - i.InputBlock(static_cast(instr->InputCount()) - 2); - BasicBlock* deopt_block = - i.InputBlock(static_cast(instr->InputCount()) - 1); - - Label* cont_label = code_->GetLabel(cont_block); - Label* deopt_label = code_->GetLabel(deopt_block); - - lazy_deoptimization_entries_.push_back(LazyDeoptimizationEntry( - after_call.pos(), cont_label, deopt_label, safepoint_id)); -} - - int CodeGenerator::DefineDeoptimizationLiteral(Handle literal) { int result = static_cast(deoptimization_literals_.size()); for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) { @@ -310,29 +292,53 @@ int CodeGenerator::DefineDeoptimizationLiteral(Handle literal) { } -int CodeGenerator::BuildTranslation(Instruction* instr, - int frame_state_offset) { +FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor( + Instruction* instr, int frame_state_offset) { InstructionOperandConverter i(this, instr); - int deoptimization_id = i.InputInt32(frame_state_offset); - frame_state_offset++; + InstructionSequence::StateId state_id = + InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset)); + return code()->GetFrameStateDescriptor(state_id); +} - // We should build translation only once. - DCHECK_EQ(NULL, deoptimization_states_[deoptimization_id]); +int CodeGenerator::BuildTranslation(Instruction* instr, int frame_state_offset, + OutputFrameStateCombine state_combine) { FrameStateDescriptor* descriptor = - code()->GetDeoptimizationEntry(deoptimization_id); + GetFrameStateDescriptor(instr, frame_state_offset); + frame_state_offset++; + + int height = descriptor->size() - descriptor->parameters_count(); + switch (state_combine) { + case kPushOutput: + height++; + break; + case kIgnoreOutput: + break; + } + + Translation translation(&translations_, 1, 1, zone()); translation.BeginJSFrame(descriptor->bailout_id(), - Translation::kSelfLiteralId, - descriptor->size() - descriptor->parameters_count()); + Translation::kSelfLiteralId, height); for (int i = 0; i < descriptor->size(); i++) { AddTranslationForOperand(&translation, instr, instr->InputAt(i + frame_state_offset)); } - deoptimization_states_[deoptimization_id] = - new (zone()) DeoptimizationState(translation.index()); + switch (state_combine) { + case kPushOutput: + DCHECK(instr->OutputCount() == 1); + AddTranslationForOperand(&translation, instr, instr->OutputAt(0)); + break; + case kIgnoreOutput: + break; + } + + int deoptimization_id = static_cast(deoptimization_states_.size()); + + deoptimization_states_.push_back(new (zone()) DeoptimizationState( + descriptor->bailout_id(), translation.index())); return deoptimization_id; } diff --git a/src/compiler/code-generator.h b/src/compiler/code-generator.h index 469c435..84ec1d6 100644 --- a/src/compiler/code-generator.h +++ b/src/compiler/code-generator.h @@ -62,6 +62,8 @@ class CodeGenerator V8_FINAL : public GapResolver::Assembler { void AssembleArchBranch(Instruction* instr, FlagsCondition condition); void AssembleArchBoolean(Instruction* instr, FlagsCondition condition); + void AssembleDeoptimizerCall(int deoptimization_id); + // Generates an architecture-specific, descriptor-specific prologue // to set up a stack frame. void AssemblePrologue(); @@ -82,43 +84,51 @@ class CodeGenerator V8_FINAL : public GapResolver::Assembler { // =========================================================================== // Deoptimization table construction void AddSafepointAndDeopt(Instruction* instr); - void UpdateSafepointsWithDeoptimizationPc(); - void RecordLazyDeoptimizationEntry(Instruction* instr, - Safepoint::Id safepoint_id); + void EmitLazyDeoptimizationCallTable(); void PopulateDeoptimizationData(Handle code); int DefineDeoptimizationLiteral(Handle literal); - int BuildTranslation(Instruction* instr, int frame_state_offset); + FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr, + int frame_state_offset); + int BuildTranslation(Instruction* instr, int frame_state_offset, + OutputFrameStateCombine state_combine); void AddTranslationForOperand(Translation* translation, Instruction* instr, InstructionOperand* op); void AddNopForSmiCodeInlining(); // =========================================================================== - class LazyDeoptimizationEntry V8_FINAL { + class DeoptimizationPoint : public ZoneObject { public: - LazyDeoptimizationEntry(int position_after_call, Label* continuation, - Label* deoptimization, Safepoint::Id safepoint_id) - : position_after_call_(position_after_call), - continuation_(continuation), - deoptimization_(deoptimization), - safepoint_id_(safepoint_id) {} - - int position_after_call() const { return position_after_call_; } - Label* continuation() const { return continuation_; } - Label* deoptimization() const { return deoptimization_; } - Safepoint::Id safepoint_id() const { return safepoint_id_; } + int state_id() const { return state_id_; } + int lazy_state_id() const { return lazy_state_id_; } + FrameStateDescriptor* descriptor() const { return descriptor_; } + Safepoint::Id safepoint() const { return safepoint_; } + + DeoptimizationPoint(int state_id, int lazy_state_id, + FrameStateDescriptor* descriptor, + Safepoint::Id safepoint) + : state_id_(state_id), + lazy_state_id_(lazy_state_id), + descriptor_(descriptor), + safepoint_(safepoint) {} private: - int position_after_call_; - Label* continuation_; - Label* deoptimization_; - Safepoint::Id safepoint_id_; + int state_id_; + int lazy_state_id_; + FrameStateDescriptor* descriptor_; + Safepoint::Id safepoint_; }; struct DeoptimizationState : ZoneObject { - int translation_id_; + public: + BailoutId bailout_id() const { return bailout_id_; } + int translation_id() const { return translation_id_; } - explicit DeoptimizationState(int translation_id) - : translation_id_(translation_id) {} + DeoptimizationState(BailoutId bailout_id, int translation_id) + : bailout_id_(bailout_id), translation_id_(translation_id) {} + + private: + BailoutId bailout_id_; + int translation_id_; }; InstructionSequence* code_; @@ -127,7 +137,7 @@ class CodeGenerator V8_FINAL : public GapResolver::Assembler { MacroAssembler masm_; GapResolver resolver_; SafepointTableBuilder safepoints_; - ZoneDeque lazy_deoptimization_entries_; + ZoneDeque deoptimization_points_; ZoneDeque deoptimization_states_; ZoneDeque > deoptimization_literals_; TranslationBuffer translations_; diff --git a/src/compiler/common-operator.h b/src/compiler/common-operator.h index d39cd3e..65528e1 100644 --- a/src/compiler/common-operator.h +++ b/src/compiler/common-operator.h @@ -44,6 +44,28 @@ class CallOperator : public Operator1 { } }; +// Flag that describes how to combine the current environment with +// the output of a node to obtain a framestate for lazy bailout. +enum OutputFrameStateCombine { + kPushOutput, // Push the output on the expression stack. + kIgnoreOutput // Use the frame state as-is. +}; + + +class FrameStateCallInfo { + public: + FrameStateCallInfo(BailoutId bailout_id, + OutputFrameStateCombine state_combine) + : bailout_id_(bailout_id), frame_state_combine_(state_combine) {} + + BailoutId bailout_id() const { return bailout_id_; } + OutputFrameStateCombine state_combine() const { return frame_state_combine_; } + + private: + BailoutId bailout_id_; + OutputFrameStateCombine frame_state_combine_; +}; + // Interface for building common operators that can be used at any level of IR, // including JavaScript, mid-level, and low-level. // TODO(titzer): Move the mnemonics into SimpleOperator and Operator1 classes. @@ -67,13 +89,6 @@ class CommonOperatorBuilder { Operator* IfTrue() { CONTROL_OP(IfTrue, 0, 1); } Operator* IfFalse() { CONTROL_OP(IfFalse, 0, 1); } Operator* Throw() { CONTROL_OP(Throw, 1, 1); } - Operator* LazyDeoptimization() { CONTROL_OP(LazyDeoptimization, 0, 1); } - Operator* Continuation() { CONTROL_OP(Continuation, 0, 1); } - - Operator* Deoptimize() { - return new (zone_) - ControlOperator(IrOpcode::kDeoptimize, 0, 1, 0, 1, "Deoptimize"); - } Operator* Return() { return new (zone_) ControlOperator(IrOpcode::kReturn, 0, 1, 0, 1, "Return"); @@ -149,9 +164,10 @@ class CommonOperatorBuilder { return new (zone_) Operator1(IrOpcode::kStateValues, Operator::kPure, arguments, 1, "StateValues", arguments); } - Operator* FrameState(BailoutId ast_id) { - return new (zone_) Operator1( - IrOpcode::kFrameState, Operator::kPure, 3, 1, "FrameState", ast_id); + Operator* FrameState(BailoutId bailout_id, OutputFrameStateCombine combine) { + return new (zone_) Operator1( + IrOpcode::kFrameState, Operator::kPure, 4, 1, "FrameState", + FrameStateCallInfo(bailout_id, combine)); } Operator* Call(CallDescriptor* descriptor) { return new (zone_) CallOperator(descriptor, "Call"); diff --git a/src/compiler/graph-builder.h b/src/compiler/graph-builder.h index dfa79b8..398939b 100644 --- a/src/compiler/graph-builder.h +++ b/src/compiler/graph-builder.h @@ -101,6 +101,7 @@ class StructuredGraphBuilder : public GraphBuilder { protected: class Environment; + friend class Environment; friend class ControlBuilder; // The following method creates a new node having the specified operator and @@ -202,6 +203,8 @@ class StructuredGraphBuilder::Environment : public ZoneObject { return builder()->CopyEnvironment(this); } + Node* GetContext() { return builder_->current_context(); } + protected: // TODO(mstarzinger): Use phase-local zone instead! Zone* zone() const { return graph()->zone(); } diff --git a/src/compiler/ia32/code-generator-ia32.cc b/src/compiler/ia32/code-generator-ia32.cc index 8aef773..c05c6ea 100644 --- a/src/compiler/ia32/code-generator-ia32.cc +++ b/src/compiler/ia32/code-generator-ia32.cc @@ -139,14 +139,6 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { AddSafepointAndDeopt(instr); break; } - case kArchDeoptimize: { - int deoptimization_id = BuildTranslation(instr, 0); - - Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( - isolate(), deoptimization_id, Deoptimizer::LAZY); - __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY); - break; - } case kArchDrop: { int words = MiscField::decode(instr->opcode()); __ add(esp, Immediate(kPointerSize * words)); @@ -582,6 +574,13 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, } +void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) { + Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( + isolate(), deoptimization_id, Deoptimizer::LAZY); + __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY); +} + + // The calling convention for JSFunctions on IA32 passes arguments on the // stack and the JSFunction and context in EDI and ESI, respectively, thus // the steps of the call look as follows: diff --git a/src/compiler/ia32/instruction-selector-ia32.cc b/src/compiler/ia32/instruction-selector-ia32.cc index b00688b..44ae0ea 100644 --- a/src/compiler/ia32/instruction-selector-ia32.cc +++ b/src/compiler/ia32/instruction-selector-ia32.cc @@ -516,7 +516,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, CallBuffer buffer(zone(), descriptor, frame_state_descriptor); // Compute InstructionOperands for inputs and outputs. - InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization); + InitializeCallBuffer(call, &buffer, true, true); // Push any stack arguments. for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); diff --git a/src/compiler/instruction-codes.h b/src/compiler/instruction-codes.h index 35ba436..669316a 100644 --- a/src/compiler/instruction-codes.h +++ b/src/compiler/instruction-codes.h @@ -32,7 +32,6 @@ namespace compiler { V(ArchCallAddress) \ V(ArchCallCodeObject) \ V(ArchCallJSFunction) \ - V(ArchDeoptimize) \ V(ArchDrop) \ V(ArchJmp) \ V(ArchNop) \ diff --git a/src/compiler/instruction-selector-impl.h b/src/compiler/instruction-selector-impl.h index 276bfa9..c9c1571 100644 --- a/src/compiler/instruction-selector-impl.h +++ b/src/compiler/instruction-selector-impl.h @@ -347,10 +347,6 @@ struct CallBuffer { ? 0 : (frame_state_descriptor->size() + 1); } - - int control_count() const { - return descriptor->CanLazilyDeoptimize() ? 2 : 0; - } }; } // namespace compiler diff --git a/src/compiler/instruction-selector.cc b/src/compiler/instruction-selector.cc index 733624e..0e3f341 100644 --- a/src/compiler/instruction-selector.cc +++ b/src/compiler/instruction-selector.cc @@ -271,8 +271,7 @@ CallBuffer::CallBuffer(Zone* zone, CallDescriptor* d, output_nodes.reserve(d->ReturnCount()); outputs.reserve(d->ReturnCount()); pushed_nodes.reserve(input_count()); - instruction_args.reserve(input_count() + control_count() + - frame_state_value_count()); + instruction_args.reserve(input_count() + frame_state_value_count()); } @@ -280,9 +279,7 @@ CallBuffer::CallBuffer(Zone* zone, CallDescriptor* d, // InstructionSelector::VisitCall platform independent instead. void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, bool call_code_immediate, - bool call_address_immediate, - BasicBlock* cont_node, - BasicBlock* deopt_node) { + bool call_address_immediate) { OperandGenerator g(this); DCHECK_EQ(call->op()->OutputCount(), buffer->descriptor->ReturnCount()); DCHECK_EQ(OperatorProperties::GetValueInputCount(call->op()), @@ -338,9 +335,9 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, // arg 1 : deoptimization id. // arg 2 - arg (n + 1) : value inputs to the frame state. if (buffer->frame_state_descriptor != NULL) { - int deoptimization_id = - sequence()->AddDeoptimizationEntry(buffer->frame_state_descriptor); - buffer->instruction_args.push_back(g.TempImmediate(deoptimization_id)); + InstructionSequence::StateId state_id = + sequence()->AddFrameStateDescriptor(buffer->frame_state_descriptor); + buffer->instruction_args.push_back(g.TempImmediate(state_id.ToInt())); Node* frame_state = call->InputAt(buffer->descriptor->InputCount()); AddFrameStateInputs(frame_state, &buffer->instruction_args, @@ -377,22 +374,9 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, } } CHECK_EQ(pushed_count, static_cast(buffer->pushed_nodes.size())); - - // If the call can deoptimize, we add the continuation and deoptimization - // block labels. - if (buffer->descriptor->CanLazilyDeoptimize()) { - DCHECK(cont_node != NULL); - DCHECK(deopt_node != NULL); - buffer->instruction_args.push_back(g.Label(cont_node)); - buffer->instruction_args.push_back(g.Label(deopt_node)); - } else { - DCHECK(cont_node == NULL); - DCHECK(deopt_node == NULL); - } - DCHECK(static_cast(input_count) == - (buffer->instruction_args.size() - buffer->control_count() + - buffer->pushed_nodes.size() - buffer->frame_state_value_count())); + (buffer->instruction_args.size() + buffer->pushed_nodes.size() - + buffer->frame_state_value_count())); } @@ -465,14 +449,6 @@ void InstructionSelector::VisitControl(BasicBlock* block) { } case BasicBlockData::kThrow: return VisitThrow(input); - case BasicBlockData::kDeoptimize: - return VisitDeoptimize(input); - case BasicBlockData::kCall: { - BasicBlock* deoptimization = block->SuccessorAt(0); - BasicBlock* continuation = block->SuccessorAt(1); - VisitCall(input, continuation, deoptimization); - break; - } case BasicBlockData::kNone: { // TODO(titzer): exit block doesn't have control. DCHECK(input == NULL); @@ -503,8 +479,6 @@ void InstructionSelector::VisitNode(Node* node) { case IrOpcode::kIfFalse: case IrOpcode::kEffectPhi: case IrOpcode::kMerge: - case IrOpcode::kLazyDeoptimization: - case IrOpcode::kContinuation: // No code needed for these graph artifacts. return; case IrOpcode::kFinish: @@ -1028,13 +1002,13 @@ void InstructionSelector::VisitThrow(Node* value) { FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor( Node* state) { DCHECK(state->op()->opcode() == IrOpcode::kFrameState); - BailoutId ast_id = OpParameter(state); + FrameStateCallInfo state_info = OpParameter(state); Node* parameters = state->InputAt(0); Node* locals = state->InputAt(1); Node* stack = state->InputAt(2); return new (instruction_zone()) - FrameStateDescriptor(ast_id, OpParameter(parameters), + FrameStateDescriptor(state_info, OpParameter(parameters), OpParameter(locals), OpParameter(stack)); } @@ -1060,6 +1034,11 @@ void InstructionSelector::AddFrameStateInputs( Node* parameters = state->InputAt(0); Node* locals = state->InputAt(1); Node* stack = state->InputAt(2); + Node* context = state->InputAt(3); + + DCHECK_EQ(IrOpcode::kStateValues, parameters->op()->opcode()); + DCHECK_EQ(IrOpcode::kStateValues, locals->op()->opcode()); + DCHECK_EQ(IrOpcode::kStateValues, stack->op()->opcode()); DCHECK_EQ(descriptor->parameters_count(), parameters->InputCount()); DCHECK_EQ(descriptor->locals_count(), locals->InputCount()); @@ -1069,6 +1048,7 @@ void InstructionSelector::AddFrameStateInputs( for (int i = 0; i < descriptor->parameters_count(); i++) { inputs->push_back(UseOrImmediate(&g, parameters->InputAt(i))); } + inputs->push_back(UseOrImmediate(&g, context)); for (int i = 0; i < descriptor->locals_count(); i++) { inputs->push_back(UseOrImmediate(&g, locals->InputAt(i))); } @@ -1078,26 +1058,6 @@ void InstructionSelector::AddFrameStateInputs( } -void InstructionSelector::VisitDeoptimize(Node* deopt) { - DCHECK(deopt->op()->opcode() == IrOpcode::kDeoptimize); - Node* state = deopt->InputAt(0); - FrameStateDescriptor* descriptor = GetFrameStateDescriptor(state); - int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); - - InstructionOperandVector inputs(zone()); - inputs.reserve(descriptor->size() + 1); - - OperandGenerator g(this); - inputs.push_back(g.TempImmediate(deoptimization_id)); - - AddFrameStateInputs(state, &inputs, descriptor); - - DCHECK_EQ(descriptor->size() + 1, inputs.size()); - - Emit(kArchDeoptimize, 0, NULL, inputs.size(), &inputs.front(), 0, NULL); -} - - #if !V8_TURBOFAN_BACKEND #define DECLARE_UNIMPLEMENTED_SELECTOR(x) \ diff --git a/src/compiler/instruction-selector.h b/src/compiler/instruction-selector.h index 365ff0d..8355513 100644 --- a/src/compiler/instruction-selector.h +++ b/src/compiler/instruction-selector.h @@ -139,8 +139,7 @@ class InstructionSelector V8_FINAL { // {call_address_immediate} to generate immediate operands to address calls. void InitializeCallBuffer(Node* call, CallBuffer* buffer, bool call_code_immediate, - bool call_address_immediate, BasicBlock* cont_node, - BasicBlock* deopt_node); + bool call_address_immediate); FrameStateDescriptor* GetFrameStateDescriptor(Node* node); void AddFrameStateInputs(Node* state, InstructionOperandVector* inputs, diff --git a/src/compiler/instruction.cc b/src/compiler/instruction.cc index dd80cad..9ab81b6 100644 --- a/src/compiler/instruction.cc +++ b/src/compiler/instruction.cc @@ -394,20 +394,20 @@ void InstructionSequence::AddGapMove(int index, InstructionOperand* from, } -int InstructionSequence::AddDeoptimizationEntry( +InstructionSequence::StateId InstructionSequence::AddFrameStateDescriptor( FrameStateDescriptor* descriptor) { int deoptimization_id = static_cast(deoptimization_entries_.size()); deoptimization_entries_.push_back(descriptor); - return deoptimization_id; + return StateId::FromInt(deoptimization_id); } -FrameStateDescriptor* InstructionSequence::GetDeoptimizationEntry( - int deoptimization_id) { - return deoptimization_entries_[deoptimization_id]; +FrameStateDescriptor* InstructionSequence::GetFrameStateDescriptor( + InstructionSequence::StateId state_id) { + return deoptimization_entries_[state_id.ToInt()]; } -int InstructionSequence::GetDeoptimizationEntryCount() { +int InstructionSequence::GetFrameStateDescriptorCount() { return static_cast(deoptimization_entries_.size()); } diff --git a/src/compiler/instruction.h b/src/compiler/instruction.h index 98c85bc..8cd8ab9 100644 --- a/src/compiler/instruction.h +++ b/src/compiler/instruction.h @@ -701,22 +701,28 @@ class Constant V8_FINAL { class FrameStateDescriptor : public ZoneObject { public: - FrameStateDescriptor(BailoutId bailout_id, int parameters_count, - int locals_count, int stack_count) - : bailout_id_(bailout_id), + FrameStateDescriptor(const FrameStateCallInfo& state_info, + int parameters_count, int locals_count, int stack_count) + : bailout_id_(state_info.bailout_id()), + frame_state_combine_(state_info.state_combine()), parameters_count_(parameters_count), locals_count_(locals_count), stack_count_(stack_count) {} BailoutId bailout_id() const { return bailout_id_; } + OutputFrameStateCombine state_combine() const { return frame_state_combine_; } int parameters_count() { return parameters_count_; } int locals_count() { return locals_count_; } int stack_count() { return stack_count_; } - int size() { return parameters_count_ + locals_count_ + stack_count_; } + int size() { + return parameters_count_ + locals_count_ + stack_count_ + + 1; // Includes context. + } private: BailoutId bailout_id_; + OutputFrameStateCombine frame_state_combine_; int parameters_count_; int locals_count_; int stack_count_; @@ -838,9 +844,19 @@ class InstructionSequence V8_FINAL { return immediates_[index]; } - int AddDeoptimizationEntry(FrameStateDescriptor* descriptor); - FrameStateDescriptor* GetDeoptimizationEntry(int deoptimization_id); - int GetDeoptimizationEntryCount(); + class StateId { + public: + static StateId FromInt(int id) { return StateId(id); } + int ToInt() const { return id_; } + + private: + explicit StateId(int id) : id_(id) {} + int id_; + }; + + StateId AddFrameStateDescriptor(FrameStateDescriptor* descriptor); + FrameStateDescriptor* GetFrameStateDescriptor(StateId deoptimization_id); + int GetFrameStateDescriptorCount(); private: friend OStream& operator<<(OStream& os, const InstructionSequence& code); diff --git a/src/compiler/js-generic-lowering.cc b/src/compiler/js-generic-lowering.cc index f83b244..a2253bf 100644 --- a/src/compiler/js-generic-lowering.cc +++ b/src/compiler/js-generic-lowering.cc @@ -289,9 +289,6 @@ REPLACE_UNIMPLEMENTED(JSDebugger) static CallDescriptor::Flags FlagsForNode(Node* node) { CallDescriptor::Flags result = CallDescriptor::kNoFlags; - if (OperatorProperties::CanLazilyDeoptimize(node->op())) { - result |= CallDescriptor::kLazyDeoptimization; - } if (OperatorProperties::HasFrameStateInput(node->op())) { result |= CallDescriptor::kNeedsFrameState; } diff --git a/src/compiler/linkage-impl.h b/src/compiler/linkage-impl.h index b5103c5..88cf125 100644 --- a/src/compiler/linkage-impl.h +++ b/src/compiler/linkage-impl.h @@ -63,7 +63,7 @@ class LinkageHelper { locations, // locations Operator::kNoProperties, // properties kNoCalleeSaved, // callee-saved registers - CallDescriptor::kLazyDeoptimization); // flags + CallDescriptor::kNeedsFrameState); // flags } diff --git a/src/compiler/linkage.cc b/src/compiler/linkage.cc index 0191cc4..4bee176 100644 --- a/src/compiler/linkage.cc +++ b/src/compiler/linkage.cc @@ -35,7 +35,7 @@ OStream& operator<<(OStream& os, const CallDescriptor& d) { // TODO(svenpanne) Output properties etc. and be less cryptic. return os << d.kind() << ":" << d.debug_name() << ":r" << d.ReturnCount() << "p" << d.ParameterCount() << "i" << d.InputCount() << "f" - << d.FrameStateCount() << (d.CanLazilyDeoptimize() ? "deopt" : ""); + << d.FrameStateCount(); } diff --git a/src/compiler/linkage.h b/src/compiler/linkage.h index 0b74e55..9f3d834 100644 --- a/src/compiler/linkage.h +++ b/src/compiler/linkage.h @@ -45,10 +45,9 @@ class CallDescriptor V8_FINAL : public ZoneObject { enum Flag { // TODO(jarin) kLazyDeoptimization and kNeedsFrameState should be unified. kNoFlags = 0u, - kLazyDeoptimization = 1u << 0, - kNeedsFrameState = 1u << 1, - kPatchableCallSite = 1u << 2, - kNeedsNopAfterCall = 1u << 3, + kNeedsFrameState = 1u << 0, + kPatchableCallSite = 1u << 1, + kNeedsNopAfterCall = 1u << 2, kPatchableCallSiteWithNop = kPatchableCallSite | kNeedsNopAfterCall }; DEFINE_FLAGS(Flags, Flag); @@ -85,7 +84,6 @@ class CallDescriptor V8_FINAL : public ZoneObject { Flags flags() const { return flags_; } - bool CanLazilyDeoptimize() const { return flags() & kLazyDeoptimization; } bool NeedsFrameState() const { return flags() & kNeedsFrameState; } LinkageLocation GetReturnLocation(int index) { diff --git a/src/compiler/opcodes.h b/src/compiler/opcodes.h index c89134f..c4b7dbf 100644 --- a/src/compiler/opcodes.h +++ b/src/compiler/opcodes.h @@ -16,10 +16,7 @@ V(IfFalse) \ V(Merge) \ V(Return) \ - V(Throw) \ - V(Continuation) \ - V(LazyDeoptimization) \ - V(Deoptimize) + V(Throw) // Opcodes for common operators. #define LEAF_OP_LIST(V) \ diff --git a/src/compiler/operator-properties-inl.h b/src/compiler/operator-properties-inl.h index 9d8cc04..374d9ba 100644 --- a/src/compiler/operator-properties-inl.h +++ b/src/compiler/operator-properties-inl.h @@ -38,6 +38,7 @@ inline bool OperatorProperties::HasFrameStateInput(Operator* op) { switch (op->opcode()) { case IrOpcode::kJSCallFunction: + case IrOpcode::kJSCallConstruct: return true; case IrOpcode::kJSCallRuntime: { Runtime::FunctionId function = @@ -57,6 +58,24 @@ inline bool OperatorProperties::HasFrameStateInput(Operator* op) { UNREACHABLE(); } + // Binary operations + case IrOpcode::kJSBitwiseOr: + case IrOpcode::kJSBitwiseXor: + case IrOpcode::kJSBitwiseAnd: + case IrOpcode::kJSShiftLeft: + case IrOpcode::kJSShiftRight: + case IrOpcode::kJSShiftRightLogical: + case IrOpcode::kJSAdd: + case IrOpcode::kJSSubtract: + case IrOpcode::kJSMultiply: + case IrOpcode::kJSDivide: + case IrOpcode::kJSModulus: + case IrOpcode::kJSLoadProperty: + case IrOpcode::kJSStoreProperty: + case IrOpcode::kJSLoadNamed: + case IrOpcode::kJSStoreNamed: + return true; + default: return false; } @@ -95,10 +114,6 @@ inline int OperatorProperties::GetControlInputCount(Operator* op) { #undef OPCODE_CASE return static_cast(op)->ControlInputCount(); default: - // If a node can lazily deoptimize, it needs control dependency. - if (CanLazilyDeoptimize(op)) { - return 1; - } // Operators that have write effects must have a control // dependency. Effect dependencies only ensure the correct order of // write/read operations without consideration of control flow. Without an @@ -131,8 +146,7 @@ inline bool OperatorProperties::HasEffectOutput(Operator* op) { inline bool OperatorProperties::HasControlOutput(Operator* op) { IrOpcode::Value opcode = static_cast(op->opcode()); - return (opcode != IrOpcode::kEnd && IrOpcode::IsControlOpcode(opcode)) || - CanLazilyDeoptimize(op); + return (opcode != IrOpcode::kEnd && IrOpcode::IsControlOpcode(opcode)); } @@ -158,66 +172,6 @@ inline bool OperatorProperties::IsBasicBlockBegin(Operator* op) { opcode == IrOpcode::kIfFalse; } -inline bool OperatorProperties::CanLazilyDeoptimize(Operator* op) { - // TODO(jarin) This function allows turning on lazy deoptimization - // incrementally. It will change as we turn on lazy deopt for - // more nodes. - - if (!FLAG_turbo_deoptimization) { - return false; - } - - switch (op->opcode()) { - case IrOpcode::kCall: { - CallOperator* call_op = reinterpret_cast(op); - CallDescriptor* descriptor = call_op->parameter(); - return descriptor->CanLazilyDeoptimize(); - } - case IrOpcode::kJSCallRuntime: { - Runtime::FunctionId function = - reinterpret_cast*>(op)->parameter(); - // TODO(jarin) At the moment, we only support lazy deoptimization for - // a few chosen runtime functions. - switch (function) { - case Runtime::kDebugBreak: - case Runtime::kDeoptimizeFunction: - case Runtime::kSetScriptBreakPoint: - case Runtime::kDebugGetLoadedScripts: - case Runtime::kStackGuard: - return true; - default: - return false; - } - UNREACHABLE(); - } - - // JS function calls - case IrOpcode::kJSCallFunction: - case IrOpcode::kJSCallConstruct: - - // Binary operations - case IrOpcode::kJSBitwiseOr: - case IrOpcode::kJSBitwiseXor: - case IrOpcode::kJSBitwiseAnd: - case IrOpcode::kJSShiftLeft: - case IrOpcode::kJSShiftRight: - case IrOpcode::kJSShiftRightLogical: - case IrOpcode::kJSAdd: - case IrOpcode::kJSSubtract: - case IrOpcode::kJSMultiply: - case IrOpcode::kJSDivide: - case IrOpcode::kJSModulus: - case IrOpcode::kJSLoadProperty: - case IrOpcode::kJSStoreProperty: - case IrOpcode::kJSLoadNamed: - case IrOpcode::kJSStoreNamed: - return true; - - default: - return false; - } - return false; -} } } } // namespace v8::internal::compiler diff --git a/src/compiler/operator-properties.h b/src/compiler/operator-properties.h index 19491d5..0187193 100644 --- a/src/compiler/operator-properties.h +++ b/src/compiler/operator-properties.h @@ -37,7 +37,6 @@ class OperatorProperties { static inline int GetControlOutputCount(Operator* op); static inline bool IsBasicBlockBegin(Operator* op); - static inline bool CanLazilyDeoptimize(Operator* op); }; } } diff --git a/src/compiler/raw-machine-assembler.cc b/src/compiler/raw-machine-assembler.cc index 18f86db..3b4a9ac 100644 --- a/src/compiler/raw-machine-assembler.cc +++ b/src/compiler/raw-machine-assembler.cc @@ -76,17 +76,8 @@ void RawMachineAssembler::Return(Node* value) { } -void RawMachineAssembler::Deoptimize(Node* state) { - Node* deopt = graph()->NewNode(common()->Deoptimize(), state); - schedule()->AddDeoptimize(CurrentBlock(), deopt); - current_block_ = NULL; -} - - Node* RawMachineAssembler::CallFunctionStub0(Node* function, Node* receiver, Node* context, Node* frame_state, - Label* continuation, - Label* deoptimization, CallFunctionFlags flags) { CallFunctionStub stub(isolate(), 0, flags); CodeStubInterfaceDescriptor* d = isolate()->code_stub_interface_descriptor( @@ -94,35 +85,29 @@ Node* RawMachineAssembler::CallFunctionStub0(Node* function, Node* receiver, stub.InitializeInterfaceDescriptor(d); CallDescriptor* desc = Linkage::GetStubCallDescriptor( - d, 1, - CallDescriptor::kLazyDeoptimization | CallDescriptor::kNeedsFrameState, - zone()); + d, 1, CallDescriptor::kNeedsFrameState, zone()); Node* stub_code = HeapConstant(stub.GetCode()); Node* call = graph()->NewNode(common()->Call(desc), stub_code, function, receiver, context, frame_state); - schedule()->AddCall(CurrentBlock(), call, Use(continuation), - Use(deoptimization)); - current_block_ = NULL; + schedule()->AddNode(CurrentBlock(), call); return call; } Node* RawMachineAssembler::CallJS0(Node* function, Node* receiver, - Label* continuation, Label* deoptimization) { + Node* frame_state) { CallDescriptor* descriptor = Linkage::GetJSCallDescriptor(1, zone()); - Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver); - schedule()->AddCall(CurrentBlock(), call, Use(continuation), - Use(deoptimization)); - current_block_ = NULL; + Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver, + frame_state); + schedule()->AddNode(CurrentBlock(), call); return call; } Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function, - Node* arg0, Label* continuation, - Label* deoptimization) { + Node* arg0, Node* frame_state) { CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor( - function, 1, Operator::kNoProperties, CallDescriptor::kLazyDeoptimization, + function, 1, Operator::kNoProperties, CallDescriptor::kNeedsFrameState, zone()); Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode()); @@ -132,10 +117,8 @@ Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function, Node* context = Parameter(1); Node* call = graph()->NewNode(common()->Call(descriptor), centry, arg0, ref, - arity, context); - schedule()->AddCall(CurrentBlock(), call, Use(continuation), - Use(deoptimization)); - current_block_ = NULL; + arity, context, frame_state); + schedule()->AddNode(CurrentBlock(), call); return call; } diff --git a/src/compiler/raw-machine-assembler.h b/src/compiler/raw-machine-assembler.h index 095c892..04e0fee 100644 --- a/src/compiler/raw-machine-assembler.h +++ b/src/compiler/raw-machine-assembler.h @@ -73,14 +73,12 @@ class RawMachineAssembler : public GraphBuilder, void Branch(Node* condition, Label* true_val, Label* false_val); // Call through CallFunctionStub with lazy deopt and frame-state. Node* CallFunctionStub0(Node* function, Node* receiver, Node* context, - Node* frame_state, Label* continuation, - Label* deoptimization, CallFunctionFlags flags); + Node* frame_state, CallFunctionFlags flags); // Call to a JS function with zero parameters. - Node* CallJS0(Node* function, Node* receiver, Label* continuation, - Label* deoptimization); + Node* CallJS0(Node* function, Node* receiver, Node* frame_state); // Call to a runtime function with zero parameters. Node* CallRuntime1(Runtime::FunctionId function, Node* arg0, - Label* continuation, Label* deoptimization); + Node* frame_state); void Return(Node* value); void Bind(Label* label); void Deoptimize(Node* state); diff --git a/src/compiler/schedule.cc b/src/compiler/schedule.cc index 6476676..a3b5ed3 100644 --- a/src/compiler/schedule.cc +++ b/src/compiler/schedule.cc @@ -24,10 +24,6 @@ OStream& operator<<(OStream& os, const BasicBlockData::Control& c) { return os << "return"; case BasicBlockData::kThrow: return os << "throw"; - case BasicBlockData::kCall: - return os << "call"; - case BasicBlockData::kDeoptimize: - return os << "deoptimize"; } UNREACHABLE(); return os; diff --git a/src/compiler/schedule.h b/src/compiler/schedule.h index 13a8ba4..30432d7 100644 --- a/src/compiler/schedule.h +++ b/src/compiler/schedule.h @@ -30,13 +30,11 @@ class BasicBlockData { public: // Possible control nodes that can end a block. enum Control { - kNone, // Control not initialized yet. - kGoto, // Goto a single successor block. - kBranch, // Branch if true to first successor, otherwise second. - kReturn, // Return a value from this method. - kThrow, // Throw an exception. - kCall, // Call to a possibly deoptimizing or throwing function. - kDeoptimize // Deoptimize. + kNone, // Control not initialized yet. + kGoto, // Goto a single successor block. + kBranch, // Branch if true to first successor, otherwise second. + kReturn, // Return a value from this method. + kThrow // Throw an exception. }; int32_t rpo_number_; // special RPO number of the block. @@ -234,20 +232,6 @@ class Schedule : public GenericGraph { AddSuccessor(block, succ); } - // BasicBlock building: add a (branching) call at the end of {block}. - void AddCall(BasicBlock* block, Node* call, BasicBlock* cont_block, - BasicBlock* deopt_block) { - DCHECK(block->control_ == BasicBlock::kNone); - DCHECK(call->opcode() == IrOpcode::kCall); - block->control_ = BasicBlock::kCall; - // Insert the deopt block first so that the RPO order builder picks - // it first (and thus it ends up late in the RPO order). - AddSuccessor(block, deopt_block); - AddSuccessor(block, cont_block); - SetControlInput(block, call); - SetBlockForNode(block, call); - } - // BasicBlock building: add a branch at the end of {block}. void AddBranch(BasicBlock* block, Node* branch, BasicBlock* tblock, BasicBlock* fblock) { @@ -283,15 +267,6 @@ class Schedule : public GenericGraph { if (block != end()) AddSuccessor(block, end()); } - // BasicBlock building: add a deopt at the end of {block}. - void AddDeoptimize(BasicBlock* block, Node* state) { - DCHECK(block->control_ == BasicBlock::kNone); - block->control_ = BasicBlock::kDeoptimize; - SetControlInput(block, state); - block->deferred_ = true; // By default, consider deopts the slow path. - if (block != end()) AddSuccessor(block, end()); - } - friend class Scheduler; friend class CodeGenerator; diff --git a/src/compiler/scheduler.cc b/src/compiler/scheduler.cc index bdae2df..cc3eae1 100644 --- a/src/compiler/scheduler.cc +++ b/src/compiler/scheduler.cc @@ -95,12 +95,6 @@ class CFGBuilder { case IrOpcode::kBranch: BuildBlocksForSuccessors(node, IrOpcode::kIfTrue, IrOpcode::kIfFalse); break; - case IrOpcode::kCall: - if (OperatorProperties::CanLazilyDeoptimize(node->op())) { - BuildBlocksForSuccessors(node, IrOpcode::kContinuation, - IrOpcode::kLazyDeoptimization); - } - break; default: break; } @@ -116,15 +110,6 @@ class CFGBuilder { scheduler_->schedule_root_nodes_.push_back(node); ConnectBranch(node); break; - case IrOpcode::kDeoptimize: - scheduler_->schedule_root_nodes_.push_back(node); - ConnectDeoptimize(node); - case IrOpcode::kCall: - if (OperatorProperties::CanLazilyDeoptimize(node->op())) { - scheduler_->schedule_root_nodes_.push_back(node); - ConnectCall(node); - } - break; case IrOpcode::kReturn: scheduler_->schedule_root_nodes_.push_back(node); ConnectReturn(node); @@ -152,7 +137,7 @@ class CFGBuilder { } // Collect the branch-related projections from a node, such as IfTrue, - // IfFalse, Continuation, and LazyDeoptimization. + // IfFalse. // TODO(titzer): consider moving this to node.h void CollectSuccessorProjections(Node* node, Node** buffer, IrOpcode::Value true_opcode, @@ -206,21 +191,13 @@ class CFGBuilder { for (InputIter j = merge->inputs().begin(); j != merge->inputs().end(); ++j) { BasicBlock* predecessor_block = schedule_->block(*j); - if ((*j)->opcode() != IrOpcode::kReturn && - (*j)->opcode() != IrOpcode::kDeoptimize) { + if ((*j)->opcode() != IrOpcode::kReturn) { TraceConnect(merge, predecessor_block, block); schedule_->AddGoto(predecessor_block, block); } } } - void ConnectDeoptimize(Node* deopt) { - Node* deopt_block_node = NodeProperties::GetControlInput(deopt); - BasicBlock* deopt_block = schedule_->block(deopt_block_node); - TraceConnect(deopt, deopt_block, NULL); - schedule_->AddDeoptimize(deopt_block, deopt); - } - void ConnectReturn(Node* ret) { Node* return_block_node = NodeProperties::GetControlInput(ret); BasicBlock* return_block = schedule_->block(return_block_node); @@ -228,21 +205,6 @@ class CFGBuilder { schedule_->AddReturn(return_block, ret); } - void ConnectCall(Node* call) { - Node* call_block_node = NodeProperties::GetControlInput(call); - BasicBlock* call_block = schedule_->block(call_block_node); - - BasicBlock* successor_blocks[2]; - CollectSuccessorBlocks(call, successor_blocks, IrOpcode::kContinuation, - IrOpcode::kLazyDeoptimization); - - TraceConnect(call, call_block, successor_blocks[0]); - TraceConnect(call, call_block, successor_blocks[1]); - - schedule_->AddCall(call_block, call, successor_blocks[0], - successor_blocks[1]); - } - void TraceConnect(Node* node, BasicBlock* block, BasicBlock* succ) { DCHECK_NE(NULL, block); if (succ == NULL) { diff --git a/src/compiler/verifier.cc b/src/compiler/verifier.cc index 1f00773..97ebb8f 100644 --- a/src/compiler/verifier.cc +++ b/src/compiler/verifier.cc @@ -194,21 +194,12 @@ GenericGraphVisit::Control Verifier::Visitor::Pre(Node* node) { OperatorProperties::GetControlInputCount(control->op())); break; } - case IrOpcode::kLazyDeoptimization: - // TODO(jarin): what are the constraints on these? - break; - case IrOpcode::kDeoptimize: - // TODO(jarin): what are the constraints on these? - break; case IrOpcode::kFrameState: // TODO(jarin): what are the constraints on these? break; case IrOpcode::kCall: // TODO(rossberg): what are the constraints on these? break; - case IrOpcode::kContinuation: - // TODO(jarin): what are the constraints on these? - break; case IrOpcode::kProjection: { // Projection has an input that produces enough values. int index = static_cast*>(node->op())->parameter(); diff --git a/src/compiler/x64/code-generator-x64.cc b/src/compiler/x64/code-generator-x64.cc index 1312cbe..30c43d6 100644 --- a/src/compiler/x64/code-generator-x64.cc +++ b/src/compiler/x64/code-generator-x64.cc @@ -233,14 +233,6 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { AddSafepointAndDeopt(instr); break; } - case kArchDeoptimize: { - int deoptimization_id = BuildTranslation(instr, 0); - - Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( - isolate(), deoptimization_id, Deoptimizer::LAZY); - __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY); - break; - } case kArchDrop: { int words = MiscField::decode(instr->opcode()); __ addq(rsp, Immediate(kPointerSize * words)); @@ -782,6 +774,13 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, } +void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) { + Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( + isolate(), deoptimization_id, Deoptimizer::LAZY); + __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY); +} + + void CodeGenerator::AssemblePrologue() { CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); int stack_slots = frame()->GetSpillSlotCount(); diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc index 5475bea..29aae3c 100644 --- a/src/compiler/x64/instruction-selector-x64.cc +++ b/src/compiler/x64/instruction-selector-x64.cc @@ -678,7 +678,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, CallBuffer buffer(zone(), descriptor, frame_state_descriptor); // Compute InstructionOperands for inputs and outputs. - InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization); + InitializeCallBuffer(call, &buffer, true, true); // TODO(dcarney): stack alignment for c calls. // TODO(dcarney): shadow space on window for c calls. diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index e6415f4..6b381da 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -926,7 +926,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, CHECK_EQ(Translation::kSelfLiteralId, closure_id); function = function_; } - unsigned height = iterator->Next(); + unsigned height = iterator->Next() - 1; // Do not count the context. unsigned height_in_bytes = height * kPointerSize; if (trace_scope_ != NULL) { PrintF(trace_scope_->file(), " translating "); @@ -1061,12 +1061,12 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, Register context_reg = JavaScriptFrame::context_register(); output_offset -= kPointerSize; input_offset -= kPointerSize; - if (is_bottommost) { - value = input_->GetFrameSlot(input_offset); - } else { - value = reinterpret_cast(function->context()); - } - output_frame->SetFrameSlot(output_offset, value); + // Read the context from the translations. + DoTranslateCommand(iterator, frame_index, output_offset); + value = output_frame->GetFrameSlot(output_offset); + // The context should not be a placeholder for a materialized object. + CHECK(value != + reinterpret_cast(isolate_->heap()->arguments_marker())); output_frame->SetContext(value); if (is_topmost) output_frame->SetRegister(context_reg.code(), value); if (trace_scope_ != NULL) { @@ -3630,6 +3630,7 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer, bool has_construct_stub) { FrameDescription* output_frame = deoptimizer->output_[frame_index]; function_ = output_frame->GetFunction(); + context_ = reinterpret_cast(output_frame->GetContext()); has_construct_stub_ = has_construct_stub; expression_count_ = output_frame->GetExpressionCount(); expression_stack_ = new Object*[expression_count_]; @@ -3663,6 +3664,7 @@ DeoptimizedFrameInfo::~DeoptimizedFrameInfo() { void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { v->VisitPointer(BitCast(&function_)); + v->VisitPointer(&context_); v->VisitPointers(parameters_, parameters_ + parameters_count_); v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); } diff --git a/src/deoptimizer.h b/src/deoptimizer.h index c509483..4aa6a50 100644 --- a/src/deoptimizer.h +++ b/src/deoptimizer.h @@ -922,6 +922,9 @@ class DeoptimizedFrameInfo : public Malloced { return function_; } + // Get the frame context. + Object* GetContext() { return context_; } + // Check if this frame is preceded by construct stub frame. The bottom-most // inlined frame might still be called by an uninlined construct stub. bool HasConstructStub() { @@ -958,6 +961,7 @@ class DeoptimizedFrameInfo : public Malloced { } JSFunction* function_; + Object* context_; bool has_construct_stub_; int parameters_count_; int expression_count_; diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index ec50f5b..06d2bb4 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -1994,22 +1994,20 @@ enum InliningKind { class HArgumentsObject; +class HConstant; class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { public: - static HEnterInlined* New(Zone* zone, - HValue* context, - BailoutId return_id, + static HEnterInlined* New(Zone* zone, HValue* context, BailoutId return_id, Handle closure, - int arguments_count, + HConstant* closure_context, int arguments_count, FunctionLiteral* function, - InliningKind inlining_kind, - Variable* arguments_var, + InliningKind inlining_kind, Variable* arguments_var, HArgumentsObject* arguments_object) { - return new(zone) HEnterInlined(return_id, closure, arguments_count, - function, inlining_kind, arguments_var, - arguments_object, zone); + return new (zone) HEnterInlined(return_id, closure, closure_context, + arguments_count, function, inlining_kind, + arguments_var, arguments_object, zone); } void RegisterReturnTarget(HBasicBlock* return_target, Zone* zone); @@ -2018,6 +2016,7 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { virtual OStream& PrintDataTo(OStream& os) const V8_OVERRIDE; // NOLINT Handle closure() const { return closure_; } + HConstant* closure_context() const { return closure_context_; } int arguments_count() const { return arguments_count_; } bool arguments_pushed() const { return arguments_pushed_; } void set_arguments_pushed() { arguments_pushed_ = true; } @@ -2035,27 +2034,25 @@ class HEnterInlined V8_FINAL : public HTemplateInstruction<0> { DECLARE_CONCRETE_INSTRUCTION(EnterInlined) private: - HEnterInlined(BailoutId return_id, - Handle closure, - int arguments_count, - FunctionLiteral* function, - InliningKind inlining_kind, - Variable* arguments_var, - HArgumentsObject* arguments_object, + HEnterInlined(BailoutId return_id, Handle closure, + HConstant* closure_context, int arguments_count, + FunctionLiteral* function, InliningKind inlining_kind, + Variable* arguments_var, HArgumentsObject* arguments_object, Zone* zone) : return_id_(return_id), closure_(closure), + closure_context_(closure_context), arguments_count_(arguments_count), arguments_pushed_(false), function_(function), inlining_kind_(inlining_kind), arguments_var_(arguments_var), arguments_object_(arguments_object), - return_targets_(2, zone) { - } + return_targets_(2, zone) {} BailoutId return_id_; Handle closure_; + HConstant* closure_context_; int arguments_count_; bool arguments_pushed_; FunctionLiteral* function_; @@ -3054,7 +3051,6 @@ struct InductionVariableLimitUpdate { class HBoundsCheck; class HPhi; -class HConstant; class HBitwise; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index b15de89..c4143c4 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -7891,10 +7891,9 @@ bool HOptimizedGraphBuilder::TryInline(Handle target, Scope* saved_scope = scope(); set_scope(target_info.scope()); HEnterInlined* enter_inlined = - Add(return_id, target, arguments_count, function, + Add(return_id, target, context, arguments_count, function, function_state()->inlining_kind(), - function->scope()->arguments(), - arguments_object); + function->scope()->arguments(), arguments_object); function_state()->set_entry(enter_inlined); VisitDeclarations(target_info.scope()->declarations()); diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc index 630e395..4569dc5 100644 --- a/src/ia32/lithium-ia32.cc +++ b/src/ia32/lithium-ia32.cc @@ -627,9 +627,8 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { HEnvironment* hydrogen_env = current_block_->last_environment(); int argument_index_accumulator = 0; ZoneList objects_to_materialize(0, zone()); - instr->set_environment(CreateEnvironment(hydrogen_env, - &argument_index_accumulator, - &objects_to_materialize)); + instr->set_environment(CreateEnvironment( + hydrogen_env, &argument_index_accumulator, &objects_to_materialize)); return instr; } @@ -2633,6 +2632,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/lithium.cc b/src/lithium.cc index 26d07b3..b29a4c5 100644 --- a/src/lithium.cc +++ b/src/lithium.cc @@ -511,18 +511,22 @@ void LChunk::set_allocated_double_registers(BitVector* allocated_registers) { LEnvironment* LChunkBuilderBase::CreateEnvironment( - HEnvironment* hydrogen_env, - int* argument_index_accumulator, + HEnvironment* hydrogen_env, int* argument_index_accumulator, ZoneList* objects_to_materialize) { if (hydrogen_env == NULL) return NULL; - LEnvironment* outer = CreateEnvironment(hydrogen_env->outer(), - argument_index_accumulator, - objects_to_materialize); + LEnvironment* outer = + CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator, + objects_to_materialize); BailoutId ast_id = hydrogen_env->ast_id(); DCHECK(!ast_id.IsNone() || hydrogen_env->frame_type() != JS_FUNCTION); - int value_count = hydrogen_env->length() - hydrogen_env->specials_count(); + + int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION) + ? 0 + : hydrogen_env->specials_count(); + + int value_count = hydrogen_env->length() - omitted_count; LEnvironment* result = new(zone()) LEnvironment(hydrogen_env->closure(), hydrogen_env->frame_type(), @@ -538,8 +542,10 @@ LEnvironment* LChunkBuilderBase::CreateEnvironment( // Store the environment description into the environment // (with holes for nested objects) for (int i = 0; i < hydrogen_env->length(); ++i) { - if (hydrogen_env->is_special_index(i)) continue; - + if (hydrogen_env->is_special_index(i) && + hydrogen_env->frame_type() != JS_FUNCTION) { + continue; + } LOperand* op; HValue* value = hydrogen_env->values()->at(i); CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc index 5b2248d..7b4223d 100644 --- a/src/mips/lithium-mips.cc +++ b/src/mips/lithium-mips.cc @@ -2508,6 +2508,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/mips64/lithium-mips64.cc b/src/mips64/lithium-mips64.cc index 75ab194..8670cdf 100644 --- a/src/mips64/lithium-mips64.cc +++ b/src/mips64/lithium-mips64.cc @@ -2508,6 +2508,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/runtime.cc b/src/runtime.cc index 40468c2..e627706 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -11196,6 +11196,9 @@ class FrameInspector { ? deoptimized_frame_->HasConstructStub() : frame_->IsConstructor(); } + Object* GetContext() { + return is_optimized_ ? deoptimized_frame_->GetContext() : frame_->context(); + } // To inspect all the provided arguments the frame might need to be // replaced with the arguments frame. @@ -11344,7 +11347,7 @@ RUNTIME_FUNCTION(Runtime_GetFrameDetails) { if (local < local_count) { // Get the context containing declarations. Handle context( - Context::cast(it.frame()->context())->declaration_context()); + Context::cast(frame_inspector.GetContext())->declaration_context()); for (; i < scope_info->LocalCount(); ++i) { if (scope_info->LocalIsSynthetic(i)) continue; @@ -13029,7 +13032,7 @@ RUNTIME_FUNCTION(Runtime_DebugEvaluate) { isolate->set_context(*(save->context())); // Evaluate on the context of the frame. - Handle context(Context::cast(frame->context())); + Handle context(Context::cast(frame_inspector.GetContext())); DCHECK(!context.is_null()); // Materialize stack locals and the arguments object. diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index 9be600b..14da898 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -609,9 +609,8 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { HEnvironment* hydrogen_env = current_block_->last_environment(); int argument_index_accumulator = 0; ZoneList objects_to_materialize(0, zone()); - instr->set_environment(CreateEnvironment(hydrogen_env, - &argument_index_accumulator, - &objects_to_materialize)); + instr->set_environment(CreateEnvironment( + hydrogen_env, &argument_index_accumulator, &objects_to_materialize)); return instr; } @@ -2608,6 +2607,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/src/x87/lithium-x87.cc b/src/x87/lithium-x87.cc index 35d3e3f..5204d9b 100644 --- a/src/x87/lithium-x87.cc +++ b/src/x87/lithium-x87.cc @@ -2609,6 +2609,7 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) { inner->Bind(instr->arguments_var(), instr->arguments_object()); } + inner->BindContext(instr->closure_context()); inner->set_entry(instr); current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); diff --git a/test/cctest/compiler/test-codegen-deopt.cc b/test/cctest/compiler/test-codegen-deopt.cc index 4cc0fde..e11fc55 100644 --- a/test/cctest/compiler/test-codegen-deopt.cc +++ b/test/cctest/compiler/test-codegen-deopt.cc @@ -140,31 +140,22 @@ class TrivialDeoptCodegenTester : public DeoptCodegenTester { PrintableUnique::CreateUninitialized(zone(), deopt_function); Node* deopt_fun_node = m.NewNode(common.HeapConstant(deopt_fun_constant)); - MLabel deopt, cont; - Node* call = m.CallJS0(deopt_fun_node, undef_node, &cont, &deopt); - - m.Bind(&cont); - m.NewNode(common.Continuation(), call); - m.Return(undef_node); - - m.Bind(&deopt); - m.NewNode(common.LazyDeoptimization(), call); bailout_id = GetCallBailoutId(); Node* parameters = m.NewNode(common.StateValues(1), undef_node); Node* locals = m.NewNode(common.StateValues(0)); Node* stack = m.NewNode(common.StateValues(0)); - Node* state_node = - m.NewNode(common.FrameState(bailout_id), parameters, locals, stack); - m.Deoptimize(state_node); + Node* state_node = m.NewNode(common.FrameState(bailout_id, kIgnoreOutput), + parameters, locals, stack, undef_node); + + m.CallJS0(deopt_fun_node, undef_node, state_node); + + m.Return(undef_node); // Schedule the graph: Schedule* schedule = m.Export(); - cont_block = cont.block(); - deopt_block = deopt.block(); - return schedule; } @@ -179,9 +170,6 @@ class TrivialDeoptCodegenTester : public DeoptCodegenTester { CHECK(false); return BailoutId(-1); } - - BasicBlock* cont_block; - BasicBlock* deopt_block; }; @@ -198,15 +186,7 @@ TEST(TurboTrivialDeoptCodegen) { DeoptimizationInputData* data = DeoptimizationInputData::cast(t.result_code->deoptimization_data()); - Label* cont_label = t.code->GetLabel(t.cont_block); - Label* deopt_label = t.code->GetLabel(t.deopt_block); - - // Check the safepoint - it should contain an entry for the call - // with the right deoptimization address. - SafepointEntry entry = t.result_code->GetSafepointEntry( - t.result_code->instruction_start() + cont_label->pos()); - CHECK(entry.is_valid()); - CHECK_EQ(deopt_label->pos(), entry.deoptimization_pc()); + // TODO(jarin) Find a way to test the safepoint. // Check that we deoptimize to the right AST id. CHECK_EQ(1, data->DeoptCount()); @@ -274,32 +254,21 @@ class TrivialRuntimeDeoptCodegenTester : public DeoptCodegenTester { PrintableUnique::CreateUninitialized(zone(), function); Node* this_fun_node = m.NewNode(common.HeapConstant(this_fun_constant)); - MLabel deopt, cont; - Node* call = m.CallRuntime1(Runtime::kDeoptimizeFunction, this_fun_node, - &cont, &deopt); - - m.Bind(&cont); - m.NewNode(common.Continuation(), call); - m.Return(undef_node); - - m.Bind(&deopt); - m.NewNode(common.LazyDeoptimization(), call); - bailout_id = GetCallBailoutId(); Node* parameters = m.NewNode(common.StateValues(1), undef_node); Node* locals = m.NewNode(common.StateValues(0)); Node* stack = m.NewNode(common.StateValues(0)); - Node* state_node = - m.NewNode(common.FrameState(bailout_id), parameters, locals, stack); - m.Deoptimize(state_node); + Node* state_node = m.NewNode(common.FrameState(bailout_id, kIgnoreOutput), + parameters, locals, stack, undef_node); + + m.CallRuntime1(Runtime::kDeoptimizeFunction, this_fun_node, state_node); + + m.Return(undef_node); // Schedule the graph: Schedule* schedule = m.Export(); - cont_block = cont.block(); - deopt_block = deopt.block(); - return schedule; } @@ -314,9 +283,6 @@ class TrivialRuntimeDeoptCodegenTester : public DeoptCodegenTester { CHECK(false); return BailoutId(-1); } - - BasicBlock* cont_block; - BasicBlock* deopt_block; }; diff --git a/test/cctest/compiler/test-schedule.cc b/test/cctest/compiler/test-schedule.cc index 34f19e8..cad686a 100644 --- a/test/cctest/compiler/test-schedule.cc +++ b/test/cctest/compiler/test-schedule.cc @@ -126,20 +126,6 @@ TEST(TestScheduleAddThrow) { } -TEST(TestScheduleAddDeopt) { - HandleAndZoneScope scope; - Schedule schedule(scope.main_zone()); - Graph graph(scope.main_zone()); - Node* n0 = graph.NewNode(&dummy_operator); - BasicBlock* entry = schedule.start(); - schedule.AddDeoptimize(entry, n0); - - CHECK_EQ(0, entry->PredecessorCount()); - CHECK_EQ(1, entry->SuccessorCount()); - CHECK_EQ(schedule.end(), entry->SuccessorAt(0)); -} - - TEST(BuildMulNodeGraph) { HandleAndZoneScope scope; Schedule schedule(scope.main_zone()); diff --git a/test/cctest/compiler/test-scheduler.cc b/test/cctest/compiler/test-scheduler.cc index 157bdc2..5f72c17 100644 --- a/test/cctest/compiler/test-scheduler.cc +++ b/test/cctest/compiler/test-scheduler.cc @@ -1637,142 +1637,6 @@ TEST(BuildScheduleSimpleLoopWithCodeMotion) { #if V8_TURBOFAN_TARGET -// So we can get a real JS function. -static Handle Compile(const char* source) { - Isolate* isolate = CcTest::i_isolate(); - Handle source_code = isolate->factory() - ->NewStringFromUtf8(CStrVector(source)) - .ToHandleChecked(); - Handle shared_function = Compiler::CompileScript( - source_code, Handle(), 0, 0, false, - Handle(isolate->native_context()), NULL, NULL, - v8::ScriptCompiler::kNoCompileOptions, NOT_NATIVES_CODE); - return isolate->factory()->NewFunctionFromSharedFunctionInfo( - shared_function, isolate->native_context()); -} - - -TEST(BuildScheduleTrivialLazyDeoptCall) { - FLAG_turbo_deoptimization = true; - - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Graph graph(scope.main_zone()); - CommonOperatorBuilder common(scope.main_zone()); - JSOperatorBuilder js_builder(scope.main_zone()); - - InitializedHandleScope handles; - Handle function = Compile("m()"); - CompilationInfoWithZone info(function); - Linkage linkage(&info); - - // Manually transcribed code for: - // function turbo_fan_test() { - // m(); - // } - // where m can lazy deopt (so it has a deopt block associated with it). - - - // Start // - // ^ // - // | (EC) // - // | // - // /------> Call <--------------\ // - // / ^ ^ \ // - // / | | \ undef // - // / / \ \ ^ // - // (E) | (C) / \ (C) \ (E) | // - // | Continuation LazyDeoptimization | | // - // \___ ^ ^ / | // - // \ | | ______/ Framestate // - // undef \ | (VC) | (C) / ^ // - // \ \ | | / / // - // Return Deoptimization ----------/ // - // ^ ^ // - // \ / // - // (C) \ / (C) // - // \ / // - // Merge // - // ^ // - // | // - // End // - - Handle undef_object = - Handle(isolate->heap()->undefined_value(), isolate); - PrintableUnique undef_constant = - PrintableUnique::CreateUninitialized(scope.main_zone(), - undef_object); - - Node* undef_node = graph.NewNode(common.HeapConstant(undef_constant)); - - Node* start_node = graph.NewNode(common.Start(0)); - - CallDescriptor* descriptor = linkage.GetJSCallDescriptor(0); - Node* call_node = graph.NewNode(common.Call(descriptor), - undef_node, // function - undef_node, // context - start_node, // effect - start_node); // control - - Node* cont_node = graph.NewNode(common.Continuation(), call_node); - Node* lazy_deopt_node = graph.NewNode(common.LazyDeoptimization(), call_node); - - Node* parameters = graph.NewNode(common.StateValues(1), undef_node); - Node* locals = graph.NewNode(common.StateValues(0)); - Node* stack = graph.NewNode(common.StateValues(0)); - - Node* state_node = graph.NewNode(common.FrameState(BailoutId(1234)), - parameters, locals, stack); - - Node* return_node = graph.NewNode(common.Return(), - undef_node, // return value - call_node, // effect - cont_node); // control - Node* deoptimization_node = graph.NewNode(common.Deoptimize(), - state_node, // deopt environment - call_node, // effect - lazy_deopt_node); // control - - Node* merge_node = - graph.NewNode(common.Merge(2), return_node, deoptimization_node); - - Node* end_node = graph.NewNode(common.End(), merge_node); - - graph.SetStart(start_node); - graph.SetEnd(end_node); - - Schedule* schedule = ComputeAndVerifySchedule(12, &graph); - - // Tests: - // Continuation and deopt have basic blocks. - BasicBlock* cont_block = schedule->block(cont_node); - BasicBlock* deopt_block = schedule->block(lazy_deopt_node); - BasicBlock* call_block = schedule->block(call_node); - CHECK_NE(NULL, cont_block); - CHECK_NE(NULL, deopt_block); - CHECK_NE(NULL, call_block); - // The basic blocks are different. - CHECK_NE(cont_block, deopt_block); - CHECK_NE(cont_block, call_block); - CHECK_NE(deopt_block, call_block); - // The call node finishes its own basic block. - CHECK_EQ(BasicBlock::kCall, call_block->control_); - CHECK_EQ(call_node, call_block->control_input_); - // The lazy deopt block is deferred. - CHECK(deopt_block->deferred_); - CHECK(!call_block->deferred_); - CHECK(!cont_block->deferred_); - // The lazy deopt block contains framestate + bailout (and nothing else). - CHECK_EQ(deoptimization_node, deopt_block->control_input_); - CHECK_EQ(5, static_cast(deopt_block->nodes_.size())); - CHECK_EQ(lazy_deopt_node, deopt_block->nodes_[0]); - CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[1]->op()->opcode()); - CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[2]->op()->opcode()); - CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[3]->op()->opcode()); - CHECK_EQ(state_node, deopt_block->nodes_[4]); -} - - static Node* CreateDiamond(Graph* graph, CommonOperatorBuilder* common, Node* cond) { Node* tv = graph->NewNode(common->Int32Constant(6)); diff --git a/test/compiler-unittests/instruction-selector-unittest.cc b/test/compiler-unittests/instruction-selector-unittest.cc index 1cf46f8..4536cd0 100644 --- a/test/compiler-unittests/instruction-selector-unittest.cc +++ b/test/compiler-unittests/instruction-selector-unittest.cc @@ -97,8 +97,9 @@ InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build( s.references_.insert(virtual_register); } } - for (int i = 0; i < sequence.GetDeoptimizationEntryCount(); i++) { - s.deoptimization_entries_.push_back(sequence.GetDeoptimizationEntry(i)); + for (int i = 0; i < sequence.GetFrameStateDescriptorCount(); i++) { + s.deoptimization_entries_.push_back(sequence.GetFrameStateDescriptor( + InstructionSequence::StateId::FromInt(i))); } return s; } @@ -323,25 +324,16 @@ TARGET_TEST_F(InstructionSelectorTest, CallJSFunctionWithDeopt) { Node* function_node = m.Parameter(0); Node* receiver = m.Parameter(1); - StreamBuilder::Label deopt, cont; - - // TODO(jarin) Add frame state. - Node* call = m.CallJS0(function_node, receiver, &cont, &deopt); - - m.Bind(&cont); - m.NewNode(m.common()->Continuation(), call); - m.Return(call); - - m.Bind(&deopt); - m.NewNode(m.common()->LazyDeoptimization(), call); Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(1)); Node* locals = m.NewNode(m.common()->StateValues(0)); Node* stack = m.NewNode(m.common()->StateValues(0)); + Node* context_dummy = m.Int32Constant(0); - Node* state_node = - m.NewNode(m.common()->FrameState(bailout_id), parameters, locals, stack); - m.Deoptimize(state_node); + Node* state_node = m.NewNode(m.common()->FrameState(bailout_id, kPushOutput), + parameters, locals, stack, context_dummy); + Node* call = m.CallJS0(function_node, receiver, state_node); + m.Return(call); Stream s = m.Build(kAllExceptNopInstructions); @@ -350,13 +342,13 @@ TARGET_TEST_F(InstructionSelectorTest, CallJSFunctionWithDeopt) { for (; index < s.size() && s[index]->arch_opcode() != kArchCallJSFunction; index++) { } - // Now we should have three instructions: call, return and deoptimize. - ASSERT_EQ(index + 3, s.size()); + // Now we should have two instructions: call and return. + ASSERT_EQ(index + 2, s.size()); EXPECT_EQ(kArchCallJSFunction, s[index++]->arch_opcode()); EXPECT_EQ(kArchRet, s[index++]->arch_opcode()); - EXPECT_EQ(kArchDeoptimize, s[index++]->arch_opcode()); - EXPECT_EQ(index, s.size()); + + // TODO(jarin) Check deoptimization table. } @@ -365,7 +357,6 @@ TARGET_TEST_F(InstructionSelectorTest, CallFunctionStubWithDeopt) { kMachAnyTagged); BailoutId bailout_id_before(42); - BailoutId bailout_id_after(54); // Some arguments for the call node. Node* function_node = m.Parameter(0); @@ -376,31 +367,17 @@ TARGET_TEST_F(InstructionSelectorTest, CallFunctionStubWithDeopt) { Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(43)); Node* locals = m.NewNode(m.common()->StateValues(1), m.Int32Constant(44)); Node* stack = m.NewNode(m.common()->StateValues(1), m.Int32Constant(45)); - Node* frame_state_before = m.NewNode( - m.common()->FrameState(bailout_id_before), parameters, locals, stack); + Node* context_sentinel = m.Int32Constant(0); + Node* frame_state_before = + m.NewNode(m.common()->FrameState(bailout_id_before, kPushOutput), + parameters, locals, stack, context_sentinel); - StreamBuilder::Label deopt, cont; // Build the call. - Node* call = - m.CallFunctionStub0(function_node, receiver, context, frame_state_before, - &cont, &deopt, CALL_AS_METHOD); + Node* call = m.CallFunctionStub0(function_node, receiver, context, + frame_state_before, CALL_AS_METHOD); - // Create the continuation branch. - m.Bind(&cont); - m.NewNode(m.common()->Continuation(), call); m.Return(call); - // Create the lazy deoptimization block (with a different frame state). - m.Bind(&deopt); - m.NewNode(m.common()->LazyDeoptimization(), call); - - Node* stack_after = - m.NewNode(m.common()->StateValues(2), m.Int32Constant(55), call); - - Node* frame_state_after = m.NewNode(m.common()->FrameState(bailout_id_after), - parameters, locals, stack_after); - m.Deoptimize(frame_state_after); - Stream s = m.Build(kAllExceptNopInstructions); // Skip until kArchCallJSFunction. @@ -408,8 +385,8 @@ TARGET_TEST_F(InstructionSelectorTest, CallFunctionStubWithDeopt) { for (; index < s.size() && s[index]->arch_opcode() != kArchCallCodeObject; index++) { } - // Now we should have three instructions: call, return and deoptimize. - ASSERT_EQ(index + 3, s.size()); + // Now we should have two instructions: call, return. + ASSERT_EQ(index + 2, s.size()); // Check the call instruction const Instruction* call_instr = s[index++]; @@ -417,10 +394,9 @@ TARGET_TEST_F(InstructionSelectorTest, CallFunctionStubWithDeopt) { size_t num_operands = 1 + // Code object. 1 + - 3 + // Frame state deopt id + one input for each value in frame state. + 4 + // Frame state deopt id + one input for each value in frame state. 1 + // Function. - 1 + // Context. - 2; // Continuation and deoptimization block labels. + 1; // Context. ASSERT_EQ(num_operands, call_instr->InputCount()); // Code object. @@ -428,41 +404,25 @@ TARGET_TEST_F(InstructionSelectorTest, CallFunctionStubWithDeopt) { // Deoptimization id. int32_t deopt_id_before = s.ToInt32(call_instr->InputAt(1)); - FrameStateDescriptor* desc_before = s.GetDeoptimizationEntry(deopt_id_before); + FrameStateDescriptor* desc_before = + s.GetFrameStateDescriptor(deopt_id_before); EXPECT_EQ(bailout_id_before, desc_before->bailout_id()); + EXPECT_EQ(kPushOutput, desc_before->state_combine()); EXPECT_EQ(1, desc_before->parameters_count()); EXPECT_EQ(1, desc_before->locals_count()); EXPECT_EQ(1, desc_before->stack_count()); EXPECT_EQ(43, s.ToInt32(call_instr->InputAt(2))); - EXPECT_EQ(44, s.ToInt32(call_instr->InputAt(3))); - EXPECT_EQ(45, s.ToInt32(call_instr->InputAt(4))); + EXPECT_EQ(0, s.ToInt32(call_instr->InputAt(3))); + EXPECT_EQ(44, s.ToInt32(call_instr->InputAt(4))); + EXPECT_EQ(45, s.ToInt32(call_instr->InputAt(5))); // Function. - EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->InputAt(5))); + EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->InputAt(6))); // Context. - EXPECT_EQ(context->id(), s.ToVreg(call_instr->InputAt(6))); - // Continuation. - EXPECT_EQ(cont.block()->id(), s.ToInt32(call_instr->InputAt(7))); - // Deoptimization. - EXPECT_EQ(deopt.block()->id(), s.ToInt32(call_instr->InputAt(8))); + EXPECT_EQ(context->id(), s.ToVreg(call_instr->InputAt(7))); EXPECT_EQ(kArchRet, s[index++]->arch_opcode()); - // Check the deoptimize instruction. - const Instruction* deopt_instr = s[index++]; - EXPECT_EQ(kArchDeoptimize, deopt_instr->arch_opcode()); - ASSERT_EQ(5U, deopt_instr->InputCount()); - int32_t deopt_id_after = s.ToInt32(deopt_instr->InputAt(0)); - FrameStateDescriptor* desc_after = s.GetDeoptimizationEntry(deopt_id_after); - EXPECT_EQ(bailout_id_after, desc_after->bailout_id()); - EXPECT_EQ(1, desc_after->parameters_count()); - EXPECT_EQ(1, desc_after->locals_count()); - EXPECT_EQ(2, desc_after->stack_count()); - // Parameter value from the frame state. - EXPECT_EQ(43, s.ToInt32(deopt_instr->InputAt(1))); - EXPECT_EQ(44, s.ToInt32(deopt_instr->InputAt(2))); - EXPECT_EQ(55, s.ToInt32(deopt_instr->InputAt(3))); - EXPECT_EQ(call->id(), s.ToVreg(deopt_instr->InputAt(4))); EXPECT_EQ(index, s.size()); } diff --git a/test/compiler-unittests/instruction-selector-unittest.h b/test/compiler-unittests/instruction-selector-unittest.h index 80e5344..f264eb0 100644 --- a/test/compiler-unittests/instruction-selector-unittest.h +++ b/test/compiler-unittests/instruction-selector-unittest.h @@ -150,12 +150,12 @@ class InstructionSelectorTest : public CompilerTest { return UnallocatedOperand::cast(operand)->virtual_register(); } - FrameStateDescriptor* GetDeoptimizationEntry(int deoptimization_id) { - EXPECT_LT(deoptimization_id, GetDeoptimizationEntryCount()); + FrameStateDescriptor* GetFrameStateDescriptor(int deoptimization_id) { + EXPECT_LT(deoptimization_id, GetFrameStateDescriptorCount()); return deoptimization_entries_[deoptimization_id]; } - int GetDeoptimizationEntryCount() { + int GetFrameStateDescriptorCount() { return static_cast(deoptimization_entries_.size()); } diff --git a/test/mjsunit/debug-clearbreakpointgroup.js b/test/mjsunit/debug-clearbreakpointgroup.js index 0cfc5c9..137dfec 100644 --- a/test/mjsunit/debug-clearbreakpointgroup.js +++ b/test/mjsunit/debug-clearbreakpointgroup.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization // Get the Debug object exposed from the debug context global object. var Debug = debug.Debug diff --git a/test/mjsunit/debug-evaluate-closure.js b/test/mjsunit/debug-evaluate-closure.js index 778defd..cf507b5 100644 --- a/test/mjsunit/debug-evaluate-closure.js +++ b/test/mjsunit/debug-evaluate-closure.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug --allow-natives-syntax +// Flags: --turbo-deoptimization Debug = debug.Debug; var listened = false; diff --git a/test/mjsunit/debug-evaluate-with.js b/test/mjsunit/debug-evaluate-with.js index c19a707..3f3310f 100644 --- a/test/mjsunit/debug-evaluate-with.js +++ b/test/mjsunit/debug-evaluate-with.js @@ -26,6 +26,8 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization + // Get the Debug object exposed from the debug context global object. Debug = debug.Debug diff --git a/test/mjsunit/debug-step-2.js b/test/mjsunit/debug-step-2.js index 502b426..5fe7466 100644 --- a/test/mjsunit/debug-step-2.js +++ b/test/mjsunit/debug-step-2.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization // This test tests that full code compiled without debug break slots // is recompiled with debug break slots when debugging is started. diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status index b2c4510..ec08a0d 100644 --- a/test/mjsunit/mjsunit.status +++ b/test/mjsunit/mjsunit.status @@ -79,21 +79,13 @@ 'compiler/osr-assert': [PASS, NO_VARIANTS], 'regress/regress-2185-2': [PASS, NO_VARIANTS], - # Support for breakpoints requires special relocation info for DebugBreak. - 'debug-clearbreakpointgroup': [PASS, NO_VARIANTS], - 'debug-step-2': [PASS, NO_VARIANTS], - 'regress/regress-debug-deopt-while-recompile': [PASS, NO_VARIANTS], - 'regress/regress-opt-after-debug-deopt': [PASS, NO_VARIANTS], - # Support for %GetFrameDetails is missing and requires checkpoints. 'debug-evaluate-bool-constructor': [PASS, NO_VARIANTS], - 'debug-evaluate-closure': [PASS, NO_VARIANTS], 'debug-evaluate-const': [PASS, NO_VARIANTS], 'debug-evaluate-locals-optimized-double': [PASS, NO_VARIANTS], 'debug-evaluate-locals-optimized': [PASS, NO_VARIANTS], 'debug-evaluate-locals': [PASS, NO_VARIANTS], 'debug-evaluate-with-context': [PASS, NO_VARIANTS], - 'debug-evaluate-with': [PASS, NO_VARIANTS], 'debug-liveedit-double-call': [PASS, NO_VARIANTS], 'debug-liveedit-restart-frame': [PASS, NO_VARIANTS], 'debug-return-value': [PASS, NO_VARIANTS], @@ -122,14 +114,7 @@ 'es6/generators-debug-scopes': [PASS, NO_VARIANTS], 'harmony/debug-blockscopes': [PASS, NO_VARIANTS], 'regress/regress-1081309': [PASS, NO_VARIANTS], - 'regress/regress-1170187': [PASS, NO_VARIANTS], - 'regress/regress-119609': [PASS, NO_VARIANTS], - 'regress/regress-131994': [PASS, NO_VARIANTS], 'regress/regress-269': [PASS, NO_VARIANTS], - 'regress/regress-325676': [PASS, NO_VARIANTS], - 'regress/regress-crbug-107996': [PASS, NO_VARIANTS], - 'regress/regress-crbug-171715': [PASS, NO_VARIANTS], - 'regress/regress-crbug-222893': [PASS, NO_VARIANTS], 'regress/regress-crbug-259300': [PASS, NO_VARIANTS], 'regress/regress-frame-details-null-receiver': [PASS, NO_VARIANTS], diff --git a/test/mjsunit/regress/regress-1170187.js b/test/mjsunit/regress/regress-1170187.js index 5e82f8a..3621bc4 100644 --- a/test/mjsunit/regress/regress-1170187.js +++ b/test/mjsunit/regress/regress-1170187.js @@ -26,6 +26,8 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization + // Make sure that the retreival of local variables are performed correctly even // when an adapter frame is present. diff --git a/test/mjsunit/regress/regress-119609.js b/test/mjsunit/regress/regress-119609.js index 99041ad..0c85063 100644 --- a/test/mjsunit/regress/regress-119609.js +++ b/test/mjsunit/regress/regress-119609.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization Debug = debug.Debug; diff --git a/test/mjsunit/regress/regress-131994.js b/test/mjsunit/regress/regress-131994.js index 7f60095..3de3813 100644 --- a/test/mjsunit/regress/regress-131994.js +++ b/test/mjsunit/regress/regress-131994.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization // Test that a variable in the local scope that shadows a context-allocated // variable is correctly resolved when being evaluated in the debugger. diff --git a/test/mjsunit/regress/regress-325676.js b/test/mjsunit/regress/regress-325676.js index 427bbc3..7450a6d 100644 --- a/test/mjsunit/regress/regress-325676.js +++ b/test/mjsunit/regress/regress-325676.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization // If a function parameter is forced to be context allocated, // debug evaluate need to resolve it to a context slot instead of diff --git a/test/mjsunit/regress/regress-crbug-107996.js b/test/mjsunit/regress/regress-crbug-107996.js index dfe07e5..b4907f3 100644 --- a/test/mjsunit/regress/regress-crbug-107996.js +++ b/test/mjsunit/regress/regress-crbug-107996.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization Debug = debug.Debug; diff --git a/test/mjsunit/regress/regress-crbug-171715.js b/test/mjsunit/regress/regress-crbug-171715.js index 040c381..309f50a 100644 --- a/test/mjsunit/regress/regress-crbug-171715.js +++ b/test/mjsunit/regress/regress-crbug-171715.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization Debug = debug.Debug diff --git a/test/mjsunit/regress/regress-crbug-222893.js b/test/mjsunit/regress/regress-crbug-222893.js index 39363bc..75e1728 100644 --- a/test/mjsunit/regress/regress-crbug-222893.js +++ b/test/mjsunit/regress/regress-crbug-222893.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug +// Flags: --turbo-deoptimization Debug = debug.Debug diff --git a/test/mjsunit/regress/regress-debug-deopt-while-recompile.js b/test/mjsunit/regress/regress-debug-deopt-while-recompile.js index 52c32e9..ce5220a 100644 --- a/test/mjsunit/regress/regress-debug-deopt-while-recompile.js +++ b/test/mjsunit/regress/regress-debug-deopt-while-recompile.js @@ -26,6 +26,7 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Flags: --expose-debug-as debug --allow-natives-syntax +// Flags: --turbo-deoptimization Debug = debug.Debug; diff --git a/test/mjsunit/regress/regress-opt-after-debug-deopt.js b/test/mjsunit/regress/regress-opt-after-debug-deopt.js index c637be5..5cbaabc 100644 --- a/test/mjsunit/regress/regress-opt-after-debug-deopt.js +++ b/test/mjsunit/regress/regress-opt-after-debug-deopt.js @@ -27,6 +27,7 @@ // Flags: --expose-debug-as debug --allow-natives-syntax // Flags: --concurrent-recompilation --block-concurrent-recompilation +// Flags: --turbo-deoptimization if (!%IsConcurrentRecompilationSupported()) { print("Concurrent recompilation is disabled. Skipping this test."); -- 2.7.4