if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
- case kArchDeoptimize: {
- int deoptimization_id = BuildTranslation(instr, 0);
-
- Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
- __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
- DCHECK_EQ(LeaveCC, i.OutputSBit());
- break;
- }
case kArchDrop: {
int words = MiscField::decode(instr->opcode());
__ Drop(words);
}
+void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+ Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
+ isolate(), deoptimization_id, Deoptimizer::LAZY);
+ __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
+}
+
+
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
case kArchCallAddress:
case kArchCallCodeObject:
case kArchCallJSFunction:
- case kArchDeoptimize:
case kArchDrop:
case kArchJmp:
case kArchNop:
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
// register if there are multiple uses of it. Improve constant pool and the
// heuristics in the register allocator for where to emit constants.
- InitializeCallBuffer(call, &buffer, true, false, continuation,
- deoptimization);
+ InitializeCallBuffer(call, &buffer, true, false);
// TODO(dcarney): might be possible to use claim/poke instead
// Push any stack arguments.
AddSafepointAndDeopt(instr);
break;
}
- case kArchDeoptimize: {
- int deoptimization_id = BuildTranslation(instr, 0);
-
- Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
- __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
- break;
- }
case kArchDrop: {
int words = MiscField::decode(instr->opcode());
__ Drop(words);
}
+void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+ Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
+ isolate(), deoptimization_id, Deoptimizer::LAZY);
+ __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
+}
+
+
// TODO(dcarney): increase stack slots in frame once before first use.
static int AlignedStackSlots(int stack_slots) {
if (stack_slots & 1) stack_slots++;
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
// register if there are multiple uses of it. Improve constant pool and the
// heuristics in the register allocator for where to emit constants.
- InitializeCallBuffer(call, &buffer, true, false, continuation,
- deoptimization);
+ InitializeCallBuffer(call, &buffer, true, false);
// Push the arguments to the stack.
bool is_c_frame = descriptor->kind() == CallDescriptor::kCallAddress;
}
-Node* AstGraphBuilder::Environment::Checkpoint(BailoutId ast_id) {
+Node* AstGraphBuilder::Environment::Checkpoint(
+ BailoutId ast_id, OutputFrameStateCombine combine) {
UpdateStateValues(¶meters_node_, 0, parameters_count());
UpdateStateValues(&locals_node_, parameters_count(), locals_count());
UpdateStateValues(&stack_node_, parameters_count() + locals_count(),
stack_height());
- Operator* op = common()->FrameState(ast_id);
+ Operator* op = common()->FrameState(ast_id, combine);
- return graph()->NewNode(op, parameters_node_, locals_node_, stack_node_);
+ return graph()->NewNode(op, parameters_node_, locals_node_, stack_node_,
+ GetContext());
}
PrintableUnique<Name> name =
MakeUnique(property->key()->AsLiteral()->AsPropertyName());
old_value = NewNode(javascript()->LoadNamed(name), object);
- PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT);
+ PrepareFrameState(old_value, property->LoadId(), kPushOutput);
break;
}
case KEYED_PROPERTY: {
Node* key = environment()->Top();
Node* object = environment()->Peek(1);
old_value = NewNode(javascript()->LoadProperty(), object, key);
- PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT);
+ PrepareFrameState(old_value, property->LoadId(), kPushOutput);
break;
}
}
Node* right = environment()->Pop();
Node* left = environment()->Pop();
Node* value = BuildBinaryOp(left, right, expr->binary_op());
- PrepareFrameState(value, expr->binary_operation()->id(), PUSH_OUTPUT);
+ PrepareFrameState(value, expr->binary_operation()->id(), kPushOutput);
environment()->Push(value);
} else {
VisitForValue(expr->value());
Node* key = environment()->Pop();
callee_value = NewNode(javascript()->LoadProperty(), object, key);
}
- PrepareFrameState(callee_value, property->LoadId(), PUSH_OUTPUT);
+ PrepareFrameState(callee_value, property->LoadId(), kPushOutput);
receiver_value = environment()->Pop();
// Note that a PROPERTY_CALL requires the receiver to be wrapped into an
// object for sloppy callees. This could also be modeled explicitly here,
Node* callee_value = NewNode(javascript()->LoadNamed(unique), receiver_value);
// TODO(jarin): Find/create a bailout id to deoptimize to (crankshaft
// refuses to optimize functions with jsruntime calls).
- PrepareFrameState(callee_value, BailoutId::None(), PUSH_OUTPUT);
+ PrepareFrameState(callee_value, BailoutId::None(), kPushOutput);
environment()->Push(callee_value);
environment()->Push(receiver_value);
PrintableUnique<Name> name =
MakeUnique(property->key()->AsLiteral()->AsPropertyName());
old_value = NewNode(javascript()->LoadNamed(name), object);
- PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT);
+ PrepareFrameState(old_value, property->LoadId(), kPushOutput);
stack_depth = 1;
break;
}
Node* key = environment()->Top();
Node* object = environment()->Peek(1);
old_value = NewNode(javascript()->LoadProperty(), object, key);
- PrepareFrameState(old_value, property->LoadId(), PUSH_OUTPUT);
+ PrepareFrameState(old_value, property->LoadId(), kPushOutput);
stack_depth = 2;
break;
}
PrintableUnique<Name> name = MakeUnique(variable->name());
Operator* op = javascript()->LoadNamed(name, contextual_mode);
Node* node = NewNode(op, global);
- PrepareFrameState(node, bailout_id, PUSH_OUTPUT);
+ PrepareFrameState(node, bailout_id, kPushOutput);
return node;
}
case Variable::PARAMETER:
DCHECK(node->InputAt(frame_state_index)->op()->opcode() == IrOpcode::kDead);
- Node* frame_state_node = environment()->Checkpoint(ast_id);
+ Node* frame_state_node = environment()->Checkpoint(ast_id, combine);
node->ReplaceInput(frame_state_index, frame_state_node);
}
-
- if (OperatorProperties::CanLazilyDeoptimize(node->op())) {
- // The deopting node should have an outgoing control dependency.
- DCHECK(environment()->GetControlDependency() == node);
-
- StructuredGraphBuilder::Environment* continuation_env = environment();
- // Create environment for the deoptimization block, and build the block.
- StructuredGraphBuilder::Environment* deopt_env =
- CopyEnvironment(continuation_env);
- set_environment(deopt_env);
-
- if (combine == PUSH_OUTPUT) {
- environment()->Push(node);
- }
-
- NewNode(common()->LazyDeoptimization());
-
- // TODO(jarin) If ast_id.IsNone(), perhaps we should generate an empty
- // deopt block and make sure there is no patch entry for this (so
- // that the deoptimizer dies when trying to deoptimize here).
- Node* state_node = environment()->Checkpoint(ast_id);
- Node* deoptimize_node = NewNode(common()->Deoptimize(), state_node);
- UpdateControlDependencyToLeaveFunction(deoptimize_node);
-
- // Continue with the original environment.
- set_environment(continuation_env);
- NewNode(common()->Continuation());
- }
}
}
// Dispatched from VisitForInStatement.
void VisitForInAssignment(Expression* expr, Node* value);
- // Flag that describes how to combine the current environment with
- // the output of a node to obtain a framestate for lazy bailout.
- enum OutputFrameStateCombine {
- PUSH_OUTPUT, // Push the output on the expression stack.
- IGNORE_OUTPUT // Use the frame state as-is.
- };
-
// Builds deoptimization for a given node.
void PrepareFrameState(Node* node, BailoutId ast_id,
- OutputFrameStateCombine combine = IGNORE_OUTPUT);
+ OutputFrameStateCombine combine = kIgnoreOutput);
OutputFrameStateCombine StateCombineFromAstContext();
// Preserve a checkpoint of the environment for the IR graph. Any
// further mutation of the environment will not affect checkpoints.
- Node* Checkpoint(BailoutId ast_id);
+ Node* Checkpoint(BailoutId ast_id, OutputFrameStateCombine combine);
private:
void UpdateStateValues(Node** state_values, int offset, int count);
// Determines how to combine the frame state with the value
// that is about to be plugged into this AstContext.
- AstGraphBuilder::OutputFrameStateCombine GetStateCombine() {
- return IsEffect() ? IGNORE_OUTPUT : PUSH_OUTPUT;
+ OutputFrameStateCombine GetStateCombine() {
+ return IsEffect() ? kIgnoreOutput : kPushOutput;
}
// Plug a node into this expression context. Call this function in tail
masm_(code->zone()->isolate(), NULL, 0),
resolver_(this),
safepoints_(code->zone()),
- lazy_deoptimization_entries_(code->zone()),
+ deoptimization_points_(code->zone()),
deoptimization_states_(code->zone()),
deoptimization_literals_(code->zone()),
- translations_(code->zone()) {
- deoptimization_states_.resize(code->GetDeoptimizationEntryCount(), NULL);
-}
+ translations_(code->zone()) {}
Handle<Code> CodeGenerator::GenerateCode() {
AssembleInstruction(*i);
}
+ EmitLazyDeoptimizationCallTable();
+
FinishCode(masm());
- UpdateSafepointsWithDeoptimizationPc();
safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
// TODO(titzer): what are the right code flags here?
}
-void CodeGenerator::UpdateSafepointsWithDeoptimizationPc() {
- int patch_count = static_cast<int>(lazy_deoptimization_entries_.size());
- for (int i = 0; i < patch_count; ++i) {
- LazyDeoptimizationEntry entry = lazy_deoptimization_entries_[i];
- // TODO(jarin) make sure that there is no code (other than nops)
- // between the call position and the continuation position.
- safepoints()->SetDeoptimizationPc(entry.safepoint_id(),
- entry.deoptimization()->pos());
+void CodeGenerator::EmitLazyDeoptimizationCallTable() {
+ // ZoneDeque<DeoptimizationPoint*>::iterator iter;
+ int i = 0;
+ for (ZoneDeque<DeoptimizationPoint*>::iterator
+ iter = deoptimization_points_.begin();
+ iter != deoptimization_points_.end(); iter++, i++) {
+ int pc_offset = masm()->pc_offset();
+ AssembleDeoptimizerCall((*iter)->lazy_state_id());
+ safepoints()->SetDeoptimizationPc((*iter)->safepoint(), pc_offset);
}
}
void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
CompilationInfo* info = linkage()->info();
- int deopt_count = code()->GetDeoptimizationEntryCount();
- int patch_count = static_cast<int>(lazy_deoptimization_entries_.size());
- if (patch_count == 0 && deopt_count == 0) return;
+ int deopt_count = static_cast<int>(deoptimization_states_.size());
+ if (deopt_count == 0) return;
Handle<DeoptimizationInputData> data =
DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
// Populate deoptimization entries.
for (int i = 0; i < deopt_count; i++) {
- FrameStateDescriptor* descriptor = code()->GetDeoptimizationEntry(i);
- data->SetAstId(i, descriptor->bailout_id());
+ DeoptimizationState* deoptimization_state = deoptimization_states_[i];
+ data->SetAstId(i, deoptimization_state->bailout_id());
CHECK_NE(NULL, deoptimization_states_[i]);
data->SetTranslationIndex(
- i, Smi::FromInt(deoptimization_states_[i]->translation_id_));
+ i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
data->SetArgumentsStackHeight(i, Smi::FromInt(0));
data->SetPc(i, Smi::FromInt(-1));
}
instr->pointer_map(), Safepoint::kSimple, 0,
needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
- if (flags & CallDescriptor::kLazyDeoptimization) {
- RecordLazyDeoptimizationEntry(instr, safepoint_id);
- }
-
if (needs_frame_state) {
// If the frame state is present, it starts at argument 1
// (just after the code address).
InstructionOperandConverter converter(this, instr);
// Deoptimization info starts at argument 1
int frame_state_offset = 1;
- int deoptimization_id = BuildTranslation(instr, frame_state_offset);
+ FrameStateDescriptor* descriptor =
+ GetFrameStateDescriptor(instr, frame_state_offset);
+ int deopt_state_id =
+ BuildTranslation(instr, frame_state_offset, kIgnoreOutput);
+ int lazy_deopt_state_id = deopt_state_id;
+ if (descriptor->state_combine() != kIgnoreOutput) {
+ lazy_deopt_state_id = BuildTranslation(instr, frame_state_offset,
+ descriptor->state_combine());
+ }
+ deoptimization_points_.push_back(new (zone()) DeoptimizationPoint(
+ deopt_state_id, lazy_deopt_state_id, descriptor, safepoint_id));
#if DEBUG
// Make sure all the values live in stack slots or they are immediates.
// (The values should not live in register because registers are clobbered
// by calls.)
- FrameStateDescriptor* descriptor =
- code()->GetDeoptimizationEntry(deoptimization_id);
for (int i = 0; i < descriptor->size(); i++) {
InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
CHECK(op->IsStackSlot() || op->IsImmediate());
}
#endif
- safepoints()->RecordLazyDeoptimizationIndex(deoptimization_id);
+ safepoints()->RecordLazyDeoptimizationIndex(lazy_deopt_state_id);
}
if (flags & CallDescriptor::kNeedsNopAfterCall) {
}
-void CodeGenerator::RecordLazyDeoptimizationEntry(Instruction* instr,
- Safepoint::Id safepoint_id) {
- InstructionOperandConverter i(this, instr);
-
- Label after_call;
- masm()->bind(&after_call);
-
- // The continuation and deoptimization are the last two inputs:
- BasicBlock* cont_block =
- i.InputBlock(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock* deopt_block =
- i.InputBlock(static_cast<int>(instr->InputCount()) - 1);
-
- Label* cont_label = code_->GetLabel(cont_block);
- Label* deopt_label = code_->GetLabel(deopt_block);
-
- lazy_deoptimization_entries_.push_back(LazyDeoptimizationEntry(
- after_call.pos(), cont_label, deopt_label, safepoint_id));
-}
-
-
int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
int result = static_cast<int>(deoptimization_literals_.size());
for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
}
-int CodeGenerator::BuildTranslation(Instruction* instr,
- int frame_state_offset) {
+FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
+ Instruction* instr, int frame_state_offset) {
InstructionOperandConverter i(this, instr);
- int deoptimization_id = i.InputInt32(frame_state_offset);
- frame_state_offset++;
+ InstructionSequence::StateId state_id =
+ InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
+ return code()->GetFrameStateDescriptor(state_id);
+}
- // We should build translation only once.
- DCHECK_EQ(NULL, deoptimization_states_[deoptimization_id]);
+int CodeGenerator::BuildTranslation(Instruction* instr, int frame_state_offset,
+ OutputFrameStateCombine state_combine) {
FrameStateDescriptor* descriptor =
- code()->GetDeoptimizationEntry(deoptimization_id);
+ GetFrameStateDescriptor(instr, frame_state_offset);
+ frame_state_offset++;
+
+ int height = descriptor->size() - descriptor->parameters_count();
+ switch (state_combine) {
+ case kPushOutput:
+ height++;
+ break;
+ case kIgnoreOutput:
+ break;
+ }
+
+
Translation translation(&translations_, 1, 1, zone());
translation.BeginJSFrame(descriptor->bailout_id(),
- Translation::kSelfLiteralId,
- descriptor->size() - descriptor->parameters_count());
+ Translation::kSelfLiteralId, height);
for (int i = 0; i < descriptor->size(); i++) {
AddTranslationForOperand(&translation, instr,
instr->InputAt(i + frame_state_offset));
}
- deoptimization_states_[deoptimization_id] =
- new (zone()) DeoptimizationState(translation.index());
+ switch (state_combine) {
+ case kPushOutput:
+ DCHECK(instr->OutputCount() == 1);
+ AddTranslationForOperand(&translation, instr, instr->OutputAt(0));
+ break;
+ case kIgnoreOutput:
+ break;
+ }
+
+ int deoptimization_id = static_cast<int>(deoptimization_states_.size());
+
+ deoptimization_states_.push_back(new (zone()) DeoptimizationState(
+ descriptor->bailout_id(), translation.index()));
return deoptimization_id;
}
void AssembleArchBranch(Instruction* instr, FlagsCondition condition);
void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
+ void AssembleDeoptimizerCall(int deoptimization_id);
+
// Generates an architecture-specific, descriptor-specific prologue
// to set up a stack frame.
void AssemblePrologue();
// ===========================================================================
// Deoptimization table construction
void AddSafepointAndDeopt(Instruction* instr);
- void UpdateSafepointsWithDeoptimizationPc();
- void RecordLazyDeoptimizationEntry(Instruction* instr,
- Safepoint::Id safepoint_id);
+ void EmitLazyDeoptimizationCallTable();
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
- int BuildTranslation(Instruction* instr, int frame_state_offset);
+ FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr,
+ int frame_state_offset);
+ int BuildTranslation(Instruction* instr, int frame_state_offset,
+ OutputFrameStateCombine state_combine);
void AddTranslationForOperand(Translation* translation, Instruction* instr,
InstructionOperand* op);
void AddNopForSmiCodeInlining();
// ===========================================================================
- class LazyDeoptimizationEntry V8_FINAL {
+ class DeoptimizationPoint : public ZoneObject {
public:
- LazyDeoptimizationEntry(int position_after_call, Label* continuation,
- Label* deoptimization, Safepoint::Id safepoint_id)
- : position_after_call_(position_after_call),
- continuation_(continuation),
- deoptimization_(deoptimization),
- safepoint_id_(safepoint_id) {}
-
- int position_after_call() const { return position_after_call_; }
- Label* continuation() const { return continuation_; }
- Label* deoptimization() const { return deoptimization_; }
- Safepoint::Id safepoint_id() const { return safepoint_id_; }
+ int state_id() const { return state_id_; }
+ int lazy_state_id() const { return lazy_state_id_; }
+ FrameStateDescriptor* descriptor() const { return descriptor_; }
+ Safepoint::Id safepoint() const { return safepoint_; }
+
+ DeoptimizationPoint(int state_id, int lazy_state_id,
+ FrameStateDescriptor* descriptor,
+ Safepoint::Id safepoint)
+ : state_id_(state_id),
+ lazy_state_id_(lazy_state_id),
+ descriptor_(descriptor),
+ safepoint_(safepoint) {}
private:
- int position_after_call_;
- Label* continuation_;
- Label* deoptimization_;
- Safepoint::Id safepoint_id_;
+ int state_id_;
+ int lazy_state_id_;
+ FrameStateDescriptor* descriptor_;
+ Safepoint::Id safepoint_;
};
struct DeoptimizationState : ZoneObject {
- int translation_id_;
+ public:
+ BailoutId bailout_id() const { return bailout_id_; }
+ int translation_id() const { return translation_id_; }
- explicit DeoptimizationState(int translation_id)
- : translation_id_(translation_id) {}
+ DeoptimizationState(BailoutId bailout_id, int translation_id)
+ : bailout_id_(bailout_id), translation_id_(translation_id) {}
+
+ private:
+ BailoutId bailout_id_;
+ int translation_id_;
};
InstructionSequence* code_;
MacroAssembler masm_;
GapResolver resolver_;
SafepointTableBuilder safepoints_;
- ZoneDeque<LazyDeoptimizationEntry> lazy_deoptimization_entries_;
+ ZoneDeque<DeoptimizationPoint*> deoptimization_points_;
ZoneDeque<DeoptimizationState*> deoptimization_states_;
ZoneDeque<Handle<Object> > deoptimization_literals_;
TranslationBuffer translations_;
}
};
+// Flag that describes how to combine the current environment with
+// the output of a node to obtain a framestate for lazy bailout.
+enum OutputFrameStateCombine {
+ kPushOutput, // Push the output on the expression stack.
+ kIgnoreOutput // Use the frame state as-is.
+};
+
+
+class FrameStateCallInfo {
+ public:
+ FrameStateCallInfo(BailoutId bailout_id,
+ OutputFrameStateCombine state_combine)
+ : bailout_id_(bailout_id), frame_state_combine_(state_combine) {}
+
+ BailoutId bailout_id() const { return bailout_id_; }
+ OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
+
+ private:
+ BailoutId bailout_id_;
+ OutputFrameStateCombine frame_state_combine_;
+};
+
// Interface for building common operators that can be used at any level of IR,
// including JavaScript, mid-level, and low-level.
// TODO(titzer): Move the mnemonics into SimpleOperator and Operator1 classes.
Operator* IfTrue() { CONTROL_OP(IfTrue, 0, 1); }
Operator* IfFalse() { CONTROL_OP(IfFalse, 0, 1); }
Operator* Throw() { CONTROL_OP(Throw, 1, 1); }
- Operator* LazyDeoptimization() { CONTROL_OP(LazyDeoptimization, 0, 1); }
- Operator* Continuation() { CONTROL_OP(Continuation, 0, 1); }
-
- Operator* Deoptimize() {
- return new (zone_)
- ControlOperator(IrOpcode::kDeoptimize, 0, 1, 0, 1, "Deoptimize");
- }
Operator* Return() {
return new (zone_) ControlOperator(IrOpcode::kReturn, 0, 1, 0, 1, "Return");
return new (zone_) Operator1<int>(IrOpcode::kStateValues, Operator::kPure,
arguments, 1, "StateValues", arguments);
}
- Operator* FrameState(BailoutId ast_id) {
- return new (zone_) Operator1<BailoutId>(
- IrOpcode::kFrameState, Operator::kPure, 3, 1, "FrameState", ast_id);
+ Operator* FrameState(BailoutId bailout_id, OutputFrameStateCombine combine) {
+ return new (zone_) Operator1<FrameStateCallInfo>(
+ IrOpcode::kFrameState, Operator::kPure, 4, 1, "FrameState",
+ FrameStateCallInfo(bailout_id, combine));
}
Operator* Call(CallDescriptor* descriptor) {
return new (zone_) CallOperator(descriptor, "Call");
protected:
class Environment;
+ friend class Environment;
friend class ControlBuilder;
// The following method creates a new node having the specified operator and
return builder()->CopyEnvironment(this);
}
+ Node* GetContext() { return builder_->current_context(); }
+
protected:
// TODO(mstarzinger): Use phase-local zone instead!
Zone* zone() const { return graph()->zone(); }
AddSafepointAndDeopt(instr);
break;
}
- case kArchDeoptimize: {
- int deoptimization_id = BuildTranslation(instr, 0);
-
- Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
- __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
- break;
- }
case kArchDrop: {
int words = MiscField::decode(instr->opcode());
__ add(esp, Immediate(kPointerSize * words));
}
+void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+ Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
+ isolate(), deoptimization_id, Deoptimizer::LAZY);
+ __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
+}
+
+
// The calling convention for JSFunctions on IA32 passes arguments on the
// stack and the JSFunction and context in EDI and ESI, respectively, thus
// the steps of the call look as follows:
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
- InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization);
+ InitializeCallBuffer(call, &buffer, true, true);
// Push any stack arguments.
for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
V(ArchCallAddress) \
V(ArchCallCodeObject) \
V(ArchCallJSFunction) \
- V(ArchDeoptimize) \
V(ArchDrop) \
V(ArchJmp) \
V(ArchNop) \
? 0
: (frame_state_descriptor->size() + 1);
}
-
- int control_count() const {
- return descriptor->CanLazilyDeoptimize() ? 2 : 0;
- }
};
} // namespace compiler
output_nodes.reserve(d->ReturnCount());
outputs.reserve(d->ReturnCount());
pushed_nodes.reserve(input_count());
- instruction_args.reserve(input_count() + control_count() +
- frame_state_value_count());
+ instruction_args.reserve(input_count() + frame_state_value_count());
}
// InstructionSelector::VisitCall platform independent instead.
void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
bool call_code_immediate,
- bool call_address_immediate,
- BasicBlock* cont_node,
- BasicBlock* deopt_node) {
+ bool call_address_immediate) {
OperandGenerator g(this);
DCHECK_EQ(call->op()->OutputCount(), buffer->descriptor->ReturnCount());
DCHECK_EQ(OperatorProperties::GetValueInputCount(call->op()),
// arg 1 : deoptimization id.
// arg 2 - arg (n + 1) : value inputs to the frame state.
if (buffer->frame_state_descriptor != NULL) {
- int deoptimization_id =
- sequence()->AddDeoptimizationEntry(buffer->frame_state_descriptor);
- buffer->instruction_args.push_back(g.TempImmediate(deoptimization_id));
+ InstructionSequence::StateId state_id =
+ sequence()->AddFrameStateDescriptor(buffer->frame_state_descriptor);
+ buffer->instruction_args.push_back(g.TempImmediate(state_id.ToInt()));
Node* frame_state = call->InputAt(buffer->descriptor->InputCount());
AddFrameStateInputs(frame_state, &buffer->instruction_args,
}
}
CHECK_EQ(pushed_count, static_cast<int>(buffer->pushed_nodes.size()));
-
- // If the call can deoptimize, we add the continuation and deoptimization
- // block labels.
- if (buffer->descriptor->CanLazilyDeoptimize()) {
- DCHECK(cont_node != NULL);
- DCHECK(deopt_node != NULL);
- buffer->instruction_args.push_back(g.Label(cont_node));
- buffer->instruction_args.push_back(g.Label(deopt_node));
- } else {
- DCHECK(cont_node == NULL);
- DCHECK(deopt_node == NULL);
- }
-
DCHECK(static_cast<size_t>(input_count) ==
- (buffer->instruction_args.size() - buffer->control_count() +
- buffer->pushed_nodes.size() - buffer->frame_state_value_count()));
+ (buffer->instruction_args.size() + buffer->pushed_nodes.size() -
+ buffer->frame_state_value_count()));
}
}
case BasicBlockData::kThrow:
return VisitThrow(input);
- case BasicBlockData::kDeoptimize:
- return VisitDeoptimize(input);
- case BasicBlockData::kCall: {
- BasicBlock* deoptimization = block->SuccessorAt(0);
- BasicBlock* continuation = block->SuccessorAt(1);
- VisitCall(input, continuation, deoptimization);
- break;
- }
case BasicBlockData::kNone: {
// TODO(titzer): exit block doesn't have control.
DCHECK(input == NULL);
case IrOpcode::kIfFalse:
case IrOpcode::kEffectPhi:
case IrOpcode::kMerge:
- case IrOpcode::kLazyDeoptimization:
- case IrOpcode::kContinuation:
// No code needed for these graph artifacts.
return;
case IrOpcode::kFinish:
FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
Node* state) {
DCHECK(state->op()->opcode() == IrOpcode::kFrameState);
- BailoutId ast_id = OpParameter<BailoutId>(state);
+ FrameStateCallInfo state_info = OpParameter<FrameStateCallInfo>(state);
Node* parameters = state->InputAt(0);
Node* locals = state->InputAt(1);
Node* stack = state->InputAt(2);
return new (instruction_zone())
- FrameStateDescriptor(ast_id, OpParameter<int>(parameters),
+ FrameStateDescriptor(state_info, OpParameter<int>(parameters),
OpParameter<int>(locals), OpParameter<int>(stack));
}
Node* parameters = state->InputAt(0);
Node* locals = state->InputAt(1);
Node* stack = state->InputAt(2);
+ Node* context = state->InputAt(3);
+
+ DCHECK_EQ(IrOpcode::kStateValues, parameters->op()->opcode());
+ DCHECK_EQ(IrOpcode::kStateValues, locals->op()->opcode());
+ DCHECK_EQ(IrOpcode::kStateValues, stack->op()->opcode());
DCHECK_EQ(descriptor->parameters_count(), parameters->InputCount());
DCHECK_EQ(descriptor->locals_count(), locals->InputCount());
for (int i = 0; i < descriptor->parameters_count(); i++) {
inputs->push_back(UseOrImmediate(&g, parameters->InputAt(i)));
}
+ inputs->push_back(UseOrImmediate(&g, context));
for (int i = 0; i < descriptor->locals_count(); i++) {
inputs->push_back(UseOrImmediate(&g, locals->InputAt(i)));
}
}
-void InstructionSelector::VisitDeoptimize(Node* deopt) {
- DCHECK(deopt->op()->opcode() == IrOpcode::kDeoptimize);
- Node* state = deopt->InputAt(0);
- FrameStateDescriptor* descriptor = GetFrameStateDescriptor(state);
- int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor);
-
- InstructionOperandVector inputs(zone());
- inputs.reserve(descriptor->size() + 1);
-
- OperandGenerator g(this);
- inputs.push_back(g.TempImmediate(deoptimization_id));
-
- AddFrameStateInputs(state, &inputs, descriptor);
-
- DCHECK_EQ(descriptor->size() + 1, inputs.size());
-
- Emit(kArchDeoptimize, 0, NULL, inputs.size(), &inputs.front(), 0, NULL);
-}
-
-
#if !V8_TURBOFAN_BACKEND
#define DECLARE_UNIMPLEMENTED_SELECTOR(x) \
// {call_address_immediate} to generate immediate operands to address calls.
void InitializeCallBuffer(Node* call, CallBuffer* buffer,
bool call_code_immediate,
- bool call_address_immediate, BasicBlock* cont_node,
- BasicBlock* deopt_node);
+ bool call_address_immediate);
FrameStateDescriptor* GetFrameStateDescriptor(Node* node);
void AddFrameStateInputs(Node* state, InstructionOperandVector* inputs,
}
-int InstructionSequence::AddDeoptimizationEntry(
+InstructionSequence::StateId InstructionSequence::AddFrameStateDescriptor(
FrameStateDescriptor* descriptor) {
int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
deoptimization_entries_.push_back(descriptor);
- return deoptimization_id;
+ return StateId::FromInt(deoptimization_id);
}
-FrameStateDescriptor* InstructionSequence::GetDeoptimizationEntry(
- int deoptimization_id) {
- return deoptimization_entries_[deoptimization_id];
+FrameStateDescriptor* InstructionSequence::GetFrameStateDescriptor(
+ InstructionSequence::StateId state_id) {
+ return deoptimization_entries_[state_id.ToInt()];
}
-int InstructionSequence::GetDeoptimizationEntryCount() {
+int InstructionSequence::GetFrameStateDescriptorCount() {
return static_cast<int>(deoptimization_entries_.size());
}
class FrameStateDescriptor : public ZoneObject {
public:
- FrameStateDescriptor(BailoutId bailout_id, int parameters_count,
- int locals_count, int stack_count)
- : bailout_id_(bailout_id),
+ FrameStateDescriptor(const FrameStateCallInfo& state_info,
+ int parameters_count, int locals_count, int stack_count)
+ : bailout_id_(state_info.bailout_id()),
+ frame_state_combine_(state_info.state_combine()),
parameters_count_(parameters_count),
locals_count_(locals_count),
stack_count_(stack_count) {}
BailoutId bailout_id() const { return bailout_id_; }
+ OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
int parameters_count() { return parameters_count_; }
int locals_count() { return locals_count_; }
int stack_count() { return stack_count_; }
- int size() { return parameters_count_ + locals_count_ + stack_count_; }
+ int size() {
+ return parameters_count_ + locals_count_ + stack_count_ +
+ 1; // Includes context.
+ }
private:
BailoutId bailout_id_;
+ OutputFrameStateCombine frame_state_combine_;
int parameters_count_;
int locals_count_;
int stack_count_;
return immediates_[index];
}
- int AddDeoptimizationEntry(FrameStateDescriptor* descriptor);
- FrameStateDescriptor* GetDeoptimizationEntry(int deoptimization_id);
- int GetDeoptimizationEntryCount();
+ class StateId {
+ public:
+ static StateId FromInt(int id) { return StateId(id); }
+ int ToInt() const { return id_; }
+
+ private:
+ explicit StateId(int id) : id_(id) {}
+ int id_;
+ };
+
+ StateId AddFrameStateDescriptor(FrameStateDescriptor* descriptor);
+ FrameStateDescriptor* GetFrameStateDescriptor(StateId deoptimization_id);
+ int GetFrameStateDescriptorCount();
private:
friend OStream& operator<<(OStream& os, const InstructionSequence& code);
static CallDescriptor::Flags FlagsForNode(Node* node) {
CallDescriptor::Flags result = CallDescriptor::kNoFlags;
- if (OperatorProperties::CanLazilyDeoptimize(node->op())) {
- result |= CallDescriptor::kLazyDeoptimization;
- }
if (OperatorProperties::HasFrameStateInput(node->op())) {
result |= CallDescriptor::kNeedsFrameState;
}
locations, // locations
Operator::kNoProperties, // properties
kNoCalleeSaved, // callee-saved registers
- CallDescriptor::kLazyDeoptimization); // flags
+ CallDescriptor::kNeedsFrameState); // flags
}
// TODO(svenpanne) Output properties etc. and be less cryptic.
return os << d.kind() << ":" << d.debug_name() << ":r" << d.ReturnCount()
<< "p" << d.ParameterCount() << "i" << d.InputCount() << "f"
- << d.FrameStateCount() << (d.CanLazilyDeoptimize() ? "deopt" : "");
+ << d.FrameStateCount();
}
enum Flag {
// TODO(jarin) kLazyDeoptimization and kNeedsFrameState should be unified.
kNoFlags = 0u,
- kLazyDeoptimization = 1u << 0,
- kNeedsFrameState = 1u << 1,
- kPatchableCallSite = 1u << 2,
- kNeedsNopAfterCall = 1u << 3,
+ kNeedsFrameState = 1u << 0,
+ kPatchableCallSite = 1u << 1,
+ kNeedsNopAfterCall = 1u << 2,
kPatchableCallSiteWithNop = kPatchableCallSite | kNeedsNopAfterCall
};
DEFINE_FLAGS(Flags, Flag);
Flags flags() const { return flags_; }
- bool CanLazilyDeoptimize() const { return flags() & kLazyDeoptimization; }
bool NeedsFrameState() const { return flags() & kNeedsFrameState; }
LinkageLocation GetReturnLocation(int index) {
V(IfFalse) \
V(Merge) \
V(Return) \
- V(Throw) \
- V(Continuation) \
- V(LazyDeoptimization) \
- V(Deoptimize)
+ V(Throw)
// Opcodes for common operators.
#define LEAF_OP_LIST(V) \
switch (op->opcode()) {
case IrOpcode::kJSCallFunction:
+ case IrOpcode::kJSCallConstruct:
return true;
case IrOpcode::kJSCallRuntime: {
Runtime::FunctionId function =
UNREACHABLE();
}
+ // Binary operations
+ case IrOpcode::kJSBitwiseOr:
+ case IrOpcode::kJSBitwiseXor:
+ case IrOpcode::kJSBitwiseAnd:
+ case IrOpcode::kJSShiftLeft:
+ case IrOpcode::kJSShiftRight:
+ case IrOpcode::kJSShiftRightLogical:
+ case IrOpcode::kJSAdd:
+ case IrOpcode::kJSSubtract:
+ case IrOpcode::kJSMultiply:
+ case IrOpcode::kJSDivide:
+ case IrOpcode::kJSModulus:
+ case IrOpcode::kJSLoadProperty:
+ case IrOpcode::kJSStoreProperty:
+ case IrOpcode::kJSLoadNamed:
+ case IrOpcode::kJSStoreNamed:
+ return true;
+
default:
return false;
}
#undef OPCODE_CASE
return static_cast<ControlOperator*>(op)->ControlInputCount();
default:
- // If a node can lazily deoptimize, it needs control dependency.
- if (CanLazilyDeoptimize(op)) {
- return 1;
- }
// Operators that have write effects must have a control
// dependency. Effect dependencies only ensure the correct order of
// write/read operations without consideration of control flow. Without an
inline bool OperatorProperties::HasControlOutput(Operator* op) {
IrOpcode::Value opcode = static_cast<IrOpcode::Value>(op->opcode());
- return (opcode != IrOpcode::kEnd && IrOpcode::IsControlOpcode(opcode)) ||
- CanLazilyDeoptimize(op);
+ return (opcode != IrOpcode::kEnd && IrOpcode::IsControlOpcode(opcode));
}
opcode == IrOpcode::kIfFalse;
}
-inline bool OperatorProperties::CanLazilyDeoptimize(Operator* op) {
- // TODO(jarin) This function allows turning on lazy deoptimization
- // incrementally. It will change as we turn on lazy deopt for
- // more nodes.
-
- if (!FLAG_turbo_deoptimization) {
- return false;
- }
-
- switch (op->opcode()) {
- case IrOpcode::kCall: {
- CallOperator* call_op = reinterpret_cast<CallOperator*>(op);
- CallDescriptor* descriptor = call_op->parameter();
- return descriptor->CanLazilyDeoptimize();
- }
- case IrOpcode::kJSCallRuntime: {
- Runtime::FunctionId function =
- reinterpret_cast<Operator1<Runtime::FunctionId>*>(op)->parameter();
- // TODO(jarin) At the moment, we only support lazy deoptimization for
- // a few chosen runtime functions.
- switch (function) {
- case Runtime::kDebugBreak:
- case Runtime::kDeoptimizeFunction:
- case Runtime::kSetScriptBreakPoint:
- case Runtime::kDebugGetLoadedScripts:
- case Runtime::kStackGuard:
- return true;
- default:
- return false;
- }
- UNREACHABLE();
- }
-
- // JS function calls
- case IrOpcode::kJSCallFunction:
- case IrOpcode::kJSCallConstruct:
-
- // Binary operations
- case IrOpcode::kJSBitwiseOr:
- case IrOpcode::kJSBitwiseXor:
- case IrOpcode::kJSBitwiseAnd:
- case IrOpcode::kJSShiftLeft:
- case IrOpcode::kJSShiftRight:
- case IrOpcode::kJSShiftRightLogical:
- case IrOpcode::kJSAdd:
- case IrOpcode::kJSSubtract:
- case IrOpcode::kJSMultiply:
- case IrOpcode::kJSDivide:
- case IrOpcode::kJSModulus:
- case IrOpcode::kJSLoadProperty:
- case IrOpcode::kJSStoreProperty:
- case IrOpcode::kJSLoadNamed:
- case IrOpcode::kJSStoreNamed:
- return true;
-
- default:
- return false;
- }
- return false;
-}
}
}
} // namespace v8::internal::compiler
static inline int GetControlOutputCount(Operator* op);
static inline bool IsBasicBlockBegin(Operator* op);
- static inline bool CanLazilyDeoptimize(Operator* op);
};
}
}
}
-void RawMachineAssembler::Deoptimize(Node* state) {
- Node* deopt = graph()->NewNode(common()->Deoptimize(), state);
- schedule()->AddDeoptimize(CurrentBlock(), deopt);
- current_block_ = NULL;
-}
-
-
Node* RawMachineAssembler::CallFunctionStub0(Node* function, Node* receiver,
Node* context, Node* frame_state,
- Label* continuation,
- Label* deoptimization,
CallFunctionFlags flags) {
CallFunctionStub stub(isolate(), 0, flags);
CodeStubInterfaceDescriptor* d = isolate()->code_stub_interface_descriptor(
stub.InitializeInterfaceDescriptor(d);
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
- d, 1,
- CallDescriptor::kLazyDeoptimization | CallDescriptor::kNeedsFrameState,
- zone());
+ d, 1, CallDescriptor::kNeedsFrameState, zone());
Node* stub_code = HeapConstant(stub.GetCode());
Node* call = graph()->NewNode(common()->Call(desc), stub_code, function,
receiver, context, frame_state);
- schedule()->AddCall(CurrentBlock(), call, Use(continuation),
- Use(deoptimization));
- current_block_ = NULL;
+ schedule()->AddNode(CurrentBlock(), call);
return call;
}
Node* RawMachineAssembler::CallJS0(Node* function, Node* receiver,
- Label* continuation, Label* deoptimization) {
+ Node* frame_state) {
CallDescriptor* descriptor = Linkage::GetJSCallDescriptor(1, zone());
- Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver);
- schedule()->AddCall(CurrentBlock(), call, Use(continuation),
- Use(deoptimization));
- current_block_ = NULL;
+ Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver,
+ frame_state);
+ schedule()->AddNode(CurrentBlock(), call);
return call;
}
Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function,
- Node* arg0, Label* continuation,
- Label* deoptimization) {
+ Node* arg0, Node* frame_state) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
- function, 1, Operator::kNoProperties, CallDescriptor::kLazyDeoptimization,
+ function, 1, Operator::kNoProperties, CallDescriptor::kNeedsFrameState,
zone());
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* context = Parameter(1);
Node* call = graph()->NewNode(common()->Call(descriptor), centry, arg0, ref,
- arity, context);
- schedule()->AddCall(CurrentBlock(), call, Use(continuation),
- Use(deoptimization));
- current_block_ = NULL;
+ arity, context, frame_state);
+ schedule()->AddNode(CurrentBlock(), call);
return call;
}
void Branch(Node* condition, Label* true_val, Label* false_val);
// Call through CallFunctionStub with lazy deopt and frame-state.
Node* CallFunctionStub0(Node* function, Node* receiver, Node* context,
- Node* frame_state, Label* continuation,
- Label* deoptimization, CallFunctionFlags flags);
+ Node* frame_state, CallFunctionFlags flags);
// Call to a JS function with zero parameters.
- Node* CallJS0(Node* function, Node* receiver, Label* continuation,
- Label* deoptimization);
+ Node* CallJS0(Node* function, Node* receiver, Node* frame_state);
// Call to a runtime function with zero parameters.
Node* CallRuntime1(Runtime::FunctionId function, Node* arg0,
- Label* continuation, Label* deoptimization);
+ Node* frame_state);
void Return(Node* value);
void Bind(Label* label);
void Deoptimize(Node* state);
return os << "return";
case BasicBlockData::kThrow:
return os << "throw";
- case BasicBlockData::kCall:
- return os << "call";
- case BasicBlockData::kDeoptimize:
- return os << "deoptimize";
}
UNREACHABLE();
return os;
public:
// Possible control nodes that can end a block.
enum Control {
- kNone, // Control not initialized yet.
- kGoto, // Goto a single successor block.
- kBranch, // Branch if true to first successor, otherwise second.
- kReturn, // Return a value from this method.
- kThrow, // Throw an exception.
- kCall, // Call to a possibly deoptimizing or throwing function.
- kDeoptimize // Deoptimize.
+ kNone, // Control not initialized yet.
+ kGoto, // Goto a single successor block.
+ kBranch, // Branch if true to first successor, otherwise second.
+ kReturn, // Return a value from this method.
+ kThrow // Throw an exception.
};
int32_t rpo_number_; // special RPO number of the block.
AddSuccessor(block, succ);
}
- // BasicBlock building: add a (branching) call at the end of {block}.
- void AddCall(BasicBlock* block, Node* call, BasicBlock* cont_block,
- BasicBlock* deopt_block) {
- DCHECK(block->control_ == BasicBlock::kNone);
- DCHECK(call->opcode() == IrOpcode::kCall);
- block->control_ = BasicBlock::kCall;
- // Insert the deopt block first so that the RPO order builder picks
- // it first (and thus it ends up late in the RPO order).
- AddSuccessor(block, deopt_block);
- AddSuccessor(block, cont_block);
- SetControlInput(block, call);
- SetBlockForNode(block, call);
- }
-
// BasicBlock building: add a branch at the end of {block}.
void AddBranch(BasicBlock* block, Node* branch, BasicBlock* tblock,
BasicBlock* fblock) {
if (block != end()) AddSuccessor(block, end());
}
- // BasicBlock building: add a deopt at the end of {block}.
- void AddDeoptimize(BasicBlock* block, Node* state) {
- DCHECK(block->control_ == BasicBlock::kNone);
- block->control_ = BasicBlock::kDeoptimize;
- SetControlInput(block, state);
- block->deferred_ = true; // By default, consider deopts the slow path.
- if (block != end()) AddSuccessor(block, end());
- }
-
friend class Scheduler;
friend class CodeGenerator;
case IrOpcode::kBranch:
BuildBlocksForSuccessors(node, IrOpcode::kIfTrue, IrOpcode::kIfFalse);
break;
- case IrOpcode::kCall:
- if (OperatorProperties::CanLazilyDeoptimize(node->op())) {
- BuildBlocksForSuccessors(node, IrOpcode::kContinuation,
- IrOpcode::kLazyDeoptimization);
- }
- break;
default:
break;
}
scheduler_->schedule_root_nodes_.push_back(node);
ConnectBranch(node);
break;
- case IrOpcode::kDeoptimize:
- scheduler_->schedule_root_nodes_.push_back(node);
- ConnectDeoptimize(node);
- case IrOpcode::kCall:
- if (OperatorProperties::CanLazilyDeoptimize(node->op())) {
- scheduler_->schedule_root_nodes_.push_back(node);
- ConnectCall(node);
- }
- break;
case IrOpcode::kReturn:
scheduler_->schedule_root_nodes_.push_back(node);
ConnectReturn(node);
}
// Collect the branch-related projections from a node, such as IfTrue,
- // IfFalse, Continuation, and LazyDeoptimization.
+ // IfFalse.
// TODO(titzer): consider moving this to node.h
void CollectSuccessorProjections(Node* node, Node** buffer,
IrOpcode::Value true_opcode,
for (InputIter j = merge->inputs().begin(); j != merge->inputs().end();
++j) {
BasicBlock* predecessor_block = schedule_->block(*j);
- if ((*j)->opcode() != IrOpcode::kReturn &&
- (*j)->opcode() != IrOpcode::kDeoptimize) {
+ if ((*j)->opcode() != IrOpcode::kReturn) {
TraceConnect(merge, predecessor_block, block);
schedule_->AddGoto(predecessor_block, block);
}
}
}
- void ConnectDeoptimize(Node* deopt) {
- Node* deopt_block_node = NodeProperties::GetControlInput(deopt);
- BasicBlock* deopt_block = schedule_->block(deopt_block_node);
- TraceConnect(deopt, deopt_block, NULL);
- schedule_->AddDeoptimize(deopt_block, deopt);
- }
-
void ConnectReturn(Node* ret) {
Node* return_block_node = NodeProperties::GetControlInput(ret);
BasicBlock* return_block = schedule_->block(return_block_node);
schedule_->AddReturn(return_block, ret);
}
- void ConnectCall(Node* call) {
- Node* call_block_node = NodeProperties::GetControlInput(call);
- BasicBlock* call_block = schedule_->block(call_block_node);
-
- BasicBlock* successor_blocks[2];
- CollectSuccessorBlocks(call, successor_blocks, IrOpcode::kContinuation,
- IrOpcode::kLazyDeoptimization);
-
- TraceConnect(call, call_block, successor_blocks[0]);
- TraceConnect(call, call_block, successor_blocks[1]);
-
- schedule_->AddCall(call_block, call, successor_blocks[0],
- successor_blocks[1]);
- }
-
void TraceConnect(Node* node, BasicBlock* block, BasicBlock* succ) {
DCHECK_NE(NULL, block);
if (succ == NULL) {
OperatorProperties::GetControlInputCount(control->op()));
break;
}
- case IrOpcode::kLazyDeoptimization:
- // TODO(jarin): what are the constraints on these?
- break;
- case IrOpcode::kDeoptimize:
- // TODO(jarin): what are the constraints on these?
- break;
case IrOpcode::kFrameState:
// TODO(jarin): what are the constraints on these?
break;
case IrOpcode::kCall:
// TODO(rossberg): what are the constraints on these?
break;
- case IrOpcode::kContinuation:
- // TODO(jarin): what are the constraints on these?
- break;
case IrOpcode::kProjection: {
// Projection has an input that produces enough values.
int index = static_cast<Operator1<int>*>(node->op())->parameter();
AddSafepointAndDeopt(instr);
break;
}
- case kArchDeoptimize: {
- int deoptimization_id = BuildTranslation(instr, 0);
-
- Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
- isolate(), deoptimization_id, Deoptimizer::LAZY);
- __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
- break;
- }
case kArchDrop: {
int words = MiscField::decode(instr->opcode());
__ addq(rsp, Immediate(kPointerSize * words));
}
+void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
+ Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
+ isolate(), deoptimization_id, Deoptimizer::LAZY);
+ __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
+}
+
+
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int stack_slots = frame()->GetSpillSlotCount();
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
- InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization);
+ InitializeCallBuffer(call, &buffer, true, true);
// TODO(dcarney): stack alignment for c calls.
// TODO(dcarney): shadow space on window for c calls.
CHECK_EQ(Translation::kSelfLiteralId, closure_id);
function = function_;
}
- unsigned height = iterator->Next();
+ unsigned height = iterator->Next() - 1; // Do not count the context.
unsigned height_in_bytes = height * kPointerSize;
if (trace_scope_ != NULL) {
PrintF(trace_scope_->file(), " translating ");
Register context_reg = JavaScriptFrame::context_register();
output_offset -= kPointerSize;
input_offset -= kPointerSize;
- if (is_bottommost) {
- value = input_->GetFrameSlot(input_offset);
- } else {
- value = reinterpret_cast<intptr_t>(function->context());
- }
- output_frame->SetFrameSlot(output_offset, value);
+ // Read the context from the translations.
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ value = output_frame->GetFrameSlot(output_offset);
+ // The context should not be a placeholder for a materialized object.
+ CHECK(value !=
+ reinterpret_cast<intptr_t>(isolate_->heap()->arguments_marker()));
output_frame->SetContext(value);
if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
if (trace_scope_ != NULL) {
bool has_construct_stub) {
FrameDescription* output_frame = deoptimizer->output_[frame_index];
function_ = output_frame->GetFunction();
+ context_ = reinterpret_cast<Object*>(output_frame->GetContext());
has_construct_stub_ = has_construct_stub;
expression_count_ = output_frame->GetExpressionCount();
expression_stack_ = new Object*[expression_count_];
void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
v->VisitPointer(BitCast<Object**>(&function_));
+ v->VisitPointer(&context_);
v->VisitPointers(parameters_, parameters_ + parameters_count_);
v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
}
return function_;
}
+ // Get the frame context.
+ Object* GetContext() { return context_; }
+
// Check if this frame is preceded by construct stub frame. The bottom-most
// inlined frame might still be called by an uninlined construct stub.
bool HasConstructStub() {
}
JSFunction* function_;
+ Object* context_;
bool has_construct_stub_;
int parameters_count_;
int expression_count_;
class HArgumentsObject;
+class HConstant;
class HEnterInlined V8_FINAL : public HTemplateInstruction<0> {
public:
- static HEnterInlined* New(Zone* zone,
- HValue* context,
- BailoutId return_id,
+ static HEnterInlined* New(Zone* zone, HValue* context, BailoutId return_id,
Handle<JSFunction> closure,
- int arguments_count,
+ HConstant* closure_context, int arguments_count,
FunctionLiteral* function,
- InliningKind inlining_kind,
- Variable* arguments_var,
+ InliningKind inlining_kind, Variable* arguments_var,
HArgumentsObject* arguments_object) {
- return new(zone) HEnterInlined(return_id, closure, arguments_count,
- function, inlining_kind, arguments_var,
- arguments_object, zone);
+ return new (zone) HEnterInlined(return_id, closure, closure_context,
+ arguments_count, function, inlining_kind,
+ arguments_var, arguments_object, zone);
}
void RegisterReturnTarget(HBasicBlock* return_target, Zone* zone);
virtual OStream& PrintDataTo(OStream& os) const V8_OVERRIDE; // NOLINT
Handle<JSFunction> closure() const { return closure_; }
+ HConstant* closure_context() const { return closure_context_; }
int arguments_count() const { return arguments_count_; }
bool arguments_pushed() const { return arguments_pushed_; }
void set_arguments_pushed() { arguments_pushed_ = true; }
DECLARE_CONCRETE_INSTRUCTION(EnterInlined)
private:
- HEnterInlined(BailoutId return_id,
- Handle<JSFunction> closure,
- int arguments_count,
- FunctionLiteral* function,
- InliningKind inlining_kind,
- Variable* arguments_var,
- HArgumentsObject* arguments_object,
+ HEnterInlined(BailoutId return_id, Handle<JSFunction> closure,
+ HConstant* closure_context, int arguments_count,
+ FunctionLiteral* function, InliningKind inlining_kind,
+ Variable* arguments_var, HArgumentsObject* arguments_object,
Zone* zone)
: return_id_(return_id),
closure_(closure),
+ closure_context_(closure_context),
arguments_count_(arguments_count),
arguments_pushed_(false),
function_(function),
inlining_kind_(inlining_kind),
arguments_var_(arguments_var),
arguments_object_(arguments_object),
- return_targets_(2, zone) {
- }
+ return_targets_(2, zone) {}
BailoutId return_id_;
Handle<JSFunction> closure_;
+ HConstant* closure_context_;
int arguments_count_;
bool arguments_pushed_;
FunctionLiteral* function_;
class HBoundsCheck;
class HPhi;
-class HConstant;
class HBitwise;
Scope* saved_scope = scope();
set_scope(target_info.scope());
HEnterInlined* enter_inlined =
- Add<HEnterInlined>(return_id, target, arguments_count, function,
+ Add<HEnterInlined>(return_id, target, context, arguments_count, function,
function_state()->inlining_kind(),
- function->scope()->arguments(),
- arguments_object);
+ function->scope()->arguments(), arguments_object);
function_state()->set_entry(enter_inlined);
VisitDeclarations(target_info.scope()->declarations());
HEnvironment* hydrogen_env = current_block_->last_environment();
int argument_index_accumulator = 0;
ZoneList<HValue*> objects_to_materialize(0, zone());
- instr->set_environment(CreateEnvironment(hydrogen_env,
- &argument_index_accumulator,
- &objects_to_materialize));
+ instr->set_environment(CreateEnvironment(
+ hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
return instr;
}
if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
LEnvironment* LChunkBuilderBase::CreateEnvironment(
- HEnvironment* hydrogen_env,
- int* argument_index_accumulator,
+ HEnvironment* hydrogen_env, int* argument_index_accumulator,
ZoneList<HValue*>* objects_to_materialize) {
if (hydrogen_env == NULL) return NULL;
- LEnvironment* outer = CreateEnvironment(hydrogen_env->outer(),
- argument_index_accumulator,
- objects_to_materialize);
+ LEnvironment* outer =
+ CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
+ objects_to_materialize);
BailoutId ast_id = hydrogen_env->ast_id();
DCHECK(!ast_id.IsNone() ||
hydrogen_env->frame_type() != JS_FUNCTION);
- int value_count = hydrogen_env->length() - hydrogen_env->specials_count();
+
+ int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
+ ? 0
+ : hydrogen_env->specials_count();
+
+ int value_count = hydrogen_env->length() - omitted_count;
LEnvironment* result =
new(zone()) LEnvironment(hydrogen_env->closure(),
hydrogen_env->frame_type(),
// Store the environment description into the environment
// (with holes for nested objects)
for (int i = 0; i < hydrogen_env->length(); ++i) {
- if (hydrogen_env->is_special_index(i)) continue;
-
+ if (hydrogen_env->is_special_index(i) &&
+ hydrogen_env->frame_type() != JS_FUNCTION) {
+ continue;
+ }
LOperand* op;
HValue* value = hydrogen_env->values()->at(i);
CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
? deoptimized_frame_->HasConstructStub()
: frame_->IsConstructor();
}
+ Object* GetContext() {
+ return is_optimized_ ? deoptimized_frame_->GetContext() : frame_->context();
+ }
// To inspect all the provided arguments the frame might need to be
// replaced with the arguments frame.
if (local < local_count) {
// Get the context containing declarations.
Handle<Context> context(
- Context::cast(it.frame()->context())->declaration_context());
+ Context::cast(frame_inspector.GetContext())->declaration_context());
for (; i < scope_info->LocalCount(); ++i) {
if (scope_info->LocalIsSynthetic(i))
continue;
isolate->set_context(*(save->context()));
// Evaluate on the context of the frame.
- Handle<Context> context(Context::cast(frame->context()));
+ Handle<Context> context(Context::cast(frame_inspector.GetContext()));
DCHECK(!context.is_null());
// Materialize stack locals and the arguments object.
HEnvironment* hydrogen_env = current_block_->last_environment();
int argument_index_accumulator = 0;
ZoneList<HValue*> objects_to_materialize(0, zone());
- instr->set_environment(CreateEnvironment(hydrogen_env,
- &argument_index_accumulator,
- &objects_to_materialize));
+ instr->set_environment(CreateEnvironment(
+ hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
return instr;
}
if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
inner->Bind(instr->arguments_var(), instr->arguments_object());
}
+ inner->BindContext(instr->closure_context());
inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
PrintableUnique<Object>::CreateUninitialized(zone(), deopt_function);
Node* deopt_fun_node = m.NewNode(common.HeapConstant(deopt_fun_constant));
- MLabel deopt, cont;
- Node* call = m.CallJS0(deopt_fun_node, undef_node, &cont, &deopt);
-
- m.Bind(&cont);
- m.NewNode(common.Continuation(), call);
- m.Return(undef_node);
-
- m.Bind(&deopt);
- m.NewNode(common.LazyDeoptimization(), call);
bailout_id = GetCallBailoutId();
Node* parameters = m.NewNode(common.StateValues(1), undef_node);
Node* locals = m.NewNode(common.StateValues(0));
Node* stack = m.NewNode(common.StateValues(0));
- Node* state_node =
- m.NewNode(common.FrameState(bailout_id), parameters, locals, stack);
- m.Deoptimize(state_node);
+ Node* state_node = m.NewNode(common.FrameState(bailout_id, kIgnoreOutput),
+ parameters, locals, stack, undef_node);
+
+ m.CallJS0(deopt_fun_node, undef_node, state_node);
+
+ m.Return(undef_node);
// Schedule the graph:
Schedule* schedule = m.Export();
- cont_block = cont.block();
- deopt_block = deopt.block();
-
return schedule;
}
CHECK(false);
return BailoutId(-1);
}
-
- BasicBlock* cont_block;
- BasicBlock* deopt_block;
};
DeoptimizationInputData* data =
DeoptimizationInputData::cast(t.result_code->deoptimization_data());
- Label* cont_label = t.code->GetLabel(t.cont_block);
- Label* deopt_label = t.code->GetLabel(t.deopt_block);
-
- // Check the safepoint - it should contain an entry for the call
- // with the right deoptimization address.
- SafepointEntry entry = t.result_code->GetSafepointEntry(
- t.result_code->instruction_start() + cont_label->pos());
- CHECK(entry.is_valid());
- CHECK_EQ(deopt_label->pos(), entry.deoptimization_pc());
+ // TODO(jarin) Find a way to test the safepoint.
// Check that we deoptimize to the right AST id.
CHECK_EQ(1, data->DeoptCount());
PrintableUnique<Object>::CreateUninitialized(zone(), function);
Node* this_fun_node = m.NewNode(common.HeapConstant(this_fun_constant));
- MLabel deopt, cont;
- Node* call = m.CallRuntime1(Runtime::kDeoptimizeFunction, this_fun_node,
- &cont, &deopt);
-
- m.Bind(&cont);
- m.NewNode(common.Continuation(), call);
- m.Return(undef_node);
-
- m.Bind(&deopt);
- m.NewNode(common.LazyDeoptimization(), call);
-
bailout_id = GetCallBailoutId();
Node* parameters = m.NewNode(common.StateValues(1), undef_node);
Node* locals = m.NewNode(common.StateValues(0));
Node* stack = m.NewNode(common.StateValues(0));
- Node* state_node =
- m.NewNode(common.FrameState(bailout_id), parameters, locals, stack);
- m.Deoptimize(state_node);
+ Node* state_node = m.NewNode(common.FrameState(bailout_id, kIgnoreOutput),
+ parameters, locals, stack, undef_node);
+
+ m.CallRuntime1(Runtime::kDeoptimizeFunction, this_fun_node, state_node);
+
+ m.Return(undef_node);
// Schedule the graph:
Schedule* schedule = m.Export();
- cont_block = cont.block();
- deopt_block = deopt.block();
-
return schedule;
}
CHECK(false);
return BailoutId(-1);
}
-
- BasicBlock* cont_block;
- BasicBlock* deopt_block;
};
}
-TEST(TestScheduleAddDeopt) {
- HandleAndZoneScope scope;
- Schedule schedule(scope.main_zone());
- Graph graph(scope.main_zone());
- Node* n0 = graph.NewNode(&dummy_operator);
- BasicBlock* entry = schedule.start();
- schedule.AddDeoptimize(entry, n0);
-
- CHECK_EQ(0, entry->PredecessorCount());
- CHECK_EQ(1, entry->SuccessorCount());
- CHECK_EQ(schedule.end(), entry->SuccessorAt(0));
-}
-
-
TEST(BuildMulNodeGraph) {
HandleAndZoneScope scope;
Schedule schedule(scope.main_zone());
#if V8_TURBOFAN_TARGET
-// So we can get a real JS function.
-static Handle<JSFunction> Compile(const char* source) {
- Isolate* isolate = CcTest::i_isolate();
- Handle<String> source_code = isolate->factory()
- ->NewStringFromUtf8(CStrVector(source))
- .ToHandleChecked();
- Handle<SharedFunctionInfo> shared_function = Compiler::CompileScript(
- source_code, Handle<String>(), 0, 0, false,
- Handle<Context>(isolate->native_context()), NULL, NULL,
- v8::ScriptCompiler::kNoCompileOptions, NOT_NATIVES_CODE);
- return isolate->factory()->NewFunctionFromSharedFunctionInfo(
- shared_function, isolate->native_context());
-}
-
-
-TEST(BuildScheduleTrivialLazyDeoptCall) {
- FLAG_turbo_deoptimization = true;
-
- HandleAndZoneScope scope;
- Isolate* isolate = scope.main_isolate();
- Graph graph(scope.main_zone());
- CommonOperatorBuilder common(scope.main_zone());
- JSOperatorBuilder js_builder(scope.main_zone());
-
- InitializedHandleScope handles;
- Handle<JSFunction> function = Compile("m()");
- CompilationInfoWithZone info(function);
- Linkage linkage(&info);
-
- // Manually transcribed code for:
- // function turbo_fan_test() {
- // m();
- // }
- // where m can lazy deopt (so it has a deopt block associated with it).
-
-
- // Start //
- // ^ //
- // | (EC) //
- // | //
- // /------> Call <--------------\ //
- // / ^ ^ \ //
- // / | | \ undef //
- // / / \ \ ^ //
- // (E) | (C) / \ (C) \ (E) | //
- // | Continuation LazyDeoptimization | | //
- // \___ ^ ^ / | //
- // \ | | ______/ Framestate //
- // undef \ | (VC) | (C) / ^ //
- // \ \ | | / / //
- // Return Deoptimization ----------/ //
- // ^ ^ //
- // \ / //
- // (C) \ / (C) //
- // \ / //
- // Merge //
- // ^ //
- // | //
- // End //
-
- Handle<Object> undef_object =
- Handle<Object>(isolate->heap()->undefined_value(), isolate);
- PrintableUnique<Object> undef_constant =
- PrintableUnique<Object>::CreateUninitialized(scope.main_zone(),
- undef_object);
-
- Node* undef_node = graph.NewNode(common.HeapConstant(undef_constant));
-
- Node* start_node = graph.NewNode(common.Start(0));
-
- CallDescriptor* descriptor = linkage.GetJSCallDescriptor(0);
- Node* call_node = graph.NewNode(common.Call(descriptor),
- undef_node, // function
- undef_node, // context
- start_node, // effect
- start_node); // control
-
- Node* cont_node = graph.NewNode(common.Continuation(), call_node);
- Node* lazy_deopt_node = graph.NewNode(common.LazyDeoptimization(), call_node);
-
- Node* parameters = graph.NewNode(common.StateValues(1), undef_node);
- Node* locals = graph.NewNode(common.StateValues(0));
- Node* stack = graph.NewNode(common.StateValues(0));
-
- Node* state_node = graph.NewNode(common.FrameState(BailoutId(1234)),
- parameters, locals, stack);
-
- Node* return_node = graph.NewNode(common.Return(),
- undef_node, // return value
- call_node, // effect
- cont_node); // control
- Node* deoptimization_node = graph.NewNode(common.Deoptimize(),
- state_node, // deopt environment
- call_node, // effect
- lazy_deopt_node); // control
-
- Node* merge_node =
- graph.NewNode(common.Merge(2), return_node, deoptimization_node);
-
- Node* end_node = graph.NewNode(common.End(), merge_node);
-
- graph.SetStart(start_node);
- graph.SetEnd(end_node);
-
- Schedule* schedule = ComputeAndVerifySchedule(12, &graph);
-
- // Tests:
- // Continuation and deopt have basic blocks.
- BasicBlock* cont_block = schedule->block(cont_node);
- BasicBlock* deopt_block = schedule->block(lazy_deopt_node);
- BasicBlock* call_block = schedule->block(call_node);
- CHECK_NE(NULL, cont_block);
- CHECK_NE(NULL, deopt_block);
- CHECK_NE(NULL, call_block);
- // The basic blocks are different.
- CHECK_NE(cont_block, deopt_block);
- CHECK_NE(cont_block, call_block);
- CHECK_NE(deopt_block, call_block);
- // The call node finishes its own basic block.
- CHECK_EQ(BasicBlock::kCall, call_block->control_);
- CHECK_EQ(call_node, call_block->control_input_);
- // The lazy deopt block is deferred.
- CHECK(deopt_block->deferred_);
- CHECK(!call_block->deferred_);
- CHECK(!cont_block->deferred_);
- // The lazy deopt block contains framestate + bailout (and nothing else).
- CHECK_EQ(deoptimization_node, deopt_block->control_input_);
- CHECK_EQ(5, static_cast<int>(deopt_block->nodes_.size()));
- CHECK_EQ(lazy_deopt_node, deopt_block->nodes_[0]);
- CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[1]->op()->opcode());
- CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[2]->op()->opcode());
- CHECK_EQ(IrOpcode::kStateValues, deopt_block->nodes_[3]->op()->opcode());
- CHECK_EQ(state_node, deopt_block->nodes_[4]);
-}
-
-
static Node* CreateDiamond(Graph* graph, CommonOperatorBuilder* common,
Node* cond) {
Node* tv = graph->NewNode(common->Int32Constant(6));
s.references_.insert(virtual_register);
}
}
- for (int i = 0; i < sequence.GetDeoptimizationEntryCount(); i++) {
- s.deoptimization_entries_.push_back(sequence.GetDeoptimizationEntry(i));
+ for (int i = 0; i < sequence.GetFrameStateDescriptorCount(); i++) {
+ s.deoptimization_entries_.push_back(sequence.GetFrameStateDescriptor(
+ InstructionSequence::StateId::FromInt(i)));
}
return s;
}
Node* function_node = m.Parameter(0);
Node* receiver = m.Parameter(1);
- StreamBuilder::Label deopt, cont;
-
- // TODO(jarin) Add frame state.
- Node* call = m.CallJS0(function_node, receiver, &cont, &deopt);
-
- m.Bind(&cont);
- m.NewNode(m.common()->Continuation(), call);
- m.Return(call);
-
- m.Bind(&deopt);
- m.NewNode(m.common()->LazyDeoptimization(), call);
Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(1));
Node* locals = m.NewNode(m.common()->StateValues(0));
Node* stack = m.NewNode(m.common()->StateValues(0));
+ Node* context_dummy = m.Int32Constant(0);
- Node* state_node =
- m.NewNode(m.common()->FrameState(bailout_id), parameters, locals, stack);
- m.Deoptimize(state_node);
+ Node* state_node = m.NewNode(m.common()->FrameState(bailout_id, kPushOutput),
+ parameters, locals, stack, context_dummy);
+ Node* call = m.CallJS0(function_node, receiver, state_node);
+ m.Return(call);
Stream s = m.Build(kAllExceptNopInstructions);
for (; index < s.size() && s[index]->arch_opcode() != kArchCallJSFunction;
index++) {
}
- // Now we should have three instructions: call, return and deoptimize.
- ASSERT_EQ(index + 3, s.size());
+ // Now we should have two instructions: call and return.
+ ASSERT_EQ(index + 2, s.size());
EXPECT_EQ(kArchCallJSFunction, s[index++]->arch_opcode());
EXPECT_EQ(kArchRet, s[index++]->arch_opcode());
- EXPECT_EQ(kArchDeoptimize, s[index++]->arch_opcode());
- EXPECT_EQ(index, s.size());
+
+ // TODO(jarin) Check deoptimization table.
}
kMachAnyTagged);
BailoutId bailout_id_before(42);
- BailoutId bailout_id_after(54);
// Some arguments for the call node.
Node* function_node = m.Parameter(0);
Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(43));
Node* locals = m.NewNode(m.common()->StateValues(1), m.Int32Constant(44));
Node* stack = m.NewNode(m.common()->StateValues(1), m.Int32Constant(45));
- Node* frame_state_before = m.NewNode(
- m.common()->FrameState(bailout_id_before), parameters, locals, stack);
+ Node* context_sentinel = m.Int32Constant(0);
+ Node* frame_state_before =
+ m.NewNode(m.common()->FrameState(bailout_id_before, kPushOutput),
+ parameters, locals, stack, context_sentinel);
- StreamBuilder::Label deopt, cont;
// Build the call.
- Node* call =
- m.CallFunctionStub0(function_node, receiver, context, frame_state_before,
- &cont, &deopt, CALL_AS_METHOD);
+ Node* call = m.CallFunctionStub0(function_node, receiver, context,
+ frame_state_before, CALL_AS_METHOD);
- // Create the continuation branch.
- m.Bind(&cont);
- m.NewNode(m.common()->Continuation(), call);
m.Return(call);
- // Create the lazy deoptimization block (with a different frame state).
- m.Bind(&deopt);
- m.NewNode(m.common()->LazyDeoptimization(), call);
-
- Node* stack_after =
- m.NewNode(m.common()->StateValues(2), m.Int32Constant(55), call);
-
- Node* frame_state_after = m.NewNode(m.common()->FrameState(bailout_id_after),
- parameters, locals, stack_after);
- m.Deoptimize(frame_state_after);
-
Stream s = m.Build(kAllExceptNopInstructions);
// Skip until kArchCallJSFunction.
for (; index < s.size() && s[index]->arch_opcode() != kArchCallCodeObject;
index++) {
}
- // Now we should have three instructions: call, return and deoptimize.
- ASSERT_EQ(index + 3, s.size());
+ // Now we should have two instructions: call, return.
+ ASSERT_EQ(index + 2, s.size());
// Check the call instruction
const Instruction* call_instr = s[index++];
size_t num_operands =
1 + // Code object.
1 +
- 3 + // Frame state deopt id + one input for each value in frame state.
+ 4 + // Frame state deopt id + one input for each value in frame state.
1 + // Function.
- 1 + // Context.
- 2; // Continuation and deoptimization block labels.
+ 1; // Context.
ASSERT_EQ(num_operands, call_instr->InputCount());
// Code object.
// Deoptimization id.
int32_t deopt_id_before = s.ToInt32(call_instr->InputAt(1));
- FrameStateDescriptor* desc_before = s.GetDeoptimizationEntry(deopt_id_before);
+ FrameStateDescriptor* desc_before =
+ s.GetFrameStateDescriptor(deopt_id_before);
EXPECT_EQ(bailout_id_before, desc_before->bailout_id());
+ EXPECT_EQ(kPushOutput, desc_before->state_combine());
EXPECT_EQ(1, desc_before->parameters_count());
EXPECT_EQ(1, desc_before->locals_count());
EXPECT_EQ(1, desc_before->stack_count());
EXPECT_EQ(43, s.ToInt32(call_instr->InputAt(2)));
- EXPECT_EQ(44, s.ToInt32(call_instr->InputAt(3)));
- EXPECT_EQ(45, s.ToInt32(call_instr->InputAt(4)));
+ EXPECT_EQ(0, s.ToInt32(call_instr->InputAt(3)));
+ EXPECT_EQ(44, s.ToInt32(call_instr->InputAt(4)));
+ EXPECT_EQ(45, s.ToInt32(call_instr->InputAt(5)));
// Function.
- EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->InputAt(5)));
+ EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->InputAt(6)));
// Context.
- EXPECT_EQ(context->id(), s.ToVreg(call_instr->InputAt(6)));
- // Continuation.
- EXPECT_EQ(cont.block()->id(), s.ToInt32(call_instr->InputAt(7)));
- // Deoptimization.
- EXPECT_EQ(deopt.block()->id(), s.ToInt32(call_instr->InputAt(8)));
+ EXPECT_EQ(context->id(), s.ToVreg(call_instr->InputAt(7)));
EXPECT_EQ(kArchRet, s[index++]->arch_opcode());
- // Check the deoptimize instruction.
- const Instruction* deopt_instr = s[index++];
- EXPECT_EQ(kArchDeoptimize, deopt_instr->arch_opcode());
- ASSERT_EQ(5U, deopt_instr->InputCount());
- int32_t deopt_id_after = s.ToInt32(deopt_instr->InputAt(0));
- FrameStateDescriptor* desc_after = s.GetDeoptimizationEntry(deopt_id_after);
- EXPECT_EQ(bailout_id_after, desc_after->bailout_id());
- EXPECT_EQ(1, desc_after->parameters_count());
- EXPECT_EQ(1, desc_after->locals_count());
- EXPECT_EQ(2, desc_after->stack_count());
- // Parameter value from the frame state.
- EXPECT_EQ(43, s.ToInt32(deopt_instr->InputAt(1)));
- EXPECT_EQ(44, s.ToInt32(deopt_instr->InputAt(2)));
- EXPECT_EQ(55, s.ToInt32(deopt_instr->InputAt(3)));
- EXPECT_EQ(call->id(), s.ToVreg(deopt_instr->InputAt(4)));
EXPECT_EQ(index, s.size());
}
return UnallocatedOperand::cast(operand)->virtual_register();
}
- FrameStateDescriptor* GetDeoptimizationEntry(int deoptimization_id) {
- EXPECT_LT(deoptimization_id, GetDeoptimizationEntryCount());
+ FrameStateDescriptor* GetFrameStateDescriptor(int deoptimization_id) {
+ EXPECT_LT(deoptimization_id, GetFrameStateDescriptorCount());
return deoptimization_entries_[deoptimization_id];
}
- int GetDeoptimizationEntryCount() {
+ int GetFrameStateDescriptorCount() {
return static_cast<int>(deoptimization_entries_.size());
}
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
// Get the Debug object exposed from the debug context global object.
var Debug = debug.Debug
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --turbo-deoptimization
Debug = debug.Debug;
var listened = false;
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
+
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
// This test tests that full code compiled without debug break slots
// is recompiled with debug break slots when debugging is started.
'compiler/osr-assert': [PASS, NO_VARIANTS],
'regress/regress-2185-2': [PASS, NO_VARIANTS],
- # Support for breakpoints requires special relocation info for DebugBreak.
- 'debug-clearbreakpointgroup': [PASS, NO_VARIANTS],
- 'debug-step-2': [PASS, NO_VARIANTS],
- 'regress/regress-debug-deopt-while-recompile': [PASS, NO_VARIANTS],
- 'regress/regress-opt-after-debug-deopt': [PASS, NO_VARIANTS],
-
# Support for %GetFrameDetails is missing and requires checkpoints.
'debug-evaluate-bool-constructor': [PASS, NO_VARIANTS],
- 'debug-evaluate-closure': [PASS, NO_VARIANTS],
'debug-evaluate-const': [PASS, NO_VARIANTS],
'debug-evaluate-locals-optimized-double': [PASS, NO_VARIANTS],
'debug-evaluate-locals-optimized': [PASS, NO_VARIANTS],
'debug-evaluate-locals': [PASS, NO_VARIANTS],
'debug-evaluate-with-context': [PASS, NO_VARIANTS],
- 'debug-evaluate-with': [PASS, NO_VARIANTS],
'debug-liveedit-double-call': [PASS, NO_VARIANTS],
'debug-liveedit-restart-frame': [PASS, NO_VARIANTS],
'debug-return-value': [PASS, NO_VARIANTS],
'es6/generators-debug-scopes': [PASS, NO_VARIANTS],
'harmony/debug-blockscopes': [PASS, NO_VARIANTS],
'regress/regress-1081309': [PASS, NO_VARIANTS],
- 'regress/regress-1170187': [PASS, NO_VARIANTS],
- 'regress/regress-119609': [PASS, NO_VARIANTS],
- 'regress/regress-131994': [PASS, NO_VARIANTS],
'regress/regress-269': [PASS, NO_VARIANTS],
- 'regress/regress-325676': [PASS, NO_VARIANTS],
- 'regress/regress-crbug-107996': [PASS, NO_VARIANTS],
- 'regress/regress-crbug-171715': [PASS, NO_VARIANTS],
- 'regress/regress-crbug-222893': [PASS, NO_VARIANTS],
'regress/regress-crbug-259300': [PASS, NO_VARIANTS],
'regress/regress-frame-details-null-receiver': [PASS, NO_VARIANTS],
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
+
// Make sure that the retreival of local variables are performed correctly even
// when an adapter frame is present.
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
Debug = debug.Debug;
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
// Test that a variable in the local scope that shadows a context-allocated
// variable is correctly resolved when being evaluated in the debugger.
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
// If a function parameter is forced to be context allocated,
// debug evaluate need to resolve it to a context slot instead of
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
Debug = debug.Debug;
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
Debug = debug.Debug
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
+// Flags: --turbo-deoptimization
Debug = debug.Debug
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --turbo-deoptimization
Debug = debug.Debug;
// Flags: --expose-debug-as debug --allow-natives-syntax
// Flags: --concurrent-recompilation --block-concurrent-recompilation
+// Flags: --turbo-deoptimization
if (!%IsConcurrentRecompilationSupported()) {
print("Concurrent recompilation is disabled. Skipping this test.");