Node* attr = jsgraph()->Constant(NONE);
const Operator* op =
javascript()->Runtime(Runtime::kDefineAccessorPropertyUnchecked, 5);
- NewNode(op, literal, name, getter, setter, attr);
+ Node* call = NewNode(op, literal, name, getter, setter, attr);
+ PrepareFrameState(call, it->first->id());
}
// Transform literals that contain functions to fast properties.
switch (assign_type) {
case VARIABLE: {
Variable* variable = expr->expression()->AsVariableProxy()->var();
+ environment()->Push(value);
BuildVariableAssignment(variable, value, expr->op(),
expr->AssignmentId());
+ environment()->Pop();
break;
}
case NAMED_PROPERTY: {
MakeUnique(property->key()->AsLiteral()->AsPropertyName());
Node* store =
NewNode(javascript()->StoreNamed(strict_mode(), name), object, value);
+ environment()->Push(value);
PrepareFrameState(store, expr->AssignmentId());
+ environment()->Pop();
break;
}
case KEYED_PROPERTY: {
Node* object = environment()->Pop();
Node* store = NewNode(javascript()->StoreProperty(strict_mode()), object,
key, value);
+ environment()->Push(value);
PrepareFrameState(store, expr->AssignmentId());
+ environment()->Pop();
break;
}
}
masm_(code->zone()->isolate(), NULL, 0),
resolver_(this),
safepoints_(code->zone()),
- deoptimization_points_(code->zone()),
deoptimization_states_(code->zone()),
deoptimization_literals_(code->zone()),
translations_(code->zone()) {}
AssembleInstruction(*i);
}
- EmitLazyDeoptimizationCallTable();
-
FinishCode(masm());
+ // Ensure there is space for lazy deopt.
+ if (!info->IsStub()) {
+ int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
+ while (masm()->pc_offset() < target_offset) {
+ masm()->nop();
+ }
+ }
+
safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
// TODO(titzer): what are the right code flags here?
}
-Safepoint::Id CodeGenerator::RecordSafepoint(PointerMap* pointers,
- Safepoint::Kind kind,
- int arguments,
- Safepoint::DeoptMode deopt_mode) {
+void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
+ int arguments,
+ Safepoint::DeoptMode deopt_mode) {
const ZoneList<InstructionOperand*>* operands =
pointers->GetNormalizedOperands();
Safepoint safepoint =
safepoint.DefinePointerRegister(reg, zone());
}
}
- return safepoint.id();
}
}
-void CodeGenerator::EmitLazyDeoptimizationCallTable() {
- // ZoneDeque<DeoptimizationPoint*>::iterator iter;
- int i = 0;
- for (ZoneDeque<DeoptimizationPoint*>::iterator
- iter = deoptimization_points_.begin();
- iter != deoptimization_points_.end(); iter++, i++) {
- int pc_offset = masm()->pc_offset();
- AssembleDeoptimizerCall((*iter)->lazy_state_id());
- safepoints()->SetDeoptimizationPc((*iter)->safepoint(), pc_offset);
- }
-}
-
-
void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
CompilationInfo* info = linkage()->info();
int deopt_count = static_cast<int>(deoptimization_states_.size());
data->SetTranslationIndex(
i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
data->SetArgumentsStackHeight(i, Smi::FromInt(0));
- data->SetPc(i, Smi::FromInt(-1));
+ data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
}
code_object->set_deoptimization_data(*data);
bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
- Safepoint::Id safepoint_id = RecordSafepoint(
+ RecordSafepoint(
instr->pointer_map(), Safepoint::kSimple, 0,
needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
+ if (flags & CallDescriptor::kNeedsNopAfterCall) {
+ AddNopForSmiCodeInlining();
+ }
+
if (needs_frame_state) {
// If the frame state is present, it starts at argument 1
// (just after the code address).
int frame_state_offset = 1;
FrameStateDescriptor* descriptor =
GetFrameStateDescriptor(instr, frame_state_offset);
- int deopt_state_id =
- BuildTranslation(instr, frame_state_offset, kIgnoreOutput);
- int lazy_deopt_state_id = deopt_state_id;
+ int pc_offset = masm()->pc_offset();
+ int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
+ descriptor->state_combine());
+ // If the pre-call frame state differs from the post-call one, produce the
+ // pre-call frame state, too.
+ // TODO(jarin) We might want to avoid building the pre-call frame state
+ // because it is only used to get locals and arguments (by the debugger and
+ // f.arguments), and those are the same in the pre-call and post-call
+ // states.
if (descriptor->state_combine() != kIgnoreOutput) {
- lazy_deopt_state_id = BuildTranslation(instr, frame_state_offset,
- descriptor->state_combine());
+ deopt_state_id =
+ BuildTranslation(instr, -1, frame_state_offset, kIgnoreOutput);
}
- deoptimization_points_.push_back(new (zone()) DeoptimizationPoint(
- deopt_state_id, lazy_deopt_state_id, descriptor, safepoint_id));
#if DEBUG
// Make sure all the values live in stack slots or they are immediates.
// (The values should not live in register because registers are clobbered
CHECK(op->IsStackSlot() || op->IsImmediate());
}
#endif
- safepoints()->RecordLazyDeoptimizationIndex(lazy_deopt_state_id);
- }
-
- if (flags & CallDescriptor::kNeedsNopAfterCall) {
- AddNopForSmiCodeInlining();
+ safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
}
}
}
-int CodeGenerator::BuildTranslation(Instruction* instr, int frame_state_offset,
+int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
+ int frame_state_offset,
OutputFrameStateCombine state_combine) {
FrameStateDescriptor* descriptor =
GetFrameStateDescriptor(instr, frame_state_offset);
int deoptimization_id = static_cast<int>(deoptimization_states_.size());
deoptimization_states_.push_back(new (zone()) DeoptimizationState(
- descriptor->bailout_id(), translation.index()));
+ descriptor->bailout_id(), translation.index(), pc_offset));
return deoptimization_id;
}
}
// Record a safepoint with the given pointer map.
- Safepoint::Id RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
- int arguments, Safepoint::DeoptMode deopt_mode);
+ void RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
+ int arguments, Safepoint::DeoptMode deopt_mode);
// Assemble code for the specified instruction.
void AssembleInstruction(Instruction* instr);
// ===========================================================================
// Deoptimization table construction
void AddSafepointAndDeopt(Instruction* instr);
- void EmitLazyDeoptimizationCallTable();
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr,
int frame_state_offset);
- int BuildTranslation(Instruction* instr, int frame_state_offset,
+ int BuildTranslation(Instruction* instr, int pc_offset,
+ int frame_state_offset,
OutputFrameStateCombine state_combine);
void BuildTranslationForFrameStateDescriptor(
FrameStateDescriptor* descriptor, Instruction* instr,
void AddNopForSmiCodeInlining();
// ===========================================================================
- class DeoptimizationPoint : public ZoneObject {
- public:
- int state_id() const { return state_id_; }
- int lazy_state_id() const { return lazy_state_id_; }
- FrameStateDescriptor* descriptor() const { return descriptor_; }
- Safepoint::Id safepoint() const { return safepoint_; }
-
- DeoptimizationPoint(int state_id, int lazy_state_id,
- FrameStateDescriptor* descriptor,
- Safepoint::Id safepoint)
- : state_id_(state_id),
- lazy_state_id_(lazy_state_id),
- descriptor_(descriptor),
- safepoint_(safepoint) {}
-
- private:
- int state_id_;
- int lazy_state_id_;
- FrameStateDescriptor* descriptor_;
- Safepoint::Id safepoint_;
- };
-
struct DeoptimizationState : ZoneObject {
public:
BailoutId bailout_id() const { return bailout_id_; }
int translation_id() const { return translation_id_; }
+ int pc_offset() const { return pc_offset_; }
- DeoptimizationState(BailoutId bailout_id, int translation_id)
- : bailout_id_(bailout_id), translation_id_(translation_id) {}
+ DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset)
+ : bailout_id_(bailout_id),
+ translation_id_(translation_id),
+ pc_offset_(pc_offset) {}
private:
BailoutId bailout_id_;
int translation_id_;
+ int pc_offset_;
};
InstructionSequence* code_;
MacroAssembler masm_;
GapResolver resolver_;
SafepointTableBuilder safepoints_;
- ZoneDeque<DeoptimizationPoint*> deoptimization_points_;
ZoneDeque<DeoptimizationState*> deoptimization_states_;
ZoneDeque<Handle<Object> > deoptimization_literals_;
TranslationBuffer translations_;
// few chosen runtime functions.
switch (function) {
case Runtime::kDebugBreak:
+ case Runtime::kDebugGetLoadedScripts:
case Runtime::kDeoptimizeFunction:
+ case Runtime::kInlineCallFunction:
+ case Runtime::kPrepareStep:
case Runtime::kSetScriptBreakPoint:
- case Runtime::kDebugGetLoadedScripts:
case Runtime::kStackGuard:
return true;
default:
SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
int deopt_index = safepoint.deoptimization_index();
// Turbofan deopt is checked when we are patching addresses on stack.
- bool turbofanned = code->is_turbofanned();
+ bool turbofanned = code->is_turbofanned() && !FLAG_turbo_deoptimization;
bool safe_to_deopt =
deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned);
element = next;
}
- if (FLAG_turbo_deoptimization) {
- PatchStackForMarkedCode(isolate);
- }
-
// TODO(titzer): we need a handle scope only because of the macro assembler,
// which is only used in EnsureCodeForDeoptimizationEntry.
HandleScope scope(isolate);
shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
// Do platform-specific patching to force any activations to lazy deopt.
- //
- // We skip patching Turbofan code - we patch return addresses on stack.
- // TODO(jarin) We should still zap the code object (but we have to
- // be careful not to zap the deoptimization block).
- if (!codes[i]->is_turbofanned()) {
+ if (!codes[i]->is_turbofanned() || FLAG_turbo_deoptimization) {
PatchCodeForDeoptimization(isolate, codes[i]);
// We might be in the middle of incremental marking with compaction.
}
-// For all marked Turbofanned code on stack, change the return address to go
-// to the deoptimization block.
-void Deoptimizer::PatchStackForMarkedCode(Isolate* isolate) {
- // TODO(jarin) We should tolerate missing patch entry for the topmost frame.
- for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done();
- it.Advance()) {
- StackFrame::Type type = it.frame()->type();
- if (type == StackFrame::OPTIMIZED) {
- Code* code = it.frame()->LookupCode();
- if (code->is_turbofanned() && code->marked_for_deoptimization()) {
- JSFunction* function =
- static_cast<OptimizedFrame*>(it.frame())->function();
- Address* pc_address = it.frame()->pc_address();
- int pc_offset =
- static_cast<int>(*pc_address - code->instruction_start());
- SafepointEntry safepoint_entry = code->GetSafepointEntry(*pc_address);
- unsigned new_pc_offset = safepoint_entry.deoptimization_pc();
-
- if (FLAG_trace_deopt) {
- CodeTracer::Scope scope(isolate->GetCodeTracer());
- PrintF(scope.file(), "[patching stack address for function: ");
- function->PrintName(scope.file());
- PrintF(scope.file(), " (Pc offset %i -> %i)]\n", pc_offset,
- new_pc_offset);
- }
-
- CHECK(new_pc_offset != Safepoint::kNoDeoptimizationPc);
- *pc_address += static_cast<int>(new_pc_offset) - pc_offset;
- }
- }
- }
-}
-
-
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
if (FLAG_trace_deopt) {
CodeTracer::Scope scope(isolate->GetCodeTracer());
// refer to that code.
static void DeoptimizeMarkedCode(Isolate* isolate);
- static void PatchStackForMarkedCode(Isolate* isolate);
-
// Visit all the known optimized functions in a given isolate.
static void VisitAllOptimizedFunctions(
Isolate* isolate, OptimizedFunctionVisitor* visitor);
} else {
os << "<none>";
}
- if (entry.deoptimization_pc() != Safepoint::kNoDeoptimizationPc) {
- Vector<char> buf2 = Vector<char>::New(30);
- SNPrintF(buf2, "%6d", entry.deoptimization_pc());
- os << buf2.start();
- } else {
- os << "<none>";
- }
if (entry.argument_count() > 0) {
os << " argc: " << entry.argument_count();
}
length_ = Memory::uint32_at(header + kLengthOffset);
entry_size_ = Memory::uint32_at(header + kEntrySizeOffset);
pc_and_deoptimization_indexes_ = header + kHeaderSize;
- entries_ =
- pc_and_deoptimization_indexes_ + (length_ * kPcAndDeoptimizationInfoSize);
+ entries_ = pc_and_deoptimization_indexes_ +
+ (length_ * kPcAndDeoptimizationIndexSize);
DCHECK(entry_size_ > 0);
STATIC_ASSERT(SafepointEntry::DeoptimizationIndexField::kMax ==
Safepoint::kNoDeoptimizationIndex);
for (unsigned i = 0; i < length(); i++) {
// TODO(kasperl): Replace the linear search with binary search.
if (GetPcOffset(i) == pc_offset) return GetEntry(i);
- if (GetDeoptimizationPcOffset(i) == pc_offset) return GetEntry(i);
}
return SafepointEntry();
}
info.pc = assembler->pc_offset();
info.arguments = arguments;
info.has_doubles = (kind & Safepoint::kWithDoubles);
- info.deoptimization_pc = Safepoint::kNoDeoptimizationPc;
- int safepoint_id = deoptimization_info_.length();
deoptimization_info_.Add(info, zone_);
deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex, zone_);
if (deopt_mode == Safepoint::kNoLazyDeopt) {
? new(zone_) ZoneList<int>(4, zone_)
: NULL,
zone_);
- return Safepoint(safepoint_id, indexes_.last(), registers_.last());
+ return Safepoint(indexes_.last(), registers_.last());
}
assembler->dd(deoptimization_info_[i].pc);
assembler->dd(EncodeExceptPC(deoptimization_info_[i],
deopt_index_list_[i]));
- assembler->dd(deoptimization_info_[i].deoptimization_pc);
}
// Emit table of bitmaps.
class SafepointEntry BASE_EMBEDDED {
public:
- SafepointEntry() : info_(0), deoptimization_pc_(0), bits_(NULL) {}
+ SafepointEntry() : info_(0), bits_(NULL) {}
- SafepointEntry(unsigned info, unsigned deoptimization_pc, uint8_t* bits)
- : info_(info), deoptimization_pc_(deoptimization_pc), bits_(bits) {
+ SafepointEntry(unsigned info, uint8_t* bits) : info_(info), bits_(bits) {
DCHECK(is_valid());
}
return DeoptimizationIndexField::decode(info_);
}
- unsigned deoptimization_pc() const {
- DCHECK(is_valid());
- return deoptimization_pc_;
- }
-
static const int kArgumentsFieldBits = 3;
static const int kSaveDoublesFieldBits = 1;
static const int kDeoptIndexBits =
private:
unsigned info_;
- unsigned deoptimization_pc_;
uint8_t* bits_;
};
int size() const {
return kHeaderSize +
- (length_ * (kPcAndDeoptimizationInfoSize + entry_size_));
+ (length_ * (kPcAndDeoptimizationIndexSize + entry_size_));
}
unsigned length() const { return length_; }
unsigned entry_size() const { return entry_size_; }
return Memory::uint32_at(GetPcOffsetLocation(index));
}
- unsigned GetDeoptimizationPcOffset(unsigned index) const {
- DCHECK(index < length_);
- return Memory::uint32_at(GetDeoptimizationPcLocation(index));
- }
-
SafepointEntry GetEntry(unsigned index) const {
DCHECK(index < length_);
unsigned info = Memory::uint32_at(GetInfoLocation(index));
- unsigned deopt_pc = Memory::uint32_at(GetDeoptimizationPcLocation(index));
uint8_t* bits = &Memory::uint8_at(entries_ + (index * entry_size_));
- return SafepointEntry(info, deopt_pc, bits);
+ return SafepointEntry(info, bits);
}
// Returns the entry for the given pc.
static const int kPcSize = kIntSize;
static const int kDeoptimizationIndexSize = kIntSize;
- static const int kDeoptimizationPcSize = kIntSize;
- static const int kPcAndDeoptimizationInfoSize =
- kPcSize + kDeoptimizationIndexSize + kDeoptimizationPcSize;
+ static const int kPcAndDeoptimizationIndexSize =
+ kPcSize + kDeoptimizationIndexSize;
Address GetPcOffsetLocation(unsigned index) const {
return pc_and_deoptimization_indexes_ +
- (index * kPcAndDeoptimizationInfoSize);
+ (index * kPcAndDeoptimizationIndexSize);
}
Address GetInfoLocation(unsigned index) const {
return GetPcOffsetLocation(index) + kPcSize;
}
- Address GetDeoptimizationPcLocation(unsigned index) const {
- return GetInfoLocation(index) + kDeoptimizationIndexSize;
- }
-
static void PrintBits(OStream& os, // NOLINT
uint8_t byte, int digits);
kLazyDeopt
};
- class Id {
- private:
- explicit Id(int id) : id_(id) {}
-
- int id_;
-
- friend class SafepointTableBuilder;
- friend class Safepoint;
- };
-
static const int kNoDeoptimizationIndex =
(1 << (SafepointEntry::kDeoptIndexBits)) - 1;
- static const unsigned kNoDeoptimizationPc = ~0U;
-
void DefinePointerSlot(int index, Zone* zone) { indexes_->Add(index, zone); }
void DefinePointerRegister(Register reg, Zone* zone);
- Id id() const { return Id(id_); }
-
private:
- Safepoint(int id, ZoneList<int>* indexes, ZoneList<int>* registers)
- : id_(id), indexes_(indexes), registers_(registers) {}
- int id_;
+ Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers)
+ : indexes_(indexes), registers_(registers) {}
ZoneList<int>* indexes_;
ZoneList<int>* registers_;
void BumpLastLazySafepointIndex() {
last_lazy_safepoint_ = deopt_index_list_.length();
}
- void SetDeoptimizationPc(Safepoint::Id safepoint_id,
- unsigned deoptimization_pc) {
- deoptimization_info_[safepoint_id.id_].deoptimization_pc =
- deoptimization_pc;
- }
// Emit the safepoint table after the body. The number of bits per
// entry must be enough to hold all the pointer indexes.
unsigned pc;
unsigned arguments;
bool has_doubles;
- unsigned deoptimization_pc;
};
uint32_t EncodeExceptPC(const DeoptimizationInfo& info, unsigned index);
Pipeline pipeline(&info);
Handle<Code> code = pipeline.GenerateCode();
+ if (FLAG_turbo_deoptimization) {
+ info.context()->native_context()->AddOptimizedCode(*code);
+ }
CHECK(!code.is_null());
function->ReplaceCode(*code);
return n;
}
+ Node* UndefinedConstant() {
+ Unique<Object> unique =
+ Unique<Object>::CreateImmovable(isolate->factory()->undefined_value());
+ return graph.NewNode(common.HeapConstant(unique));
+ }
+
+ Node* EmptyFrameState(Node* context) {
+ Node* parameters = graph.NewNode(common.StateValues(0));
+ Node* locals = graph.NewNode(common.StateValues(0));
+ Node* stack = graph.NewNode(common.StateValues(0));
+
+ Node* state_node =
+ graph.NewNode(common.FrameState(BailoutId(0), kIgnoreOutput),
+ parameters, locals, stack, context, UndefinedConstant());
+
+ return state_node;
+ }
+
Node* reduce(Node* node) {
JSGraph jsgraph(&graph, &common, &javascript, &typer, &machine);
JSTypedLowering reducer(&jsgraph);
TEST(RemoveToNumberEffects) {
+ FLAG_turbo_deoptimization = true;
+
JSTypedLoweringTester R;
Node* effect_use = NULL;
for (int i = 0; i < 10; i++) {
Node* p0 = R.Parameter(Type::Number());
Node* ton = R.Unop(R.javascript.ToNumber(), p0);
+ Node* frame_state = R.EmptyFrameState(R.context());
effect_use = NULL;
switch (i) {
effect_use = R.graph.NewNode(R.common.EffectPhi(1), ton, R.start());
case 3:
effect_use = R.graph.NewNode(R.javascript.Add(), ton, ton, R.context(),
- ton, R.start());
+ frame_state, ton, R.start());
break;
case 4:
effect_use = R.graph.NewNode(R.javascript.Add(), p0, p0, R.context(),
- ton, R.start());
+ frame_state, ton, R.start());
break;
case 5:
effect_use = R.graph.NewNode(R.common.Return(), p0, ton, R.start());
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --turbo-deoptimization
var o1 = {x:1};
var o2 = {};