__ push(r0);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ push(r0);
// Check for proxies.
__ bind(&call_runtime);
__ push(r0); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in r0.
__ Push(x0);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ Bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ Push(x0);
// Check for proxies.
__ Bind(&call_runtime);
__ Push(x0); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in x0.
ForInType for_in_type() const { return for_in_type_; }
void set_for_in_type(ForInType type) { for_in_type_ = type; }
- static int num_ids() { return parent_num_ids() + 2; }
+ static int num_ids() { return parent_num_ids() + 4; }
BailoutId BodyId() const { return BailoutId(local_id(0)); }
BailoutId PrepareId() const { return BailoutId(local_id(1)); }
+ BailoutId EnumId() const { return BailoutId(local_id(2)); }
+ BailoutId ToObjectId() const { return BailoutId(local_id(3)); }
virtual BailoutId ContinueId() const OVERRIDE { return EntryId(); }
virtual BailoutId StackCheckId() const OVERRIDE { return BodyId(); }
};
struct Accessors: public ZoneObject {
- Accessors() : getter(NULL), setter(NULL) { }
+ Accessors() : getter(NULL), setter(NULL) {}
Expression* getter;
Expression* setter;
};
+ BailoutId CreateLiteralId() const { return BailoutId(local_id(0)); }
+
+ static int num_ids() { return parent_num_ids() + 1; }
+
protected:
ObjectLiteral(Zone* zone, ZoneList<Property*>* properties, int literal_index,
int boilerplate_properties, bool has_function, int pos)
fast_elements_(false),
may_store_doubles_(false),
has_function_(has_function) {}
+ static int parent_num_ids() { return MaterializedLiteral::num_ids(); }
private:
+ int local_id(int n) const { return base_id() + parent_num_ids() + n; }
Handle<FixedArray> constant_properties_;
ZoneList<Property*>* properties_;
int boilerplate_properties_;
compiler::Pipeline pipeline(info());
pipeline.GenerateCode();
if (!info()->code().is_null()) {
- if (FLAG_turbo_deoptimization) {
- info()->context()->native_context()->AddOptimizedCode(*info()->code());
- }
return SetLastStatus(SUCCEEDED);
}
}
DCHECK(last_status() == SUCCEEDED);
// TODO(turbofan): Currently everything is done in the first phase.
if (!info()->code().is_null()) {
+ if (FLAG_turbo_deoptimization) {
+ info()->context()->native_context()->AddOptimizedCode(*info()->code());
+ }
RecordOptimizationStats();
return last_status();
}
// Convert object to jsobject.
// PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
obj = NewNode(javascript()->ToObject(), obj);
+ PrepareFrameState(obj, stmt->ToObjectId(), OutputFrameStateCombine::Push());
environment()->Push(obj);
// TODO(dcarney): should do a fast enum cache check here to skip runtime.
environment()->Push(obj);
Node* cache_type = ProcessArguments(
javascript()->CallRuntime(Runtime::kGetPropertyNamesFast, 1), 1);
+ PrepareFrameState(cache_type, stmt->EnumId(),
+ OutputFrameStateCombine::Push());
// TODO(dcarney): these next runtime calls should be removed in favour of
// a few simplified instructions.
environment()->Push(obj);
const Operator* op =
javascript()->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Node* literal = NewNode(op, literals_array, literal_index, constants, flags);
+ PrepareFrameState(literal, expr->CreateLiteralId(),
+ OutputFrameStateCombine::Push());
// The object is expected on the operand stack during computation of the
// property values and is the value of the entire expression.
if (property->emit_store()) {
const Operator* op =
javascript()->CallRuntime(Runtime::kInternalSetPrototype, 2);
- NewNode(op, receiver, value);
+ Node* set_prototype = NewNode(op, receiver, value);
+ // SetPrototype should not lazy deopt on an object
+ // literal.
+ PrepareFrameState(set_prototype, BailoutId::None());
}
break;
}
const Operator* op =
javascript()->CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
Node* call = NewNode(op, literal, name, getter, setter, attr);
- PrepareFrameState(call, it->first->id());
+ // This should not lazy deopt on a new literal.
+ PrepareFrameState(call, BailoutId::None());
}
// Transform literals that contain functions to fast properties.
receiver_value = NewNode(common()->Projection(1), pair);
PrepareFrameState(pair, expr->EvalOrLookupId(),
- OutputFrameStateCombine::Push());
+ OutputFrameStateCombine::Push(2));
break;
}
case Call::PROPERTY_CALL: {
return kind_ == kPushOutput && parameter_ == 0;
}
+ size_t ConsumedOutputCount() const {
+ return kind_ == kPushOutput ? GetPushCount() : 1;
+ }
+
bool operator==(OutputFrameStateCombine const& other) const {
return kind_ == other.kind_ && parameter_ == other.parameter_;
}
return ImmediateOperand::Create(index, zone());
}
+ InstructionOperand* TempLocation(LinkageLocation location, MachineType type) {
+ UnallocatedOperand* op = ToUnallocatedOperand(location, type);
+ op->set_virtual_register(sequence()->NextVirtualRegister());
+ return op;
+ }
+
InstructionOperand* Label(BasicBlock* block) {
// TODO(bmeurer): We misuse ImmediateOperand here.
return TempImmediate(block->rpo_number());
}
+void InstructionSelector::MarkAsRepresentation(MachineType rep,
+ InstructionOperand* op) {
+ UnallocatedOperand* unalloc = UnallocatedOperand::cast(op);
+ switch (RepresentationOf(rep)) {
+ case kRepFloat32:
+ case kRepFloat64:
+ sequence()->MarkAsDouble(unalloc->virtual_register());
+ break;
+ case kRepTagged:
+ sequence()->MarkAsReference(unalloc->virtual_register());
+ break;
+ default:
+ break;
+ }
+}
+
+
void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) {
DCHECK_NOT_NULL(node);
switch (RepresentationOf(rep)) {
}
// Filter out the outputs that aren't live because no projection uses them.
+ size_t outputs_needed_by_framestate =
+ buffer->frame_state_descriptor == NULL
+ ? 0
+ : buffer->frame_state_descriptor->state_combine()
+ .ConsumedOutputCount();
for (size_t i = 0; i < buffer->output_nodes.size(); i++) {
- if (buffer->output_nodes[i] != NULL) {
- Node* output = buffer->output_nodes[i];
+ bool output_is_live =
+ buffer->output_nodes[i] != NULL || i < outputs_needed_by_framestate;
+ if (output_is_live) {
MachineType type =
buffer->descriptor->GetReturnType(static_cast<int>(i));
LinkageLocation location =
buffer->descriptor->GetReturnLocation(static_cast<int>(i));
- MarkAsRepresentation(type, output);
- buffer->outputs.push_back(g.DefineAsLocation(output, location, type));
+
+ Node* output = buffer->output_nodes[i];
+ InstructionOperand* op =
+ output == NULL ? g.TempLocation(location, type)
+ : g.DefineAsLocation(output, location, type);
+ MarkAsRepresentation(type, op);
+
+ buffer->outputs.push_back(op);
}
}
}
// by {node}.
void MarkAsRepresentation(MachineType rep, Node* node);
+ // Inform the register allocation of the representation of the unallocated
+ // operand {op}.
+ void MarkAsRepresentation(MachineType rep, InstructionOperand* op);
+
// Initialize the call buffer with the InstructionOperands, nodes, etc,
// corresponding
// to the inputs and outputs of the call.
case Runtime::kCompileLazy:
case Runtime::kCompileOptimized:
case Runtime::kCompileString:
+ case Runtime::kCreateObjectLiteral:
case Runtime::kDebugBreak:
case Runtime::kDataViewSetInt8:
case Runtime::kDataViewSetUint8:
case Runtime::kDataViewGetFloat32:
case Runtime::kDataViewGetFloat64:
case Runtime::kDebugEvaluate:
+ case Runtime::kDebugEvaluateGlobal:
case Runtime::kDebugGetLoadedScripts:
case Runtime::kDebugGetPropertyDetails:
case Runtime::kDebugPromiseEvent:
+ case Runtime::kDefineAccessorPropertyUnchecked:
+ case Runtime::kDefineDataPropertyUnchecked:
case Runtime::kDeleteProperty:
case Runtime::kDeoptimizeFunction:
case Runtime::kFunctionBindArguments:
+ case Runtime::kGetDefaultReceiver:
case Runtime::kGetFrameCount:
+ case Runtime::kGetImplFromInitializedIntlObject:
case Runtime::kGetOwnProperty:
+ case Runtime::kGetOwnPropertyNames:
+ case Runtime::kGetPropertyNamesFast:
+ case Runtime::kGetPrototype:
+ case Runtime::kInlineArguments:
case Runtime::kInlineCallFunction:
case Runtime::kInlineDateField:
case Runtime::kInlineRegExpExec:
+ case Runtime::kInternalSetPrototype:
+ case Runtime::kInterrupt:
+ case Runtime::kIsPropertyEnumerable:
+ case Runtime::kIsSloppyModeFunction:
case Runtime::kLiveEditGatherCompileInfo:
case Runtime::kLoadLookupSlot:
case Runtime::kLoadLookupSlotNoReferenceError:
case Runtime::kMaterializeRegExpLiteral:
+ case Runtime::kNewObject:
case Runtime::kNewObjectFromBound:
+ case Runtime::kNewObjectWithAllocationSite:
case Runtime::kObjectFreeze:
+ case Runtime::kOwnKeys:
case Runtime::kParseJson:
case Runtime::kPrepareStep:
case Runtime::kPreventExtensions:
case Runtime::kRegExpCompile:
case Runtime::kRegExpExecMultiple:
case Runtime::kResolvePossiblyDirectEval:
- // case Runtime::kSetPrototype:
+ case Runtime::kSetPrototype:
case Runtime::kSetScriptBreakPoint:
+ case Runtime::kSparseJoinWithSeparator:
case Runtime::kStackGuard:
+ case Runtime::kStoreKeyedToSuper_Sloppy:
+ case Runtime::kStoreKeyedToSuper_Strict:
+ case Runtime::kStoreToSuper_Sloppy:
+ case Runtime::kStoreToSuper_Strict:
case Runtime::kStoreLookupSlot:
case Runtime::kStringBuilderConcat:
+ case Runtime::kStringBuilderJoin:
case Runtime::kStringReplaceGlobalRegExpWithString:
+ case Runtime::kThrowNonMethodError:
+ case Runtime::kThrowNotDateError:
case Runtime::kThrowReferenceError:
+ case Runtime::kThrowUnsupportedSuperError:
case Runtime::kThrow:
case Runtime::kTypedArraySetFastCases:
case Runtime::kTypedArrayInitializeFromArrayLike:
- case Runtime::kDebugEvaluateGlobal:
- case Runtime::kOwnKeys:
- case Runtime::kGetOwnPropertyNames:
- case Runtime::kIsPropertyEnumerable:
- case Runtime::kGetPrototype:
- case Runtime::kSparseJoinWithSeparator:
return true;
default:
return false;
case IrOpcode::kJSStoreProperty:
case IrOpcode::kJSSubtract:
+ // Conversions
+ case IrOpcode::kJSToObject:
+
// Other
case IrOpcode::kJSDeleteProperty:
return true;
__ push(eax);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ push(eax);
// Check for proxies.
__ bind(&call_runtime);
__ push(eax);
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
isolate()->factory()->meta_map());
__ j(not_equal, &fixed_array);
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in eax.
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ mov(a0, v0);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ push(a0);
// Check for proxies.
__ bind(&call_runtime);
__ push(a0); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in v0.
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ mov(a0, v0);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ push(a0);
// Check for proxies.
__ bind(&call_runtime);
__ push(a0); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in v0.
__ Push(rax);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ Push(rax);
// Check for proxies.
__ bind(&call_runtime);
__ Push(rax); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in rax.
__ push(eax);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
+ PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
__ push(eax);
// Check for proxies.
__ bind(&call_runtime);
__ push(eax);
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+ PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
isolate()->factory()->meta_map());
__ j(not_equal, &fixed_array);
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
+ PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in eax.
// test enters an infinite recursion which goes through the runtime and we
// overflow the system stack before the simulator stack.
-// Flags: --harmony-proxies --sim-stack-size=500
+// Flags: --harmony-proxies --sim-stack-size=500 --turbo-deoptimization
// Helper.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Flags: --stack-size=100
+// Flags: --stack-size=100 --turbo-deoptimization
var a = [];
SHARED(ToNumber, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
SHARED(ToString, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
SHARED(ToName, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
- SHARED(ToObject, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
+ SHARED(ToObject, Operator::kNoProperties, 1, 1, 1, 1, 1, 1),
SHARED(Yield, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
SHARED(Create, Operator::kEliminatable, 0, 0, 1, 0, 1, 1),
- SHARED(HasProperty, Operator::kNoProperties, 2, 0, 1, 1, 1, 1),
+ SHARED(HasProperty, Operator::kNoProperties, 2, 1, 1, 1, 1, 1),
SHARED(TypeOf, Operator::kPure, 1, 0, 0, 0, 1, 0),
- SHARED(InstanceOf, Operator::kNoProperties, 2, 0, 1, 1, 1, 1),
+ SHARED(InstanceOf, Operator::kNoProperties, 2, 1, 1, 1, 1, 1),
SHARED(Debugger, Operator::kNoProperties, 0, 0, 1, 1, 0, 1),
SHARED(CreateFunctionContext, Operator::kNoProperties, 1, 0, 1, 1, 1, 1),
SHARED(CreateWithContext, Operator::kNoProperties, 2, 0, 1, 1, 1, 1),