namespace internal {
+void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { r3, r2, r1, r0 };
+ descriptor->register_param_count_ = 4;
+ descriptor->register_params_ = registers;
+ descriptor->stack_parameter_count_ = NULL;
+ descriptor->deoptimization_handler_ =
+ Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
+}
+
+
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
-void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- //
- // [sp]: object literal flags.
- // [sp + kPointerSize]: constant properties.
- // [sp + (2 * kPointerSize)]: literal index.
- // [sp + (3 * kPointerSize)]: literals array.
-
- // Load boilerplate object into r3 and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
- __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
- __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
- __ b(eq, &slow_case);
-
- // Check that the boilerplate contains only fast properties and we can
- // statically determine the instance size.
- int size = JSObject::kHeaderSize + length_ * kPointerSize;
- __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceSizeOffset));
- __ cmp(r0, Operand(size >> kPointerSizeLog2));
- __ b(ne, &slow_case);
-
- // Allocate the JS object and copy header together with all in-object
- // properties from the boilerplate.
- __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT);
- for (int i = 0; i < size; i += kPointerSize) {
- __ ldr(r1, FieldMemOperand(r3, i));
- __ str(r1, FieldMemOperand(r0, i));
- }
-
- // Return and remove the on-stack parameters.
- __ add(sp, sp, Operand(4 * kPointerSize));
- __ Ret();
-
- __ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
-}
-
-
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
}
-void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
- int frame_index) {
+void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
+ int frame_index) {
//
// FROM TO
// | .... | | .... |
// v +-------------------------+ +-------------------------|
// | COMPILED_STUB marker | | STUB_FAILURE marker |
// +-------------------------+ +-------------------------+
- // | | | caller args.length_ |
- // | ... | +-------------------------+
// | | | caller args.arguments_ |
+ // | ... | +-------------------------+
+ // | | | caller args.length_ |
// |-------------------------|<-sp +-------------------------+
// | caller args pointer |
// +-------------------------+
isolate_->code_stub_interface_descriptor(major_key);
// The output frame must have room for all pushed register parameters
- // and the standard stack frame slots.
- int output_frame_size = StandardFrameConstants::kFixedFrameSize +
- kPointerSize * descriptor->register_param_count_;
-
- // Include space for an argument object to the callee and optionally
- // the space to pass the argument object to the stub failure handler.
- output_frame_size += sizeof(Arguments) + kPointerSize;
+ // and the standard stack frame slots. Include space for an argument
+ // object to the callee and optionally the space to pass the argument
+ // object to the stub failure handler.
+ int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
+ sizeof(Arguments) + kPointerSize;
+ int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
+ int input_frame_size = input_->GetFrameSize();
+ int output_frame_size = height_in_bytes + fixed_frame_size;
+ if (trace_) {
+ PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n",
+ CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
+ height_in_bytes);
+ }
+ // The stub failure trampoline is a single frame.
FrameDescription* output_frame =
- new(output_frame_size) FrameDescription(output_frame_size, 0);
+ new(output_frame_size) FrameDescription(output_frame_size, NULL);
+ output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
ASSERT(frame_index == 0);
output_[frame_index] = output_frame;
- Code* notify_failure =
- isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
- output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
- output_frame->SetContinuation(
- reinterpret_cast<intptr_t>(notify_failure->entry()));
- Code* trampoline = NULL;
- int extra = descriptor->extra_expression_stack_count_;
- StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
- ASSERT(trampoline != NULL);
- output_frame->SetPc(reinterpret_cast<intptr_t>(
- trampoline->instruction_start()));
- unsigned input_frame_size = input_->GetFrameSize();
-
- intptr_t frame_ptr = input_->GetRegister(fp.code());
+ // The top address for the output frame can be computed from the input
+ // frame pointer and the output frame's height. Subtract space for the
+ // context and function slots.
+ intptr_t top_address = input_->GetRegister(fp.code()) - (2 * kPointerSize) -
+ height_in_bytes;
+ output_frame->SetTop(top_address);
- // JSFunction continuation
+ // Read caller's PC (JSFunction continuation) from the input frame.
intptr_t input_frame_offset = input_frame_size - kPointerSize;
intptr_t output_frame_offset = output_frame_size - kPointerSize;
intptr_t value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // saved frame ptr
+ // Read caller's FP from the input frame, and set this frame's FP.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ intptr_t frame_ptr = input_->GetRegister(fp.code());
+ output_frame->SetRegister(fp.code(), frame_ptr);
+ output_frame->SetFp(frame_ptr);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Restore context
+ // The context can be gotten from the input frame.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetRegister(cp.code(), value);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Internal frame markers
+ // A marker value is used in place of the function.
output_frame_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (stub fail sentinel)\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
int caller_arg_count = 0;
if (descriptor->stack_parameter_count_ != NULL) {
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
(caller_arg_count - 1) * kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.arguments\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- output_frame->SetFrameSlot(output_frame_offset, value);
output_frame_offset -= kPointerSize;
- output_frame->SetFrameSlot(output_frame_offset, caller_arg_count);
+ value = caller_arg_count;
+ output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.length\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- value = frame_ptr - (output_frame_size - output_frame_offset) -
- StandardFrameConstants::kMarkerOffset;
output_frame_offset -= kPointerSize;
+ value = frame_ptr - (output_frame_size - output_frame_offset) -
+ StandardFrameConstants::kMarkerOffset + kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args*\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
// Copy the register parameters to the failure frame.
for (int i = 0; i < descriptor->register_param_count_; ++i) {
DoTranslateCommand(iterator, 0, output_frame_offset);
}
+ ASSERT(0 == output_frame_offset);
+
for (int i = 0; i < DwVfpRegister::kMaxNumRegisters; ++i) {
double double_value = input_->GetDoubleRegister(i);
output_frame->SetDoubleRegister(i, double_value);
}
- output_frame->SetRegister(fp.code(), frame_ptr);
- output_frame->SetFp(frame_ptr);
-
ApiFunction function(descriptor->deoptimization_handler_);
ExternalReference xref(&function, ExternalReference::BUILTIN_CALL, isolate_);
intptr_t handler = reinterpret_cast<intptr_t>(xref.address());
}
output_frame->SetRegister(r0.code(), params);
output_frame->SetRegister(r1.code(), handler);
+
+ // Compute this frame's PC, state, and continuation.
+ Code* trampoline = NULL;
+ int extra = descriptor->extra_expression_stack_count_;
+ StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
+ ASSERT(trampoline != NULL);
+ output_frame->SetPc(reinterpret_cast<intptr_t>(
+ trampoline->instruction_start()));
+ output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
+ Code* notify_failure =
+ isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
+ output_frame->SetContinuation(
+ reinterpret_cast<intptr_t>(notify_failure->entry()));
}
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
Handle<FixedArray> constant_properties = expr->constant_properties();
- __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_properties));
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
__ mov(r0, Operand(Smi::FromInt(flags)));
- __ Push(r3, r2, r1, r0);
int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
+ __ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else if (flags != ObjectLiteral::kFastElements ||
+ } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
} else {
FastCloneShallowObjectStub stub(properties_count);
}
+LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return DefineAsRegister(new(zone()) LInstanceSize(object));
+}
+
+
LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
LOperand* receiver = UseRegisterAtStart(instr->receiver());
LOperand* function = UseRegisterAtStart(instr->function());
V(In) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
+ V(InstanceSize) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(Uint32ToDouble) \
};
+class LInstanceSize: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LInstanceSize(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size")
+ DECLARE_HYDROGEN_ACCESSOR(InstanceSize)
+};
+
+
class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
public:
LBoundsCheck(LOperand* index, LOperand* length) {
}
+void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
+ Register object = ToRegister(instr->object());
+ Register result = ToRegister(instr->result());
+ __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset));
+ __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset));
+}
+
+
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
DeferredAllocate* deferred =
new(zone()) DeferredAllocate(this, instr);
- Register size = ToRegister(instr->size());
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->temp1());
Register scratch2 = ToRegister(instr->temp2());
- HAllocate* original_instr = instr->hydrogen();
- if (original_instr->size()->IsConstant()) {
- UNREACHABLE();
+ // Allocate memory for the object.
+ AllocationFlags flags = TAG_OBJECT;
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) {
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
+ }
+ if (instr->size()->IsConstantOperand()) {
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+ __ AllocateInNewSpace(size,
+ result,
+ scratch,
+ scratch2,
+ deferred->entry(),
+ flags);
} else {
- // Allocate memory for the object.
- AllocationFlags flags = TAG_OBJECT;
- if (original_instr->MustAllocateDoubleAligned()) {
- flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
- }
+ Register size = ToRegister(instr->size());
__ AllocateInNewSpace(size,
result,
scratch,
scratch2,
deferred->entry(),
- TAG_OBJECT);
+ flags);
}
__ bind(deferred->exit());
instr->hydrogen()->constant_properties();
// Set up the parameters to the stub/runtime call.
- __ LoadHeapObject(r4, literals);
- __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ mov(r2, Operand(constant_properties));
+ __ LoadHeapObject(r3, literals);
+ __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ mov(r1, Operand(constant_properties));
int flags = instr->hydrogen()->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
- __ mov(r1, Operand(Smi::FromInt(flags)));
- __ Push(r4, r3, r2, r1);
+ __ mov(r0, Operand(Smi::FromInt(flags)));
// Pick the right runtime function or stub to call.
int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
+ __ Push(r3, r2, r1, r0);
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
} else if (flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ Push(r3, r2, r1, r0);
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
} else {
FastCloneShallowObjectStub stub(properties_count);
};
+template <>
+void CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
+ Zone* zone = this->zone();
+ Factory* factory = isolate()->factory();
+
+ HInstruction* boilerplate =
+ AddInstruction(new(zone) HLoadKeyed(GetParameter(0),
+ GetParameter(1),
+ NULL,
+ FAST_ELEMENTS));
+
+ CheckBuilder builder(this, BailoutId::StubEntry());
+ builder.CheckNotUndefined(boilerplate);
+
+ int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
+ HValue* boilerplate_size =
+ AddInstruction(new(zone) HInstanceSize(boilerplate));
+ HValue* size_in_words =
+ AddInstruction(new(zone) HConstant(size >> kPointerSizeLog2,
+ Representation::Integer32()));
+ builder.CheckIntegerEq(boilerplate_size, size_in_words);
+
+ HValue* size_in_bytes =
+ AddInstruction(new(zone) HConstant(size, Representation::Integer32()));
+ HInstruction* object =
+ AddInstruction(new(zone) HAllocate(context(),
+ size_in_bytes,
+ HType::JSObject(),
+ HAllocate::CAN_ALLOCATE_IN_NEW_SPACE));
+
+ for (int i = 0; i < size; i += kPointerSize) {
+ HInstruction* value =
+ AddInstruction(new(zone) HLoadNamedField(boilerplate, true, i));
+ AddInstruction(new(zone) HStoreNamedField(object,
+ factory->empty_symbol(),
+ value,
+ true, i));
+ AddSimulate(BailoutId::StubEntry());
+ }
+
+ builder.End();
+
+ HReturn* ret = new(zone) HReturn(object, context());
+ current_block()->Finish(ret);
+}
+
+
+Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
+ CodeStubGraphBuilder<FastCloneShallowObjectStub> builder(this);
+ LChunk* chunk = OptimizeGraph(builder.CreateGraph());
+ return chunk->Codegen(Code::COMPILED_STUB);
+}
+
+
template <>
void CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
Zone* zone = this->zone();
new(zone) HBoundsCheck(array_length, max_alloc_size,
DONT_ALLOW_SMI_KEY, Representation::Integer32()));
- IfBuilder if_builder(this);
+ IfBuilder if_builder(this, BailoutId::StubEntry());
if_builder.BeginTrue(array_length, graph()->GetConstant0(), Token::EQ);
: AddInstruction(new(zone) HConstant(nan_double,
Representation::Double()));
- LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
+ LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement,
+ BailoutId::StubEntry());
HValue* zero = graph()->GetConstant0();
HValue* start = IsFastElementsKind(to_kind) ? zero : array_length;
};
-class HGraph;
-struct Register;
class HydrogenCodeStub : public CodeStub {
public:
// Retrieve the code for the stub. Generate the code if needed.
};
-class FastCloneShallowObjectStub : public PlatformCodeStub {
+class FastCloneShallowObjectStub : public HydrogenCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
ASSERT_LE(length_, kMaximumClonedProperties);
}
- void Generate(MacroAssembler* masm);
+ int length() const { return length_; }
+
+ virtual Handle<Code> GenerateCode();
+
+ virtual void InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor);
private:
int length_;
Major MajorKey() { return FastCloneShallowObject; }
int MinorKey() { return length_; }
+
+ DISALLOW_COPY_AND_ASSIGN(FastCloneShallowObjectStub);
};
IsJSArrayBits::encode(is_js_array);
}
- Major MajorKey() { return KeyedLoadElement; }
- int MinorKey() { return bit_field_; }
-
bool is_js_array() const {
return IsJSArrayBits::decode(bit_field_);
}
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
uint32_t bit_field_;
+ Major MajorKey() { return KeyedLoadElement; }
+ int MinorKey() { return bit_field_; }
+
DISALLOW_COPY_AND_ASSIGN(KeyedLoadFastElementStub);
};
ToKindBits::encode(to_kind);
}
- Major MajorKey() { return TransitionElementsKind; }
- int MinorKey() { return bit_field_; }
-
ElementsKind from_kind() const {
return FromKindBits::decode(bit_field_);
}
class ToKindBits: public BitField<ElementsKind, 0, 8> {};
uint32_t bit_field_;
+ Major MajorKey() { return TransitionElementsKind; }
+ int MinorKey() { return bit_field_; }
+
DISALLOW_COPY_AND_ASSIGN(TransitionElementsKindStub);
};
DoComputeAccessorStubFrame(&iterator, i, true);
break;
case Translation::COMPILED_STUB_FRAME:
- DoCompiledStubFrame(&iterator, i);
+ DoComputeCompiledStubFrame(&iterator, i);
break;
case Translation::BEGIN:
case Translation::REGISTER:
void DoComputeAccessorStubFrame(TranslationIterator* iterator,
int frame_index,
bool is_setter_stub_frame);
- void DoCompiledStubFrame(TranslationIterator* iterator,
- int frame_index);
+ void DoComputeCompiledStubFrame(TranslationIterator* iterator,
+ int frame_index);
void DoTranslateCommand(TranslationIterator* iterator,
int frame_index,
unsigned output_offset);
V(In) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
+ V(InstanceSize) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
};
+// TODO(mstarzinger): This instruction should be modeled as a load of the map
+// field followed by a load of the instance size field once HLoadNamedField is
+// flexible enough to accommodate byte-field loads.
+class HInstanceSize: public HTemplateInstruction<1> {
+ public:
+ explicit HInstanceSize(HValue* object) {
+ SetOperandAt(0, object);
+ set_representation(Representation::Integer32());
+ }
+
+ HValue* object() { return OperandAt(0); }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(InstanceSize)
+};
+
+
class HPower: public HTemplateInstruction<2> {
public:
static HInstruction* New(Zone* zone, HValue* left, HValue* right);
}
+HGraphBuilder::CheckBuilder::CheckBuilder(HGraphBuilder* builder, BailoutId id)
+ : builder_(builder),
+ finished_(false),
+ id_(id) {
+ HEnvironment* env = builder->environment();
+ failure_block_ = builder->CreateBasicBlock(env->Copy());
+ merge_block_ = builder->CreateBasicBlock(env->Copy());
+}
+
+
+void HGraphBuilder::CheckBuilder::CheckNotUndefined(HValue* value) {
+ HEnvironment* env = builder_->environment();
+ HIsNilAndBranch* compare =
+ new(zone()) HIsNilAndBranch(value, kStrictEquality, kUndefinedValue);
+ HBasicBlock* success_block = builder_->CreateBasicBlock(env->Copy());
+ HBasicBlock* failure_block = builder_->CreateBasicBlock(env->Copy());
+ compare->SetSuccessorAt(0, failure_block);
+ compare->SetSuccessorAt(1, success_block);
+ failure_block->Goto(failure_block_);
+ builder_->current_block()->Finish(compare);
+ builder_->set_current_block(success_block);
+}
+
+
+void HGraphBuilder::CheckBuilder::CheckIntegerEq(HValue* left, HValue* right) {
+ HEnvironment* env = builder_->environment();
+ HCompareIDAndBranch* compare =
+ new(zone()) HCompareIDAndBranch(left, right, Token::EQ);
+ compare->AssumeRepresentation(Representation::Integer32());
+ HBasicBlock* success_block = builder_->CreateBasicBlock(env->Copy());
+ HBasicBlock* failure_block = builder_->CreateBasicBlock(env->Copy());
+ compare->SetSuccessorAt(0, success_block);
+ compare->SetSuccessorAt(1, failure_block);
+ failure_block->Goto(failure_block_);
+ builder_->current_block()->Finish(compare);
+ builder_->set_current_block(success_block);
+}
+
+
+void HGraphBuilder::CheckBuilder::End() {
+ ASSERT(!finished_);
+ builder_->current_block()->Goto(merge_block_);
+ failure_block_->FinishExitWithDeoptimization(HDeoptimize::kUseAll);
+ failure_block_->SetJoinId(id_);
+ builder_->set_current_block(merge_block_);
+ merge_block_->SetJoinId(id_);
+ finished_ = true;
+}
+
+
HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, BailoutId id)
: builder_(builder),
finished_(false),
id_(id) {
HEnvironment* env = builder->environment();
- HEnvironment* true_env = env->Copy();
- HEnvironment* false_env = env->Copy();
- first_true_block_ = builder->CreateBasicBlock(true_env);
+ first_true_block_ = builder->CreateBasicBlock(env->Copy());
last_true_block_ = NULL;
- first_false_block_ = builder->CreateBasicBlock(false_env);
+ first_false_block_ = builder->CreateBasicBlock(env->Copy());
}
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length) {
- LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
+ LoopBuilder builder(this, context, LoopBuilder::kPostIncrement,
+ BailoutId::StubEntry());
HValue* key = builder.BeginBody(graph()->GetConstant0(),
length, Token::LT);
HInstruction* BuildStoreMap(HValue* object, HValue* map, BailoutId id);
HInstruction* BuildStoreMap(HValue* object, Handle<Map> map, BailoutId id);
+ class CheckBuilder {
+ public:
+ CheckBuilder(HGraphBuilder* builder, BailoutId id);
+ ~CheckBuilder() {
+ if (!finished_) End();
+ }
+
+ void CheckNotUndefined(HValue* value);
+ void CheckIntegerEq(HValue* left, HValue* right);
+ void End();
+
+ private:
+ Zone* zone() { return builder_->zone(); }
+
+ HGraphBuilder* builder_;
+ bool finished_;
+ HBasicBlock* failure_block_;
+ HBasicBlock* merge_block_;
+ BailoutId id_;
+ };
+
class IfBuilder {
public:
- IfBuilder(HGraphBuilder* builder,
- BailoutId id = BailoutId::StubEntry());
+ IfBuilder(HGraphBuilder* builder, BailoutId id);
~IfBuilder() {
if (!finished_) End();
}
void End();
private:
+ Zone* zone() { return builder_->zone(); }
+
HGraphBuilder* builder_;
bool finished_;
HBasicBlock* first_true_block_;
HBasicBlock* first_false_block_;
HBasicBlock* merge_block_;
BailoutId id_;
-
- Zone* zone() { return builder_->zone(); }
};
class LoopBuilder {
LoopBuilder(HGraphBuilder* builder,
HValue* context,
Direction direction,
- BailoutId id = BailoutId::StubEntry());
+ BailoutId id);
~LoopBuilder() {
ASSERT(finished_);
}
void EndBody();
private:
+ Zone* zone() { return builder_->zone(); }
+
HGraphBuilder* builder_;
HValue* context_;
HInstruction* increment_;
Direction direction_;
BailoutId id_;
bool finished_;
-
- Zone* zone() { return builder_->zone(); }
};
HValue* BuildAllocateElements(HContext* context,
namespace internal {
+void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { eax, ebx, ecx, edx };
+ descriptor->register_param_count_ = 4;
+ descriptor->register_params_ = registers;
+ descriptor->stack_parameter_count_ = NULL;
+ descriptor->deoptimization_handler_ =
+ Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
+}
+
+
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
-void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- //
- // [esp + kPointerSize]: object literal flags.
- // [esp + (2 * kPointerSize)]: constant properties.
- // [esp + (3 * kPointerSize)]: literal index.
- // [esp + (4 * kPointerSize)]: literals array.
-
- // Load boilerplate object into ecx and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ mov(ecx, Operand(esp, 4 * kPointerSize));
- __ mov(eax, Operand(esp, 3 * kPointerSize));
- STATIC_ASSERT(kPointerSize == 4);
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kSmiTag == 0);
- __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
- FixedArray::kHeaderSize));
- Factory* factory = masm->isolate()->factory();
- __ cmp(ecx, factory->undefined_value());
- __ j(equal, &slow_case);
-
- // Check that the boilerplate contains only fast properties and we can
- // statically determine the instance size.
- int size = JSObject::kHeaderSize + length_ * kPointerSize;
- __ mov(eax, FieldOperand(ecx, HeapObject::kMapOffset));
- __ movzx_b(eax, FieldOperand(eax, Map::kInstanceSizeOffset));
- __ cmp(eax, Immediate(size >> kPointerSizeLog2));
- __ j(not_equal, &slow_case);
-
- // Allocate the JS object and copy header together with all in-object
- // properties from the boilerplate.
- __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
- for (int i = 0; i < size; i += kPointerSize) {
- __ mov(ebx, FieldOperand(ecx, i));
- __ mov(FieldOperand(eax, i), ebx);
- }
-
- // Return and remove the on-stack parameters.
- __ ret(4 * kPointerSize);
-
- __ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
-}
-
-
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
}
-void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
- int frame_index) {
+void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
+ int frame_index) {
//
// FROM TO
// | .... | | .... |
// v +-------------------------+ +-------------------------|
// | COMPILED_STUB marker | | STUB_FAILURE marker |
// +-------------------------+ +-------------------------+
- // | | | caller args.length_ |
- // | ... | +-------------------------+
// | | | caller args.arguments_ |
+ // | ... | +-------------------------+
+ // | | | caller args.length_ |
// |-------------------------|<-esp +-------------------------+
// | caller args pointer |
// +-------------------------+
isolate_->code_stub_interface_descriptor(major_key);
// The output frame must have room for all pushed register parameters
- // and the standard stack frame slots.
- int output_frame_size = StandardFrameConstants::kFixedFrameSize +
- kPointerSize * descriptor->register_param_count_;
-
- // Include space for an argument object to the callee and optionally
- // the space to pass the argument object to the stub failure handler.
- output_frame_size += sizeof(Arguments) + kPointerSize;
+ // and the standard stack frame slots. Include space for an argument
+ // object to the callee and optionally the space to pass the argument
+ // object to the stub failure handler.
+ int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
+ sizeof(Arguments) + kPointerSize;
+ int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
+ int input_frame_size = input_->GetFrameSize();
+ int output_frame_size = height_in_bytes + fixed_frame_size;
+ if (trace_) {
+ PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n",
+ CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
+ height_in_bytes);
+ }
+ // The stub failure trampoline is a single frame.
FrameDescription* output_frame =
- new(output_frame_size) FrameDescription(output_frame_size, 0);
+ new(output_frame_size) FrameDescription(output_frame_size, NULL);
+ output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
ASSERT(frame_index == 0);
output_[frame_index] = output_frame;
- Code* notify_failure =
- isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
- output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
- output_frame->SetContinuation(
- reinterpret_cast<intptr_t>(notify_failure->entry()));
-
- Code* trampoline = NULL;
- int extra = descriptor->extra_expression_stack_count_;
- StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
- ASSERT(trampoline != NULL);
- output_frame->SetPc(reinterpret_cast<intptr_t>(
- trampoline->instruction_start()));
- unsigned input_frame_size = input_->GetFrameSize();
- intptr_t frame_ptr = input_->GetRegister(ebp.code());
+ // The top address for the output frame can be computed from the input
+ // frame pointer and the output frame's height. Subtract space for the
+ // context and function slots.
+ intptr_t top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) -
+ height_in_bytes;
+ output_frame->SetTop(top_address);
- // JSFunction continuation
+ // Read caller's PC (JSFunction continuation) from the input frame.
intptr_t input_frame_offset = input_frame_size - kPointerSize;
intptr_t output_frame_offset = output_frame_size - kPointerSize;
intptr_t value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // saved frame ptr
+ // Read caller's FP from the input frame, and set this frame's FP.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ intptr_t frame_ptr = input_->GetRegister(ebp.code());
+ output_frame->SetRegister(ebp.code(), frame_ptr);
+ output_frame->SetFp(frame_ptr);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Restore context
+ // The context can be gotten from the input frame.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetRegister(esi.code(), value);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Internal frame markers
+ // A marker value is used in place of the function.
output_frame_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (stub fail sentinel)\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
int caller_arg_count = 0;
if (descriptor->stack_parameter_count_ != NULL) {
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
(caller_arg_count - 1) * kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.arguments\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- output_frame->SetFrameSlot(output_frame_offset, value);
output_frame_offset -= kPointerSize;
- output_frame->SetFrameSlot(output_frame_offset, caller_arg_count);
+ value = caller_arg_count;
+ output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.length\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- value = frame_ptr - (output_frame_size - output_frame_offset) -
- StandardFrameConstants::kMarkerOffset;
output_frame_offset -= kPointerSize;
+ value = frame_ptr - (output_frame_size - output_frame_offset) -
+ StandardFrameConstants::kMarkerOffset + kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args*\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
// Copy the register parameters to the failure frame.
for (int i = 0; i < descriptor->register_param_count_; ++i) {
DoTranslateCommand(iterator, 0, output_frame_offset);
}
- output_frame->SetRegister(ebp.code(), frame_ptr);
- output_frame->SetFp(frame_ptr);
+ ASSERT(0 == output_frame_offset);
for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
double double_value = input_->GetDoubleRegister(i);
}
output_frame->SetRegister(eax.code(), params);
output_frame->SetRegister(ebx.code(), handler);
+
+ // Compute this frame's PC, state, and continuation.
+ Code* trampoline = NULL;
+ int extra = descriptor->extra_expression_stack_count_;
+ StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
+ ASSERT(trampoline != NULL);
+ output_frame->SetPc(reinterpret_cast<intptr_t>(
+ trampoline->instruction_start()));
+ output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
+ Code* notify_failure =
+ isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
+ output_frame->SetContinuation(
+ reinterpret_cast<intptr_t>(notify_failure->entry()));
}
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
Handle<FixedArray> constant_properties = expr->constant_properties();
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
- __ push(Immediate(Smi::FromInt(expr->literal_index())));
- __ push(Immediate(constant_properties));
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
flags |= expr->has_function()
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
- __ push(Immediate(Smi::FromInt(flags)));
int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
+ __ push(Immediate(constant_properties));
+ __ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else if (flags != ObjectLiteral::kFastElements ||
+ } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
+ __ push(Immediate(constant_properties));
+ __ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
} else {
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
+ __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
+ __ mov(ecx, Immediate(constant_properties));
+ __ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(properties_count);
__ CallStub(&stub);
}
}
+void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
+ Register object = ToRegister(instr->object());
+ Register result = ToRegister(instr->result());
+ __ mov(result, FieldOperand(object, HeapObject::kMapOffset));
+ __ movzx_b(result, FieldOperand(result, Map::kInstanceSizeOffset));
+}
+
+
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
DeferredAllocate* deferred =
new(zone()) DeferredAllocate(this, instr);
- Register size = ToRegister(instr->size());
Register result = ToRegister(instr->result());
Register temp = ToRegister(instr->temp());
- HAllocate* original_instr = instr->hydrogen();
- if (original_instr->size()->IsConstant()) {
- UNREACHABLE();
+ // Allocate memory for the object.
+ AllocationFlags flags = TAG_OBJECT;
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) {
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
+ }
+ if (instr->size()->IsConstantOperand()) {
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+ __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
} else {
- // Allocate memory for the object.
- AllocationFlags flags = TAG_OBJECT;
- if (original_instr->MustAllocateDoubleAligned()) {
- flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
- }
- __ AllocateInNewSpace(size, result, temp, no_reg,
- deferred->entry(), flags);
+ Register size = ToRegister(instr->size());
+ __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
}
__ bind(deferred->exit());
Handle<FixedArray> constant_properties =
instr->hydrogen()->constant_properties();
- // Set up the parameters to the stub/runtime call.
- __ PushHeapObject(literals);
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ push(Immediate(constant_properties));
int flags = instr->hydrogen()->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
flags |= instr->hydrogen()->has_function()
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
- __ push(Immediate(Smi::FromInt(flags)));
- // Pick the right runtime function or stub to call.
+ // Set up the parameters to the stub/runtime call and pick the right
+ // runtime function or stub to call.
int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
+ __ PushHeapObject(literals);
+ __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ push(Immediate(constant_properties));
+ __ push(Immediate(Smi::FromInt(flags)));
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
} else if (flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ PushHeapObject(literals);
+ __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ push(Immediate(constant_properties));
+ __ push(Immediate(Smi::FromInt(flags)));
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
} else {
+ __ LoadHeapObject(eax, literals);
+ __ mov(ebx, Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ mov(ecx, Immediate(constant_properties));
+ __ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(properties_count);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return DefineAsRegister(new(zone()) LInstanceSize(object));
+}
+
+
LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
LOperand* receiver = UseRegister(instr->receiver());
LOperand* function = UseRegisterAtStart(instr->function());
V(In) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
+ V(InstanceSize) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(Uint32ToDouble) \
};
+class LInstanceSize: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LInstanceSize(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size")
+ DECLARE_HYDROGEN_ACCESSOR(InstanceSize)
+};
+
+
class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
public:
LBoundsCheck(LOperand* index, LOperand* length) {
namespace internal {
+void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { rax, rbx, rcx, rdx };
+ descriptor->register_param_count_ = 4;
+ descriptor->register_params_ = registers;
+ descriptor->stack_parameter_count_ = NULL;
+ descriptor->deoptimization_handler_ =
+ Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
+}
+
+
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
-void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- //
- // [rsp + kPointerSize]: object literal flags.
- // [rsp + (2 * kPointerSize)]: constant properties.
- // [rsp + (3 * kPointerSize)]: literal index.
- // [rsp + (4 * kPointerSize)]: literals array.
-
- // Load boilerplate object into ecx and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ movq(rcx, Operand(rsp, 4 * kPointerSize));
- __ movq(rax, Operand(rsp, 3 * kPointerSize));
- SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
- __ movq(rcx,
- FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
- __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
- __ j(equal, &slow_case);
-
- // Check that the boilerplate contains only fast properties and we can
- // statically determine the instance size.
- int size = JSObject::kHeaderSize + length_ * kPointerSize;
- __ movq(rax, FieldOperand(rcx, HeapObject::kMapOffset));
- __ movzxbq(rax, FieldOperand(rax, Map::kInstanceSizeOffset));
- __ cmpq(rax, Immediate(size >> kPointerSizeLog2));
- __ j(not_equal, &slow_case);
-
- // Allocate the JS object and copy header together with all in-object
- // properties from the boilerplate.
- __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
- for (int i = 0; i < size; i += kPointerSize) {
- __ movq(rbx, FieldOperand(rcx, i));
- __ movq(FieldOperand(rax, i), rbx);
- }
-
- // Return and remove the on-stack parameters.
- __ ret(4 * kPointerSize);
-
- __ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
-}
-
-
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
}
-void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
- int frame_index) {
+void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
+ int frame_index) {
//
// FROM TO
// | .... | | .... |
// v +-------------------------+ +-------------------------|
// | COMPILED_STUB marker | | STUB_FAILURE marker |
// +-------------------------+ +-------------------------+
- // | | | caller args.length_ |
- // | ... | +-------------------------+
// | | | caller args.arguments_ |
+ // | ... | +-------------------------+
+ // | | | caller args.length_ |
// |-------------------------|<-rsp +-------------------------+
// | caller args pointer |
// +-------------------------+
isolate_->code_stub_interface_descriptor(major_key);
// The output frame must have room for all pushed register parameters
- // and the standard stack frame slots.
- int output_frame_size = StandardFrameConstants::kFixedFrameSize +
- kPointerSize * descriptor->register_param_count_;
-
- // Include space for an argument object to the callee and optionally
- // the space to pass the argument object to the stub failure handler.
- output_frame_size += sizeof(Arguments) + kPointerSize;
+ // and the standard stack frame slots. Include space for an argument
+ // object to the callee and optionally the space to pass the argument
+ // object to the stub failure handler.
+ int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
+ sizeof(Arguments) + kPointerSize;
+ int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
+ int input_frame_size = input_->GetFrameSize();
+ int output_frame_size = height_in_bytes + fixed_frame_size;
+ if (trace_) {
+ PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n",
+ CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
+ height_in_bytes);
+ }
+ // The stub failure trampoline is a single frame.
FrameDescription* output_frame =
- new(output_frame_size) FrameDescription(output_frame_size, 0);
+ new(output_frame_size) FrameDescription(output_frame_size, NULL);
+ output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
ASSERT(frame_index == 0);
output_[frame_index] = output_frame;
- Code* notify_failure =
- isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
- output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
- output_frame->SetContinuation(
- reinterpret_cast<intptr_t>(notify_failure->entry()));
-
- Code* trampoline = NULL;
- int extra = descriptor->extra_expression_stack_count_;
- StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
- ASSERT(trampoline != NULL);
- output_frame->SetPc(reinterpret_cast<intptr_t>(
- trampoline->instruction_start()));
- unsigned input_frame_size = input_->GetFrameSize();
- intptr_t frame_ptr = input_->GetRegister(rbp.code());
+ // The top address for the output frame can be computed from the input
+ // frame pointer and the output frame's height. Subtract space for the
+ // context and function slots.
+ intptr_t top_address = input_->GetRegister(rbp.code()) - (2 * kPointerSize) -
+ height_in_bytes;
+ output_frame->SetTop(top_address);
- // JSFunction continuation
+ // Read caller's PC (JSFunction continuation) from the input frame.
unsigned input_frame_offset = input_frame_size - kPointerSize;
unsigned output_frame_offset = output_frame_size - kPointerSize;
intptr_t value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's pc\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // saved frame ptr
+ // Read caller's FP from the input frame, and set this frame's FP.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ intptr_t frame_ptr = input_->GetRegister(rbp.code());
+ output_frame->SetRegister(rbp.code(), frame_ptr);
+ output_frame->SetFp(frame_ptr);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's fp\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Restore context
+ // The context can be gotten from the input frame.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetRegister(rsi.code(), value);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; context\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- // Internal frame markers
+ // A marker value is used in place of the function.
output_frame_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; function (stub failure sentinel)\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
intptr_t caller_arg_count = 0;
if (descriptor->stack_parameter_count_ != NULL) {
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
(caller_arg_count - 1) * kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; args.arguments\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- output_frame->SetFrameSlot(output_frame_offset, value);
output_frame_offset -= kPointerSize;
- output_frame->SetFrameSlot(output_frame_offset, caller_arg_count);
+ value = caller_arg_count;
+ output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; args.length\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
- value = frame_ptr - (output_frame_size - output_frame_offset) -
- StandardFrameConstants::kMarkerOffset;
output_frame_offset -= kPointerSize;
+ value = frame_ptr - (output_frame_size - output_frame_offset) -
+ StandardFrameConstants::kMarkerOffset + kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; args*\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
// Copy the register parameters to the failure frame.
for (int i = 0; i < descriptor->register_param_count_; ++i) {
DoTranslateCommand(iterator, 0, output_frame_offset);
}
- output_frame->SetRegister(rbp.code(), frame_ptr);
- output_frame->SetFp(frame_ptr);
+ ASSERT(0 == output_frame_offset);
for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); ++i) {
double double_value = input_->GetDoubleRegister(i);
}
output_frame->SetRegister(rax.code(), params);
output_frame->SetRegister(rbx.code(), handler);
+
+ // Compute this frame's PC, state, and continuation.
+ Code* trampoline = NULL;
+ int extra = descriptor->extra_expression_stack_count_;
+ StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
+ ASSERT(trampoline != NULL);
+ output_frame->SetPc(reinterpret_cast<intptr_t>(
+ trampoline->instruction_start()));
+ output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
+ Code* notify_failure =
+ isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
+ output_frame->SetContinuation(
+ reinterpret_cast<intptr_t>(notify_failure->entry()));
}
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
Handle<FixedArray> constant_properties = expr->constant_properties();
- __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
- __ Push(Smi::FromInt(expr->literal_index()));
- __ Push(constant_properties);
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
flags |= expr->has_function()
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
- __ Push(Smi::FromInt(flags));
int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
+ __ Push(Smi::FromInt(expr->literal_index()));
+ __ Push(constant_properties);
+ __ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else if (flags != ObjectLiteral::kFastElements ||
+ } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
+ __ Push(Smi::FromInt(expr->literal_index()));
+ __ Push(constant_properties);
+ __ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
} else {
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
+ __ Move(rbx, Smi::FromInt(expr->literal_index()));
+ __ Move(rcx, constant_properties);
+ __ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(properties_count);
__ CallStub(&stub);
}
}
+void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
+ Register object = ToRegister(instr->object());
+ Register result = ToRegister(instr->result());
+ __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
+ __ movzxbq(result, FieldOperand(result, Map::kInstanceSizeOffset));
+}
+
+
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
DeferredAllocate* deferred =
new(zone()) DeferredAllocate(this, instr);
- Register size = ToRegister(instr->size());
Register result = ToRegister(instr->result());
Register temp = ToRegister(instr->temp());
- HAllocate* original_instr = instr->hydrogen();
- if (original_instr->size()->IsConstant()) {
- UNREACHABLE();
+ // Allocate memory for the object.
+ AllocationFlags flags = TAG_OBJECT;
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) {
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
+ }
+ if (instr->size()->IsConstantOperand()) {
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+ __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
} else {
- // Allocate memory for the object.
- AllocationFlags flags = TAG_OBJECT;
- if (original_instr->MustAllocateDoubleAligned()) {
- flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
- }
- __ AllocateInNewSpace(size, result, temp, no_reg,
- deferred->entry(), flags);
+ Register size = ToRegister(instr->size());
+ __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
}
__ bind(deferred->exit());
Handle<FixedArray> constant_properties =
instr->hydrogen()->constant_properties();
- // Set up the parameters to the stub/runtime call.
- __ PushHeapObject(literals);
- __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
- __ Push(constant_properties);
int flags = instr->hydrogen()->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
flags |= instr->hydrogen()->has_function()
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
- __ Push(Smi::FromInt(flags));
- // Pick the right runtime function or stub to call.
+ // Set up the parameters to the stub/runtime call and pick the right
+ // runtime function or stub to call.
int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
+ __ PushHeapObject(literals);
+ __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
+ __ Push(constant_properties);
+ __ Push(Smi::FromInt(flags));
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
} else if (flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
+ __ PushHeapObject(literals);
+ __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
+ __ Push(constant_properties);
+ __ Push(Smi::FromInt(flags));
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
} else {
+ __ LoadHeapObject(rax, literals);
+ __ Move(rbx, Smi::FromInt(instr->hydrogen()->literal_index()));
+ __ Move(rcx, constant_properties);
+ __ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(properties_count);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return DefineAsRegister(new(zone()) LInstanceSize(object));
+}
+
+
LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
LOperand* receiver = UseRegister(instr->receiver());
LOperand* function = UseRegisterAtStart(instr->function());
V(In) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
+ V(InstanceSize) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(Uint32ToDouble) \
};
+class LInstanceSize: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LInstanceSize(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size")
+ DECLARE_HYDROGEN_ACCESSOR(InstanceSize)
+};
+
+
class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
public:
LBoundsCheck(LOperand* index, LOperand* length) {