From: mstarzinger@chromium.org Date: Tue, 26 Feb 2013 13:08:08 +0000 (+0000) Subject: Compile FastCloneShallowObjectStub using Crankshaft. X-Git-Tag: upstream/4.7.83~15027 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=7d681e014de95e523943806fda9c795eef3b4eb8;p=platform%2Fupstream%2Fv8.git Compile FastCloneShallowObjectStub using Crankshaft. This changes FastCloneShallowObjectStub to be compiled independent of the target architecture. It also adds tracing to the deoptimizer for compiled stubs and contains some minor bugfixes. R=danno@chromium.org Review URL: https://codereview.chromium.org/12220074 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13732 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index a7e9611..fa84a8e 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -38,6 +38,18 @@ namespace v8 { namespace internal { +void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( + Isolate* isolate, + CodeStubInterfaceDescriptor* descriptor) { + static Register registers[] = { r3, r2, r1, r0 }; + descriptor->register_param_count_ = 4; + descriptor->register_params_ = registers; + descriptor->stack_parameter_count_ = NULL; + descriptor->deoptimization_handler_ = + Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; +} + + void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { @@ -500,49 +512,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { } -void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { - // Stack layout on entry: - // - // [sp]: object literal flags. - // [sp + kPointerSize]: constant properties. - // [sp + (2 * kPointerSize)]: literal index. - // [sp + (3 * kPointerSize)]: literals array. - - // Load boilerplate object into r3 and check if we need to create a - // boilerplate. - Label slow_case; - __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); - __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); - __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); - __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); - __ b(eq, &slow_case); - - // Check that the boilerplate contains only fast properties and we can - // statically determine the instance size. - int size = JSObject::kHeaderSize + length_ * kPointerSize; - __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset)); - __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceSizeOffset)); - __ cmp(r0, Operand(size >> kPointerSizeLog2)); - __ b(ne, &slow_case); - - // Allocate the JS object and copy header together with all in-object - // properties from the boilerplate. - __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT); - for (int i = 0; i < size; i += kPointerSize) { - __ ldr(r1, FieldMemOperand(r3, i)); - __ str(r1, FieldMemOperand(r0, i)); - } - - // Return and remove the on-stack parameters. - __ add(sp, sp, Operand(4 * kPointerSize)); - __ Ret(); - - __ bind(&slow_case); - __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); -} - - // Takes a Smi and converts to an IEEE 64 bit floating point value in two // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc index 06415ed..190dd76 100644 --- a/src/arm/deoptimizer-arm.cc +++ b/src/arm/deoptimizer-arm.cc @@ -452,8 +452,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, } -void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, - int frame_index) { +void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, + int frame_index) { // // FROM TO // | .... | | .... | @@ -466,9 +466,9 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, // v +-------------------------+ +-------------------------| // | COMPILED_STUB marker | | STUB_FAILURE marker | // +-------------------------+ +-------------------------+ - // | | | caller args.length_ | - // | ... | +-------------------------+ // | | | caller args.arguments_ | + // | ... | +-------------------------+ + // | | | caller args.length_ | // |-------------------------|<-sp +-------------------------+ // | caller args pointer | // +-------------------------+ @@ -490,58 +490,77 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, isolate_->code_stub_interface_descriptor(major_key); // The output frame must have room for all pushed register parameters - // and the standard stack frame slots. - int output_frame_size = StandardFrameConstants::kFixedFrameSize + - kPointerSize * descriptor->register_param_count_; - - // Include space for an argument object to the callee and optionally - // the space to pass the argument object to the stub failure handler. - output_frame_size += sizeof(Arguments) + kPointerSize; + // and the standard stack frame slots. Include space for an argument + // object to the callee and optionally the space to pass the argument + // object to the stub failure handler. + int height_in_bytes = kPointerSize * descriptor->register_param_count_ + + sizeof(Arguments) + kPointerSize; + int fixed_frame_size = StandardFrameConstants::kFixedFrameSize; + int input_frame_size = input_->GetFrameSize(); + int output_frame_size = height_in_bytes + fixed_frame_size; + if (trace_) { + PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n", + CodeStub::MajorName(static_cast(major_key), false), + height_in_bytes); + } + // The stub failure trampoline is a single frame. FrameDescription* output_frame = - new(output_frame_size) FrameDescription(output_frame_size, 0); + new(output_frame_size) FrameDescription(output_frame_size, NULL); + output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); ASSERT(frame_index == 0); output_[frame_index] = output_frame; - Code* notify_failure = - isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); - output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); - output_frame->SetContinuation( - reinterpret_cast(notify_failure->entry())); - Code* trampoline = NULL; - int extra = descriptor->extra_expression_stack_count_; - StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); - ASSERT(trampoline != NULL); - output_frame->SetPc(reinterpret_cast( - trampoline->instruction_start())); - unsigned input_frame_size = input_->GetFrameSize(); - - intptr_t frame_ptr = input_->GetRegister(fp.code()); + // The top address for the output frame can be computed from the input + // frame pointer and the output frame's height. Subtract space for the + // context and function slots. + intptr_t top_address = input_->GetRegister(fp.code()) - (2 * kPointerSize) - + height_in_bytes; + output_frame->SetTop(top_address); - // JSFunction continuation + // Read caller's PC (JSFunction continuation) from the input frame. intptr_t input_frame_offset = input_frame_size - kPointerSize; intptr_t output_frame_offset = output_frame_size - kPointerSize; intptr_t value = input_->GetFrameSlot(input_frame_offset); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // saved frame ptr + // Read caller's FP from the input frame, and set this frame's FP. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + intptr_t frame_ptr = input_->GetRegister(fp.code()); + output_frame->SetRegister(fp.code(), frame_ptr); + output_frame->SetFp(frame_ptr); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Restore context + // The context can be gotten from the input frame. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame->SetRegister(cp.code(), value); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Internal frame markers + // A marker value is used in place of the function. output_frame_offset -= kPointerSize; value = reinterpret_cast( Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE)); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (stub fail sentinel)\n", + top_address + output_frame_offset, output_frame_offset, value); + } int caller_arg_count = 0; if (descriptor->stack_parameter_count_ != NULL) { @@ -554,15 +573,27 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, value = frame_ptr + StandardFrameConstants::kCallerSPOffset + (caller_arg_count - 1) * kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.arguments\n", + top_address + output_frame_offset, output_frame_offset, value); + } - output_frame->SetFrameSlot(output_frame_offset, value); output_frame_offset -= kPointerSize; - output_frame->SetFrameSlot(output_frame_offset, caller_arg_count); + value = caller_arg_count; + output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.length\n", + top_address + output_frame_offset, output_frame_offset, value); + } - value = frame_ptr - (output_frame_size - output_frame_offset) - - StandardFrameConstants::kMarkerOffset; output_frame_offset -= kPointerSize; + value = frame_ptr - (output_frame_size - output_frame_offset) - + StandardFrameConstants::kMarkerOffset + kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args*\n", + top_address + output_frame_offset, output_frame_offset, value); + } // Copy the register parameters to the failure frame. for (int i = 0; i < descriptor->register_param_count_; ++i) { @@ -570,14 +601,13 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, DoTranslateCommand(iterator, 0, output_frame_offset); } + ASSERT(0 == output_frame_offset); + for (int i = 0; i < DwVfpRegister::kMaxNumRegisters; ++i) { double double_value = input_->GetDoubleRegister(i); output_frame->SetDoubleRegister(i, double_value); } - output_frame->SetRegister(fp.code(), frame_ptr); - output_frame->SetFp(frame_ptr); - ApiFunction function(descriptor->deoptimization_handler_); ExternalReference xref(&function, ExternalReference::BUILTIN_CALL, isolate_); intptr_t handler = reinterpret_cast(xref.address()); @@ -587,6 +617,19 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, } output_frame->SetRegister(r0.code(), params); output_frame->SetRegister(r1.code(), handler); + + // Compute this frame's PC, state, and continuation. + Code* trampoline = NULL; + int extra = descriptor->extra_expression_stack_count_; + StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); + ASSERT(trampoline != NULL); + output_frame->SetPc(reinterpret_cast( + trampoline->instruction_start())); + output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); + Code* notify_failure = + isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); + output_frame->SetContinuation( + reinterpret_cast(notify_failure->entry())); } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 9ccae5a..4db8e46 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1580,7 +1580,7 @@ void FullCodeGenerator::EmitAccessor(Expression* expression) { void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { Comment cmnt(masm_, "[ ObjectLiteral"); Handle constant_properties = expr->constant_properties(); - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(constant_properties)); @@ -1591,12 +1591,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; __ mov(r0, Operand(Smi::FromInt(flags))); - __ Push(r3, r2, r1, r0); int properties_count = constant_properties->length() / 2; if (expr->depth() > 1) { + __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kCreateObjectLiteral, 4); - } else if (flags != ObjectLiteral::kFastElements || + } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); } else { FastCloneShallowObjectStub stub(properties_count); diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc index c7b4382..69fd5e4 100644 --- a/src/arm/lithium-arm.cc +++ b/src/arm/lithium-arm.cc @@ -972,6 +972,12 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal( } +LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) { + LOperand* object = UseRegisterAtStart(instr->object()); + return DefineAsRegister(new(zone()) LInstanceSize(object)); +} + + LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) { LOperand* receiver = UseRegisterAtStart(instr->receiver()); LOperand* function = UseRegisterAtStart(instr->function()); diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h index a93e898..a1df469 100644 --- a/src/arm/lithium-arm.h +++ b/src/arm/lithium-arm.h @@ -108,6 +108,7 @@ class LCodeGen; V(In) \ V(InstanceOf) \ V(InstanceOfKnownGlobal) \ + V(InstanceSize) \ V(InstructionGap) \ V(Integer32ToDouble) \ V(Uint32ToDouble) \ @@ -980,6 +981,19 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> { }; +class LInstanceSize: public LTemplateInstruction<1, 1, 0> { + public: + explicit LInstanceSize(LOperand* object) { + inputs_[0] = object; + } + + LOperand* object() { return inputs_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size") + DECLARE_HYDROGEN_ACCESSOR(InstanceSize) +}; + + class LBoundsCheck: public LTemplateInstruction<0, 2, 0> { public: LBoundsCheck(LOperand* index, LOperand* length) { diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index f140e39..29f8260 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -2842,6 +2842,14 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, } +void LCodeGen::DoInstanceSize(LInstanceSize* instr) { + Register object = ToRegister(instr->object()); + Register result = ToRegister(instr->result()); + __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); + __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); +} + + void LCodeGen::DoCmpT(LCmpT* instr) { Token::Value op = instr->op(); @@ -5602,26 +5610,31 @@ void LCodeGen::DoAllocate(LAllocate* instr) { DeferredAllocate* deferred = new(zone()) DeferredAllocate(this, instr); - Register size = ToRegister(instr->size()); Register result = ToRegister(instr->result()); Register scratch = ToRegister(instr->temp1()); Register scratch2 = ToRegister(instr->temp2()); - HAllocate* original_instr = instr->hydrogen(); - if (original_instr->size()->IsConstant()) { - UNREACHABLE(); + // Allocate memory for the object. + AllocationFlags flags = TAG_OBJECT; + if (instr->hydrogen()->MustAllocateDoubleAligned()) { + flags = static_cast(flags | DOUBLE_ALIGNMENT); + } + if (instr->size()->IsConstantOperand()) { + int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); + __ AllocateInNewSpace(size, + result, + scratch, + scratch2, + deferred->entry(), + flags); } else { - // Allocate memory for the object. - AllocationFlags flags = TAG_OBJECT; - if (original_instr->MustAllocateDoubleAligned()) { - flags = static_cast(flags | DOUBLE_ALIGNMENT); - } + Register size = ToRegister(instr->size()); __ AllocateInNewSpace(size, result, scratch, scratch2, deferred->entry(), - TAG_OBJECT); + flags); } __ bind(deferred->exit()); @@ -5869,21 +5882,22 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { instr->hydrogen()->constant_properties(); // Set up the parameters to the stub/runtime call. - __ LoadHeapObject(r4, literals); - __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); - __ mov(r2, Operand(constant_properties)); + __ LoadHeapObject(r3, literals); + __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); + __ mov(r1, Operand(constant_properties)); int flags = instr->hydrogen()->fast_elements() ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; - __ mov(r1, Operand(Smi::FromInt(flags))); - __ Push(r4, r3, r2, r1); + __ mov(r0, Operand(Smi::FromInt(flags))); // Pick the right runtime function or stub to call. int properties_count = constant_properties->length() / 2; if (instr->hydrogen()->depth() > 1) { + __ Push(r3, r2, r1, r0); CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); } else if (flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ Push(r3, r2, r1, r0); CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); } else { FastCloneShallowObjectStub stub(properties_count); diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 3421ef8..d02715f 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -138,6 +138,60 @@ class CodeStubGraphBuilder: public CodeStubGraphBuilderBase { template <> +void CodeStubGraphBuilder::BuildCodeStub() { + Zone* zone = this->zone(); + Factory* factory = isolate()->factory(); + + HInstruction* boilerplate = + AddInstruction(new(zone) HLoadKeyed(GetParameter(0), + GetParameter(1), + NULL, + FAST_ELEMENTS)); + + CheckBuilder builder(this, BailoutId::StubEntry()); + builder.CheckNotUndefined(boilerplate); + + int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize; + HValue* boilerplate_size = + AddInstruction(new(zone) HInstanceSize(boilerplate)); + HValue* size_in_words = + AddInstruction(new(zone) HConstant(size >> kPointerSizeLog2, + Representation::Integer32())); + builder.CheckIntegerEq(boilerplate_size, size_in_words); + + HValue* size_in_bytes = + AddInstruction(new(zone) HConstant(size, Representation::Integer32())); + HInstruction* object = + AddInstruction(new(zone) HAllocate(context(), + size_in_bytes, + HType::JSObject(), + HAllocate::CAN_ALLOCATE_IN_NEW_SPACE)); + + for (int i = 0; i < size; i += kPointerSize) { + HInstruction* value = + AddInstruction(new(zone) HLoadNamedField(boilerplate, true, i)); + AddInstruction(new(zone) HStoreNamedField(object, + factory->empty_symbol(), + value, + true, i)); + AddSimulate(BailoutId::StubEntry()); + } + + builder.End(); + + HReturn* ret = new(zone) HReturn(object, context()); + current_block()->Finish(ret); +} + + +Handle FastCloneShallowObjectStub::GenerateCode() { + CodeStubGraphBuilder builder(this); + LChunk* chunk = OptimizeGraph(builder.CreateGraph()); + return chunk->Codegen(Code::COMPILED_STUB); +} + + +template <> void CodeStubGraphBuilder::BuildCodeStub() { Zone* zone = this->zone(); @@ -189,7 +243,7 @@ void CodeStubGraphBuilder::BuildCodeStub() { new(zone) HBoundsCheck(array_length, max_alloc_size, DONT_ALLOW_SMI_KEY, Representation::Integer32())); - IfBuilder if_builder(this); + IfBuilder if_builder(this, BailoutId::StubEntry()); if_builder.BeginTrue(array_length, graph()->GetConstant0(), Token::EQ); @@ -219,7 +273,8 @@ void CodeStubGraphBuilder::BuildCodeStub() { : AddInstruction(new(zone) HConstant(nan_double, Representation::Double())); - LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); + LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement, + BailoutId::StubEntry()); HValue* zero = graph()->GetConstant0(); HValue* start = IsFastElementsKind(to_kind) ? zero : array_length; diff --git a/src/code-stubs.h b/src/code-stubs.h index c06502a..e681a70 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -264,8 +264,6 @@ struct CodeStubInterfaceDescriptor { }; -class HGraph; -struct Register; class HydrogenCodeStub : public CodeStub { public: // Retrieve the code for the stub. Generate the code if needed. @@ -475,7 +473,7 @@ class FastCloneShallowArrayStub : public PlatformCodeStub { }; -class FastCloneShallowObjectStub : public PlatformCodeStub { +class FastCloneShallowObjectStub : public HydrogenCodeStub { public: // Maximum number of properties in copied object. static const int kMaximumClonedProperties = 6; @@ -485,13 +483,21 @@ class FastCloneShallowObjectStub : public PlatformCodeStub { ASSERT_LE(length_, kMaximumClonedProperties); } - void Generate(MacroAssembler* masm); + int length() const { return length_; } + + virtual Handle GenerateCode(); + + virtual void InitializeInterfaceDescriptor( + Isolate* isolate, + CodeStubInterfaceDescriptor* descriptor); private: int length_; Major MajorKey() { return FastCloneShallowObject; } int MinorKey() { return length_; } + + DISALLOW_COPY_AND_ASSIGN(FastCloneShallowObjectStub); }; @@ -1222,9 +1228,6 @@ class KeyedLoadFastElementStub : public HydrogenCodeStub { IsJSArrayBits::encode(is_js_array); } - Major MajorKey() { return KeyedLoadElement; } - int MinorKey() { return bit_field_; } - bool is_js_array() const { return IsJSArrayBits::decode(bit_field_); } @@ -1244,6 +1247,9 @@ class KeyedLoadFastElementStub : public HydrogenCodeStub { class ElementsKindBits: public BitField {}; uint32_t bit_field_; + Major MajorKey() { return KeyedLoadElement; } + int MinorKey() { return bit_field_; } + DISALLOW_COPY_AND_ASSIGN(KeyedLoadFastElementStub); }; @@ -1256,9 +1262,6 @@ class TransitionElementsKindStub : public HydrogenCodeStub { ToKindBits::encode(to_kind); } - Major MajorKey() { return TransitionElementsKind; } - int MinorKey() { return bit_field_; } - ElementsKind from_kind() const { return FromKindBits::decode(bit_field_); } @@ -1278,6 +1281,9 @@ class TransitionElementsKindStub : public HydrogenCodeStub { class ToKindBits: public BitField {}; uint32_t bit_field_; + Major MajorKey() { return TransitionElementsKind; } + int MinorKey() { return bit_field_; } + DISALLOW_COPY_AND_ASSIGN(TransitionElementsKindStub); }; diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index cb2f510..98c4ac2 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -779,7 +779,7 @@ void Deoptimizer::DoComputeOutputFrames() { DoComputeAccessorStubFrame(&iterator, i, true); break; case Translation::COMPILED_STUB_FRAME: - DoCompiledStubFrame(&iterator, i); + DoComputeCompiledStubFrame(&iterator, i); break; case Translation::BEGIN: case Translation::REGISTER: diff --git a/src/deoptimizer.h b/src/deoptimizer.h index 6682c2e..25a1f22 100644 --- a/src/deoptimizer.h +++ b/src/deoptimizer.h @@ -345,8 +345,8 @@ class Deoptimizer : public Malloced { void DoComputeAccessorStubFrame(TranslationIterator* iterator, int frame_index, bool is_setter_stub_frame); - void DoCompiledStubFrame(TranslationIterator* iterator, - int frame_index); + void DoComputeCompiledStubFrame(TranslationIterator* iterator, + int frame_index); void DoTranslateCommand(TranslationIterator* iterator, int frame_index, unsigned output_offset); diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index c99240c..e9ff25d 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -123,6 +123,7 @@ class LChunkBuilder; V(In) \ V(InstanceOf) \ V(InstanceOfKnownGlobal) \ + V(InstanceSize) \ V(InvokeFunction) \ V(IsConstructCallAndBranch) \ V(IsNilAndBranch) \ @@ -3948,6 +3949,26 @@ class HInstanceOfKnownGlobal: public HTemplateInstruction<2> { }; +// TODO(mstarzinger): This instruction should be modeled as a load of the map +// field followed by a load of the instance size field once HLoadNamedField is +// flexible enough to accommodate byte-field loads. +class HInstanceSize: public HTemplateInstruction<1> { + public: + explicit HInstanceSize(HValue* object) { + SetOperandAt(0, object); + set_representation(Representation::Integer32()); + } + + HValue* object() { return OperandAt(0); } + + virtual Representation RequiredInputRepresentation(int index) { + return Representation::Tagged(); + } + + DECLARE_CONCRETE_INSTRUCTION(InstanceSize) +}; + + class HPower: public HTemplateInstruction<2> { public: static HInstruction* New(Zone* zone, HValue* left, HValue* right); diff --git a/src/hydrogen.cc b/src/hydrogen.cc index acb8fd0..73287f4 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -634,16 +634,64 @@ HConstant* HGraph::GetConstantHole() { } +HGraphBuilder::CheckBuilder::CheckBuilder(HGraphBuilder* builder, BailoutId id) + : builder_(builder), + finished_(false), + id_(id) { + HEnvironment* env = builder->environment(); + failure_block_ = builder->CreateBasicBlock(env->Copy()); + merge_block_ = builder->CreateBasicBlock(env->Copy()); +} + + +void HGraphBuilder::CheckBuilder::CheckNotUndefined(HValue* value) { + HEnvironment* env = builder_->environment(); + HIsNilAndBranch* compare = + new(zone()) HIsNilAndBranch(value, kStrictEquality, kUndefinedValue); + HBasicBlock* success_block = builder_->CreateBasicBlock(env->Copy()); + HBasicBlock* failure_block = builder_->CreateBasicBlock(env->Copy()); + compare->SetSuccessorAt(0, failure_block); + compare->SetSuccessorAt(1, success_block); + failure_block->Goto(failure_block_); + builder_->current_block()->Finish(compare); + builder_->set_current_block(success_block); +} + + +void HGraphBuilder::CheckBuilder::CheckIntegerEq(HValue* left, HValue* right) { + HEnvironment* env = builder_->environment(); + HCompareIDAndBranch* compare = + new(zone()) HCompareIDAndBranch(left, right, Token::EQ); + compare->AssumeRepresentation(Representation::Integer32()); + HBasicBlock* success_block = builder_->CreateBasicBlock(env->Copy()); + HBasicBlock* failure_block = builder_->CreateBasicBlock(env->Copy()); + compare->SetSuccessorAt(0, success_block); + compare->SetSuccessorAt(1, failure_block); + failure_block->Goto(failure_block_); + builder_->current_block()->Finish(compare); + builder_->set_current_block(success_block); +} + + +void HGraphBuilder::CheckBuilder::End() { + ASSERT(!finished_); + builder_->current_block()->Goto(merge_block_); + failure_block_->FinishExitWithDeoptimization(HDeoptimize::kUseAll); + failure_block_->SetJoinId(id_); + builder_->set_current_block(merge_block_); + merge_block_->SetJoinId(id_); + finished_ = true; +} + + HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, BailoutId id) : builder_(builder), finished_(false), id_(id) { HEnvironment* env = builder->environment(); - HEnvironment* true_env = env->Copy(); - HEnvironment* false_env = env->Copy(); - first_true_block_ = builder->CreateBasicBlock(true_env); + first_true_block_ = builder->CreateBasicBlock(env->Copy()); last_true_block_ = NULL; - first_false_block_ = builder->CreateBasicBlock(false_env); + first_false_block_ = builder->CreateBasicBlock(env->Copy()); } @@ -1071,7 +1119,8 @@ void HGraphBuilder::BuildCopyElements(HContext* context, HValue* to_elements, ElementsKind to_elements_kind, HValue* length) { - LoopBuilder builder(this, context, LoopBuilder::kPostIncrement); + LoopBuilder builder(this, context, LoopBuilder::kPostIncrement, + BailoutId::StubEntry()); HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); diff --git a/src/hydrogen.h b/src/hydrogen.h index 0a8a10e..a9829a0 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -920,10 +920,30 @@ class HGraphBuilder { HInstruction* BuildStoreMap(HValue* object, HValue* map, BailoutId id); HInstruction* BuildStoreMap(HValue* object, Handle map, BailoutId id); + class CheckBuilder { + public: + CheckBuilder(HGraphBuilder* builder, BailoutId id); + ~CheckBuilder() { + if (!finished_) End(); + } + + void CheckNotUndefined(HValue* value); + void CheckIntegerEq(HValue* left, HValue* right); + void End(); + + private: + Zone* zone() { return builder_->zone(); } + + HGraphBuilder* builder_; + bool finished_; + HBasicBlock* failure_block_; + HBasicBlock* merge_block_; + BailoutId id_; + }; + class IfBuilder { public: - IfBuilder(HGraphBuilder* builder, - BailoutId id = BailoutId::StubEntry()); + IfBuilder(HGraphBuilder* builder, BailoutId id); ~IfBuilder() { if (!finished_) End(); } @@ -937,6 +957,8 @@ class HGraphBuilder { void End(); private: + Zone* zone() { return builder_->zone(); } + HGraphBuilder* builder_; bool finished_; HBasicBlock* first_true_block_; @@ -944,8 +966,6 @@ class HGraphBuilder { HBasicBlock* first_false_block_; HBasicBlock* merge_block_; BailoutId id_; - - Zone* zone() { return builder_->zone(); } }; class LoopBuilder { @@ -960,7 +980,7 @@ class HGraphBuilder { LoopBuilder(HGraphBuilder* builder, HValue* context, Direction direction, - BailoutId id = BailoutId::StubEntry()); + BailoutId id); ~LoopBuilder() { ASSERT(finished_); } @@ -973,6 +993,8 @@ class HGraphBuilder { void EndBody(); private: + Zone* zone() { return builder_->zone(); } + HGraphBuilder* builder_; HValue* context_; HInstruction* increment_; @@ -983,8 +1005,6 @@ class HGraphBuilder { Direction direction_; BailoutId id_; bool finished_; - - Zone* zone() { return builder_->zone(); } }; HValue* BuildAllocateElements(HContext* context, diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 0e2db0c..8d6e9fb 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -42,6 +42,18 @@ namespace v8 { namespace internal { +void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( + Isolate* isolate, + CodeStubInterfaceDescriptor* descriptor) { + static Register registers[] = { eax, ebx, ecx, edx }; + descriptor->register_param_count_ = 4; + descriptor->register_params_ = registers; + descriptor->stack_parameter_count_ = NULL; + descriptor->deoptimization_handler_ = + Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; +} + + void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { @@ -493,52 +505,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { } -void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { - // Stack layout on entry: - // - // [esp + kPointerSize]: object literal flags. - // [esp + (2 * kPointerSize)]: constant properties. - // [esp + (3 * kPointerSize)]: literal index. - // [esp + (4 * kPointerSize)]: literals array. - - // Load boilerplate object into ecx and check if we need to create a - // boilerplate. - Label slow_case; - __ mov(ecx, Operand(esp, 4 * kPointerSize)); - __ mov(eax, Operand(esp, 3 * kPointerSize)); - STATIC_ASSERT(kPointerSize == 4); - STATIC_ASSERT(kSmiTagSize == 1); - STATIC_ASSERT(kSmiTag == 0); - __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, - FixedArray::kHeaderSize)); - Factory* factory = masm->isolate()->factory(); - __ cmp(ecx, factory->undefined_value()); - __ j(equal, &slow_case); - - // Check that the boilerplate contains only fast properties and we can - // statically determine the instance size. - int size = JSObject::kHeaderSize + length_ * kPointerSize; - __ mov(eax, FieldOperand(ecx, HeapObject::kMapOffset)); - __ movzx_b(eax, FieldOperand(eax, Map::kInstanceSizeOffset)); - __ cmp(eax, Immediate(size >> kPointerSizeLog2)); - __ j(not_equal, &slow_case); - - // Allocate the JS object and copy header together with all in-object - // properties from the boilerplate. - __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT); - for (int i = 0; i < size; i += kPointerSize) { - __ mov(ebx, FieldOperand(ecx, i)); - __ mov(FieldOperand(eax, i), ebx); - } - - // Return and remove the on-stack parameters. - __ ret(4 * kPointerSize); - - __ bind(&slow_case); - __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); -} - - // The stub expects its argument on the stack and returns its result in tos_: // zero for false, and a non-zero value for true. void ToBooleanStub::Generate(MacroAssembler* masm) { diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc index 94b429b..a71e4db 100644 --- a/src/ia32/deoptimizer-ia32.cc +++ b/src/ia32/deoptimizer-ia32.cc @@ -559,8 +559,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, } -void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, - int frame_index) { +void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, + int frame_index) { // // FROM TO // | .... | | .... | @@ -573,9 +573,9 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, // v +-------------------------+ +-------------------------| // | COMPILED_STUB marker | | STUB_FAILURE marker | // +-------------------------+ +-------------------------+ - // | | | caller args.length_ | - // | ... | +-------------------------+ // | | | caller args.arguments_ | + // | ... | +-------------------------+ + // | | | caller args.length_ | // |-------------------------|<-esp +-------------------------+ // | caller args pointer | // +-------------------------+ @@ -597,58 +597,77 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, isolate_->code_stub_interface_descriptor(major_key); // The output frame must have room for all pushed register parameters - // and the standard stack frame slots. - int output_frame_size = StandardFrameConstants::kFixedFrameSize + - kPointerSize * descriptor->register_param_count_; - - // Include space for an argument object to the callee and optionally - // the space to pass the argument object to the stub failure handler. - output_frame_size += sizeof(Arguments) + kPointerSize; + // and the standard stack frame slots. Include space for an argument + // object to the callee and optionally the space to pass the argument + // object to the stub failure handler. + int height_in_bytes = kPointerSize * descriptor->register_param_count_ + + sizeof(Arguments) + kPointerSize; + int fixed_frame_size = StandardFrameConstants::kFixedFrameSize; + int input_frame_size = input_->GetFrameSize(); + int output_frame_size = height_in_bytes + fixed_frame_size; + if (trace_) { + PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n", + CodeStub::MajorName(static_cast(major_key), false), + height_in_bytes); + } + // The stub failure trampoline is a single frame. FrameDescription* output_frame = - new(output_frame_size) FrameDescription(output_frame_size, 0); + new(output_frame_size) FrameDescription(output_frame_size, NULL); + output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); ASSERT(frame_index == 0); output_[frame_index] = output_frame; - Code* notify_failure = - isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); - output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); - output_frame->SetContinuation( - reinterpret_cast(notify_failure->entry())); - - Code* trampoline = NULL; - int extra = descriptor->extra_expression_stack_count_; - StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); - ASSERT(trampoline != NULL); - output_frame->SetPc(reinterpret_cast( - trampoline->instruction_start())); - unsigned input_frame_size = input_->GetFrameSize(); - intptr_t frame_ptr = input_->GetRegister(ebp.code()); + // The top address for the output frame can be computed from the input + // frame pointer and the output frame's height. Subtract space for the + // context and function slots. + intptr_t top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) - + height_in_bytes; + output_frame->SetTop(top_address); - // JSFunction continuation + // Read caller's PC (JSFunction continuation) from the input frame. intptr_t input_frame_offset = input_frame_size - kPointerSize; intptr_t output_frame_offset = output_frame_size - kPointerSize; intptr_t value = input_->GetFrameSlot(input_frame_offset); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // saved frame ptr + // Read caller's FP from the input frame, and set this frame's FP. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + intptr_t frame_ptr = input_->GetRegister(ebp.code()); + output_frame->SetRegister(ebp.code(), frame_ptr); + output_frame->SetFp(frame_ptr); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Restore context + // The context can be gotten from the input frame. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame->SetRegister(esi.code(), value); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Internal frame markers + // A marker value is used in place of the function. output_frame_offset -= kPointerSize; value = reinterpret_cast( Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE)); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (stub fail sentinel)\n", + top_address + output_frame_offset, output_frame_offset, value); + } int caller_arg_count = 0; if (descriptor->stack_parameter_count_ != NULL) { @@ -661,15 +680,27 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, value = frame_ptr + StandardFrameConstants::kCallerSPOffset + (caller_arg_count - 1) * kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.arguments\n", + top_address + output_frame_offset, output_frame_offset, value); + } - output_frame->SetFrameSlot(output_frame_offset, value); output_frame_offset -= kPointerSize; - output_frame->SetFrameSlot(output_frame_offset, caller_arg_count); + value = caller_arg_count; + output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.length\n", + top_address + output_frame_offset, output_frame_offset, value); + } - value = frame_ptr - (output_frame_size - output_frame_offset) - - StandardFrameConstants::kMarkerOffset; output_frame_offset -= kPointerSize; + value = frame_ptr - (output_frame_size - output_frame_offset) - + StandardFrameConstants::kMarkerOffset + kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args*\n", + top_address + output_frame_offset, output_frame_offset, value); + } // Copy the register parameters to the failure frame. for (int i = 0; i < descriptor->register_param_count_; ++i) { @@ -677,8 +708,7 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, DoTranslateCommand(iterator, 0, output_frame_offset); } - output_frame->SetRegister(ebp.code(), frame_ptr); - output_frame->SetFp(frame_ptr); + ASSERT(0 == output_frame_offset); for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) { double double_value = input_->GetDoubleRegister(i); @@ -693,6 +723,19 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, } output_frame->SetRegister(eax.code(), params); output_frame->SetRegister(ebx.code(), handler); + + // Compute this frame's PC, state, and continuation. + Code* trampoline = NULL; + int extra = descriptor->extra_expression_stack_count_; + StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); + ASSERT(trampoline != NULL); + output_frame->SetPc(reinterpret_cast( + trampoline->instruction_start())); + output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); + Code* notify_failure = + isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); + output_frame->SetContinuation( + reinterpret_cast(notify_failure->entry())); } diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 0318103..42561d3 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1527,24 +1527,34 @@ void FullCodeGenerator::EmitAccessor(Expression* expression) { void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { Comment cmnt(masm_, "[ ObjectLiteral"); Handle constant_properties = expr->constant_properties(); - __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); - __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); - __ push(Immediate(Smi::FromInt(expr->literal_index()))); - __ push(Immediate(constant_properties)); int flags = expr->fast_elements() ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; flags |= expr->has_function() ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; - __ push(Immediate(Smi::FromInt(flags))); int properties_count = constant_properties->length() / 2; if (expr->depth() > 1) { + __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); + __ push(Immediate(Smi::FromInt(expr->literal_index()))); + __ push(Immediate(constant_properties)); + __ push(Immediate(Smi::FromInt(flags))); __ CallRuntime(Runtime::kCreateObjectLiteral, 4); - } else if (flags != ObjectLiteral::kFastElements || + } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); + __ push(Immediate(Smi::FromInt(expr->literal_index()))); + __ push(Immediate(constant_properties)); + __ push(Immediate(Smi::FromInt(flags))); __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); } else { + __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset)); + __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); + __ mov(ecx, Immediate(constant_properties)); + __ mov(edx, Immediate(Smi::FromInt(flags))); FastCloneShallowObjectStub stub(properties_count); __ CallStub(&stub); } diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index fa6d7ad..f681443 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -2655,6 +2655,14 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, } +void LCodeGen::DoInstanceSize(LInstanceSize* instr) { + Register object = ToRegister(instr->object()); + Register result = ToRegister(instr->result()); + __ mov(result, FieldOperand(object, HeapObject::kMapOffset)); + __ movzx_b(result, FieldOperand(result, Map::kInstanceSizeOffset)); +} + + void LCodeGen::DoCmpT(LCmpT* instr) { Token::Value op = instr->op(); @@ -5459,21 +5467,20 @@ void LCodeGen::DoAllocate(LAllocate* instr) { DeferredAllocate* deferred = new(zone()) DeferredAllocate(this, instr); - Register size = ToRegister(instr->size()); Register result = ToRegister(instr->result()); Register temp = ToRegister(instr->temp()); - HAllocate* original_instr = instr->hydrogen(); - if (original_instr->size()->IsConstant()) { - UNREACHABLE(); + // Allocate memory for the object. + AllocationFlags flags = TAG_OBJECT; + if (instr->hydrogen()->MustAllocateDoubleAligned()) { + flags = static_cast(flags | DOUBLE_ALIGNMENT); + } + if (instr->size()->IsConstantOperand()) { + int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); + __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags); } else { - // Allocate memory for the object. - AllocationFlags flags = TAG_OBJECT; - if (original_instr->MustAllocateDoubleAligned()) { - flags = static_cast(flags | DOUBLE_ALIGNMENT); - } - __ AllocateInNewSpace(size, result, temp, no_reg, - deferred->entry(), flags); + Register size = ToRegister(instr->size()); + __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags); } __ bind(deferred->exit()); @@ -5731,26 +5738,34 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { Handle constant_properties = instr->hydrogen()->constant_properties(); - // Set up the parameters to the stub/runtime call. - __ PushHeapObject(literals); - __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); - __ push(Immediate(constant_properties)); int flags = instr->hydrogen()->fast_elements() ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; flags |= instr->hydrogen()->has_function() ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; - __ push(Immediate(Smi::FromInt(flags))); - // Pick the right runtime function or stub to call. + // Set up the parameters to the stub/runtime call and pick the right + // runtime function or stub to call. int properties_count = constant_properties->length() / 2; if (instr->hydrogen()->depth() > 1) { + __ PushHeapObject(literals); + __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); + __ push(Immediate(constant_properties)); + __ push(Immediate(Smi::FromInt(flags))); CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); } else if (flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ PushHeapObject(literals); + __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); + __ push(Immediate(constant_properties)); + __ push(Immediate(Smi::FromInt(flags))); CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); } else { + __ LoadHeapObject(eax, literals); + __ mov(ebx, Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); + __ mov(ecx, Immediate(constant_properties)); + __ mov(edx, Immediate(Smi::FromInt(flags))); FastCloneShallowObjectStub stub(properties_count); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc index 5d539ed..39079d3 100644 --- a/src/ia32/lithium-ia32.cc +++ b/src/ia32/lithium-ia32.cc @@ -1026,6 +1026,12 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal( } +LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) { + LOperand* object = UseRegisterAtStart(instr->object()); + return DefineAsRegister(new(zone()) LInstanceSize(object)); +} + + LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) { LOperand* receiver = UseRegister(instr->receiver()); LOperand* function = UseRegisterAtStart(instr->function()); diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h index dbe54bd..ff4c477 100644 --- a/src/ia32/lithium-ia32.h +++ b/src/ia32/lithium-ia32.h @@ -102,6 +102,7 @@ class LCodeGen; V(In) \ V(InstanceOf) \ V(InstanceOfKnownGlobal) \ + V(InstanceSize) \ V(InstructionGap) \ V(Integer32ToDouble) \ V(Uint32ToDouble) \ @@ -969,6 +970,19 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 2, 1> { }; +class LInstanceSize: public LTemplateInstruction<1, 1, 0> { + public: + explicit LInstanceSize(LOperand* object) { + inputs_[0] = object; + } + + LOperand* object() { return inputs_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size") + DECLARE_HYDROGEN_ACCESSOR(InstanceSize) +}; + + class LBoundsCheck: public LTemplateInstruction<0, 2, 0> { public: LBoundsCheck(LOperand* index, LOperand* length) { diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index cd4fa07..7c5ae26 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -39,6 +39,18 @@ namespace v8 { namespace internal { +void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( + Isolate* isolate, + CodeStubInterfaceDescriptor* descriptor) { + static Register registers[] = { rax, rbx, rcx, rdx }; + descriptor->register_param_count_ = 4; + descriptor->register_params_ = registers; + descriptor->stack_parameter_count_ = NULL; + descriptor->deoptimization_handler_ = + Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; +} + + void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { @@ -484,49 +496,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { } -void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { - // Stack layout on entry: - // - // [rsp + kPointerSize]: object literal flags. - // [rsp + (2 * kPointerSize)]: constant properties. - // [rsp + (3 * kPointerSize)]: literal index. - // [rsp + (4 * kPointerSize)]: literals array. - - // Load boilerplate object into ecx and check if we need to create a - // boilerplate. - Label slow_case; - __ movq(rcx, Operand(rsp, 4 * kPointerSize)); - __ movq(rax, Operand(rsp, 3 * kPointerSize)); - SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); - __ movq(rcx, - FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); - __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); - __ j(equal, &slow_case); - - // Check that the boilerplate contains only fast properties and we can - // statically determine the instance size. - int size = JSObject::kHeaderSize + length_ * kPointerSize; - __ movq(rax, FieldOperand(rcx, HeapObject::kMapOffset)); - __ movzxbq(rax, FieldOperand(rax, Map::kInstanceSizeOffset)); - __ cmpq(rax, Immediate(size >> kPointerSizeLog2)); - __ j(not_equal, &slow_case); - - // Allocate the JS object and copy header together with all in-object - // properties from the boilerplate. - __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT); - for (int i = 0; i < size; i += kPointerSize) { - __ movq(rbx, FieldOperand(rcx, i)); - __ movq(FieldOperand(rax, i), rbx); - } - - // Return and remove the on-stack parameters. - __ ret(4 * kPointerSize); - - __ bind(&slow_case); - __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); -} - - // The stub expects its argument on the stack and returns its result in tos_: // zero for false, and a non-zero value for true. void ToBooleanStub::Generate(MacroAssembler* masm) { diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index 4d199c5..a6c9807 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -448,8 +448,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, } -void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, - int frame_index) { +void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, + int frame_index) { // // FROM TO // | .... | | .... | @@ -462,9 +462,9 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, // v +-------------------------+ +-------------------------| // | COMPILED_STUB marker | | STUB_FAILURE marker | // +-------------------------+ +-------------------------+ - // | | | caller args.length_ | - // | ... | +-------------------------+ // | | | caller args.arguments_ | + // | ... | +-------------------------+ + // | | | caller args.length_ | // |-------------------------|<-rsp +-------------------------+ // | caller args pointer | // +-------------------------+ @@ -486,58 +486,81 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, isolate_->code_stub_interface_descriptor(major_key); // The output frame must have room for all pushed register parameters - // and the standard stack frame slots. - int output_frame_size = StandardFrameConstants::kFixedFrameSize + - kPointerSize * descriptor->register_param_count_; - - // Include space for an argument object to the callee and optionally - // the space to pass the argument object to the stub failure handler. - output_frame_size += sizeof(Arguments) + kPointerSize; + // and the standard stack frame slots. Include space for an argument + // object to the callee and optionally the space to pass the argument + // object to the stub failure handler. + int height_in_bytes = kPointerSize * descriptor->register_param_count_ + + sizeof(Arguments) + kPointerSize; + int fixed_frame_size = StandardFrameConstants::kFixedFrameSize; + int input_frame_size = input_->GetFrameSize(); + int output_frame_size = height_in_bytes + fixed_frame_size; + if (trace_) { + PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n", + CodeStub::MajorName(static_cast(major_key), false), + height_in_bytes); + } + // The stub failure trampoline is a single frame. FrameDescription* output_frame = - new(output_frame_size) FrameDescription(output_frame_size, 0); + new(output_frame_size) FrameDescription(output_frame_size, NULL); + output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); ASSERT(frame_index == 0); output_[frame_index] = output_frame; - Code* notify_failure = - isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); - output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); - output_frame->SetContinuation( - reinterpret_cast(notify_failure->entry())); - - Code* trampoline = NULL; - int extra = descriptor->extra_expression_stack_count_; - StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); - ASSERT(trampoline != NULL); - output_frame->SetPc(reinterpret_cast( - trampoline->instruction_start())); - unsigned input_frame_size = input_->GetFrameSize(); - intptr_t frame_ptr = input_->GetRegister(rbp.code()); + // The top address for the output frame can be computed from the input + // frame pointer and the output frame's height. Subtract space for the + // context and function slots. + intptr_t top_address = input_->GetRegister(rbp.code()) - (2 * kPointerSize) - + height_in_bytes; + output_frame->SetTop(top_address); - // JSFunction continuation + // Read caller's PC (JSFunction continuation) from the input frame. unsigned input_frame_offset = input_frame_size - kPointerSize; unsigned output_frame_offset = output_frame_size - kPointerSize; intptr_t value = input_->GetFrameSlot(input_frame_offset); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; caller's pc\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // saved frame ptr + // Read caller's FP from the input frame, and set this frame's FP. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + intptr_t frame_ptr = input_->GetRegister(rbp.code()); + output_frame->SetRegister(rbp.code(), frame_ptr); + output_frame->SetFp(frame_ptr); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; caller's fp\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Restore context + // The context can be gotten from the input frame. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); output_frame->SetRegister(rsi.code(), value); output_frame_offset -= kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; context\n", + top_address + output_frame_offset, output_frame_offset, value); + } - // Internal frame markers + // A marker value is used in place of the function. output_frame_offset -= kPointerSize; value = reinterpret_cast( Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE)); output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; function (stub failure sentinel)\n", + top_address + output_frame_offset, output_frame_offset, value); + } intptr_t caller_arg_count = 0; if (descriptor->stack_parameter_count_ != NULL) { @@ -550,15 +573,30 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, value = frame_ptr + StandardFrameConstants::kCallerSPOffset + (caller_arg_count - 1) * kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; args.arguments\n", + top_address + output_frame_offset, output_frame_offset, value); + } - output_frame->SetFrameSlot(output_frame_offset, value); output_frame_offset -= kPointerSize; - output_frame->SetFrameSlot(output_frame_offset, caller_arg_count); + value = caller_arg_count; + output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; args.length\n", + top_address + output_frame_offset, output_frame_offset, value); + } - value = frame_ptr - (output_frame_size - output_frame_offset) - - StandardFrameConstants::kMarkerOffset; output_frame_offset -= kPointerSize; + value = frame_ptr - (output_frame_size - output_frame_offset) - + StandardFrameConstants::kMarkerOffset + kPointerSize; output_frame->SetFrameSlot(output_frame_offset, value); + if (trace_) { + PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" + V8PRIxPTR " ; args*\n", + top_address + output_frame_offset, output_frame_offset, value); + } // Copy the register parameters to the failure frame. for (int i = 0; i < descriptor->register_param_count_; ++i) { @@ -566,8 +604,7 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, DoTranslateCommand(iterator, 0, output_frame_offset); } - output_frame->SetRegister(rbp.code(), frame_ptr); - output_frame->SetFp(frame_ptr); + ASSERT(0 == output_frame_offset); for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); ++i) { double double_value = input_->GetDoubleRegister(i); @@ -582,6 +619,19 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator, } output_frame->SetRegister(rax.code(), params); output_frame->SetRegister(rbx.code(), handler); + + // Compute this frame's PC, state, and continuation. + Code* trampoline = NULL; + int extra = descriptor->extra_expression_stack_count_; + StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_); + ASSERT(trampoline != NULL); + output_frame->SetPc(reinterpret_cast( + trampoline->instruction_start())); + output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); + Code* notify_failure = + isolate_->builtins()->builtin(Builtins::kNotifyStubFailure); + output_frame->SetContinuation( + reinterpret_cast(notify_failure->entry())); } diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index ecc92c7..8121cb2 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1552,24 +1552,34 @@ void FullCodeGenerator::EmitAccessor(Expression* expression) { void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { Comment cmnt(masm_, "[ ObjectLiteral"); Handle constant_properties = expr->constant_properties(); - __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); - __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); - __ Push(Smi::FromInt(expr->literal_index())); - __ Push(constant_properties); int flags = expr->fast_elements() ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; flags |= expr->has_function() ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; - __ Push(Smi::FromInt(flags)); int properties_count = constant_properties->length() / 2; if (expr->depth() > 1) { + __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); + __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); + __ Push(Smi::FromInt(expr->literal_index())); + __ Push(constant_properties); + __ Push(Smi::FromInt(flags)); __ CallRuntime(Runtime::kCreateObjectLiteral, 4); - } else if (flags != ObjectLiteral::kFastElements || + } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); + __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); + __ Push(Smi::FromInt(expr->literal_index())); + __ Push(constant_properties); + __ Push(Smi::FromInt(flags)); __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); } else { + __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); + __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset)); + __ Move(rbx, Smi::FromInt(expr->literal_index())); + __ Move(rcx, constant_properties); + __ Move(rdx, Smi::FromInt(flags)); FastCloneShallowObjectStub stub(properties_count); __ CallStub(&stub); } diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 0cfa336..99d79fb 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -2496,6 +2496,14 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, } +void LCodeGen::DoInstanceSize(LInstanceSize* instr) { + Register object = ToRegister(instr->object()); + Register result = ToRegister(instr->result()); + __ movq(result, FieldOperand(object, HeapObject::kMapOffset)); + __ movzxbq(result, FieldOperand(result, Map::kInstanceSizeOffset)); +} + + void LCodeGen::DoCmpT(LCmpT* instr) { Token::Value op = instr->op(); @@ -5051,21 +5059,20 @@ void LCodeGen::DoAllocate(LAllocate* instr) { DeferredAllocate* deferred = new(zone()) DeferredAllocate(this, instr); - Register size = ToRegister(instr->size()); Register result = ToRegister(instr->result()); Register temp = ToRegister(instr->temp()); - HAllocate* original_instr = instr->hydrogen(); - if (original_instr->size()->IsConstant()) { - UNREACHABLE(); + // Allocate memory for the object. + AllocationFlags flags = TAG_OBJECT; + if (instr->hydrogen()->MustAllocateDoubleAligned()) { + flags = static_cast(flags | DOUBLE_ALIGNMENT); + } + if (instr->size()->IsConstantOperand()) { + int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); + __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags); } else { - // Allocate memory for the object. - AllocationFlags flags = TAG_OBJECT; - if (original_instr->MustAllocateDoubleAligned()) { - flags = static_cast(flags | DOUBLE_ALIGNMENT); - } - __ AllocateInNewSpace(size, result, temp, no_reg, - deferred->entry(), flags); + Register size = ToRegister(instr->size()); + __ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags); } __ bind(deferred->exit()); @@ -5304,26 +5311,34 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { Handle constant_properties = instr->hydrogen()->constant_properties(); - // Set up the parameters to the stub/runtime call. - __ PushHeapObject(literals); - __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); - __ Push(constant_properties); int flags = instr->hydrogen()->fast_elements() ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; flags |= instr->hydrogen()->has_function() ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; - __ Push(Smi::FromInt(flags)); - // Pick the right runtime function or stub to call. + // Set up the parameters to the stub/runtime call and pick the right + // runtime function or stub to call. int properties_count = constant_properties->length() / 2; if (instr->hydrogen()->depth() > 1) { + __ PushHeapObject(literals); + __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); + __ Push(constant_properties); + __ Push(Smi::FromInt(flags)); CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); } else if (flags != ObjectLiteral::kFastElements || properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { + __ PushHeapObject(literals); + __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); + __ Push(constant_properties); + __ Push(Smi::FromInt(flags)); CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); } else { + __ LoadHeapObject(rax, literals); + __ Move(rbx, Smi::FromInt(instr->hydrogen()->literal_index())); + __ Move(rcx, constant_properties); + __ Move(rdx, Smi::FromInt(flags)); FastCloneShallowObjectStub stub(properties_count); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index 57de0c8..1075b2e 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -979,6 +979,12 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal( } +LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) { + LOperand* object = UseRegisterAtStart(instr->object()); + return DefineAsRegister(new(zone()) LInstanceSize(object)); +} + + LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) { LOperand* receiver = UseRegister(instr->receiver()); LOperand* function = UseRegisterAtStart(instr->function()); diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h index 061a305..0744ed5 100644 --- a/src/x64/lithium-x64.h +++ b/src/x64/lithium-x64.h @@ -109,6 +109,7 @@ class LCodeGen; V(In) \ V(InstanceOf) \ V(InstanceOfKnownGlobal) \ + V(InstanceSize) \ V(InstructionGap) \ V(Integer32ToDouble) \ V(Uint32ToDouble) \ @@ -954,6 +955,19 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> { }; +class LInstanceSize: public LTemplateInstruction<1, 1, 0> { + public: + explicit LInstanceSize(LOperand* object) { + inputs_[0] = object; + } + + LOperand* object() { return inputs_[0]; } + + DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size") + DECLARE_HYDROGEN_ACCESSOR(InstanceSize) +}; + + class LBoundsCheck: public LTemplateInstruction<0, 2, 0> { public: LBoundsCheck(LOperand* index, LOperand* length) {