From 516ed9fa90ecf8a802caaf32d80a8d41add72aa8 Mon Sep 17 00:00:00 2001 From: "mvstanton@chromium.org" Date: Mon, 10 Feb 2014 21:38:17 +0000 Subject: [PATCH] Adding a type vector to replace type cells. R=bmeurer@chromium.org Review URL: https://codereview.chromium.org/137403009 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19244 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/code-stubs-arm.cc | 107 +++++++++++++++++++-------------- src/arm/debug-arm.cc | 10 ++-- src/arm/full-codegen-arm.cc | 36 ++++++----- src/ast.cc | 35 +++++++++-- src/ast.h | 79 +++++++++++++++++++++--- src/code-stubs-hydrogen.cc | 14 ++--- src/compiler.cc | 11 +++- src/compiler.h | 6 +- src/feedback-slots.h | 110 ++++++++++++++++++++++++++++++++++ src/full-codegen.cc | 33 ++++------ src/full-codegen.h | 24 ++++---- src/heap.cc | 3 +- src/ia32/code-stubs-ia32.cc | 81 ++++++++++++++++--------- src/ia32/debug-ia32.cc | 12 ++-- src/ia32/full-codegen-ia32.cc | 39 +++++++----- src/objects-debug.cc | 2 +- src/objects-inl.h | 43 ++----------- src/objects-printer.cc | 4 +- src/objects-visiting-inl.h | 2 +- src/objects.cc | 18 +++--- src/objects.h | 81 +++++++++---------------- src/parser.cc | 4 ++ src/runtime.cc | 2 +- src/scopes.cc | 2 +- src/type-info.cc | 73 ++++++++++------------ src/type-info.h | 18 ++++-- src/typing.cc | 7 ++- src/x64/code-stubs-x64.cc | 89 ++++++++++++++++++--------- src/x64/debug-x64.cc | 12 ++-- src/x64/full-codegen-x64.cc | 37 +++++++----- test/cctest/test-heap.cc | 20 ++++--- tools/gyp/v8.gyp | 1 + 32 files changed, 622 insertions(+), 393 deletions(-) create mode 100644 src/feedback-slots.h diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 44de7aabc..282a6d875 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -105,8 +105,8 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { r2 }; - descriptor->register_param_count_ = 1; + static Register registers[] = { r2, r3 }; + descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = NULL; } @@ -3004,38 +3004,40 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { static void GenerateRecordCallTarget(MacroAssembler* masm) { - // Cache the called function in a global property cell. Cache states + // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and // megamorphic. // r0 : number of arguments to the construct function // r1 : the function to call - // r2 : cache cell for call target + // r2 : Feedback vector + // r3 : slot in feedback vector (Smi) Label initialize, done, miss, megamorphic, not_array_function; - ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), masm->isolate()->heap()->undefined_value()); - ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), masm->isolate()->heap()->the_hole_value()); - // Load the cache state into r3. - __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); + // Load the cache state into r4. + __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); + __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ cmp(r3, r1); + __ cmp(r4, r1); __ b(eq, &done); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the cell either some other function or an + // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in ecx. - __ ldr(r5, FieldMemOperand(r3, 0)); + __ ldr(r5, FieldMemOperand(r4, 0)); __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); __ b(ne, &miss); // Make sure the function is the Array() function - __ LoadArrayFunction(r3); - __ cmp(r1, r3); + __ LoadArrayFunction(r4); + __ cmp(r1, r4); __ b(ne, &megamorphic); __ jmp(&done); @@ -3043,43 +3045,51 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. - __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); + __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); __ b(eq, &initialize); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. __ bind(&megamorphic); + __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); + __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); __ jmp(&done); // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. __ bind(&initialize); // Make sure the function is the Array() function - __ LoadArrayFunction(r3); - __ cmp(r1, r3); + __ LoadArrayFunction(r4); + __ cmp(r1, r4); __ b(ne, ¬_array_function); // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the cell + // Create an AllocationSite if we don't already have it, store it in the slot. { FrameScope scope(masm, StackFrame::INTERNAL); // Arguments register must be smi-tagged to call out. __ SmiTag(r0); - __ Push(r2, r1, r0); + __ Push(r3, r2, r1, r0); CreateAllocationSiteStub create_stub; __ CallStub(&create_stub); - __ Pop(r2, r1, r0); + __ Pop(r3, r2, r1, r0); __ SmiUntag(r0); } __ b(&done); __ bind(¬_array_function); - __ str(r1, FieldMemOperand(r2, Cell::kValueOffset)); - // No need for a write barrier here - cells are rescanned. + + __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); + __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + __ str(r1, MemOperand(r4, 0)); + + __ Push(r4, r2, r1); + __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + __ Pop(r4, r2, r1); __ bind(&done); } @@ -3087,7 +3097,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { void CallFunctionStub::Generate(MacroAssembler* masm) { // r1 : the function to call - // r2 : cache cell for call target + // r2 : feedback vector + // r3 : (only if r2 is not undefined) slot in feedback vector (Smi) Label slow, non_function, wrap, cont; if (NeedsChecks()) { @@ -3096,7 +3107,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ JumpIfSmi(r1, &non_function); // Goto slow case if we do not have a function. - __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); + __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); __ b(ne, &slow); if (RecordCallTarget()) { @@ -3144,13 +3155,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // If there is a call target cache, mark it megamorphic in the // non-function case. MegamorphicSentinel is an immortal immovable // object (undefined) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), masm->isolate()->heap()->undefined_value()); + __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); + __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); } // Check for function proxy. - __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); + __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); __ b(ne, &non_function); __ push(r1); // put proxy as additional argument __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); @@ -3190,13 +3202,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) { // r0 : number of arguments // r1 : the function to call - // r2 : cache cell for call target + // r2 : feedback vector + // r3 : (only if r2 is not undefined) slot in feedback vector (Smi) Label slow, non_function_call; // Check that the function is not a smi. __ JumpIfSmi(r1, &non_function_call); // Check that the function is a JSFunction. - __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); + __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); __ b(ne, &slow); if (RecordCallTarget()) { @@ -3204,7 +3217,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) { } // Jump to the function-specific construct stub. - Register jmp_reg = r3; + Register jmp_reg = r4; __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); __ ldr(jmp_reg, FieldMemOperand(jmp_reg, SharedFunctionInfo::kConstructStubOffset)); @@ -3212,10 +3225,10 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // r0: number of arguments // r1: called object - // r3: object type + // r4: object type Label do_call; __ bind(&slow); - __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); + __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); __ b(ne, &non_function_call); __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); __ jmp(&do_call); @@ -5175,7 +5188,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { // We are going to create a holey array, but our kind is non-holey. - // Fix kind and retry (only if we have an allocation site in the cell). + // Fix kind and retry (only if we have an allocation site in the slot). __ add(r3, r3, Operand(1)); if (FLAG_debug_code) { @@ -5283,7 +5296,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r0 : argc (only if argument_count_ == ANY) // -- r1 : constructor - // -- r2 : type info cell + // -- r2 : feedback vector (fixed array or undefined) + // -- r3 : slot index (if r2 is fixed array) // -- sp[0] : return address // -- sp[4] : last argument // ----------------------------------- @@ -5292,21 +5306,25 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // builtin Array functions which always have maps. // Initial map for the builtin Array function should be a map. - __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); + __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); // Will both indicate a NULL and a Smi. - __ tst(r3, Operand(kSmiTagMask)); + __ tst(r4, Operand(kSmiTagMask)); __ Assert(ne, kUnexpectedInitialMapForArrayFunction); - __ CompareObjectType(r3, r3, r4, MAP_TYPE); + __ CompareObjectType(r4, r4, r5, MAP_TYPE); __ Assert(eq, kUnexpectedInitialMapForArrayFunction); - // We should either have undefined in ebx or a valid cell + // We should either have undefined in ebx or a valid fixed array. Label okay_here; - Handle cell_map = masm->isolate()->factory()->cell_map(); + Handle fixed_array_map = masm->isolate()->factory()->fixed_array_map(); __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); __ b(eq, &okay_here); - __ ldr(r3, FieldMemOperand(r2, 0)); - __ cmp(r3, Operand(cell_map)); - __ Assert(eq, kExpectedPropertyCellInRegisterEbx); + __ ldr(r4, FieldMemOperand(r2, 0)); + __ cmp(r4, Operand(fixed_array_map)); + __ Assert(eq, kExpectedFixedArrayInRegisterR2); + + // r3 should be a smi if we don't have undefined in r2 + __ AssertSmi(r3); + __ bind(&okay_here); } @@ -5314,9 +5332,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // Get the elements kind and case on that. __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); __ b(eq, &no_info); - __ ldr(r2, FieldMemOperand(r2, Cell::kValueOffset)); + __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); + __ ldr(r2, FieldMemOperand(r2, FixedArray::kHeaderSize)); - // If the type cell is undefined, or contains anything other than an + // If the feedback vector is undefined, or contains anything other than an // AllocationSite, call an array constructor that doesn't use AllocationSites. __ ldr(r4, FieldMemOperand(r2, 0)); __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc index efd11069b..9990bccdc 100644 --- a/src/arm/debug-arm.cc +++ b/src/arm/debug-arm.cc @@ -265,9 +265,10 @@ void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { // Register state for CallFunctionStub (from code-stubs-arm.cc). // ----------- S t a t e ------------- // -- r1 : function - // -- r2 : cache cell for call target + // -- r2 : feedback array + // -- r3 : slot in feedback array // ----------------------------------- - Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit(), 0); + Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit() | r3.bit(), 0); } @@ -286,9 +287,10 @@ void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r0 : number of arguments (not smi) // -- r1 : constructor function - // -- r2 : cache cell for call target + // -- r2 : feedback array + // -- r3 : feedback slot (smi) // ----------------------------------- - Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit(), r0.bit()); + Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit() | r3.bit(), r0.bit()); } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 813e9492d..3f229eb87 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -130,6 +130,9 @@ void FullCodeGenerator::Generate() { CompilationInfo* info = info_; handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); + + InitializeFeedbackVector(); + profiling_counter_ = isolate()->factory()->NewCell( Handle(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -1074,6 +1077,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Comment cmnt(masm_, "[ ForInStatement"); + int slot = stmt->ForInFeedbackSlot(); SetStatementPosition(stmt); Label loop, exit; @@ -1163,13 +1167,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle cell = isolate()->factory()->NewCell( - Handle(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), - isolate())); - RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); - __ Move(r1, cell); - __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); - __ str(r2, FieldMemOperand(r1, Cell::kValueOffset)); + Handle feedback = Handle( + Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), + isolate()); + StoreFeedbackVectorSlot(slot, feedback); + __ Move(r1, FeedbackVector()); + __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); + __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object @@ -2716,15 +2720,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { SetSourcePosition(expr->position()); Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); - __ mov(r2, Operand(cell)); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); + __ Move(r2, FeedbackVector()); + __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); // Record call targets in unoptimized code. CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub, expr->CallFeedbackId()); + __ CallStub(&stub); RecordJSReturnSite(expr); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2905,10 +2909,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { // Record call targets in unoptimized code. Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); - __ mov(r2, Operand(cell)); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + __ Move(r2, FeedbackVector()); + __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); CallConstructStub stub(RECORD_CALL_TARGET); __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); diff --git a/src/ast.cc b/src/ast.cc index 1a9919b5a..6b2f48f01 100644 --- a/src/ast.cc +++ b/src/ast.cc @@ -593,6 +593,17 @@ void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) { } +int Call::ComputeFeedbackSlotCount(Isolate* isolate) { + CallType call_type = GetCallType(isolate); + if (call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL) { + // Call only uses a slot in some cases. + return 1; + } + + return 0; +} + + Call::CallType Call::GetCallType(Isolate* isolate) const { VariableProxy* proxy = expression()->AsVariableProxy(); if (proxy != NULL) { @@ -633,10 +644,10 @@ bool Call::ComputeGlobalTarget(Handle global, void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) { allocation_site_ = - oracle->GetCallNewAllocationSite(CallNewFeedbackId()); - is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackId()); + oracle->GetCallNewAllocationSite(CallNewFeedbackSlot()); + is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackSlot()); if (is_monomorphic_) { - target_ = oracle->GetCallNewTarget(CallNewFeedbackId()); + target_ = oracle->GetCallNewTarget(CallNewFeedbackSlot()); if (!allocation_site_.is_null()) { elements_kind_ = allocation_site_->GetElementsKind(); } @@ -1039,6 +1050,11 @@ CaseClause::CaseClause(Zone* zone, void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ increase_node_count(); \ } +#define REGULAR_NODE_WITH_FEEDBACK_SLOTS(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + add_slot_node(node); \ + } #define DONT_OPTIMIZE_NODE(NodeType) \ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ increase_node_count(); \ @@ -1051,6 +1067,12 @@ CaseClause::CaseClause(Zone* zone, increase_node_count(); \ add_flag(kDontSelfOptimize); \ } +#define DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + add_slot_node(node); \ + add_flag(kDontSelfOptimize); \ + } #define DONT_CACHE_NODE(NodeType) \ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ increase_node_count(); \ @@ -1085,8 +1107,8 @@ REGULAR_NODE(CountOperation) REGULAR_NODE(BinaryOperation) REGULAR_NODE(CompareOperation) REGULAR_NODE(ThisFunction) -REGULAR_NODE(Call) -REGULAR_NODE(CallNew) +REGULAR_NODE_WITH_FEEDBACK_SLOTS(Call) +REGULAR_NODE_WITH_FEEDBACK_SLOTS(CallNew) // In theory, for VariableProxy we'd have to add: // if (node->var()->IsLookupSlot()) add_flag(kDontInline); // But node->var() is usually not bound yet at VariableProxy creation time, and @@ -1111,11 +1133,12 @@ DONT_OPTIMIZE_NODE(NativeFunctionLiteral) DONT_SELFOPTIMIZE_NODE(DoWhileStatement) DONT_SELFOPTIMIZE_NODE(WhileStatement) DONT_SELFOPTIMIZE_NODE(ForStatement) -DONT_SELFOPTIMIZE_NODE(ForInStatement) +DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(ForInStatement) DONT_SELFOPTIMIZE_NODE(ForOfStatement) DONT_CACHE_NODE(ModuleLiteral) + void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { increase_node_count(); if (node->is_jsruntime()) { diff --git a/src/ast.h b/src/ast.h index 2b33820f9..aacc5e4fc 100644 --- a/src/ast.h +++ b/src/ast.h @@ -32,6 +32,7 @@ #include "assembler.h" #include "factory.h" +#include "feedback-slots.h" #include "isolate.h" #include "jsregexp.h" #include "list-inl.h" @@ -181,7 +182,7 @@ class AstProperties V8_FINAL BASE_EMBEDDED { public: class Flags : public EnumSet {}; - AstProperties() : node_count_(0) { } + AstProperties() : node_count_(0) {} Flags* flags() { return &flags_; } int node_count() { return node_count_; } @@ -914,7 +915,8 @@ class ForEachStatement : public IterationStatement { }; -class ForInStatement V8_FINAL : public ForEachStatement { +class ForInStatement V8_FINAL : public ForEachStatement, + public FeedbackSlotInterface { public: DECLARE_NODE_TYPE(ForInStatement) @@ -922,7 +924,16 @@ class ForInStatement V8_FINAL : public ForEachStatement { return subject(); } - TypeFeedbackId ForInFeedbackId() const { return reuse(PrepareId()); } + // Type feedback information. + virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; } + virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; } + virtual void SetFirstFeedbackSlot(int slot) { for_in_feedback_slot_ = slot; } + + int ForInFeedbackSlot() { + ASSERT(for_in_feedback_slot_ != kInvalidFeedbackSlot); + return for_in_feedback_slot_; + } + enum ForInType { FAST_FOR_IN, SLOW_FOR_IN }; ForInType for_in_type() const { return for_in_type_; } void set_for_in_type(ForInType type) { for_in_type_ = type; } @@ -936,11 +947,13 @@ class ForInStatement V8_FINAL : public ForEachStatement { ForInStatement(Zone* zone, ZoneStringList* labels, int pos) : ForEachStatement(zone, labels, pos), for_in_type_(SLOW_FOR_IN), + for_in_feedback_slot_(kInvalidFeedbackSlot), body_id_(GetNextId(zone)), prepare_id_(GetNextId(zone)) { } ForInType for_in_type_; + int for_in_feedback_slot_; const BailoutId body_id_; const BailoutId prepare_id_; }; @@ -1733,7 +1746,7 @@ class Property V8_FINAL : public Expression { }; -class Call V8_FINAL : public Expression { +class Call V8_FINAL : public Expression, public FeedbackSlotInterface { public: DECLARE_NODE_TYPE(Call) @@ -1741,7 +1754,16 @@ class Call V8_FINAL : public Expression { ZoneList* arguments() const { return arguments_; } // Type feedback information. - TypeFeedbackId CallFeedbackId() const { return reuse(id()); } + virtual ComputablePhase GetComputablePhase() { return AFTER_SCOPING; } + virtual int ComputeFeedbackSlotCount(Isolate* isolate); + virtual void SetFirstFeedbackSlot(int slot) { + call_feedback_slot_ = slot; + } + + bool HasCallFeedbackSlot() const { + return call_feedback_slot_ != kInvalidFeedbackSlot; + } + int CallFeedbackSlot() const { return call_feedback_slot_; } virtual SmallMapList* GetReceiverTypes() V8_OVERRIDE { if (expression()->IsProperty()) { @@ -1790,6 +1812,7 @@ class Call V8_FINAL : public Expression { : Expression(zone, pos), expression_(expression), arguments_(arguments), + call_feedback_slot_(kInvalidFeedbackSlot), return_id_(GetNextId(zone)) { if (expression->IsProperty()) { expression->AsProperty()->mark_for_call(); @@ -1802,12 +1825,13 @@ class Call V8_FINAL : public Expression { Handle target_; Handle cell_; + int call_feedback_slot_; const BailoutId return_id_; }; -class CallNew V8_FINAL : public Expression { +class CallNew V8_FINAL : public Expression, public FeedbackSlotInterface { public: DECLARE_NODE_TYPE(CallNew) @@ -1815,6 +1839,17 @@ class CallNew V8_FINAL : public Expression { ZoneList* arguments() const { return arguments_; } // Type feedback information. + virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; } + virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; } + virtual void SetFirstFeedbackSlot(int slot) { + callnew_feedback_slot_ = slot; + } + + int CallNewFeedbackSlot() { + ASSERT(callnew_feedback_slot_ != kInvalidFeedbackSlot); + return callnew_feedback_slot_; + } + TypeFeedbackId CallNewFeedbackId() const { return reuse(id()); } void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() V8_OVERRIDE { return is_monomorphic_; } @@ -1824,6 +1859,8 @@ class CallNew V8_FINAL : public Expression { return allocation_site_; } + static int feedback_slots() { return 1; } + BailoutId ReturnId() const { return return_id_; } protected: @@ -1836,6 +1873,7 @@ class CallNew V8_FINAL : public Expression { arguments_(arguments), is_monomorphic_(false), elements_kind_(GetInitialFastElementsKind()), + callnew_feedback_slot_(kInvalidFeedbackSlot), return_id_(GetNextId(zone)) { } private: @@ -1846,6 +1884,7 @@ class CallNew V8_FINAL : public Expression { Handle target_; ElementsKind elements_kind_; Handle allocation_site_; + int callnew_feedback_slot_; const BailoutId return_id_; }; @@ -2332,7 +2371,15 @@ class FunctionLiteral V8_FINAL : public Expression { void set_ast_properties(AstProperties* ast_properties) { ast_properties_ = *ast_properties; } - + void set_slot_processor(DeferredFeedbackSlotProcessor* slot_processor) { + slot_processor_ = *slot_processor; + } + void ProcessFeedbackSlots(Isolate* isolate) { + slot_processor_.ProcessFeedbackSlots(isolate); + } + int slot_count() { + return slot_processor_.slot_count(); + } bool dont_optimize() { return dont_optimize_reason_ != kNoReason; } BailoutReason dont_optimize_reason() { return dont_optimize_reason_; } void set_dont_optimize_reason(BailoutReason reason) { @@ -2382,6 +2429,7 @@ class FunctionLiteral V8_FINAL : public Expression { ZoneList* body_; Handle inferred_name_; AstProperties ast_properties_; + DeferredFeedbackSlotProcessor slot_processor_; BailoutReason dont_optimize_reason_; int materialized_literal_count_; @@ -2856,10 +2904,13 @@ private: \ class AstConstructionVisitor BASE_EMBEDDED { public: - AstConstructionVisitor() : dont_optimize_reason_(kNoReason) { } + explicit AstConstructionVisitor(Zone* zone) + : dont_optimize_reason_(kNoReason), + zone_(zone) { } AstProperties* ast_properties() { return &properties_; } BailoutReason dont_optimize_reason() { return dont_optimize_reason_; } + DeferredFeedbackSlotProcessor* slot_processor() { return &slot_processor_; } private: template friend class AstNodeFactory; @@ -2876,13 +2927,21 @@ class AstConstructionVisitor BASE_EMBEDDED { dont_optimize_reason_ = reason; } + void add_slot_node(FeedbackSlotInterface* slot_node) { + slot_processor_.add_slot_node(zone_, slot_node); + } + AstProperties properties_; + DeferredFeedbackSlotProcessor slot_processor_; BailoutReason dont_optimize_reason_; + Zone* zone_; }; class AstNullVisitor BASE_EMBEDDED { public: + explicit AstNullVisitor(Zone* zone) {} + // Node visitors. #define DEF_VISIT(type) \ void Visit##type(type* node) {} @@ -2898,7 +2957,9 @@ class AstNullVisitor BASE_EMBEDDED { template class AstNodeFactory V8_FINAL BASE_EMBEDDED { public: - explicit AstNodeFactory(Zone* zone) : zone_(zone) { } + explicit AstNodeFactory(Zone* zone) + : zone_(zone), + visitor_(zone) { } Visitor* visitor() { return &visitor_; } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index bd3d8d286..9fcb0b37b 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -530,15 +530,11 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { Add(site_list, HObjectAccess::ForAllocationSiteList(), object); - // We use a hammer (SkipWriteBarrier()) to indicate that we know the input - // cell is really a Cell, and so no write barrier is needed. - // TODO(mvstanton): Add a debug_code check to verify the input cell is really - // a cell. (perhaps with a new instruction, HAssert). - HInstruction* cell = GetParameter(0); - HObjectAccess access = HObjectAccess::ForCellValue(); - store = Add(cell, access, object); - store->SkipWriteBarrier(); - return cell; + HInstruction* feedback_vector = GetParameter(0); + HInstruction* slot = GetParameter(1); + Add(feedback_vector, slot, object, FAST_ELEMENTS, + INITIALIZING_STORE); + return feedback_vector; } diff --git a/src/compiler.cc b/src/compiler.cc index b9e13c166..9bae80973 100644 --- a/src/compiler.cc +++ b/src/compiler.cc @@ -243,6 +243,13 @@ bool CompilationInfo::ShouldSelfOptimize() { } +void CompilationInfo::PrepareForCompilation(Scope* scope) { + ASSERT(scope_ == NULL); + scope_ = scope; + function()->ProcessFeedbackSlots(isolate_); +} + + class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder { public: explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info) @@ -363,7 +370,7 @@ OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() { // Note that we use the same AST that we will use for generating the // optimized code. unoptimized.SetFunction(info()->function()); - unoptimized.SetScope(info()->scope()); + unoptimized.PrepareForCompilation(info()->scope()); unoptimized.SetContext(info()->context()); if (should_recompile) unoptimized.EnableDeoptimizationSupport(); bool succeeded = FullCodeGenerator::MakeCode(&unoptimized); @@ -982,7 +989,7 @@ Handle Compiler::BuildFunctionInfo(FunctionLiteral* literal, // Precondition: code has been parsed and scopes have been analyzed. CompilationInfoWithZone info(script); info.SetFunction(literal); - info.SetScope(literal->scope()); + info.PrepareForCompilation(literal->scope()); info.SetLanguageMode(literal->scope()->language_mode()); Isolate* isolate = info.isolate(); diff --git a/src/compiler.h b/src/compiler.h index 3bf4db578..9d1baa328 100644 --- a/src/compiler.h +++ b/src/compiler.h @@ -175,10 +175,8 @@ class CompilationInfo { ASSERT(function_ == NULL); function_ = literal; } - void SetScope(Scope* scope) { - ASSERT(scope_ == NULL); - scope_ = scope; - } + // When the scope is applied, we may have deferred work to do on the function. + void PrepareForCompilation(Scope* scope); void SetGlobalScope(Scope* global_scope) { ASSERT(global_scope_ == NULL); global_scope_ = global_scope; diff --git a/src/feedback-slots.h b/src/feedback-slots.h new file mode 100644 index 000000000..9760c652b --- /dev/null +++ b/src/feedback-slots.h @@ -0,0 +1,110 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef V8_FEEDBACK_SLOTS_H_ +#define V8_FEEDBACK_SLOTS_H_ + +#include "v8.h" + +#include "isolate.h" + +namespace v8 { +namespace internal { + +enum ComputablePhase { + DURING_PARSE, + AFTER_SCOPING +}; + + +class FeedbackSlotInterface { + public: + static const int kInvalidFeedbackSlot = -1; + + virtual ~FeedbackSlotInterface() {} + + // When can we ask how many feedback slots are necessary? + virtual ComputablePhase GetComputablePhase() = 0; + virtual int ComputeFeedbackSlotCount(Isolate* isolate) = 0; + virtual void SetFirstFeedbackSlot(int slot) = 0; +}; + + +class DeferredFeedbackSlotProcessor { + public: + DeferredFeedbackSlotProcessor() + : slot_nodes_(NULL), + slot_count_(0) { } + + void add_slot_node(Zone* zone, FeedbackSlotInterface* slot) { + if (slot->GetComputablePhase() == DURING_PARSE) { + // No need to add to the list + int count = slot->ComputeFeedbackSlotCount(zone->isolate()); + slot->SetFirstFeedbackSlot(slot_count_); + slot_count_ += count; + } else { + if (slot_nodes_ == NULL) { + slot_nodes_ = new(zone) ZoneList(10, zone); + } + slot_nodes_->Add(slot, zone); + } + } + + void ProcessFeedbackSlots(Isolate* isolate) { + // Scope analysis must have been done. + if (slot_nodes_ == NULL) { + return; + } + + int current_slot = slot_count_; + for (int i = 0; i < slot_nodes_->length(); i++) { + FeedbackSlotInterface* slot_interface = slot_nodes_->at(i); + int count = slot_interface->ComputeFeedbackSlotCount(isolate); + if (count > 0) { + slot_interface->SetFirstFeedbackSlot(current_slot); + current_slot += count; + } + } + + slot_count_ = current_slot; + slot_nodes_->Clear(); + } + + int slot_count() { + ASSERT(slot_count_ >= 0); + return slot_count_; + } + + private: + ZoneList* slot_nodes_; + int slot_count_; +}; + + +} } // namespace v8::internal + +#endif // V8_FEEDBACK_SLOTS_H_ diff --git a/src/full-codegen.cc b/src/full-codegen.cc index e14afefda..0f1536f45 100644 --- a/src/full-codegen.cc +++ b/src/full-codegen.cc @@ -345,7 +345,6 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { info->function()->scope()->AllowsLazyCompilation()); cgen.PopulateDeoptimizationData(code); cgen.PopulateTypeFeedbackInfo(code); - cgen.PopulateTypeFeedbackCells(code); code->set_has_deoptimization_support(info->HasDeoptimizationSupport()); code->set_handler_table(*cgen.handler_table()); #ifdef ENABLE_DEBUGGER_SUPPORT @@ -387,6 +386,15 @@ unsigned FullCodeGenerator::EmitBackEdgeTable() { } +void FullCodeGenerator::InitializeFeedbackVector() { + int length = info_->function()->slot_count(); + ASSERT_EQ(isolate()->heap()->the_hole_value(), + *TypeFeedbackInfo::UninitializedSentinel(isolate())); + feedback_vector_ = isolate()->factory()->NewFixedArrayWithHoles(length, + TENURED); +} + + void FullCodeGenerator::PopulateDeoptimizationData(Handle code) { // Fill in the deoptimization information. ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty()); @@ -405,6 +413,7 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle code) { void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle code) { Handle info = isolate()->factory()->NewTypeFeedbackInfo(); info->set_ic_total_count(ic_total_count_); + info->set_feedback_vector(*FeedbackVector()); ASSERT(!isolate()->heap()->InNewSpace(*info)); code->set_type_feedback_info(*info); } @@ -425,21 +434,6 @@ void FullCodeGenerator::Initialize() { } -void FullCodeGenerator::PopulateTypeFeedbackCells(Handle code) { - if (type_feedback_cells_.is_empty()) return; - int length = type_feedback_cells_.length(); - int array_size = TypeFeedbackCells::LengthOfFixedArray(length); - Handle cache = Handle::cast( - isolate()->factory()->NewFixedArray(array_size, TENURED)); - for (int i = 0; i < length; i++) { - cache->SetAstId(i, type_feedback_cells_[i].ast_id); - cache->SetCell(i, *type_feedback_cells_[i].cell); - } - TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells( - *cache); -} - - void FullCodeGenerator::PrepareForBailout(Expression* node, State state) { PrepareForBailoutForId(node->id(), state); } @@ -490,13 +484,6 @@ void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) { } -void FullCodeGenerator::RecordTypeFeedbackCell( - TypeFeedbackId id, Handle cell) { - TypeFeedbackCellEntry entry = { id, cell }; - type_feedback_cells_.Add(entry, zone()); -} - - void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) { // The pc offset does not need to be encoded and packed together with a state. ASSERT(masm_->pc_offset() > 0); diff --git a/src/full-codegen.h b/src/full-codegen.h index d52f3c410..b2514cb0a 100644 --- a/src/full-codegen.h +++ b/src/full-codegen.h @@ -96,9 +96,6 @@ class FullCodeGenerator: public AstVisitor { ? info->function()->ast_node_count() : 0, info->zone()), back_edges_(2, info->zone()), - type_feedback_cells_(info->HasDeoptimizationSupport() - ? info->function()->ast_node_count() : 0, - info->zone()), ic_total_count_(0) { Initialize(); } @@ -434,9 +431,15 @@ class FullCodeGenerator: public AstVisitor { void PrepareForBailout(Expression* node, State state); void PrepareForBailoutForId(BailoutId id, State state); - // Cache cell support. This associates AST ids with global property cells - // that will be cleared during GC and collected by the type-feedback oracle. - void RecordTypeFeedbackCell(TypeFeedbackId id, Handle cell); + // Feedback slot support. The feedback vector will be cleared during gc and + // collected by the type-feedback oracle. + Handle FeedbackVector() { + return feedback_vector_; + } + void StoreFeedbackVectorSlot(int slot, Handle object) { + feedback_vector_->set(slot, *object); + } + void InitializeFeedbackVector(); // Record a call's return site offset, used to rebuild the frame if the // called function was inlined at the site. @@ -635,7 +638,6 @@ class FullCodeGenerator: public AstVisitor { void Generate(); void PopulateDeoptimizationData(Handle code); void PopulateTypeFeedbackInfo(Handle code); - void PopulateTypeFeedbackCells(Handle code); Handle handler_table() { return handler_table_; } @@ -650,12 +652,6 @@ class FullCodeGenerator: public AstVisitor { uint32_t loop_depth; }; - struct TypeFeedbackCellEntry { - TypeFeedbackId ast_id; - Handle cell; - }; - - class ExpressionContext BASE_EMBEDDED { public: explicit ExpressionContext(FullCodeGenerator* codegen) @@ -845,9 +841,9 @@ class FullCodeGenerator: public AstVisitor { ZoneList bailout_entries_; GrowableBitVector prepared_bailout_ids_; ZoneList back_edges_; - ZoneList type_feedback_cells_; int ic_total_count_; Handle handler_table_; + Handle feedback_vector_; Handle profiling_counter_; bool generate_debug_code_; diff --git a/src/heap.cc b/src/heap.cc index dfe98ec08..6493524fc 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -2669,8 +2669,7 @@ MaybeObject* Heap::AllocateTypeFeedbackInfo() { if (!maybe_info->To(&info)) return maybe_info; } info->initialize_storage(); - info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()), - SKIP_WRITE_BARRIER); + info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER); return info; } diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index e280c50e7..eddd571e6 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -110,8 +110,8 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { ebx }; - descriptor->register_param_count_ = 1; + static Register registers[] = { ebx, edx }; + descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = NULL; } @@ -2322,28 +2322,30 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { static void GenerateRecordCallTarget(MacroAssembler* masm) { - // Cache the called function in a global property cell. Cache states + // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and // megamorphic. // eax : number of arguments to the construct function - // ebx : cache cell for call target + // ebx : Feedback vector + // edx : slot in feedback vector (Smi) // edi : the function to call Isolate* isolate = masm->isolate(); Label initialize, done, miss, megamorphic, not_array_function; // Load the cache state into ecx. - __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset)); + __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. __ cmp(ecx, edi); - __ j(equal, &done); - __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); - __ j(equal, &done); + __ j(equal, &done, Label::kFar); + __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); + __ j(equal, &done, Label::kFar); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the cell either some other function or an + // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in ecx. Handle allocation_site_map = masm->isolate()->factory()->allocation_site_map(); @@ -2356,20 +2358,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ cmp(edi, Operand(ecx, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); __ j(not_equal, &megamorphic); - __ jmp(&done); + __ jmp(&done, Label::kFar); __ bind(&miss); // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. - __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate))); + __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate))); __ j(equal, &initialize); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. __ bind(&megamorphic); - __ mov(FieldOperand(ebx, Cell::kValueOffset), - Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); - __ jmp(&done, Label::kNear); + __ mov(FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize), + Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); + __ jmp(&done, Label::kFar); // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. @@ -2381,7 +2384,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ j(not_equal, ¬_array_function); // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the cell + // Create an AllocationSite if we don't already have it, store it in the slot. { FrameScope scope(masm, StackFrame::INTERNAL); @@ -2389,12 +2392,14 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ SmiTag(eax); __ push(eax); __ push(edi); + __ push(edx); __ push(ebx); CreateAllocationSiteStub create_stub; __ CallStub(&create_stub); __ pop(ebx); + __ pop(edx); __ pop(edi); __ pop(eax); __ SmiUntag(eax); @@ -2402,15 +2407,26 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ jmp(&done); __ bind(¬_array_function); - __ mov(FieldOperand(ebx, Cell::kValueOffset), edi); - // No need for a write barrier here - cells are rescanned. + __ mov(FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize), + edi); + // We won't need edx or ebx anymore, just save edi + __ push(edi); + __ push(ebx); + __ push(edx); + __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + __ pop(edx); + __ pop(ebx); + __ pop(edi); __ bind(&done); } void CallFunctionStub::Generate(MacroAssembler* masm) { - // ebx : cache cell for call target + // ebx : feedback vector + // edx : (only if ebx is not undefined) slot in feedback vector (Smi) // edi : the function to call Isolate* isolate = masm->isolate(); Label slow, non_function, wrap, cont; @@ -2469,8 +2485,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // If there is a call target cache, mark it megamorphic in the // non-function case. MegamorphicSentinel is an immortal immovable // object (undefined) so no write barrier is needed. - __ mov(FieldOperand(ebx, Cell::kValueOffset), - Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); + __ mov(FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize), + Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); } // Check for function proxy. __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); @@ -2514,7 +2531,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) { // eax : number of arguments - // ebx : cache cell for call target + // ebx : feedback vector + // edx : (only if ebx is not undefined) slot in feedback vector (Smi) // edi : constructor function Label slow, non_function_call; @@ -5137,7 +5155,8 @@ void ArrayConstructorStub::GenerateDispatchToArrayStub( void ArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : argc (only if argument_count_ == ANY) - // -- ebx : type info cell + // -- ebx : feedback vector (fixed array or undefined) + // -- edx : slot index (if ebx is fixed array) // -- edi : constructor // -- esp[0] : return address // -- esp[4] : last argument @@ -5158,22 +5177,27 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { __ CmpObjectType(ecx, MAP_TYPE, ecx); __ Assert(equal, kUnexpectedInitialMapForArrayFunction); - // We should either have undefined in ebx or a valid cell + // We should either have undefined in ebx or a valid fixed array. Label okay_here; - Handle cell_map = masm->isolate()->factory()->cell_map(); + Handle fixed_array_map = masm->isolate()->factory()->fixed_array_map(); __ cmp(ebx, Immediate(undefined_sentinel)); __ j(equal, &okay_here); - __ cmp(FieldOperand(ebx, 0), Immediate(cell_map)); - __ Assert(equal, kExpectedPropertyCellInRegisterEbx); + __ cmp(FieldOperand(ebx, 0), Immediate(fixed_array_map)); + __ Assert(equal, kExpectedFixedArrayInRegisterEbx); + + // edx should be a smi if we don't have undefined in ebx. + __ AssertSmi(edx); + __ bind(&okay_here); } Label no_info; - // If the type cell is undefined, or contains anything other than an + // If the feedback vector is undefined, or contains anything other than an // AllocationSite, call an array constructor that doesn't use AllocationSites. __ cmp(ebx, Immediate(undefined_sentinel)); __ j(equal, &no_info); - __ mov(ebx, FieldOperand(ebx, Cell::kValueOffset)); + __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); __ cmp(FieldOperand(ebx, 0), Immediate( masm->isolate()->factory()->allocation_site_map())); __ j(not_equal, &no_info); @@ -5229,7 +5253,6 @@ void InternalArrayConstructorStub::GenerateCase( void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : argc - // -- ebx : type info cell // -- edi : constructor // -- esp[0] : return address // -- esp[4] : last argument diff --git a/src/ia32/debug-ia32.cc b/src/ia32/debug-ia32.cc index 76a7003bf..4c76f7dfe 100644 --- a/src/ia32/debug-ia32.cc +++ b/src/ia32/debug-ia32.cc @@ -280,10 +280,12 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { // Register state for CallFunctionStub (from code-stubs-ia32.cc). // ----------- S t a t e ------------- - // -- ebx: cache cell for call target + // -- ebx: feedback array + // -- edx: slot in feedback array // -- edi: function // ----------------------------------- - Generate_DebugBreakCallHelper(masm, ebx.bit() | edi.bit(), 0, false); + Generate_DebugBreakCallHelper(masm, ebx.bit() | edx.bit() | edi.bit(), + 0, false); } @@ -306,11 +308,13 @@ void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) { // above IC call. // ----------- S t a t e ------------- // -- eax: number of arguments (not smi) - // -- ebx: cache cell for call target + // -- ebx: feedback array + // -- edx: feedback slot (smi) // -- edi: constructor function // ----------------------------------- // The number of arguments in eax is not smi encoded. - Generate_DebugBreakCallHelper(masm, ebx.bit() | edi.bit(), eax.bit(), false); + Generate_DebugBreakCallHelper(masm, ebx.bit() | edx.bit() | edi.bit(), + eax.bit(), false); } diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index f3125666f..b9e4d4bbb 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -118,6 +118,9 @@ void FullCodeGenerator::Generate() { CompilationInfo* info = info_; handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); + + InitializeFeedbackVector(); + profiling_counter_ = isolate()->factory()->NewCell( Handle(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -1021,6 +1024,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Comment cmnt(masm_, "[ ForInStatement"); + int slot = stmt->ForInFeedbackSlot(); + SetStatementPosition(stmt); Label loop, exit; @@ -1099,13 +1104,15 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle cell = isolate()->factory()->NewCell( - Handle(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), - isolate())); - RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); - __ LoadHeapObject(ebx, cell); - __ mov(FieldOperand(ebx, Cell::kValueOffset), - Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); + Handle feedback = Handle( + Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), + isolate()); + StoreFeedbackVectorSlot(slot, feedback); + + // No need for a write barrier, we are storing a Smi in the feedback vector. + __ LoadHeapObject(ebx, FeedbackVector()); + __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)), + Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object @@ -2668,15 +2675,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { SetSourcePosition(expr->position()); Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); - __ mov(ebx, cell); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); + __ LoadHeapObject(ebx, FeedbackVector()); + __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot()))); // Record call targets in unoptimized code. CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub, expr->CallFeedbackId()); + __ CallStub(&stub); RecordJSReturnSite(expr); // Restore context register. @@ -2848,10 +2855,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { // Record call targets in unoptimized code. Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); - __ mov(ebx, cell); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + __ LoadHeapObject(ebx, FeedbackVector()); + __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot()))); CallConstructStub stub(RECORD_CALL_TARGET); __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); diff --git a/src/objects-debug.cc b/src/objects-debug.cc index e33b46be7..03776ace5 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -367,7 +367,7 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() { void TypeFeedbackInfo::TypeFeedbackInfoVerify() { VerifyObjectField(kStorage1Offset); VerifyObjectField(kStorage2Offset); - VerifyHeapPointer(type_feedback_cells()); + VerifyHeapPointer(feedback_vector()); } diff --git a/src/objects-inl.h b/src/objects-inl.h index dbac0f468..be2c12d4c 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -760,16 +760,6 @@ bool Object::IsDependentCode() { } -bool Object::IsTypeFeedbackCells() { - if (!IsFixedArray()) return false; - // There's actually no way to see the difference between a fixed array and - // a cache cells array. Since this is used for asserts we can check that - // the length is plausible though. - if (FixedArray::cast(this)->length() % 2 != 0) return false; - return true; -} - - bool Object::IsContext() { if (!Object::IsHeapObject()) return false; Map* map = HeapObject::cast(this)->map(); @@ -2791,7 +2781,6 @@ CAST_ACCESSOR(DescriptorArray) CAST_ACCESSOR(DeoptimizationInputData) CAST_ACCESSOR(DeoptimizationOutputData) CAST_ACCESSOR(DependentCode) -CAST_ACCESSOR(TypeFeedbackCells) CAST_ACCESSOR(StringTable) CAST_ACCESSOR(JSFunctionResultCache) CAST_ACCESSOR(NormalizedMapCache) @@ -6563,43 +6552,23 @@ MaybeObject* ConstantPoolArray::Copy() { } -void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) { - set(1 + index * 2, Smi::FromInt(id.ToInt())); -} - - -TypeFeedbackId TypeFeedbackCells::AstId(int index) { - return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value()); -} - - -void TypeFeedbackCells::SetCell(int index, Cell* cell) { - set(index * 2, cell); -} - - -Cell* TypeFeedbackCells::GetCell(int index) { - return Cell::cast(get(index * 2)); -} - - -Handle TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) { +Handle TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) { return isolate->factory()->the_hole_value(); } -Handle TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) { +Handle TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) { return isolate->factory()->undefined_value(); } -Handle TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate, +Handle TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate, ElementsKind elements_kind) { return Handle(Smi::FromInt(static_cast(elements_kind)), isolate); } -Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) { +Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) { return heap->the_hole_value(); } @@ -6682,8 +6651,8 @@ bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) { } -ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells, - kTypeFeedbackCellsOffset) +ACCESSORS(TypeFeedbackInfo, feedback_vector, FixedArray, + kFeedbackVectorOffset) SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot) diff --git a/src/objects-printer.cc b/src/objects-printer.cc index 909d8f742..995459b3c 100644 --- a/src/objects-printer.cc +++ b/src/objects-printer.cc @@ -555,8 +555,8 @@ void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) { HeapObject::PrintHeader(out, "TypeFeedbackInfo"); PrintF(out, " - ic_total_count: %d, ic_with_type_info_count: %d\n", ic_total_count(), ic_with_type_info_count()); - PrintF(out, " - type_feedback_cells: "); - type_feedback_cells()->FixedArrayPrint(out); + PrintF(out, " - feedback_vector: "); + feedback_vector()->FixedArrayPrint(out); } diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h index 5201a7b31..9c3378357 100644 --- a/src/objects-visiting-inl.h +++ b/src/objects-visiting-inl.h @@ -427,7 +427,7 @@ void StaticMarkingVisitor::VisitCode( Heap* heap = map->GetHeap(); Code* code = Code::cast(object); if (FLAG_cleanup_code_caches_at_gc) { - code->ClearTypeFeedbackCells(heap); + code->ClearTypeFeedbackInfo(heap); } if (FLAG_age_code && !Serializer::enabled()) { code->MakeOlder(heap->mark_compact_collector()->marking_parity()); diff --git a/src/objects.cc b/src/objects.cc index 48d77db45..7da082a4c 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -10631,18 +10631,18 @@ void Code::ClearInlineCaches(Code::Kind* kind) { } -void Code::ClearTypeFeedbackCells(Heap* heap) { +void Code::ClearTypeFeedbackInfo(Heap* heap) { if (kind() != FUNCTION) return; Object* raw_info = type_feedback_info(); if (raw_info->IsTypeFeedbackInfo()) { - TypeFeedbackCells* type_feedback_cells = - TypeFeedbackInfo::cast(raw_info)->type_feedback_cells(); - for (int i = 0; i < type_feedback_cells->CellCount(); i++) { - Cell* cell = type_feedback_cells->GetCell(i); - // Don't clear AllocationSites - Object* value = cell->value(); - if (value == NULL || !value->IsAllocationSite()) { - cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap)); + FixedArray* feedback_vector = + TypeFeedbackInfo::cast(raw_info)->feedback_vector(); + for (int i = 0; i < feedback_vector->length(); i++) { + Object* obj = feedback_vector->get(i); + if (!obj->IsAllocationSite()) { + // TODO(mvstanton): Can't I avoid a write barrier for this sentinel? + feedback_vector->set(i, + TypeFeedbackInfo::RawUninitializedSentinel(heap)); } } } diff --git a/src/objects.h b/src/objects.h index 6e3e94132..2a081a516 100644 --- a/src/objects.h +++ b/src/objects.h @@ -1038,7 +1038,6 @@ class MaybeObject BASE_EMBEDDED { V(DeoptimizationInputData) \ V(DeoptimizationOutputData) \ V(DependentCode) \ - V(TypeFeedbackCells) \ V(FixedArray) \ V(FixedDoubleArray) \ V(ConstantPoolArray) \ @@ -1149,10 +1148,12 @@ class MaybeObject BASE_EMBEDDED { V(kExpectedAllocationSite, "expected allocation site") \ V(kExpectedPropertyCellInRegisterA2, \ "Expected property cell in register a2") \ - V(kExpectedPropertyCellInRegisterEbx, \ - "Expected property cell in register ebx") \ - V(kExpectedPropertyCellInRegisterRbx, \ - "Expected property cell in register rbx") \ + V(kExpectedFixedArrayInRegisterEbx, \ + "Expected fixed array in register ebx") \ + V(kExpectedFixedArrayInRegisterR2, \ + "Expected fixed array in register r2") \ + V(kExpectedFixedArrayInRegisterRbx, \ + "Expected fixed array in register rbx") \ V(kExpectingAlignmentForCopyBytes, \ "Expecting alignment for CopyBytes") \ V(kExportDeclaration, "Export declaration") \ @@ -5069,49 +5070,6 @@ class DeoptimizationOutputData: public FixedArray { // Forward declaration. class Cell; class PropertyCell; - -// TypeFeedbackCells is a fixed array used to hold the association between -// cache cells and AST ids for code generated by the full compiler. -// The format of the these objects is -// [i * 2]: Global property cell of ith cache cell. -// [i * 2 + 1]: Ast ID for ith cache cell. -class TypeFeedbackCells: public FixedArray { - public: - int CellCount() { return length() / 2; } - static int LengthOfFixedArray(int cell_count) { return cell_count * 2; } - - // Accessors for AST ids associated with cache values. - inline TypeFeedbackId AstId(int index); - inline void SetAstId(int index, TypeFeedbackId id); - - // Accessors for global property cells holding the cache values. - inline Cell* GetCell(int index); - inline void SetCell(int index, Cell* cell); - - // The object that indicates an uninitialized cache. - static inline Handle UninitializedSentinel(Isolate* isolate); - - // The object that indicates a megamorphic state. - static inline Handle MegamorphicSentinel(Isolate* isolate); - - // The object that indicates a monomorphic state of Array with - // ElementsKind - static inline Handle MonomorphicArraySentinel(Isolate* isolate, - ElementsKind elements_kind); - - // A raw version of the uninitialized sentinel that's safe to read during - // garbage collection (e.g., for patching the cache). - static inline Object* RawUninitializedSentinel(Heap* heap); - - // Casting. - static inline TypeFeedbackCells* cast(Object* obj); - - static const int kForInFastCaseMarker = 0; - static const int kForInSlowCaseMarker = 1; -}; - - -// Forward declaration. class SafepointEntry; class TypeFeedbackInfo; @@ -5472,7 +5430,7 @@ class Code: public HeapObject { void ClearInlineCaches(); void ClearInlineCaches(Kind kind); - void ClearTypeFeedbackCells(Heap* heap); + void ClearTypeFeedbackInfo(Heap* heap); BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset); uint32_t TranslateAstIdToPcOffset(BailoutId ast_id); @@ -8186,7 +8144,7 @@ class TypeFeedbackInfo: public Struct { inline void set_inlined_type_change_checksum(int checksum); inline bool matches_inlined_type_change_checksum(int checksum); - DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells) + DECL_ACCESSORS(feedback_vector, FixedArray) static inline TypeFeedbackInfo* cast(Object* obj); @@ -8196,8 +8154,27 @@ class TypeFeedbackInfo: public Struct { static const int kStorage1Offset = HeapObject::kHeaderSize; static const int kStorage2Offset = kStorage1Offset + kPointerSize; - static const int kTypeFeedbackCellsOffset = kStorage2Offset + kPointerSize; - static const int kSize = kTypeFeedbackCellsOffset + kPointerSize; + static const int kFeedbackVectorOffset = + kStorage2Offset + kPointerSize; + static const int kSize = kFeedbackVectorOffset + kPointerSize; + + // The object that indicates an uninitialized cache. + static inline Handle UninitializedSentinel(Isolate* isolate); + + // The object that indicates a megamorphic state. + static inline Handle MegamorphicSentinel(Isolate* isolate); + + // The object that indicates a monomorphic state of Array with + // ElementsKind + static inline Handle MonomorphicArraySentinel(Isolate* isolate, + ElementsKind elements_kind); + + // A raw version of the uninitialized sentinel that's safe to read during + // garbage collection (e.g., for patching the cache). + static inline Object* RawUninitializedSentinel(Heap* heap); + + static const int kForInFastCaseMarker = 0; + static const int kForInSlowCaseMarker = 1; private: static const int kTypeChangeChecksumBits = 7; diff --git a/src/parser.cc b/src/parser.cc index 5e7680e6c..c1c1122d3 100644 --- a/src/parser.cc +++ b/src/parser.cc @@ -684,6 +684,7 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info, FunctionLiteral::kNotGenerator, 0); result->set_ast_properties(factory()->visitor()->ast_properties()); + result->set_slot_processor(factory()->visitor()->slot_processor()); result->set_dont_optimize_reason( factory()->visitor()->dont_optimize_reason()); } else if (stack_overflow()) { @@ -4041,6 +4042,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral( FunctionLiteral::IsGeneratorFlag generator = is_generator ? FunctionLiteral::kIsGenerator : FunctionLiteral::kNotGenerator; + DeferredFeedbackSlotProcessor* slot_processor; AstProperties ast_properties; BailoutReason dont_optimize_reason = kNoReason; // Parse function body. @@ -4297,6 +4299,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral( CHECK_OK); } ast_properties = *factory()->visitor()->ast_properties(); + slot_processor = factory()->visitor()->slot_processor(); dont_optimize_reason = factory()->visitor()->dont_optimize_reason(); } @@ -4320,6 +4323,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral( pos); function_literal->set_function_token_position(function_token_pos); function_literal->set_ast_properties(&ast_properties); + function_literal->set_slot_processor(slot_processor); function_literal->set_dont_optimize_reason(dont_optimize_reason); if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal); diff --git a/src/runtime.cc b/src/runtime.cc index b9a09c1de..eb5552e3e 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -8474,7 +8474,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearFunctionTypeFeedback) { Code* unoptimized = function->shared()->code(); if (unoptimized->kind() == Code::FUNCTION) { unoptimized->ClearInlineCaches(); - unoptimized->ClearTypeFeedbackCells(isolate->heap()); + unoptimized->ClearTypeFeedbackInfo(isolate->heap()); } return isolate->heap()->undefined_value(); } diff --git a/src/scopes.cc b/src/scopes.cc index 97b67bd5a..650f57c61 100644 --- a/src/scopes.cc +++ b/src/scopes.cc @@ -307,7 +307,7 @@ bool Scope::Analyze(CompilationInfo* info) { } #endif - info->SetScope(scope); + info->PrepareForCompilation(scope); return true; } diff --git a/src/type-info.cc b/src/type-info.cc index 2ca04b88f..1c08e0cd1 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -47,6 +47,12 @@ TypeFeedbackOracle::TypeFeedbackOracle(Handle code, Zone* zone) : native_context_(native_context), zone_(zone) { + Object* raw_info = code->type_feedback_info(); + if (raw_info->IsTypeFeedbackInfo()) { + feedback_vector_ = Handle(TypeFeedbackInfo::cast(raw_info)-> + feedback_vector()); + } + BuildDictionary(code); ASSERT(dictionary_->IsDictionary()); } @@ -72,6 +78,17 @@ Handle TypeFeedbackOracle::GetInfo(TypeFeedbackId ast_id) { } +Handle TypeFeedbackOracle::GetInfo(int slot) { + ASSERT(slot >= 0 && slot < feedback_vector_->length()); + Object* obj = feedback_vector_->get(slot); + if (!obj->IsJSFunction() || + !CanRetainOtherContext(JSFunction::cast(obj), *native_context_)) { + return Handle(obj, isolate()); + } + return Handle::cast(isolate()->factory()->undefined_value()); +} + + bool TypeFeedbackOracle::LoadIsUninitialized(TypeFeedbackId id) { Handle maybe_code = GetInfo(id); if (maybe_code->IsCode()) { @@ -101,22 +118,22 @@ bool TypeFeedbackOracle::StoreIsKeyedPolymorphic(TypeFeedbackId ast_id) { } -bool TypeFeedbackOracle::CallIsMonomorphic(TypeFeedbackId id) { - Handle value = GetInfo(id); +bool TypeFeedbackOracle::CallIsMonomorphic(int slot) { + Handle value = GetInfo(slot); return value->IsAllocationSite() || value->IsJSFunction(); } -bool TypeFeedbackOracle::CallNewIsMonomorphic(TypeFeedbackId id) { - Handle info = GetInfo(id); +bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) { + Handle info = GetInfo(slot); return info->IsAllocationSite() || info->IsJSFunction(); } -byte TypeFeedbackOracle::ForInType(TypeFeedbackId id) { - Handle value = GetInfo(id); +byte TypeFeedbackOracle::ForInType(int feedback_vector_slot) { + Handle value = GetInfo(feedback_vector_slot); return value->IsSmi() && - Smi::cast(*value)->value() == TypeFeedbackCells::kForInFastCaseMarker + Smi::cast(*value)->value() == TypeFeedbackInfo::kForInFastCaseMarker ? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN; } @@ -134,8 +151,8 @@ KeyedAccessStoreMode TypeFeedbackOracle::GetStoreMode( } -Handle TypeFeedbackOracle::GetCallTarget(TypeFeedbackId id) { - Handle info = GetInfo(id); +Handle TypeFeedbackOracle::GetCallTarget(int slot) { + Handle info = GetInfo(slot); if (info->IsAllocationSite()) { return Handle(isolate()->global_context()->array_function()); } else { @@ -144,8 +161,8 @@ Handle TypeFeedbackOracle::GetCallTarget(TypeFeedbackId id) { } -Handle TypeFeedbackOracle::GetCallNewTarget(TypeFeedbackId id) { - Handle info = GetInfo(id); +Handle TypeFeedbackOracle::GetCallNewTarget(int slot) { + Handle info = GetInfo(slot); if (info->IsAllocationSite()) { return Handle(isolate()->global_context()->array_function()); } else { @@ -154,9 +171,8 @@ Handle TypeFeedbackOracle::GetCallNewTarget(TypeFeedbackId id) { } -Handle TypeFeedbackOracle::GetCallNewAllocationSite( - TypeFeedbackId id) { - Handle info = GetInfo(id); +Handle TypeFeedbackOracle::GetCallNewAllocationSite(int slot) { + Handle info = GetInfo(slot); if (info->IsAllocationSite()) { return Handle::cast(info); } @@ -409,7 +425,6 @@ void TypeFeedbackOracle::BuildDictionary(Handle code) { GetRelocInfos(code, &infos); CreateDictionary(code, &infos); ProcessRelocInfos(&infos); - ProcessTypeFeedbackCells(code); // Allocate handle in the parent scope. dictionary_ = scope.CloseAndEscape(dictionary_); } @@ -427,13 +442,9 @@ void TypeFeedbackOracle::GetRelocInfos(Handle code, void TypeFeedbackOracle::CreateDictionary(Handle code, ZoneList* infos) { AllowHeapAllocation allocation_allowed; - int cell_count = code->type_feedback_info()->IsTypeFeedbackInfo() - ? TypeFeedbackInfo::cast(code->type_feedback_info())-> - type_feedback_cells()->CellCount() - : 0; - int length = infos->length() + cell_count; byte* old_start = code->instruction_start(); - dictionary_ = isolate()->factory()->NewUnseededNumberDictionary(length); + dictionary_ = + isolate()->factory()->NewUnseededNumberDictionary(infos->length()); byte* new_start = code->instruction_start(); RelocateRelocInfos(infos, old_start, new_start); } @@ -475,26 +486,6 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList* infos) { } -void TypeFeedbackOracle::ProcessTypeFeedbackCells(Handle code) { - Object* raw_info = code->type_feedback_info(); - if (!raw_info->IsTypeFeedbackInfo()) return; - Handle cache( - TypeFeedbackInfo::cast(raw_info)->type_feedback_cells()); - for (int i = 0; i < cache->CellCount(); i++) { - TypeFeedbackId ast_id = cache->AstId(i); - Cell* cell = cache->GetCell(i); - Object* value = cell->value(); - if (value->IsSmi() || - value->IsAllocationSite() || - (value->IsJSFunction() && - !CanRetainOtherContext(JSFunction::cast(value), - *native_context_))) { - SetInfo(ast_id, cell); - } - } -} - - void TypeFeedbackOracle::SetInfo(TypeFeedbackId ast_id, Object* target) { ASSERT(dictionary_->FindEntry(IdToKey(ast_id)) == UnseededNumberDictionary::kNotFound); diff --git a/src/type-info.h b/src/type-info.h index 8661d5057..6de92cec0 100644 --- a/src/type-info.h +++ b/src/type-info.h @@ -50,14 +50,16 @@ class TypeFeedbackOracle: public ZoneObject { bool LoadIsUninitialized(TypeFeedbackId id); bool StoreIsUninitialized(TypeFeedbackId id); bool StoreIsKeyedPolymorphic(TypeFeedbackId id); + bool CallIsMonomorphic(int slot); bool CallIsMonomorphic(TypeFeedbackId aid); - bool CallNewIsMonomorphic(TypeFeedbackId id); + bool KeyedArrayCallIsHoley(TypeFeedbackId id); + bool CallNewIsMonomorphic(int slot); // TODO(1571) We can't use ForInStatement::ForInType as the return value due // to various cycles in our headers. // TODO(rossberg): once all oracle access is removed from ast.cc, it should // be possible. - byte ForInType(TypeFeedbackId id); + byte ForInType(int feedback_vector_slot); KeyedAccessStoreMode GetStoreMode(TypeFeedbackId id); @@ -84,9 +86,9 @@ class TypeFeedbackOracle: public ZoneObject { static bool CanRetainOtherContext(JSFunction* function, Context* native_context); - Handle GetCallTarget(TypeFeedbackId id); - Handle GetCallNewTarget(TypeFeedbackId id); - Handle GetCallNewAllocationSite(TypeFeedbackId id); + Handle GetCallTarget(int slot); + Handle GetCallNewTarget(int slot); + Handle GetCallNewAllocationSite(int slot); bool LoadIsBuiltin(TypeFeedbackId id, Builtins::Name builtin_id); bool LoadIsStub(TypeFeedbackId id, ICStub* stub); @@ -130,16 +132,20 @@ class TypeFeedbackOracle: public ZoneObject { byte* old_start, byte* new_start); void ProcessRelocInfos(ZoneList* infos); - void ProcessTypeFeedbackCells(Handle code); // Returns an element from the backing store. Returns undefined if // there is no information. Handle GetInfo(TypeFeedbackId id); + // Returns an element from the type feedback vector. Returns undefined + // if there is no information. + Handle GetInfo(int slot); + private: Handle native_context_; Zone* zone_; Handle dictionary_; + Handle feedback_vector_; DISALLOW_COPY_AND_ASSIGN(TypeFeedbackOracle); }; diff --git a/src/typing.cc b/src/typing.cc index c7bea40ac..b925dc610 100644 --- a/src/typing.cc +++ b/src/typing.cc @@ -323,7 +323,7 @@ void AstTyper::VisitForStatement(ForStatement* stmt) { void AstTyper::VisitForInStatement(ForInStatement* stmt) { // Collect type feedback. stmt->set_for_in_type(static_cast( - oracle()->ForInType(stmt->ForInFeedbackId()))); + oracle()->ForInType(stmt->ForInFeedbackSlot()))); RECURSE(Visit(stmt->enumerable())); store_.Forget(); // Control may transfer here via looping or 'continue'. @@ -530,8 +530,9 @@ void AstTyper::VisitCall(Call* expr) { // Collect type feedback. RECURSE(Visit(expr->expression())); if (!expr->expression()->IsProperty() && - oracle()->CallIsMonomorphic(expr->CallFeedbackId())) { - expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackId())); + expr->HasCallFeedbackSlot() && + oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) { + expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot())); } ZoneList* args = expr->arguments(); diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 075964bce..0637bd2fb 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -106,8 +106,8 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { rbx }; - descriptor->register_param_count_ = 1; + static Register registers[] = { rbx, rdx }; + descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = NULL; } @@ -2161,28 +2161,32 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { static void GenerateRecordCallTarget(MacroAssembler* masm) { - // Cache the called function in a global property cell. Cache states + // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and // megamorphic. // rax : number of arguments to the construct function - // rbx : cache cell for call target + // rbx : Feedback vector + // rdx : slot in feedback vector (Smi) // rdi : the function to call Isolate* isolate = masm->isolate(); - Label initialize, done, miss, megamorphic, not_array_function; + Label initialize, done, miss, megamorphic, not_array_function, + done_no_smi_convert; // Load the cache state into rcx. - __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); + __ SmiToInteger32(rdx, rdx); + __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize)); // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. __ cmpq(rcx, rdi); __ j(equal, &done); - __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); + __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); __ j(equal, &done); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the cell either some other function or an + // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in rcx. Handle allocation_site_map = masm->isolate()->factory()->allocation_site_map(); @@ -2199,13 +2203,13 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. - __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); + __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); __ j(equal, &initialize); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. __ bind(&megamorphic); - __ Move(FieldOperand(rbx, Cell::kValueOffset), - TypeFeedbackCells::MegamorphicSentinel(isolate)); + __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), + TypeFeedbackInfo::MegamorphicSentinel(isolate)); __ jmp(&done); // An uninitialized cache is patched with the function or sentinel to @@ -2217,7 +2221,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ j(not_equal, ¬_array_function); // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the cell + // Create an AllocationSite if we don't already have it, store it in the slot. { FrameScope scope(masm, StackFrame::INTERNAL); @@ -2225,28 +2229,45 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ Integer32ToSmi(rax, rax); __ push(rax); __ push(rdi); + __ Integer32ToSmi(rdx, rdx); + __ push(rdx); __ push(rbx); CreateAllocationSiteStub create_stub; __ CallStub(&create_stub); __ pop(rbx); + __ pop(rdx); __ pop(rdi); __ pop(rax); __ SmiToInteger32(rax, rax); } - __ jmp(&done); + __ jmp(&done_no_smi_convert); __ bind(¬_array_function); - __ movp(FieldOperand(rbx, Cell::kValueOffset), rdi); - // No need for a write barrier here - cells are rescanned. + __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), + rdi); + + // We won't need rdx or rbx anymore, just save rdi + __ push(rdi); + __ push(rbx); + __ push(rdx); + __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + __ pop(rdx); + __ pop(rbx); + __ pop(rdi); __ bind(&done); + __ Integer32ToSmi(rdx, rdx); + + __ bind(&done_no_smi_convert); } void CallFunctionStub::Generate(MacroAssembler* masm) { - // rbx : cache cell for call target + // rbx : feedback vector + // rdx : (only if rbx is not undefined) slot in feedback vector (Smi) // rdi : the function to call Isolate* isolate = masm->isolate(); Label slow, non_function, wrap, cont; @@ -2283,6 +2304,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ j(not_equal, &cont); } + // Load the receiver from the stack. __ movp(rax, args.GetReceiverOperand()); @@ -2306,8 +2328,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // If there is a call target cache, mark it megamorphic in the // non-function case. MegamorphicSentinel is an immortal immovable // object (undefined) so no write barrier is needed. - __ Move(FieldOperand(rbx, Cell::kValueOffset), - TypeFeedbackCells::MegamorphicSentinel(isolate)); + __ SmiToInteger32(rdx, rdx); + __ Move(FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize), + TypeFeedbackInfo::MegamorphicSentinel(isolate)); + __ Integer32ToSmi(rdx, rdx); } // Check for function proxy. __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); @@ -2353,7 +2378,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) { // rax : number of arguments - // rbx : cache cell for call target + // rbx : feedback vector + // rdx : (only if rbx is not undefined) slot in feedback vector (Smi) // rdi : constructor function Label slow, non_function_call; @@ -4867,7 +4893,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { // We are going to create a holey array, but our kind is non-holey. - // Fix kind and retry (only if we have an allocation site in the cell). + // Fix kind and retry (only if we have an allocation site in the slot). __ incl(rdx); if (FLAG_debug_code) { @@ -4977,7 +5003,8 @@ void ArrayConstructorStub::GenerateDispatchToArrayStub( void ArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : argc - // -- rbx : type info cell + // -- rbx : feedback vector (fixed array or undefined) + // -- rdx : slot index (if ebx is fixed array) // -- rdi : constructor // -- rsp[0] : return address // -- rsp[8] : last argument @@ -4999,22 +5026,29 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { __ CmpObjectType(rcx, MAP_TYPE, rcx); __ Check(equal, kUnexpectedInitialMapForArrayFunction); - // We should either have undefined in rbx or a valid cell + // We should either have undefined in rbx or a valid fixed array. Label okay_here; - Handle cell_map = masm->isolate()->factory()->cell_map(); + Handle fixed_array_map = masm->isolate()->factory()->fixed_array_map(); __ Cmp(rbx, undefined_sentinel); __ j(equal, &okay_here); - __ Cmp(FieldOperand(rbx, 0), cell_map); - __ Assert(equal, kExpectedPropertyCellInRegisterRbx); + __ Cmp(FieldOperand(rbx, 0), fixed_array_map); + __ Assert(equal, kExpectedFixedArrayInRegisterRbx); + + // rdx should be a smi if we don't have undefined in rbx. + __ AssertSmi(rdx); + __ bind(&okay_here); } Label no_info; - // If the type cell is undefined, or contains anything other than an + // If the feedback slot is undefined, or contains anything other than an // AllocationSite, call an array constructor that doesn't use AllocationSites. __ Cmp(rbx, undefined_sentinel); __ j(equal, &no_info); - __ movp(rbx, FieldOperand(rbx, Cell::kValueOffset)); + __ SmiToInteger32(rdx, rdx); + __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize)); + __ Integer32ToSmi(rdx, rdx); __ Cmp(FieldOperand(rbx, 0), masm->isolate()->factory()->allocation_site_map()); __ j(not_equal, &no_info); @@ -5071,7 +5105,6 @@ void InternalArrayConstructorStub::GenerateCase( void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : argc - // -- rbx : type info cell // -- rdi : constructor // -- rsp[0] : return address // -- rsp[8] : last argument diff --git a/src/x64/debug-x64.cc b/src/x64/debug-x64.cc index 8ae03deae..cdb80e0fe 100644 --- a/src/x64/debug-x64.cc +++ b/src/x64/debug-x64.cc @@ -261,9 +261,11 @@ void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { // Register state for CallFunctionStub (from code-stubs-x64.cc). // ----------- S t a t e ------------- // -- rdi : function - // -- rbx: cache cell for call target + // -- rbx: feedback array + // -- rdx: slot in feedback array // ----------------------------------- - Generate_DebugBreakCallHelper(masm, rbx.bit() | rdi.bit(), 0, false); + Generate_DebugBreakCallHelper(masm, rbx.bit() | rdx.bit() | rdi.bit(), + 0, false); } @@ -285,10 +287,12 @@ void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) { // above IC call. // ----------- S t a t e ------------- // -- rax: number of arguments - // -- rbx: cache cell for call target + // -- rbx: feedback array + // -- rdx: feedback slot (smi) // ----------------------------------- // The number of arguments in rax is not smi encoded. - Generate_DebugBreakCallHelper(masm, rbx.bit() | rdi.bit(), rax.bit(), false); + Generate_DebugBreakCallHelper(masm, rbx.bit() | rdx.bit() | rdi.bit(), + rax.bit(), false); } diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 621eacc70..6f8989a93 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -118,6 +118,9 @@ void FullCodeGenerator::Generate() { CompilationInfo* info = info_; handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); + + InitializeFeedbackVector(); + profiling_counter_ = isolate()->factory()->NewCell( Handle(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -1035,6 +1038,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Comment cmnt(masm_, "[ ForInStatement"); + int slot = stmt->ForInFeedbackSlot(); SetStatementPosition(stmt); Label loop, exit; @@ -1123,14 +1127,15 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle cell = isolate()->factory()->NewCell( - Handle(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), - isolate())); - RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); - __ Move(rbx, cell); - __ Move(FieldOperand(rbx, Cell::kValueOffset), - Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)); + Handle feedback = Handle( + Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), + isolate()); + StoreFeedbackVectorSlot(slot, feedback); + // No need for a write barrier, we are storing a Smi in the feedback vector. + __ Move(rbx, FeedbackVector()); + __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)), + Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); @@ -2650,15 +2655,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { SetSourcePosition(expr->position()); Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); - __ Move(rbx, cell); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); + __ Move(rbx, FeedbackVector()); + __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot())); // Record call targets in unoptimized code. CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub, expr->CallFeedbackId()); + __ CallStub(&stub); RecordJSReturnSite(expr); // Restore context register. __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); @@ -2830,10 +2835,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { // Record call targets in unoptimized code, but not in the snapshot. Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); - __ Move(rbx, cell); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + __ Move(rbx, FeedbackVector()); + __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot())); CallConstructStub stub(RECORD_CALL_TARGET); __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 3e8d93b3a..eeeaf016f 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -2826,7 +2826,7 @@ TEST(Regress2211) { } -TEST(IncrementalMarkingClearsTypeFeedbackCells) { +TEST(IncrementalMarkingClearsTypeFeedbackInfo) { if (i::FLAG_always_opt) return; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -2849,23 +2849,25 @@ TEST(IncrementalMarkingClearsTypeFeedbackCells) { CcTest::global()->Set(v8_str("fun1"), fun1); CcTest::global()->Set(v8_str("fun2"), fun2); CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);"); + Handle f = v8::Utils::OpenHandle( *v8::Handle::Cast( CcTest::global()->Get(v8_str("f")))); - Handle cells(TypeFeedbackInfo::cast( - f->shared()->code()->type_feedback_info())->type_feedback_cells()); - CHECK_EQ(2, cells->CellCount()); - CHECK(cells->GetCell(0)->value()->IsJSFunction()); - CHECK(cells->GetCell(1)->value()->IsJSFunction()); + Handle feedback_vector(TypeFeedbackInfo::cast( + f->shared()->code()->type_feedback_info())->feedback_vector()); + + CHECK_EQ(2, feedback_vector->length()); + CHECK(feedback_vector->get(0)->IsJSFunction()); + CHECK(feedback_vector->get(1)->IsJSFunction()); SimulateIncrementalMarking(); CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); - CHECK_EQ(2, cells->CellCount()); - CHECK(cells->GetCell(0)->value()->IsTheHole()); - CHECK(cells->GetCell(1)->value()->IsTheHole()); + CHECK_EQ(2, feedback_vector->length()); + CHECK(feedback_vector->get(0)->IsTheHole()); + CHECK(feedback_vector->get(1)->IsTheHole()); } diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp index d78853789..0b48d86ae 100644 --- a/tools/gyp/v8.gyp +++ b/tools/gyp/v8.gyp @@ -343,6 +343,7 @@ '../../src/factory.h', '../../src/fast-dtoa.cc', '../../src/fast-dtoa.h', + '../../src/feedback-slots.h', '../../src/fixed-dtoa.cc', '../../src/fixed-dtoa.h', '../../src/flag-definitions.h', -- 2.34.1