From f6852ee2c40408d86358e139310215b032b9f4f9 Mon Sep 17 00:00:00 2001 From: "palfia@homejinni.com" Date: Tue, 11 Feb 2014 02:48:57 +0000 Subject: [PATCH] MIPS: Adding a type vector to replace type cells. Port r19244 (08e0afd9) BUG= R=plind44@gmail.com Review URL: https://codereview.chromium.org/159393002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19246 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/mips/code-stubs-mips.cc | 117 +++++++++++++++++++++++++----------------- src/mips/debug-mips.cc | 10 ++-- src/mips/full-codegen-mips.cc | 36 +++++++------ 3 files changed, 97 insertions(+), 66 deletions(-) diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index e38f181..1d05dce 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -106,8 +106,8 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( void CreateAllocationSiteStub::InitializeInterfaceDescriptor( Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { a2 }; - descriptor->register_param_count_ = 1; + static Register registers[] = { a2, a3 }; + descriptor->register_param_count_ = 2; descriptor->register_params_ = registers; descriptor->deoptimization_handler_ = NULL; } @@ -3152,37 +3152,40 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { static void GenerateRecordCallTarget(MacroAssembler* masm) { - // Cache the called function in a global property cell. Cache states + // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and // megamorphic. // a0 : number of arguments to the construct function // a1 : the function to call - // a2 : cache cell for call target + // a2 : Feedback vector + // a3 : slot in feedback vector (Smi) Label initialize, done, miss, megamorphic, not_array_function; - ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), masm->isolate()->heap()->undefined_value()); - ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), masm->isolate()->heap()->the_hole_value()); - // Load the cache state into a3. - __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); + // Load the cache state into t0. + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t0, a2, Operand(t0)); + __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ Branch(&done, eq, a3, Operand(a1)); + __ Branch(&done, eq, t0, Operand(a1)); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the cell either some other function or an + // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in a3. - __ lw(t1, FieldMemOperand(a3, 0)); + __ lw(t1, FieldMemOperand(t0, 0)); __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); __ Branch(&miss, ne, t1, Operand(at)); // Make sure the function is the Array() function - __ LoadArrayFunction(a3); - __ Branch(&megamorphic, ne, a1, Operand(a3)); + __ LoadArrayFunction(t0); + __ Branch(&megamorphic, ne, a1, Operand(t0)); __ jmp(&done); __ bind(&miss); @@ -3190,29 +3193,32 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. __ LoadRoot(at, Heap::kTheHoleValueRootIndex); - __ Branch(&initialize, eq, a3, Operand(at)); + __ Branch(&initialize, eq, t0, Operand(at)); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. __ bind(&megamorphic); + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t0, a2, Operand(t0)); __ LoadRoot(at, Heap::kUndefinedValueRootIndex); - __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); + __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); __ jmp(&done); // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. __ bind(&initialize); // Make sure the function is the Array() function - __ LoadArrayFunction(a3); - __ Branch(¬_array_function, ne, a1, Operand(a3)); + __ LoadArrayFunction(t0); + __ Branch(¬_array_function, ne, a1, Operand(t0)); // The target function is the Array constructor. - // Create an AllocationSite if we don't already have it, store it in the cell. + // Create an AllocationSite if we don't already have it, store it in the slot. { FrameScope scope(masm, StackFrame::INTERNAL); const RegList kSavedRegs = 1 << 4 | // a0 1 << 5 | // a1 - 1 << 6; // a2 + 1 << 6 | // a2 + 1 << 7; // a3 // Arguments register must be smi-tagged to call out. __ SmiTag(a0); @@ -3227,8 +3233,16 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ Branch(&done); __ bind(¬_array_function); - __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset)); - // No need for a write barrier here - cells are rescanned. + + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t0, a2, Operand(t0)); + __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + __ sw(a1, MemOperand(t0, 0)); + + __ Push(t0, a2, a1); + __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + __ Pop(t0, a2, a1); __ bind(&done); } @@ -3236,7 +3250,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { void CallFunctionStub::Generate(MacroAssembler* masm) { // a1 : the function to call - // a2 : cache cell for call target + // a2 : feedback vector + // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) Label slow, non_function, wrap, cont; if (NeedsChecks()) { @@ -3245,8 +3260,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ JumpIfSmi(a1, &non_function); // Goto slow case if we do not have a function. - __ GetObjectType(a1, a3, a3); - __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); + __ GetObjectType(a1, t0, t0); + __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); if (RecordCallTarget()) { GenerateRecordCallTarget(masm); @@ -3291,13 +3306,15 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // If there is a call target cache, mark it megamorphic in the // non-function case. MegamorphicSentinel is an immortal immovable // object (undefined) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), + ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), masm->isolate()->heap()->undefined_value()); + __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(t1, a2, Operand(t1)); __ LoadRoot(at, Heap::kUndefinedValueRootIndex); - __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); + __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); } // Check for function proxy. - __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); + __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); __ push(a1); // Put proxy as additional argument. __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); __ li(a2, Operand(0, RelocInfo::NONE32)); @@ -3337,21 +3354,22 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) { // a0 : number of arguments // a1 : the function to call - // a2 : cache cell for call target + // a2 : feedback vector + // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) Label slow, non_function_call; // Check that the function is not a smi. __ JumpIfSmi(a1, &non_function_call); // Check that the function is a JSFunction. - __ GetObjectType(a1, a3, a3); - __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); + __ GetObjectType(a1, t0, t0); + __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); if (RecordCallTarget()) { GenerateRecordCallTarget(masm); } // Jump to the function-specific construct stub. - Register jmp_reg = a3; + Register jmp_reg = t0; __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ lw(jmp_reg, FieldMemOperand(jmp_reg, SharedFunctionInfo::kConstructStubOffset)); @@ -3360,10 +3378,10 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // a0: number of arguments // a1: called object - // a3: object type + // t0: object type Label do_call; __ bind(&slow); - __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); + __ Branch(&non_function_call, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); __ jmp(&do_call); @@ -5361,7 +5379,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm, __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { // We are going to create a holey array, but our kind is non-holey. - // Fix kind and retry (only if we have an allocation site in the cell). + // Fix kind and retry (only if we have an allocation site in the slot). __ Addu(a3, a3, Operand(1)); if (FLAG_debug_code) { @@ -5468,7 +5486,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- a0 : argc (only if argument_count_ == ANY) // -- a1 : constructor - // -- a2 : type info cell + // -- a2 : feedback vector (fixed array or undefined) + // -- a3 : slot index (if a2 is fixed array) // -- sp[0] : return address // -- sp[4] : last argument // ----------------------------------- @@ -5477,23 +5496,27 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // builtin Array functions which always have maps. // Initial map for the builtin Array function should be a map. - __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); + __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); // Will both indicate a NULL and a Smi. - __ SmiTst(a3, at); + __ SmiTst(t0, at); __ Assert(ne, kUnexpectedInitialMapForArrayFunction, at, Operand(zero_reg)); - __ GetObjectType(a3, a3, t0); + __ GetObjectType(t0, t0, t1); __ Assert(eq, kUnexpectedInitialMapForArrayFunction, - t0, Operand(MAP_TYPE)); + t1, Operand(MAP_TYPE)); - // We should either have undefined in a2 or a valid cell. + // We should either have undefined in a2 or a valid fixed array. Label okay_here; - Handle cell_map = masm->isolate()->factory()->cell_map(); + Handle fixed_array_map = masm->isolate()->factory()->fixed_array_map(); __ LoadRoot(at, Heap::kUndefinedValueRootIndex); __ Branch(&okay_here, eq, a2, Operand(at)); - __ lw(a3, FieldMemOperand(a2, 0)); - __ Assert(eq, kExpectedPropertyCellInRegisterA2, - a3, Operand(cell_map)); + __ lw(t0, FieldMemOperand(a2, 0)); + __ Assert(eq, kExpectedFixedArrayInRegisterR2, + t0, Operand(fixed_array_map)); + + // a3 should be a smi if we don't have undefined in a2 + __ AssertSmi(a3); + __ bind(&okay_here); } @@ -5501,9 +5524,11 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // Get the elements kind and case on that. __ LoadRoot(at, Heap::kUndefinedValueRootIndex); __ Branch(&no_info, eq, a2, Operand(at)); - __ lw(a2, FieldMemOperand(a2, Cell::kValueOffset)); + __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); + __ Addu(a2, a2, Operand(t0)); + __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize)); - // If the type cell is undefined, or contains anything other than an + // If the feedback vector is undefined, or contains anything other than an // AllocationSite, call an array constructor that doesn't use AllocationSites. __ lw(t0, FieldMemOperand(a2, 0)); __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); diff --git a/src/mips/debug-mips.cc b/src/mips/debug-mips.cc index 1535231..b9bf69d 100644 --- a/src/mips/debug-mips.cc +++ b/src/mips/debug-mips.cc @@ -274,9 +274,10 @@ void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) { // Register state for CallFunctionStub (from code-stubs-mips.cc). // ----------- S t a t e ------------- // -- a1 : function - // -- a2 : cache cell for call target + // -- a2 : feedback array + // -- a3 : slot in feedback array // ----------------------------------- - Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit(), 0); + Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit() | a3.bit(), 0); } @@ -295,9 +296,10 @@ void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- a0 : number of arguments (not smi) // -- a1 : constructor function - // -- a2 : cache cell for call target + // -- a2 : feedback array + // -- a3 : feedback slot (smi) // ----------------------------------- - Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit(), a0.bit()); + Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit() | a3.bit(), a0.bit()); } diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index 18ee02d..56c6dee 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -138,6 +138,9 @@ void FullCodeGenerator::Generate() { CompilationInfo* info = info_; handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); + + InitializeFeedbackVector(); + profiling_counter_ = isolate()->factory()->NewCell( Handle(Smi::FromInt(FLAG_interrupt_budget), isolate())); SetFunctionPosition(function()); @@ -1087,6 +1090,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Comment cmnt(masm_, "[ ForInStatement"); + int slot = stmt->ForInFeedbackSlot(); SetStatementPosition(stmt); Label loop, exit; @@ -1172,13 +1176,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { Label non_proxy; __ bind(&fixed_array); - Handle cell = isolate()->factory()->NewCell( - Handle(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), - isolate())); - RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); - __ li(a1, cell); - __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); - __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset)); + Handle feedback = Handle( + Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), + isolate()); + StoreFeedbackVectorSlot(slot, feedback); + __ li(a1, FeedbackVector()); + __ li(a2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); + __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot))); __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object @@ -2741,15 +2745,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { SetSourcePosition(expr->position()); Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); - __ li(a2, Operand(cell)); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); + __ li(a2, FeedbackVector()); + __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); // Record call targets in unoptimized code. CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); - __ CallStub(&stub, expr->CallFeedbackId()); + __ CallStub(&stub); RecordJSReturnSite(expr); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2928,10 +2932,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { // Record call targets in unoptimized code. Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = isolate()->factory()->NewCell(uninitialized); - RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); - __ li(a2, Operand(cell)); + TypeFeedbackInfo::UninitializedSentinel(isolate()); + StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + __ li(a2, FeedbackVector()); + __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); CallConstructStub stub(RECORD_CALL_TARGET); __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); -- 2.7.4