From: ishell@chromium.org Date: Thu, 22 May 2014 17:13:28 +0000 (+0000) Subject: Revert "Reland r21346 "Inobject slack tracking is done on a per-closure basis instead... X-Git-Tag: upstream/4.7.83~9016 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=9f3183f76b80e81993b8a481876b20f1247cef57;p=platform%2Fupstream%2Fv8.git Revert "Reland r21346 "Inobject slack tracking is done on a per-closure basis instead of per-shared info basis."" This reverts r21442. TBR=jkummerow@chromium.org Review URL: https://codereview.chromium.org/292433016 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21444 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index 29e1d19..2e5cc73 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -313,6 +313,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, + bool count_constructions, bool create_memento) { // ----------- S t a t e ------------- // -- r0 : number of arguments @@ -322,9 +323,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // -- sp[...]: constructor arguments // ----------------------------------- + // Should never count constructions for api objects. + ASSERT(!is_api_function || !count_constructions); + // Should never create mementos for api functions. ASSERT(!is_api_function || !create_memento); + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); + Isolate* isolate = masm->isolate(); // Enter a construct frame. @@ -368,23 +375,21 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); __ b(eq, &rt_call); - if (!is_api_function) { + if (count_constructions) { Label allocate; - MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset); - // Check if slack tracking is enabled. - __ ldr(r4, bit_field3); - __ DecodeField(r3, r4); - __ cmp(r3, Operand(JSFunction::kNoSlackTracking)); - __ b(eq, &allocate); // Decrease generous allocation count. - __ sub(r4, r4, Operand(1 << Map::ConstructionCount::kShift)); - __ str(r4, bit_field3); - __ cmp(r3, Operand(JSFunction::kFinishSlackTracking)); + __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + MemOperand constructor_count = + FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset); + __ ldrb(r4, constructor_count); + __ sub(r4, r4, Operand(1), SetCC); + __ strb(r4, constructor_count); __ b(ne, &allocate); __ push(r1); __ Push(r2, r1); // r1 = constructor + // The call will replace the stub, so the countdown is only done once. __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); __ pop(r2); @@ -425,18 +430,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // r4: JSObject (not tagged) // r5: First in-object property of JSObject (not tagged) ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); - __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); - - if (!is_api_function) { - Label no_inobject_slack_tracking; - - // Check if slack tracking is enabled. - __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset)); - __ DecodeField(ip); - __ cmp(ip, Operand(JSFunction::kNoSlackTracking)); - __ b(eq, &no_inobject_slack_tracking); - // Allocate object with a slack. + if (count_constructions) { + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, kBitsPerByte); @@ -450,14 +446,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ InitializeFieldsWithFiller(r5, r0, r6); // To allow for truncation. __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex); - // Fill the remaining fields with one pointer filler map. - - __ bind(&no_inobject_slack_tracking); - } - - if (create_memento) { - __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize)); - __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object. + __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. + __ InitializeFieldsWithFiller(r5, r0, r6); + } else if (create_memento) { + __ sub(r6, r3, Operand(AllocationMemento::kSize / kPointerSize)); + __ add(r0, r4, Operand(r6, LSL, kPointerSizeLog2)); // End of object. + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); __ InitializeFieldsWithFiller(r5, r0, r6); // Fill in memento fields. @@ -470,6 +464,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); } else { + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. __ InitializeFieldsWithFiller(r5, r0, r6); } @@ -660,7 +655,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } // Store offset of return address for deoptimizer. - if (!is_api_function) { + if (!is_api_function && !count_constructions) { masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); } @@ -712,13 +707,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } +void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { + Generate_JSConstructStubHelper(masm, false, true, false); +} + + void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index cd36062..98d2087 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -1356,19 +1356,11 @@ class MacroAssembler: public Assembler { void NumberOfOwnDescriptors(Register dst, Register map); template - void DecodeField(Register dst, Register src) { + void DecodeField(Register reg) { static const int shift = Field::kShift; static const int mask = Field::kMask >> shift; - static const int size = Field::kSize; - mov(dst, Operand(src, LSR, shift)); - if (shift + size != 32) { - and_(dst, dst, Operand(mask)); - } - } - - template - void DecodeField(Register reg) { - DecodeField(reg, reg); + mov(reg, Operand(reg, LSR, shift)); + and_(reg, reg, Operand(mask)); } // Activation support. diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc index 8073542..fec5fef 100644 --- a/src/arm64/builtins-arm64.cc +++ b/src/arm64/builtins-arm64.cc @@ -304,6 +304,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, + bool count_constructions, bool create_memento) { // ----------- S t a t e ------------- // -- x0 : number of arguments @@ -314,8 +315,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // ----------------------------------- ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); + // Should never count constructions for api objects. + ASSERT(!is_api_function || !count_constructions); // Should never create mementos for api functions. ASSERT(!is_api_function || !create_memento); + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); Isolate* isolate = masm->isolate(); @@ -361,28 +366,24 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE); __ B(eq, &rt_call); - Register constructon_count = x14; - if (!is_api_function) { + if (count_constructions) { Label allocate; - MemOperand bit_field3 = - FieldMemOperand(init_map, Map::kBitField3Offset); - // Check if slack tracking is enabled. - __ Ldr(x4, bit_field3); - __ DecodeField(constructon_count, x4); - __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking)); - __ B(eq, &allocate); // Decrease generous allocation count. - __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift)); - __ Str(x4, bit_field3); - __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking)); + __ Ldr(x3, FieldMemOperand(constructor, + JSFunction::kSharedFunctionInfoOffset)); + MemOperand constructor_count = + FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset); + __ Ldrb(x4, constructor_count); + __ Subs(x4, x4, 1); + __ Strb(x4, constructor_count); __ B(ne, &allocate); // Push the constructor and map to the stack, and the constructor again // as argument to the runtime call. __ Push(constructor, init_map, constructor); + // The call will replace the stub, so the countdown is only done once. __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); __ Pop(init_map, constructor); - __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking)); __ Bind(&allocate); } @@ -412,8 +413,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ Add(first_prop, new_obj, JSObject::kHeaderSize); // Fill all of the in-object properties with the appropriate filler. - Register filler = x7; - __ LoadRoot(filler, Heap::kUndefinedValueRootIndex); + Register undef = x7; + __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); // Obtain number of pre-allocated property fields and in-object // properties. @@ -431,38 +432,36 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Register prop_fields = x6; __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize); - if (!is_api_function) { - Label no_inobject_slack_tracking; - - // Check if slack tracking is enabled. - __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking)); - __ B(eq, &no_inobject_slack_tracking); - constructon_count = NoReg; - + if (count_constructions) { // Fill the pre-allocated fields with undef. - __ FillFields(first_prop, prealloc_fields, filler); + __ FillFields(first_prop, prealloc_fields, undef); - // Update first_prop register to be the offset of the first field after + // Register first_non_prealloc is the offset of the first field after // pre-allocated fields. - __ Add(first_prop, first_prop, + Register first_non_prealloc = x12; + __ Add(first_non_prealloc, first_prop, Operand(prealloc_fields, LSL, kPointerSizeLog2)); + first_prop = NoReg; + if (FLAG_debug_code) { - Register obj_end = x14; + Register obj_end = x5; __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); - __ Cmp(first_prop, obj_end); + __ Cmp(first_non_prealloc, obj_end); __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); } // Fill the remaining fields with one pointer filler map. - __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex); - __ Sub(prop_fields, prop_fields, prealloc_fields); - - __ bind(&no_inobject_slack_tracking); - } - if (create_memento) { + Register one_pointer_filler = x5; + Register non_prealloc_fields = x6; + __ LoadRoot(one_pointer_filler, Heap::kOnePointerFillerMapRootIndex); + __ Sub(non_prealloc_fields, prop_fields, prealloc_fields); + __ FillFields(first_non_prealloc, non_prealloc_fields, + one_pointer_filler); + prop_fields = NoReg; + } else if (create_memento) { // Fill the pre-allocated fields with undef. - __ FillFields(first_prop, prop_fields, filler); + __ FillFields(first_prop, prop_fields, undef); __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex); ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); @@ -474,7 +473,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, first_prop = NoReg; } else { // Fill all of the property fields with undef. - __ FillFields(first_prop, prop_fields, filler); + __ FillFields(first_prop, prop_fields, undef); first_prop = NoReg; prop_fields = NoReg; } @@ -517,7 +516,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Initialize the fields to undefined. Register elements = x10; __ Add(elements, new_array, FixedArray::kHeaderSize); - __ FillFields(elements, element_count, filler); + __ FillFields(elements, element_count, undef); // Store the initialized FixedArray into the properties field of the // JSObject. @@ -625,7 +624,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } // Store offset of return address for deoptimizer. - if (!is_api_function) { + if (!is_api_function && !count_constructions) { masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); } @@ -676,13 +675,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } +void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { + Generate_JSConstructStubHelper(masm, false, true, false); +} + + void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index 48f2e77..1f3aa14 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -842,15 +842,10 @@ class MacroAssembler : public Assembler { void NumberOfOwnDescriptors(Register dst, Register map); template - void DecodeField(Register dst, Register src) { + void DecodeField(Register reg) { static const uint64_t shift = Field::kShift; static const uint64_t setbits = CountSetBits(Field::kMask, 32); - Ubfx(dst, src, shift, setbits); - } - - template - void DecodeField(Register reg) { - DecodeField(reg, reg); + Ubfx(reg, reg, shift, setbits); } // ---- SMI and Number Utilities ---- diff --git a/src/builtins.h b/src/builtins.h index a2ed12d..8ec7819 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -68,6 +68,8 @@ enum BuiltinExtraArguments { kNoExtraICState) \ V(InOptimizationQueue, BUILTIN, UNINITIALIZED, \ kNoExtraICState) \ + V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \ + kNoExtraICState) \ V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \ kNoExtraICState) \ V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \ @@ -338,6 +340,7 @@ class Builtins { static void Generate_InOptimizationQueue(MacroAssembler* masm); static void Generate_CompileOptimized(MacroAssembler* masm); static void Generate_CompileOptimizedConcurrent(MacroAssembler* masm); + static void Generate_JSConstructStubCountdown(MacroAssembler* masm); static void Generate_JSConstructStubGeneric(MacroAssembler* masm); static void Generate_JSConstructStubApi(MacroAssembler* masm); static void Generate_JSEntryTrampoline(MacroAssembler* masm); diff --git a/src/compiler.cc b/src/compiler.cc index f1736c1..4e55d22 100644 --- a/src/compiler.cc +++ b/src/compiler.cc @@ -510,6 +510,9 @@ void OptimizedCompileJob::RecordOptimizationStats() { // Sets the expected number of properties based on estimate from compiler. void SetExpectedNofPropertiesFromEstimate(Handle shared, int estimate) { + // See the comment in SetExpectedNofProperties. + if (shared->live_objects_may_exist()) return; + // If no properties are added in the constructor, they are more likely // to be added later. if (estimate == 0) estimate = 2; diff --git a/src/factory.cc b/src/factory.cc index fcb4918..4300b24 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -1932,6 +1932,7 @@ Handle Factory::NewSharedFunctionInfo( share->set_debug_info(*undefined_value(), SKIP_WRITE_BARRIER); share->set_inferred_name(*empty_string(), SKIP_WRITE_BARRIER); share->set_feedback_vector(*empty_fixed_array(), SKIP_WRITE_BARRIER); + share->set_initial_map(*undefined_value(), SKIP_WRITE_BARRIER); share->set_profiler_ticks(0); share->set_ast_node_count(0); share->set_counters(0); diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc index 652e394..fb1fa38 100644 --- a/src/heap-snapshot-generator.cc +++ b/src/heap-snapshot-generator.cc @@ -1421,6 +1421,9 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences( SetInternalReference(obj, entry, "feedback_vector", shared->feedback_vector(), SharedFunctionInfo::kFeedbackVectorOffset); + SetWeakReference(obj, entry, + "initial_map", shared->initial_map(), + SharedFunctionInfo::kInitialMapOffset); } diff --git a/src/heap.cc b/src/heap.cc index 24e1d53..66d17fb 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -3637,7 +3637,7 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, // so that object accesses before the constructor completes (e.g. in the // debugger) will not cause a crash. if (map->constructor()->IsJSFunction() && - JSFunction::cast(map->constructor())-> + JSFunction::cast(map->constructor())->shared()-> IsInobjectSlackTrackingInProgress()) { // We might want to shrink the object later. ASSERT(obj->GetInternalFieldCount() == 0); diff --git a/src/hydrogen.cc b/src/hydrogen.cc index d5d546a..5d78a4a 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -8582,8 +8582,8 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) { // Force completion of inobject slack tracking before generating // allocation code to finalize instance size. - if (constructor->IsInobjectSlackTrackingInProgress()) { - constructor->CompleteInobjectSlackTracking(); + if (constructor->shared()->IsInobjectSlackTrackingInProgress()) { + constructor->shared()->CompleteInobjectSlackTracking(); } // Calculate instance size from initial map of constructor. diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index 100505f..969aae1 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -102,6 +102,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, + bool count_constructions, bool create_memento) { // ----------- S t a t e ------------- // -- eax: number of arguments @@ -109,9 +110,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // -- ebx: allocation site or undefined // ----------------------------------- + // Should never count constructions for api objects. + ASSERT(!is_api_function || !count_constructions); + // Should never create mementos for api functions. ASSERT(!is_api_function || !create_memento); + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); + // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); @@ -157,32 +164,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ CmpInstanceType(eax, JS_FUNCTION_TYPE); __ j(equal, &rt_call); - if (!is_api_function) { + if (count_constructions) { Label allocate; - // The code below relies on these assumptions. - STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); - STATIC_ASSERT(Map::ConstructionCount::kShift + - Map::ConstructionCount::kSize == 32); - // Check if slack tracking is enabled. - __ mov(esi, FieldOperand(eax, Map::kBitField3Offset)); - __ shr(esi, Map::ConstructionCount::kShift); - __ j(zero, &allocate); // JSFunction::kNoSlackTracking // Decrease generous allocation count. - __ sub(FieldOperand(eax, Map::kBitField3Offset), - Immediate(1 << Map::ConstructionCount::kShift)); - - __ cmp(esi, JSFunction::kFinishSlackTracking); - __ j(not_equal, &allocate); + __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ dec_b(FieldOperand(ecx, + SharedFunctionInfo::kConstructionCountOffset)); + __ j(not_zero, &allocate); __ push(eax); __ push(edi); __ push(edi); // constructor + // The call will replace the stub, so the countdown is only done once. __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); __ pop(edi); __ pop(eax); - __ xor_(esi, esi); // JSFunction::kNoSlackTracking __ bind(&allocate); } @@ -212,17 +210,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // eax: initial map // ebx: JSObject // edi: start of next object (including memento if create_memento) - // esi: slack tracking counter (non-API function case) - __ mov(edx, factory->undefined_value()); __ lea(ecx, Operand(ebx, JSObject::kHeaderSize)); - if (!is_api_function) { - Label no_inobject_slack_tracking; - - // Check if slack tracking is enabled. - __ cmp(esi, JSFunction::kNoSlackTracking); - __ j(equal, &no_inobject_slack_tracking); - - // Allocate object with a slack. + __ mov(edx, factory->undefined_value()); + if (count_constructions) { __ movzx_b(esi, FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset)); __ lea(esi, @@ -235,19 +225,16 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } __ InitializeFieldsWithFiller(ecx, esi, edx); __ mov(edx, factory->one_pointer_filler_map()); - // Fill the remaining fields with one pointer filler map. - - __ bind(&no_inobject_slack_tracking); - } - - if (create_memento) { + __ InitializeFieldsWithFiller(ecx, edi, edx); + } else if (create_memento) { __ lea(esi, Operand(edi, -AllocationMemento::kSize)); __ InitializeFieldsWithFiller(ecx, esi, edx); // Fill in memento fields if necessary. // esi: points to the allocated but uninitialized memento. + Handle allocation_memento_map = factory->allocation_memento_map(); __ mov(Operand(esi, AllocationMemento::kMapOffset), - factory->allocation_memento_map()); + allocation_memento_map); // Get the cell or undefined. __ mov(edx, Operand(esp, kPointerSize*2)); __ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset), @@ -353,8 +340,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, offset = kPointerSize; } - // Must restore esi (context) and edi (constructor) before calling runtime. - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + // Must restore edi (constructor) before calling runtime. __ mov(edi, Operand(esp, offset)); // edi: function (constructor) __ push(edi); @@ -427,7 +413,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } // Store offset of return address for deoptimizer. - if (!is_api_function) { + if (!is_api_function && !count_constructions) { masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); } @@ -469,13 +455,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } +void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { + Generate_JSConstructStubHelper(masm, false, true, false); +} + + void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/mark-compact.cc b/src/mark-compact.cc index ce4d6ac..b857c79 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -417,6 +417,8 @@ void MarkCompactCollector::CollectGarbage() { SweepSpaces(); + if (!FLAG_collect_maps) ReattachInitialMaps(); + #ifdef DEBUG if (FLAG_verify_native_context_separation) { VerifyNativeContextSeparation(heap_); @@ -2532,6 +2534,23 @@ void MarkCompactCollector::ProcessMapCaches() { } +void MarkCompactCollector::ReattachInitialMaps() { + HeapObjectIterator map_iterator(heap()->map_space()); + for (HeapObject* obj = map_iterator.Next(); + obj != NULL; + obj = map_iterator.Next()) { + Map* map = Map::cast(obj); + + STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); + if (map->instance_type() < FIRST_JS_RECEIVER_TYPE) continue; + + if (map->attached_to_shared_function_info()) { + JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); + } + } +} + + void MarkCompactCollector::ClearNonLiveReferences() { // Iterate over the map space, setting map transitions that go from // a marked map to an unmarked map to null transitions. This action @@ -2545,6 +2564,13 @@ void MarkCompactCollector::ClearNonLiveReferences() { if (!map->CanTransition()) continue; MarkBit map_mark = Marking::MarkBitFrom(map); + if (map_mark.Get() && map->attached_to_shared_function_info()) { + // This map is used for inobject slack tracking and has been detached + // from SharedFunctionInfo during the mark phase. + // Since it survived the GC, reattach it now. + JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); + } + ClearNonLivePrototypeTransitions(map); ClearNonLiveMapTransitions(map, map_mark); diff --git a/src/mark-compact.h b/src/mark-compact.h index bd34d56..254f258 100644 --- a/src/mark-compact.h +++ b/src/mark-compact.h @@ -851,6 +851,12 @@ class MarkCompactCollector { int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group, int start, int end, int new_start); + // Marking detaches initial maps from SharedFunctionInfo objects + // to make this reference weak. We need to reattach initial maps + // back after collection. This is either done during + // ClearNonLiveTransitions pass or by calling this function. + void ReattachInitialMaps(); + // Mark all values associated with reachable keys in weak collections // encountered so far. This might push new object or even new weak maps onto // the marking stack. diff --git a/src/objects-inl.h b/src/objects-inl.h index 8a35d69..0be46bc 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -4072,6 +4072,19 @@ bool Map::is_extensible() { } +void Map::set_attached_to_shared_function_info(bool value) { + if (value) { + set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo)); + } else { + set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo)); + } +} + +bool Map::attached_to_shared_function_info() { + return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0; +} + + void Map::set_is_shared(bool value) { set_bit_field3(IsShared::update(bit_field3(), value)); } @@ -4138,26 +4151,6 @@ bool Map::is_migration_target() { } -void Map::set_done_inobject_slack_tracking(bool value) { - set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value)); -} - - -bool Map::done_inobject_slack_tracking() { - return DoneInobjectSlackTracking::decode(bit_field3()); -} - - -void Map::set_construction_count(int value) { - set_bit_field3(ConstructionCount::update(bit_field3(), value)); -} - - -int Map::construction_count() { - return ConstructionCount::decode(bit_field3()); -} - - void Map::freeze() { set_bit_field3(IsFrozen::update(bit_field3(), true)); } @@ -5058,6 +5051,7 @@ ACCESSORS(SharedFunctionInfo, optimized_code_map, Object, ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray, kFeedbackVectorOffset) +ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) ACCESSORS(SharedFunctionInfo, instance_class_name, Object, kInstanceClassNameOffset) ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) @@ -5182,6 +5176,28 @@ PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, #endif +int SharedFunctionInfo::construction_count() { + return READ_BYTE_FIELD(this, kConstructionCountOffset); +} + + +void SharedFunctionInfo::set_construction_count(int value) { + ASSERT(0 <= value && value < 256); + WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast(value)); +} + + +BOOL_ACCESSORS(SharedFunctionInfo, + compiler_hints, + live_objects_may_exist, + kLiveObjectsMayExist) + + +bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { + return initial_map() != GetHeap()->undefined_value(); +} + + BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, @@ -5232,6 +5248,7 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator) void SharedFunctionInfo::BeforeVisitingPointers() { + if (IsInobjectSlackTrackingInProgress()) DetachInitialMap(); } @@ -5456,12 +5473,6 @@ bool JSFunction::IsInOptimizationQueue() { } -bool JSFunction::IsInobjectSlackTrackingInProgress() { - return has_initial_map() && - initial_map()->construction_count() != JSFunction::kNoSlackTracking; -} - - Code* JSFunction::code() { return Code::cast( Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); diff --git a/src/objects.cc b/src/objects.cc index 831e65d..352e5d6 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -10225,8 +10225,8 @@ void JSFunction::SetInstancePrototype(Handle function, // copy containing the new prototype. Also complete any in-object // slack tracking that is in progress at this point because it is // still tracking the old copy. - if (function->IsInobjectSlackTrackingInProgress()) { - function->CompleteInobjectSlackTracking(); + if (function->shared()->IsInobjectSlackTrackingInProgress()) { + function->shared()->CompleteInobjectSlackTracking(); } Handle new_map = Map::Copy(handle(function->initial_map())); new_map->set_prototype(*value); @@ -10335,13 +10335,13 @@ void JSFunction::EnsureHasInitialMap(Handle function) { map->set_prototype(*prototype); ASSERT(map->has_fast_object_elements()); + if (!function->shared()->is_generator()) { + function->shared()->StartInobjectSlackTracking(*map); + } + // Finally link initial map and constructor function. function->set_initial_map(*map); map->set_constructor(*function); - - if (!function->shared()->is_generator()) { - function->StartInobjectSlackTracking(); - } } @@ -10726,15 +10726,14 @@ bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) { } -void JSFunction::StartInobjectSlackTracking() { - ASSERT(has_initial_map() && !IsInobjectSlackTrackingInProgress()); +void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) { + ASSERT(!IsInobjectSlackTrackingInProgress()); if (!FLAG_clever_optimizations) return; - Map* map = initial_map(); // Only initiate the tracking the first time. - if (map->done_inobject_slack_tracking()) return; - map->set_done_inobject_slack_tracking(true); + if (live_objects_may_exist()) return; + set_live_objects_may_exist(true); // No tracking during the snapshot construction phase. Isolate* isolate = GetIsolate(); @@ -10742,7 +10741,56 @@ void JSFunction::StartInobjectSlackTracking() { if (map->unused_property_fields() == 0) return; - map->set_construction_count(kGenerousAllocationCount); + // Nonzero counter is a leftover from the previous attempt interrupted + // by GC, keep it. + if (construction_count() == 0) { + set_construction_count(kGenerousAllocationCount); + } + set_initial_map(map); + Builtins* builtins = isolate->builtins(); + ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric), + construct_stub()); + set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown)); +} + + +// Called from GC, hence reinterpret_cast and unchecked accessors. +void SharedFunctionInfo::DetachInitialMap() { + Map* map = reinterpret_cast(initial_map()); + + // Make the map remember to restore the link if it survives the GC. + map->set_bit_field2( + map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo)); + + // Undo state changes made by StartInobjectTracking (except the + // construction_count). This way if the initial map does not survive the GC + // then StartInobjectTracking will be called again the next time the + // constructor is called. The countdown will continue and (possibly after + // several more GCs) CompleteInobjectSlackTracking will eventually be called. + Heap* heap = map->GetHeap(); + set_initial_map(heap->undefined_value()); + Builtins* builtins = heap->isolate()->builtins(); + ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown), + *RawField(this, kConstructStubOffset)); + set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric)); + // It is safe to clear the flag: it will be set again if the map is live. + set_live_objects_may_exist(false); +} + + +// Called from GC, hence reinterpret_cast and unchecked accessors. +void SharedFunctionInfo::AttachInitialMap(Map* map) { + map->set_bit_field2( + map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo)); + + // Resume inobject slack tracking. + set_initial_map(map); + Builtins* builtins = map->GetHeap()->isolate()->builtins(); + ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric), + *RawField(this, kConstructStubOffset)); + set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown)); + // The map survived the gc, so there may be objects referencing it. + set_live_objects_may_exist(true); } @@ -10785,18 +10833,26 @@ static void ShrinkInstanceSize(Map* map, void* data) { } -void JSFunction::CompleteInobjectSlackTracking() { - ASSERT(has_initial_map()); - Map* map = initial_map(); +void SharedFunctionInfo::CompleteInobjectSlackTracking() { + ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress()); + Map* map = Map::cast(initial_map()); - ASSERT(map->done_inobject_slack_tracking()); - map->set_construction_count(kNoSlackTracking); + Heap* heap = map->GetHeap(); + set_initial_map(heap->undefined_value()); + Builtins* builtins = heap->isolate()->builtins(); + ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown), + construct_stub()); + set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric)); int slack = map->unused_property_fields(); map->TraverseTransitionTree(&GetMinInobjectSlack, &slack); if (slack != 0) { // Resize the initial map and all maps in its transition tree. map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); + + // Give the correct expected_nof_properties to initial maps created later. + ASSERT(expected_nof_properties() >= slack); + set_expected_nof_properties(expected_nof_properties() - slack); } } diff --git a/src/objects.h b/src/objects.h index f26dc07..47fea39 100644 --- a/src/objects.h +++ b/src/objects.h @@ -6001,10 +6001,6 @@ class Map: public HeapObject { class IsFrozen: public BitField {}; class IsUnstable: public BitField {}; class IsMigrationTarget: public BitField {}; - class DoneInobjectSlackTracking: public BitField {}; - // Keep this bit field at the very end for better code in - // Builtins::kJSConstructStubGeneric stub. - class ConstructionCount: public BitField {}; // Tells whether the object in the prototype property will be used // for instances created from this function. If the prototype @@ -6186,6 +6182,12 @@ class Map: public HeapObject { // function that was used to instantiate the object). String* constructor_name(); + // Tells whether the map is attached to SharedFunctionInfo + // (for inobject slack tracking). + inline void set_attached_to_shared_function_info(bool value); + + inline bool attached_to_shared_function_info(); + // Tells whether the map is shared between objects that may have different // behavior. If true, the map should never be modified, instead a clone // should be created and modified. @@ -6320,10 +6322,6 @@ class Map: public HeapObject { inline bool is_stable(); inline void set_migration_target(bool value); inline bool is_migration_target(); - inline void set_done_inobject_slack_tracking(bool value); - inline bool done_inobject_slack_tracking(); - inline void set_construction_count(int value); - inline int construction_count(); inline void deprecate(); inline bool is_deprecated(); inline bool CanBeDeprecated(); @@ -6564,7 +6562,7 @@ class Map: public HeapObject { // Bit positions for bit field 2 static const int kIsExtensible = 0; static const int kStringWrapperSafeForDefaultValueOf = 1; - // Currently bit 2 is not used. + static const int kAttachedToSharedFunctionInfo = 2; // No bits can be used after kElementsKindFirstBit, they are all reserved for // storing ElementKind. static const int kElementsKindShift = 3; @@ -6957,16 +6955,108 @@ class SharedFunctionInfo: public HeapObject { inline int expected_nof_properties(); inline void set_expected_nof_properties(int value); + // Inobject slack tracking is the way to reclaim unused inobject space. + // + // The instance size is initially determined by adding some slack to + // expected_nof_properties (to allow for a few extra properties added + // after the constructor). There is no guarantee that the extra space + // will not be wasted. + // + // Here is the algorithm to reclaim the unused inobject space: + // - Detect the first constructor call for this SharedFunctionInfo. + // When it happens enter the "in progress" state: remember the + // constructor's initial_map and install a special construct stub that + // counts constructor calls. + // - While the tracking is in progress create objects filled with + // one_pointer_filler_map instead of undefined_value. This way they can be + // resized quickly and safely. + // - Once enough (kGenerousAllocationCount) objects have been created + // compute the 'slack' (traverse the map transition tree starting from the + // initial_map and find the lowest value of unused_property_fields). + // - Traverse the transition tree again and decrease the instance size + // of every map. Existing objects will resize automatically (they are + // filled with one_pointer_filler_map). All further allocations will + // use the adjusted instance size. + // - Decrease expected_nof_properties so that an allocations made from + // another context will use the adjusted instance size too. + // - Exit "in progress" state by clearing the reference to the initial_map + // and setting the regular construct stub (generic or inline). + // + // The above is the main event sequence. Some special cases are possible + // while the tracking is in progress: + // + // - GC occurs. + // Check if the initial_map is referenced by any live objects (except this + // SharedFunctionInfo). If it is, continue tracking as usual. + // If it is not, clear the reference and reset the tracking state. The + // tracking will be initiated again on the next constructor call. + // + // - The constructor is called from another context. + // Immediately complete the tracking, perform all the necessary changes + // to maps. This is necessary because there is no efficient way to track + // multiple initial_maps. + // Proceed to create an object in the current context (with the adjusted + // size). + // + // - A different constructor function sharing the same SharedFunctionInfo is + // called in the same context. This could be another closure in the same + // context, or the first function could have been disposed. + // This is handled the same way as the previous case. + // + // Important: inobject slack tracking is not attempted during the snapshot + // creation. + + static const int kGenerousAllocationCount = 8; + + // [construction_count]: Counter for constructor calls made during + // the tracking phase. + inline int construction_count(); + inline void set_construction_count(int value); + // [feedback_vector] - accumulates ast node feedback from full-codegen and // (increasingly) from crankshafted code where sufficient feedback isn't // available. Currently the field is duplicated in // TypeFeedbackInfo::feedback_vector, but the allocation is done here. DECL_ACCESSORS(feedback_vector, FixedArray) + // [initial_map]: initial map of the first function called as a constructor. + // Saved for the duration of the tracking phase. + // This is a weak link (GC resets it to undefined_value if no other live + // object reference this map). + DECL_ACCESSORS(initial_map, Object) + + // True if the initial_map is not undefined and the countdown stub is + // installed. + inline bool IsInobjectSlackTrackingInProgress(); + + // Starts the tracking. + // Stores the initial map and installs the countdown stub. + // IsInobjectSlackTrackingInProgress is normally true after this call, + // except when tracking have not been started (e.g. the map has no unused + // properties or the snapshot is being built). + void StartInobjectSlackTracking(Map* map); + + // Completes the tracking. + // IsInobjectSlackTrackingInProgress is false after this call. + void CompleteInobjectSlackTracking(); + // Invoked before pointers in SharedFunctionInfo are being marked. // Also clears the optimized code map. inline void BeforeVisitingPointers(); + // Clears the initial_map before the GC marking phase to ensure the reference + // is weak. IsInobjectSlackTrackingInProgress is false after this call. + void DetachInitialMap(); + + // Restores the link to the initial map after the GC marking phase. + // IsInobjectSlackTrackingInProgress is true after this call. + void AttachInitialMap(Map* map); + + // False if there are definitely no live objects created from this function. + // True if live objects _may_ exist (existence not guaranteed). + // May go back from true to false after GC. + DECL_BOOLEAN_ACCESSORS(live_objects_may_exist) + // [instance class name]: class name for instances. DECL_ACCESSORS(instance_class_name, Object) @@ -7212,10 +7302,12 @@ class SharedFunctionInfo: public HeapObject { static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize; static const int kFeedbackVectorOffset = kInferredNameOffset + kPointerSize; + static const int kInitialMapOffset = + kFeedbackVectorOffset + kPointerSize; #if V8_HOST_ARCH_32_BIT // Smi fields. static const int kLengthOffset = - kFeedbackVectorOffset + kPointerSize; + kInitialMapOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize; static const int kExpectedNofPropertiesOffset = kFormalParameterCountOffset + kPointerSize; @@ -7251,7 +7343,7 @@ class SharedFunctionInfo: public HeapObject { // word is not set and thus this word cannot be treated as pointer // to HeapObject during old space traversal. static const int kLengthOffset = - kFeedbackVectorOffset + kPointerSize; + kInitialMapOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kIntSize; @@ -7285,10 +7377,21 @@ class SharedFunctionInfo: public HeapObject { #endif + // The construction counter for inobject slack tracking is stored in the + // most significant byte of compiler_hints which is otherwise unused. + // Its offset depends on the endian-ness of the architecture. +#if defined(V8_TARGET_LITTLE_ENDIAN) + static const int kConstructionCountOffset = kCompilerHintsOffset + 3; +#elif defined(V8_TARGET_BIG_ENDIAN) + static const int kConstructionCountOffset = kCompilerHintsOffset + 0; +#else +#error Unknown byte ordering +#endif + static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize); typedef FixedBodyDescriptor BodyDescriptor; // Bit positions in start_position_and_type. @@ -7303,6 +7406,7 @@ class SharedFunctionInfo: public HeapObject { enum CompilerHints { kAllowLazyCompilation, kAllowLazyCompilationWithoutContext, + kLiveObjectsMayExist, kOptimizationDisabled, kStrictModeFunction, kUsesArguments, @@ -7518,54 +7622,6 @@ class JSFunction: public JSObject { // Tells whether or not the function is on the concurrent recompilation queue. inline bool IsInOptimizationQueue(); - // Inobject slack tracking is the way to reclaim unused inobject space. - // - // The instance size is initially determined by adding some slack to - // expected_nof_properties (to allow for a few extra properties added - // after the constructor). There is no guarantee that the extra space - // will not be wasted. - // - // Here is the algorithm to reclaim the unused inobject space: - // - Detect the first constructor call for this JSFunction. - // When it happens enter the "in progress" state: initialize construction - // counter in the initial_map and set the |done_inobject_slack_tracking| - // flag. - // - While the tracking is in progress create objects filled with - // one_pointer_filler_map instead of undefined_value. This way they can be - // resized quickly and safely. - // - Once enough (kGenerousAllocationCount) objects have been created - // compute the 'slack' (traverse the map transition tree starting from the - // initial_map and find the lowest value of unused_property_fields). - // - Traverse the transition tree again and decrease the instance size - // of every map. Existing objects will resize automatically (they are - // filled with one_pointer_filler_map). All further allocations will - // use the adjusted instance size. - // - SharedFunctionInfo's expected_nof_properties left unmodified since - // allocations made using different closures could actually create different - // kind of objects (see prototype inheritance pattern). - // - // Important: inobject slack tracking is not attempted during the snapshot - // creation. - - static const int kGenerousAllocationCount = Map::ConstructionCount::kMax; - static const int kFinishSlackTracking = 1; - static const int kNoSlackTracking = 0; - - // True if the initial_map is set and the object constructions countdown - // counter is not zero. - inline bool IsInobjectSlackTrackingInProgress(); - - // Starts the tracking. - // Initializes object constructions countdown counter in the initial map. - // IsInobjectSlackTrackingInProgress is normally true after this call, - // except when tracking have not been started (e.g. the map has no unused - // properties or the snapshot is being built). - void StartInobjectSlackTracking(); - - // Completes the tracking. - // IsInobjectSlackTrackingInProgress is false after this call. - void CompleteInobjectSlackTracking(); - // [literals_or_bindings]: Fixed array holding either // the materialized literals or the bindings of a bound function. // diff --git a/src/runtime.cc b/src/runtime.cc index 78bf25a..39c3d29 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -3124,12 +3124,18 @@ RUNTIME_FUNCTION(Runtime_SetExpectedNumberOfProperties) { CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0); CONVERT_SMI_ARG_CHECKED(num, 1); RUNTIME_ASSERT(num >= 0); - - func->shared()->set_expected_nof_properties(num); - if (func->has_initial_map()) { - Handle new_initial_map = Map::Copy(handle(func->initial_map())); - new_initial_map->set_unused_property_fields(num); - func->set_initial_map(*new_initial_map); + // If objects constructed from this function exist then changing + // 'estimated_nof_properties' is dangerous since the previous value might + // have been compiled into the fast construct stub. Moreover, the inobject + // slack tracking logic might have adjusted the previous value, so even + // passing the same value is risky. + if (!func->shared()->live_objects_may_exist()) { + func->shared()->set_expected_nof_properties(num); + if (func->has_initial_map()) { + Handle new_initial_map = Map::Copy(handle(func->initial_map())); + new_initial_map->set_unused_property_fields(num); + func->set_initial_map(*new_initial_map); + } } return isolate->heap()->undefined_value(); } @@ -8366,6 +8372,15 @@ static Object* Runtime_NewObjectHelper(Isolate* isolate, // available. Compiler::EnsureCompiled(function, CLEAR_EXCEPTION); + Handle shared(function->shared(), isolate); + if (!function->has_initial_map() && + shared->IsInobjectSlackTrackingInProgress()) { + // The tracking is already in progress for another function. We can only + // track one initial_map at a time, so we force the completion before the + // function is called as a constructor for the first time. + shared->CompleteInobjectSlackTracking(); + } + Handle result; if (site.is_null()) { result = isolate->factory()->NewJSObject(function); @@ -8409,7 +8424,7 @@ RUNTIME_FUNCTION(RuntimeHidden_FinalizeInstanceSize) { ASSERT(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); - function->CompleteInobjectSlackTracking(); + function->shared()->CompleteInobjectSlackTracking(); return isolate->heap()->undefined_value(); } diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index a88025f..9e3b89a 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -101,6 +101,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, + bool count_constructions, bool create_memento) { // ----------- S t a t e ------------- // -- rax: number of arguments @@ -108,9 +109,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // -- rbx: allocation site or undefined // ----------------------------------- + // Should never count constructions for api objects. + ASSERT(!is_api_function || !count_constructions);\ + // Should never create mementos for api functions. ASSERT(!is_api_function || !create_memento); + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); + // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); @@ -159,32 +166,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ CmpInstanceType(rax, JS_FUNCTION_TYPE); __ j(equal, &rt_call); - if (!is_api_function) { + if (count_constructions) { Label allocate; - // The code below relies on these assumptions. - STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); - STATIC_ASSERT(Map::ConstructionCount::kShift + - Map::ConstructionCount::kSize == 32); - // Check if slack tracking is enabled. - __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset)); - __ shrl(rsi, Immediate(Map::ConstructionCount::kShift)); - __ j(zero, &allocate); // JSFunction::kNoSlackTracking // Decrease generous allocation count. - __ subl(FieldOperand(rax, Map::kBitField3Offset), - Immediate(1 << Map::ConstructionCount::kShift)); - - __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking)); - __ j(not_equal, &allocate); + __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ decb(FieldOperand(rcx, + SharedFunctionInfo::kConstructionCountOffset)); + __ j(not_zero, &allocate); __ Push(rax); __ Push(rdi); __ Push(rdi); // constructor + // The call will replace the stub, so the countdown is only done once. __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); __ Pop(rdi); __ Pop(rax); - __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking __ bind(&allocate); } @@ -215,17 +213,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // rax: initial map // rbx: JSObject // rdi: start of next object (including memento if create_memento) - // rsi: slack tracking counter (non-API function case) __ leap(rcx, Operand(rbx, JSObject::kHeaderSize)); __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); - if (!is_api_function) { - Label no_inobject_slack_tracking; - - // Check if slack tracking is enabled. - __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking)); - __ j(equal, &no_inobject_slack_tracking); - - // Allocate object with a slack. + if (count_constructions) { __ movzxbp(rsi, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); __ leap(rsi, @@ -238,21 +228,20 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } __ InitializeFieldsWithFiller(rcx, rsi, rdx); __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); - // Fill the remaining fields with one pointer filler map. - - __ bind(&no_inobject_slack_tracking); - } - if (create_memento) { + __ InitializeFieldsWithFiller(rcx, rdi, rdx); + } else if (create_memento) { __ leap(rsi, Operand(rdi, -AllocationMemento::kSize)); __ InitializeFieldsWithFiller(rcx, rsi, rdx); // Fill in memento fields if necessary. // rsi: points to the allocated but uninitialized memento. + Handle allocation_memento_map = factory->allocation_memento_map(); __ Move(Operand(rsi, AllocationMemento::kMapOffset), - factory->allocation_memento_map()); + allocation_memento_map); // Get the cell or undefined. __ movp(rdx, Operand(rsp, kPointerSize*2)); - __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx); + __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), + rdx); } else { __ InitializeFieldsWithFiller(rcx, rdi, rdx); } @@ -355,8 +344,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, offset = kPointerSize; } - // Must restore rsi (context) and rdi (constructor) before calling runtime. - __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); + // Must restore rdi (constructor) before calling runtime. __ movp(rdi, Operand(rsp, offset)); __ Push(rdi); if (create_memento) { @@ -428,7 +416,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } // Store offset of return address for deoptimizer. - if (!is_api_function) { + if (!is_api_function && !count_constructions) { masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); } @@ -471,13 +459,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } +void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { + Generate_JSConstructStubHelper(masm, false, true, false); +} + + void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/test/cctest/test-mementos.cc b/test/cctest/test-mementos.cc index 4aee57c..a377b4a 100644 --- a/test/cctest/test-mementos.cc +++ b/test/cctest/test-mementos.cc @@ -89,7 +89,10 @@ TEST(PretenuringCallNew) { Isolate* isolate = CcTest::i_isolate(); Heap* heap = isolate->heap(); + // We need to create several instances to get past the slack-tracking + // phase, where mementos aren't emitted. int call_count = 10; + CHECK_GE(call_count, SharedFunctionInfo::kGenerousAllocationCount); i::ScopedVector test_buf(1024); const char* program = "function f() {" @@ -114,8 +117,8 @@ TEST(PretenuringCallNew) { CHECK_EQ(memento->map(), heap->allocation_memento_map()); // Furthermore, how many mementos did we create? The count should match - // call_count. Note, that mementos are allocated during the inobject slack - // tracking phase. + // call_count - SharedFunctionInfo::kGenerousAllocationCount. AllocationSite* site = memento->GetAllocationSite(); - CHECK_EQ(call_count, site->pretenure_create_count()->value()); + CHECK_EQ(call_count - SharedFunctionInfo::kGenerousAllocationCount, + site->pretenure_create_count()->value()); }