static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
+ bool count_constructions,
bool create_memento) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- sp[...]: constructor arguments
// -----------------------------------
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
+
// Should never create mementos for api functions.
ASSERT(!is_api_function || !create_memento);
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
Isolate* isolate = masm->isolate();
// Enter a construct frame.
__ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
__ b(eq, &rt_call);
- if (!is_api_function) {
+ if (count_constructions) {
Label allocate;
- MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset);
- // Check if slack tracking is enabled.
- __ ldr(r4, bit_field3);
- __ DecodeField<Map::ConstructionCount>(r3, r4);
- __ cmp(r3, Operand(JSFunction::kNoSlackTracking));
- __ b(eq, &allocate);
// Decrease generous allocation count.
- __ sub(r4, r4, Operand(1 << Map::ConstructionCount::kShift));
- __ str(r4, bit_field3);
- __ cmp(r3, Operand(JSFunction::kFinishSlackTracking));
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ MemOperand constructor_count =
+ FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
+ __ ldrb(r4, constructor_count);
+ __ sub(r4, r4, Operand(1), SetCC);
+ __ strb(r4, constructor_count);
__ b(ne, &allocate);
__ push(r1);
__ Push(r2, r1); // r1 = constructor
+ // The call will replace the stub, so the countdown is only done once.
__ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
__ pop(r2);
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
- __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
-
- if (!is_api_function) {
- Label no_inobject_slack_tracking;
-
- // Check if slack tracking is enabled.
- __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset));
- __ DecodeField<Map::ConstructionCount>(ip);
- __ cmp(ip, Operand(JSFunction::kNoSlackTracking));
- __ b(eq, &no_inobject_slack_tracking);
- // Allocate object with a slack.
+ if (count_constructions) {
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
__ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
__ InitializeFieldsWithFiller(r5, r0, r6);
// To allow for truncation.
__ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
- // Fill the remaining fields with one pointer filler map.
-
- __ bind(&no_inobject_slack_tracking);
- }
-
- if (create_memento) {
- __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
- __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
+ __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ InitializeFieldsWithFiller(r5, r0, r6);
+ } else if (create_memento) {
+ __ sub(r6, r3, Operand(AllocationMemento::kSize / kPointerSize));
+ __ add(r0, r4, Operand(r6, LSL, kPointerSizeLog2)); // End of object.
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ InitializeFieldsWithFiller(r5, r0, r6);
// Fill in memento fields.
ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
} else {
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
__ InitializeFieldsWithFiller(r5, r0, r6);
}
}
// Store offset of return address for deoptimizer.
- if (!is_api_function) {
+ if (!is_api_function && !count_constructions) {
masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
}
}
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true, false);
+}
+
+
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
void NumberOfOwnDescriptors(Register dst, Register map);
template<typename Field>
- void DecodeField(Register dst, Register src) {
+ void DecodeField(Register reg) {
static const int shift = Field::kShift;
static const int mask = Field::kMask >> shift;
- static const int size = Field::kSize;
- mov(dst, Operand(src, LSR, shift));
- if (shift + size != 32) {
- and_(dst, dst, Operand(mask));
- }
- }
-
- template<typename Field>
- void DecodeField(Register reg) {
- DecodeField<Field>(reg, reg);
+ mov(reg, Operand(reg, LSR, shift));
+ and_(reg, reg, Operand(mask));
}
// Activation support.
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
+ bool count_constructions,
bool create_memento) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
// Should never create mementos for api functions.
ASSERT(!is_api_function || !create_memento);
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
Isolate* isolate = masm->isolate();
__ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
__ B(eq, &rt_call);
- Register constructon_count = x14;
- if (!is_api_function) {
+ if (count_constructions) {
Label allocate;
- MemOperand bit_field3 =
- FieldMemOperand(init_map, Map::kBitField3Offset);
- // Check if slack tracking is enabled.
- __ Ldr(x4, bit_field3);
- __ DecodeField<Map::ConstructionCount>(constructon_count, x4);
- __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
- __ B(eq, &allocate);
// Decrease generous allocation count.
- __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift));
- __ Str(x4, bit_field3);
- __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking));
+ __ Ldr(x3, FieldMemOperand(constructor,
+ JSFunction::kSharedFunctionInfoOffset));
+ MemOperand constructor_count =
+ FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset);
+ __ Ldrb(x4, constructor_count);
+ __ Subs(x4, x4, 1);
+ __ Strb(x4, constructor_count);
__ B(ne, &allocate);
// Push the constructor and map to the stack, and the constructor again
// as argument to the runtime call.
__ Push(constructor, init_map, constructor);
+ // The call will replace the stub, so the countdown is only done once.
__ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
__ Pop(init_map, constructor);
- __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking));
__ Bind(&allocate);
}
__ Add(first_prop, new_obj, JSObject::kHeaderSize);
// Fill all of the in-object properties with the appropriate filler.
- Register filler = x7;
- __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
+ Register undef = x7;
+ __ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
// Obtain number of pre-allocated property fields and in-object
// properties.
Register prop_fields = x6;
__ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize);
- if (!is_api_function) {
- Label no_inobject_slack_tracking;
-
- // Check if slack tracking is enabled.
- __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
- __ B(eq, &no_inobject_slack_tracking);
- constructon_count = NoReg;
-
+ if (count_constructions) {
// Fill the pre-allocated fields with undef.
- __ FillFields(first_prop, prealloc_fields, filler);
+ __ FillFields(first_prop, prealloc_fields, undef);
- // Update first_prop register to be the offset of the first field after
+ // Register first_non_prealloc is the offset of the first field after
// pre-allocated fields.
- __ Add(first_prop, first_prop,
+ Register first_non_prealloc = x12;
+ __ Add(first_non_prealloc, first_prop,
Operand(prealloc_fields, LSL, kPointerSizeLog2));
+ first_prop = NoReg;
+
if (FLAG_debug_code) {
- Register obj_end = x14;
+ Register obj_end = x5;
__ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
- __ Cmp(first_prop, obj_end);
+ __ Cmp(first_non_prealloc, obj_end);
__ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
}
// Fill the remaining fields with one pointer filler map.
- __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
- __ Sub(prop_fields, prop_fields, prealloc_fields);
-
- __ bind(&no_inobject_slack_tracking);
- }
- if (create_memento) {
+ Register one_pointer_filler = x5;
+ Register non_prealloc_fields = x6;
+ __ LoadRoot(one_pointer_filler, Heap::kOnePointerFillerMapRootIndex);
+ __ Sub(non_prealloc_fields, prop_fields, prealloc_fields);
+ __ FillFields(first_non_prealloc, non_prealloc_fields,
+ one_pointer_filler);
+ prop_fields = NoReg;
+ } else if (create_memento) {
// Fill the pre-allocated fields with undef.
- __ FillFields(first_prop, prop_fields, filler);
+ __ FillFields(first_prop, prop_fields, undef);
__ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
__ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
first_prop = NoReg;
} else {
// Fill all of the property fields with undef.
- __ FillFields(first_prop, prop_fields, filler);
+ __ FillFields(first_prop, prop_fields, undef);
first_prop = NoReg;
prop_fields = NoReg;
}
// Initialize the fields to undefined.
Register elements = x10;
__ Add(elements, new_array, FixedArray::kHeaderSize);
- __ FillFields(elements, element_count, filler);
+ __ FillFields(elements, element_count, undef);
// Store the initialized FixedArray into the properties field of the
// JSObject.
}
// Store offset of return address for deoptimizer.
- if (!is_api_function) {
+ if (!is_api_function && !count_constructions) {
masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
}
}
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true, false);
+}
+
+
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
void NumberOfOwnDescriptors(Register dst, Register map);
template<typename Field>
- void DecodeField(Register dst, Register src) {
+ void DecodeField(Register reg) {
static const uint64_t shift = Field::kShift;
static const uint64_t setbits = CountSetBits(Field::kMask, 32);
- Ubfx(dst, src, shift, setbits);
- }
-
- template<typename Field>
- void DecodeField(Register reg) {
- DecodeField<Field>(reg, reg);
+ Ubfx(reg, reg, shift, setbits);
}
// ---- SMI and Number Utilities ----
kNoExtraICState) \
V(InOptimizationQueue, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
+ V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
+ kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
static void Generate_InOptimizationQueue(MacroAssembler* masm);
static void Generate_CompileOptimized(MacroAssembler* masm);
static void Generate_CompileOptimizedConcurrent(MacroAssembler* masm);
+ static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm);
// Sets the expected number of properties based on estimate from compiler.
void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
int estimate) {
+ // See the comment in SetExpectedNofProperties.
+ if (shared->live_objects_may_exist()) return;
+
// If no properties are added in the constructor, they are more likely
// to be added later.
if (estimate == 0) estimate = 2;
share->set_debug_info(*undefined_value(), SKIP_WRITE_BARRIER);
share->set_inferred_name(*empty_string(), SKIP_WRITE_BARRIER);
share->set_feedback_vector(*empty_fixed_array(), SKIP_WRITE_BARRIER);
+ share->set_initial_map(*undefined_value(), SKIP_WRITE_BARRIER);
share->set_profiler_ticks(0);
share->set_ast_node_count(0);
share->set_counters(0);
SetInternalReference(obj, entry,
"feedback_vector", shared->feedback_vector(),
SharedFunctionInfo::kFeedbackVectorOffset);
+ SetWeakReference(obj, entry,
+ "initial_map", shared->initial_map(),
+ SharedFunctionInfo::kInitialMapOffset);
}
// so that object accesses before the constructor completes (e.g. in the
// debugger) will not cause a crash.
if (map->constructor()->IsJSFunction() &&
- JSFunction::cast(map->constructor())->
+ JSFunction::cast(map->constructor())->shared()->
IsInobjectSlackTrackingInProgress()) {
// We might want to shrink the object later.
ASSERT(obj->GetInternalFieldCount() == 0);
// Force completion of inobject slack tracking before generating
// allocation code to finalize instance size.
- if (constructor->IsInobjectSlackTrackingInProgress()) {
- constructor->CompleteInobjectSlackTracking();
+ if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
+ constructor->shared()->CompleteInobjectSlackTracking();
}
// Calculate instance size from initial map of constructor.
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
+ bool count_constructions,
bool create_memento) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- ebx: allocation site or undefined
// -----------------------------------
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
+
// Should never create mementos for api functions.
ASSERT(!is_api_function || !create_memento);
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
__ CmpInstanceType(eax, JS_FUNCTION_TYPE);
__ j(equal, &rt_call);
- if (!is_api_function) {
+ if (count_constructions) {
Label allocate;
- // The code below relies on these assumptions.
- STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
- STATIC_ASSERT(Map::ConstructionCount::kShift +
- Map::ConstructionCount::kSize == 32);
- // Check if slack tracking is enabled.
- __ mov(esi, FieldOperand(eax, Map::kBitField3Offset));
- __ shr(esi, Map::ConstructionCount::kShift);
- __ j(zero, &allocate); // JSFunction::kNoSlackTracking
// Decrease generous allocation count.
- __ sub(FieldOperand(eax, Map::kBitField3Offset),
- Immediate(1 << Map::ConstructionCount::kShift));
-
- __ cmp(esi, JSFunction::kFinishSlackTracking);
- __ j(not_equal, &allocate);
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ dec_b(FieldOperand(ecx,
+ SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
__ push(eax);
__ push(edi);
__ push(edi); // constructor
+ // The call will replace the stub, so the countdown is only done once.
__ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
__ pop(edi);
__ pop(eax);
- __ xor_(esi, esi); // JSFunction::kNoSlackTracking
__ bind(&allocate);
}
// eax: initial map
// ebx: JSObject
// edi: start of next object (including memento if create_memento)
- // esi: slack tracking counter (non-API function case)
- __ mov(edx, factory->undefined_value());
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
- if (!is_api_function) {
- Label no_inobject_slack_tracking;
-
- // Check if slack tracking is enabled.
- __ cmp(esi, JSFunction::kNoSlackTracking);
- __ j(equal, &no_inobject_slack_tracking);
-
- // Allocate object with a slack.
+ __ mov(edx, factory->undefined_value());
+ if (count_constructions) {
__ movzx_b(esi,
FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
__ lea(esi,
}
__ InitializeFieldsWithFiller(ecx, esi, edx);
__ mov(edx, factory->one_pointer_filler_map());
- // Fill the remaining fields with one pointer filler map.
-
- __ bind(&no_inobject_slack_tracking);
- }
-
- if (create_memento) {
+ __ InitializeFieldsWithFiller(ecx, edi, edx);
+ } else if (create_memento) {
__ lea(esi, Operand(edi, -AllocationMemento::kSize));
__ InitializeFieldsWithFiller(ecx, esi, edx);
// Fill in memento fields if necessary.
// esi: points to the allocated but uninitialized memento.
+ Handle<Map> allocation_memento_map = factory->allocation_memento_map();
__ mov(Operand(esi, AllocationMemento::kMapOffset),
- factory->allocation_memento_map());
+ allocation_memento_map);
// Get the cell or undefined.
__ mov(edx, Operand(esp, kPointerSize*2));
__ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset),
}
// Store offset of return address for deoptimizer.
- if (!is_api_function) {
+ if (!is_api_function && !count_constructions) {
masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
}
}
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true, false);
+}
+
+
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
SweepSpaces();
+ if (!FLAG_collect_maps) ReattachInitialMaps();
+
#ifdef DEBUG
if (FLAG_verify_native_context_separation) {
VerifyNativeContextSeparation(heap_);
}
+void MarkCompactCollector::ReattachInitialMaps() {
+ HeapObjectIterator map_iterator(heap()->map_space());
+ for (HeapObject* obj = map_iterator.Next();
+ obj != NULL;
+ obj = map_iterator.Next()) {
+ Map* map = Map::cast(obj);
+
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (map->instance_type() < FIRST_JS_RECEIVER_TYPE) continue;
+
+ if (map->attached_to_shared_function_info()) {
+ JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map);
+ }
+ }
+}
+
+
void MarkCompactCollector::ClearNonLiveReferences() {
// Iterate over the map space, setting map transitions that go from
// a marked map to an unmarked map to null transitions. This action
if (!map->CanTransition()) continue;
MarkBit map_mark = Marking::MarkBitFrom(map);
+ if (map_mark.Get() && map->attached_to_shared_function_info()) {
+ // This map is used for inobject slack tracking and has been detached
+ // from SharedFunctionInfo during the mark phase.
+ // Since it survived the GC, reattach it now.
+ JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map);
+ }
+
ClearNonLivePrototypeTransitions(map);
ClearNonLiveMapTransitions(map, map_mark);
int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group,
int start, int end, int new_start);
+ // Marking detaches initial maps from SharedFunctionInfo objects
+ // to make this reference weak. We need to reattach initial maps
+ // back after collection. This is either done during
+ // ClearNonLiveTransitions pass or by calling this function.
+ void ReattachInitialMaps();
+
// Mark all values associated with reachable keys in weak collections
// encountered so far. This might push new object or even new weak maps onto
// the marking stack.
}
+void Map::set_attached_to_shared_function_info(bool value) {
+ if (value) {
+ set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
+ } else {
+ set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
+ }
+}
+
+bool Map::attached_to_shared_function_info() {
+ return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
+}
+
+
void Map::set_is_shared(bool value) {
set_bit_field3(IsShared::update(bit_field3(), value));
}
}
-void Map::set_done_inobject_slack_tracking(bool value) {
- set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
-}
-
-
-bool Map::done_inobject_slack_tracking() {
- return DoneInobjectSlackTracking::decode(bit_field3());
-}
-
-
-void Map::set_construction_count(int value) {
- set_bit_field3(ConstructionCount::update(bit_field3(), value));
-}
-
-
-int Map::construction_count() {
- return ConstructionCount::decode(bit_field3());
-}
-
-
void Map::freeze() {
set_bit_field3(IsFrozen::update(bit_field3(), true));
}
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
kFeedbackVectorOffset)
+ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
kInstanceClassNameOffset)
ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
#endif
+int SharedFunctionInfo::construction_count() {
+ return READ_BYTE_FIELD(this, kConstructionCountOffset);
+}
+
+
+void SharedFunctionInfo::set_construction_count(int value) {
+ ASSERT(0 <= value && value < 256);
+ WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
+}
+
+
+BOOL_ACCESSORS(SharedFunctionInfo,
+ compiler_hints,
+ live_objects_may_exist,
+ kLiveObjectsMayExist)
+
+
+bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
+ return initial_map() != GetHeap()->undefined_value();
+}
+
+
BOOL_GETTER(SharedFunctionInfo,
compiler_hints,
optimization_disabled,
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
void SharedFunctionInfo::BeforeVisitingPointers() {
+ if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
}
}
-bool JSFunction::IsInobjectSlackTrackingInProgress() {
- return has_initial_map() &&
- initial_map()->construction_count() != JSFunction::kNoSlackTracking;
-}
-
-
Code* JSFunction::code() {
return Code::cast(
Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
// copy containing the new prototype. Also complete any in-object
// slack tracking that is in progress at this point because it is
// still tracking the old copy.
- if (function->IsInobjectSlackTrackingInProgress()) {
- function->CompleteInobjectSlackTracking();
+ if (function->shared()->IsInobjectSlackTrackingInProgress()) {
+ function->shared()->CompleteInobjectSlackTracking();
}
Handle<Map> new_map = Map::Copy(handle(function->initial_map()));
new_map->set_prototype(*value);
map->set_prototype(*prototype);
ASSERT(map->has_fast_object_elements());
+ if (!function->shared()->is_generator()) {
+ function->shared()->StartInobjectSlackTracking(*map);
+ }
+
// Finally link initial map and constructor function.
function->set_initial_map(*map);
map->set_constructor(*function);
-
- if (!function->shared()->is_generator()) {
- function->StartInobjectSlackTracking();
- }
}
}
-void JSFunction::StartInobjectSlackTracking() {
- ASSERT(has_initial_map() && !IsInobjectSlackTrackingInProgress());
+void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
+ ASSERT(!IsInobjectSlackTrackingInProgress());
if (!FLAG_clever_optimizations) return;
- Map* map = initial_map();
// Only initiate the tracking the first time.
- if (map->done_inobject_slack_tracking()) return;
- map->set_done_inobject_slack_tracking(true);
+ if (live_objects_may_exist()) return;
+ set_live_objects_may_exist(true);
// No tracking during the snapshot construction phase.
Isolate* isolate = GetIsolate();
if (map->unused_property_fields() == 0) return;
- map->set_construction_count(kGenerousAllocationCount);
+ // Nonzero counter is a leftover from the previous attempt interrupted
+ // by GC, keep it.
+ if (construction_count() == 0) {
+ set_construction_count(kGenerousAllocationCount);
+ }
+ set_initial_map(map);
+ Builtins* builtins = isolate->builtins();
+ ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
+ construct_stub());
+ set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
+}
+
+
+// Called from GC, hence reinterpret_cast and unchecked accessors.
+void SharedFunctionInfo::DetachInitialMap() {
+ Map* map = reinterpret_cast<Map*>(initial_map());
+
+ // Make the map remember to restore the link if it survives the GC.
+ map->set_bit_field2(
+ map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
+
+ // Undo state changes made by StartInobjectTracking (except the
+ // construction_count). This way if the initial map does not survive the GC
+ // then StartInobjectTracking will be called again the next time the
+ // constructor is called. The countdown will continue and (possibly after
+ // several more GCs) CompleteInobjectSlackTracking will eventually be called.
+ Heap* heap = map->GetHeap();
+ set_initial_map(heap->undefined_value());
+ Builtins* builtins = heap->isolate()->builtins();
+ ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
+ *RawField(this, kConstructStubOffset));
+ set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
+ // It is safe to clear the flag: it will be set again if the map is live.
+ set_live_objects_may_exist(false);
+}
+
+
+// Called from GC, hence reinterpret_cast and unchecked accessors.
+void SharedFunctionInfo::AttachInitialMap(Map* map) {
+ map->set_bit_field2(
+ map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
+
+ // Resume inobject slack tracking.
+ set_initial_map(map);
+ Builtins* builtins = map->GetHeap()->isolate()->builtins();
+ ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
+ *RawField(this, kConstructStubOffset));
+ set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
+ // The map survived the gc, so there may be objects referencing it.
+ set_live_objects_may_exist(true);
}
}
-void JSFunction::CompleteInobjectSlackTracking() {
- ASSERT(has_initial_map());
- Map* map = initial_map();
+void SharedFunctionInfo::CompleteInobjectSlackTracking() {
+ ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
+ Map* map = Map::cast(initial_map());
- ASSERT(map->done_inobject_slack_tracking());
- map->set_construction_count(kNoSlackTracking);
+ Heap* heap = map->GetHeap();
+ set_initial_map(heap->undefined_value());
+ Builtins* builtins = heap->isolate()->builtins();
+ ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
+ construct_stub());
+ set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
int slack = map->unused_property_fields();
map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
if (slack != 0) {
// Resize the initial map and all maps in its transition tree.
map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
+
+ // Give the correct expected_nof_properties to initial maps created later.
+ ASSERT(expected_nof_properties() >= slack);
+ set_expected_nof_properties(expected_nof_properties() - slack);
}
}
class IsFrozen: public BitField<bool, 25, 1> {};
class IsUnstable: public BitField<bool, 26, 1> {};
class IsMigrationTarget: public BitField<bool, 27, 1> {};
- class DoneInobjectSlackTracking: public BitField<bool, 28, 1> {};
- // Keep this bit field at the very end for better code in
- // Builtins::kJSConstructStubGeneric stub.
- class ConstructionCount: public BitField<int, 29, 3> {};
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
// function that was used to instantiate the object).
String* constructor_name();
+ // Tells whether the map is attached to SharedFunctionInfo
+ // (for inobject slack tracking).
+ inline void set_attached_to_shared_function_info(bool value);
+
+ inline bool attached_to_shared_function_info();
+
// Tells whether the map is shared between objects that may have different
// behavior. If true, the map should never be modified, instead a clone
// should be created and modified.
inline bool is_stable();
inline void set_migration_target(bool value);
inline bool is_migration_target();
- inline void set_done_inobject_slack_tracking(bool value);
- inline bool done_inobject_slack_tracking();
- inline void set_construction_count(int value);
- inline int construction_count();
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
// Bit positions for bit field 2
static const int kIsExtensible = 0;
static const int kStringWrapperSafeForDefaultValueOf = 1;
- // Currently bit 2 is not used.
+ static const int kAttachedToSharedFunctionInfo = 2;
// No bits can be used after kElementsKindFirstBit, they are all reserved for
// storing ElementKind.
static const int kElementsKindShift = 3;
inline int expected_nof_properties();
inline void set_expected_nof_properties(int value);
+ // Inobject slack tracking is the way to reclaim unused inobject space.
+ //
+ // The instance size is initially determined by adding some slack to
+ // expected_nof_properties (to allow for a few extra properties added
+ // after the constructor). There is no guarantee that the extra space
+ // will not be wasted.
+ //
+ // Here is the algorithm to reclaim the unused inobject space:
+ // - Detect the first constructor call for this SharedFunctionInfo.
+ // When it happens enter the "in progress" state: remember the
+ // constructor's initial_map and install a special construct stub that
+ // counts constructor calls.
+ // - While the tracking is in progress create objects filled with
+ // one_pointer_filler_map instead of undefined_value. This way they can be
+ // resized quickly and safely.
+ // - Once enough (kGenerousAllocationCount) objects have been created
+ // compute the 'slack' (traverse the map transition tree starting from the
+ // initial_map and find the lowest value of unused_property_fields).
+ // - Traverse the transition tree again and decrease the instance size
+ // of every map. Existing objects will resize automatically (they are
+ // filled with one_pointer_filler_map). All further allocations will
+ // use the adjusted instance size.
+ // - Decrease expected_nof_properties so that an allocations made from
+ // another context will use the adjusted instance size too.
+ // - Exit "in progress" state by clearing the reference to the initial_map
+ // and setting the regular construct stub (generic or inline).
+ //
+ // The above is the main event sequence. Some special cases are possible
+ // while the tracking is in progress:
+ //
+ // - GC occurs.
+ // Check if the initial_map is referenced by any live objects (except this
+ // SharedFunctionInfo). If it is, continue tracking as usual.
+ // If it is not, clear the reference and reset the tracking state. The
+ // tracking will be initiated again on the next constructor call.
+ //
+ // - The constructor is called from another context.
+ // Immediately complete the tracking, perform all the necessary changes
+ // to maps. This is necessary because there is no efficient way to track
+ // multiple initial_maps.
+ // Proceed to create an object in the current context (with the adjusted
+ // size).
+ //
+ // - A different constructor function sharing the same SharedFunctionInfo is
+ // called in the same context. This could be another closure in the same
+ // context, or the first function could have been disposed.
+ // This is handled the same way as the previous case.
+ //
+ // Important: inobject slack tracking is not attempted during the snapshot
+ // creation.
+
+ static const int kGenerousAllocationCount = 8;
+
+ // [construction_count]: Counter for constructor calls made during
+ // the tracking phase.
+ inline int construction_count();
+ inline void set_construction_count(int value);
+
// [feedback_vector] - accumulates ast node feedback from full-codegen and
// (increasingly) from crankshafted code where sufficient feedback isn't
// available. Currently the field is duplicated in
// TypeFeedbackInfo::feedback_vector, but the allocation is done here.
DECL_ACCESSORS(feedback_vector, FixedArray)
+ // [initial_map]: initial map of the first function called as a constructor.
+ // Saved for the duration of the tracking phase.
+ // This is a weak link (GC resets it to undefined_value if no other live
+ // object reference this map).
+ DECL_ACCESSORS(initial_map, Object)
+
+ // True if the initial_map is not undefined and the countdown stub is
+ // installed.
+ inline bool IsInobjectSlackTrackingInProgress();
+
+ // Starts the tracking.
+ // Stores the initial map and installs the countdown stub.
+ // IsInobjectSlackTrackingInProgress is normally true after this call,
+ // except when tracking have not been started (e.g. the map has no unused
+ // properties or the snapshot is being built).
+ void StartInobjectSlackTracking(Map* map);
+
+ // Completes the tracking.
+ // IsInobjectSlackTrackingInProgress is false after this call.
+ void CompleteInobjectSlackTracking();
+
// Invoked before pointers in SharedFunctionInfo are being marked.
// Also clears the optimized code map.
inline void BeforeVisitingPointers();
+ // Clears the initial_map before the GC marking phase to ensure the reference
+ // is weak. IsInobjectSlackTrackingInProgress is false after this call.
+ void DetachInitialMap();
+
+ // Restores the link to the initial map after the GC marking phase.
+ // IsInobjectSlackTrackingInProgress is true after this call.
+ void AttachInitialMap(Map* map);
+
+ // False if there are definitely no live objects created from this function.
+ // True if live objects _may_ exist (existence not guaranteed).
+ // May go back from true to false after GC.
+ DECL_BOOLEAN_ACCESSORS(live_objects_may_exist)
+
// [instance class name]: class name for instances.
DECL_ACCESSORS(instance_class_name, Object)
static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize;
static const int kFeedbackVectorOffset =
kInferredNameOffset + kPointerSize;
+ static const int kInitialMapOffset =
+ kFeedbackVectorOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kFeedbackVectorOffset + kPointerSize;
+ kInitialMapOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kFeedbackVectorOffset + kPointerSize;
+ kInitialMapOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
#endif
+ // The construction counter for inobject slack tracking is stored in the
+ // most significant byte of compiler_hints which is otherwise unused.
+ // Its offset depends on the endian-ness of the architecture.
+#if defined(V8_TARGET_LITTLE_ENDIAN)
+ static const int kConstructionCountOffset = kCompilerHintsOffset + 3;
+#elif defined(V8_TARGET_BIG_ENDIAN)
+ static const int kConstructionCountOffset = kCompilerHintsOffset + 0;
+#else
+#error Unknown byte ordering
+#endif
+
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
typedef FixedBodyDescriptor<kNameOffset,
- kFeedbackVectorOffset + kPointerSize,
+ kInitialMapOffset + kPointerSize,
kSize> BodyDescriptor;
// Bit positions in start_position_and_type.
enum CompilerHints {
kAllowLazyCompilation,
kAllowLazyCompilationWithoutContext,
+ kLiveObjectsMayExist,
kOptimizationDisabled,
kStrictModeFunction,
kUsesArguments,
// Tells whether or not the function is on the concurrent recompilation queue.
inline bool IsInOptimizationQueue();
- // Inobject slack tracking is the way to reclaim unused inobject space.
- //
- // The instance size is initially determined by adding some slack to
- // expected_nof_properties (to allow for a few extra properties added
- // after the constructor). There is no guarantee that the extra space
- // will not be wasted.
- //
- // Here is the algorithm to reclaim the unused inobject space:
- // - Detect the first constructor call for this JSFunction.
- // When it happens enter the "in progress" state: initialize construction
- // counter in the initial_map and set the |done_inobject_slack_tracking|
- // flag.
- // - While the tracking is in progress create objects filled with
- // one_pointer_filler_map instead of undefined_value. This way they can be
- // resized quickly and safely.
- // - Once enough (kGenerousAllocationCount) objects have been created
- // compute the 'slack' (traverse the map transition tree starting from the
- // initial_map and find the lowest value of unused_property_fields).
- // - Traverse the transition tree again and decrease the instance size
- // of every map. Existing objects will resize automatically (they are
- // filled with one_pointer_filler_map). All further allocations will
- // use the adjusted instance size.
- // - SharedFunctionInfo's expected_nof_properties left unmodified since
- // allocations made using different closures could actually create different
- // kind of objects (see prototype inheritance pattern).
- //
- // Important: inobject slack tracking is not attempted during the snapshot
- // creation.
-
- static const int kGenerousAllocationCount = Map::ConstructionCount::kMax;
- static const int kFinishSlackTracking = 1;
- static const int kNoSlackTracking = 0;
-
- // True if the initial_map is set and the object constructions countdown
- // counter is not zero.
- inline bool IsInobjectSlackTrackingInProgress();
-
- // Starts the tracking.
- // Initializes object constructions countdown counter in the initial map.
- // IsInobjectSlackTrackingInProgress is normally true after this call,
- // except when tracking have not been started (e.g. the map has no unused
- // properties or the snapshot is being built).
- void StartInobjectSlackTracking();
-
- // Completes the tracking.
- // IsInobjectSlackTrackingInProgress is false after this call.
- void CompleteInobjectSlackTracking();
-
// [literals_or_bindings]: Fixed array holding either
// the materialized literals or the bindings of a bound function.
//
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
CONVERT_SMI_ARG_CHECKED(num, 1);
RUNTIME_ASSERT(num >= 0);
-
- func->shared()->set_expected_nof_properties(num);
- if (func->has_initial_map()) {
- Handle<Map> new_initial_map = Map::Copy(handle(func->initial_map()));
- new_initial_map->set_unused_property_fields(num);
- func->set_initial_map(*new_initial_map);
+ // If objects constructed from this function exist then changing
+ // 'estimated_nof_properties' is dangerous since the previous value might
+ // have been compiled into the fast construct stub. Moreover, the inobject
+ // slack tracking logic might have adjusted the previous value, so even
+ // passing the same value is risky.
+ if (!func->shared()->live_objects_may_exist()) {
+ func->shared()->set_expected_nof_properties(num);
+ if (func->has_initial_map()) {
+ Handle<Map> new_initial_map = Map::Copy(handle(func->initial_map()));
+ new_initial_map->set_unused_property_fields(num);
+ func->set_initial_map(*new_initial_map);
+ }
}
return isolate->heap()->undefined_value();
}
// available.
Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
+ Handle<SharedFunctionInfo> shared(function->shared(), isolate);
+ if (!function->has_initial_map() &&
+ shared->IsInobjectSlackTrackingInProgress()) {
+ // The tracking is already in progress for another function. We can only
+ // track one initial_map at a time, so we force the completion before the
+ // function is called as a constructor for the first time.
+ shared->CompleteInobjectSlackTracking();
+ }
+
Handle<JSObject> result;
if (site.is_null()) {
result = isolate->factory()->NewJSObject(function);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
- function->CompleteInobjectSlackTracking();
+ function->shared()->CompleteInobjectSlackTracking();
return isolate->heap()->undefined_value();
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
+ bool count_constructions,
bool create_memento) {
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rbx: allocation site or undefined
// -----------------------------------
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);\
+
// Should never create mementos for api functions.
ASSERT(!is_api_function || !create_memento);
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
__ CmpInstanceType(rax, JS_FUNCTION_TYPE);
__ j(equal, &rt_call);
- if (!is_api_function) {
+ if (count_constructions) {
Label allocate;
- // The code below relies on these assumptions.
- STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
- STATIC_ASSERT(Map::ConstructionCount::kShift +
- Map::ConstructionCount::kSize == 32);
- // Check if slack tracking is enabled.
- __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
- __ shrl(rsi, Immediate(Map::ConstructionCount::kShift));
- __ j(zero, &allocate); // JSFunction::kNoSlackTracking
// Decrease generous allocation count.
- __ subl(FieldOperand(rax, Map::kBitField3Offset),
- Immediate(1 << Map::ConstructionCount::kShift));
-
- __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking));
- __ j(not_equal, &allocate);
+ __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ decb(FieldOperand(rcx,
+ SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
__ Push(rax);
__ Push(rdi);
__ Push(rdi); // constructor
+ // The call will replace the stub, so the countdown is only done once.
__ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
__ Pop(rdi);
__ Pop(rax);
- __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking
__ bind(&allocate);
}
// rax: initial map
// rbx: JSObject
// rdi: start of next object (including memento if create_memento)
- // rsi: slack tracking counter (non-API function case)
__ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
- if (!is_api_function) {
- Label no_inobject_slack_tracking;
-
- // Check if slack tracking is enabled.
- __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking));
- __ j(equal, &no_inobject_slack_tracking);
-
- // Allocate object with a slack.
+ if (count_constructions) {
__ movzxbp(rsi,
FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
__ leap(rsi,
}
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
__ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
- // Fill the remaining fields with one pointer filler map.
-
- __ bind(&no_inobject_slack_tracking);
- }
- if (create_memento) {
+ __ InitializeFieldsWithFiller(rcx, rdi, rdx);
+ } else if (create_memento) {
__ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
// Fill in memento fields if necessary.
// rsi: points to the allocated but uninitialized memento.
+ Handle<Map> allocation_memento_map = factory->allocation_memento_map();
__ Move(Operand(rsi, AllocationMemento::kMapOffset),
- factory->allocation_memento_map());
+ allocation_memento_map);
// Get the cell or undefined.
__ movp(rdx, Operand(rsp, kPointerSize*2));
- __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
+ __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset),
+ rdx);
} else {
__ InitializeFieldsWithFiller(rcx, rdi, rdx);
}
}
// Store offset of return address for deoptimizer.
- if (!is_api_function) {
+ if (!is_api_function && !count_constructions) {
masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
}
}
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true, false);
+}
+
+
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
+ // We need to create several instances to get past the slack-tracking
+ // phase, where mementos aren't emitted.
int call_count = 10;
+ CHECK_GE(call_count, SharedFunctionInfo::kGenerousAllocationCount);
i::ScopedVector<char> test_buf(1024);
const char* program =
"function f() {"
CHECK_EQ(memento->map(), heap->allocation_memento_map());
// Furthermore, how many mementos did we create? The count should match
- // call_count. Note, that mementos are allocated during the inobject slack
- // tracking phase.
+ // call_count - SharedFunctionInfo::kGenerousAllocationCount.
AllocationSite* site = memento->GetAllocationSite();
- CHECK_EQ(call_count, site->pretenure_create_count()->value());
+ CHECK_EQ(call_count - SharedFunctionInfo::kGenerousAllocationCount,
+ site->pretenure_create_count()->value());
}