}
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
+ // r0 : number of arguments to the construct function
+ // r2 : Feedback vector
+ // r3 : slot in feedback vector (Smi)
+ // r1 : the function to call
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(r0);
+ __ Push(r3, r2, r1, r0);
+
+ __ CallStub(stub);
+
+ __ Pop(r3, r2, r1, r0);
+ __ SmiUntag(r0);
+}
+
+
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// A monomorphic cache hit or an already megamorphic state: invoke the
// function without changing the state.
- __ cmp(r4, r1);
+ Label check_allocation_site;
+ Register feedback_map = r5;
+ Register weak_value = r8;
+ __ ldr(weak_value, FieldMemOperand(r4, WeakCell::kValueOffset));
+ __ cmp(r1, weak_value);
+ __ b(eq, &done);
+ __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &done);
+ __ ldr(feedback_map, FieldMemOperand(r4, 0));
+ __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
+ __ b(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+
+ // If r1 is not equal to the weak cell value, and the weak cell value is
+ // cleared, we have a new chance to become monomorphic.
+ __ JumpIfSmi(weak_value, &initialize);
+ __ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
+ __ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
- __ ldr(r5, FieldMemOperand(r4, 0));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
// Make sure the function is the Array() function
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
- {
- FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
-
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(r0);
- __ Push(r3, r2, r1, r0);
-
- CreateAllocationSiteStub create_stub(masm->isolate());
- __ CallStub(&create_stub);
-
- __ Pop(r3, r2, r1, r0);
- __ SmiUntag(r0);
- }
+ CreateAllocationSiteStub create_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &create_stub);
__ b(&done);
__ bind(¬_array_function);
}
- __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
- __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ str(r1, MemOperand(r4, 0));
-
- __ Push(r4, r2, r1);
- __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Pop(r4, r2, r1);
-
+ CreateWeakCellStub create_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &create_stub);
__ bind(&done);
}
}
-static void GenerateRecordCallTarget(MacroAssembler* masm,
- Register argc,
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
+ Register argc, Register function,
+ Register feedback_vector,
+ Register index) {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(argc);
+ __ Push(argc, function, feedback_vector, index);
+
+ DCHECK(feedback_vector.Is(x2) && index.Is(x3));
+ __ CallStub(stub);
+
+ __ Pop(index, feedback_vector, function, argc);
+ __ SmiUntag(argc);
+}
+
+
+static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
Register function,
- Register feedback_vector,
- Register index,
- Register scratch1,
- Register scratch2) {
+ Register feedback_vector, Register index,
+ Register scratch1, Register scratch2,
+ Register scratch3) {
ASM_LOCATION("GenerateRecordCallTarget");
- DCHECK(!AreAliased(scratch1, scratch2,
- argc, function, feedback_vector, index));
+ DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function,
+ feedback_vector, index));
// Cache the called function in a feedback vector slot. Cache states are
// uninitialized, monomorphic (indicated by a JSFunction), and megamorphic.
// argc : number of arguments to the construct function
masm->isolate()->heap()->uninitialized_symbol());
// Load the cache state.
- __ Add(scratch1, feedback_vector,
+ Register feedback = scratch1;
+ Register feedback_map = scratch2;
+ Register feedback_value = scratch3;
+ __ Add(feedback, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
- __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
+ __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
// A monomorphic cache hit or an already megamorphic state: invoke the
// function without changing the state.
- __ Cmp(scratch1, function);
+ Label check_allocation_site;
+ __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset));
+ __ Cmp(function, feedback_value);
__ B(eq, &done);
+ __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
+ __ B(eq, &done);
+ __ Ldr(feedback_map, FieldMemOperand(feedback, 0));
+ __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
+ __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+
+ // If function is not equal to the weak cell value, and the weak cell value is
+ // cleared, we have a new chance to become monomorphic.
+ __ JumpIfSmi(feedback_value, &initialize);
+ __ B(&megamorphic);
if (!FLAG_pretenuring_call_new) {
+ __ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in scratch1 register.
- __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset));
- __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
+ __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- CreateAllocationSiteStub create_stub(masm->isolate());
-
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(argc);
- __ Push(argc, function, feedback_vector, index);
-
- // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
- // index in x3.
- DCHECK(feedback_vector.Is(x2) && index.Is(x3));
- __ CallStub(&create_stub);
-
- __ Pop(index, feedback_vector, function, argc);
- __ SmiUntag(argc);
- }
+ CreateAllocationSiteStub create_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &create_stub, argc, function,
+ feedback_vector, index);
__ B(&done);
__ Bind(¬_array_function);
}
- // An uninitialized cache is patched with the function.
-
- __ Add(scratch1, feedback_vector,
- Operand::UntagSmiAndScale(index, kPointerSizeLog2));
- __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag);
- __ Str(function, MemOperand(scratch1, 0));
-
- __ Push(function);
- __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
- kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Pop(function);
-
+ CreateWeakCellStub create_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &create_stub, argc, function,
+ feedback_vector, index);
__ Bind(&done);
}
&slow);
if (RecordCallTarget()) {
- GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5);
+ GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11);
__ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
if (FLAG_pretenuring_call_new) {
}
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
+ // eax : number of arguments to the construct function
+ // ebx : Feedback vector
+ // edx : slot in feedback vector (Smi)
+ // edi : the function to call
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(eax);
+ __ push(eax);
+ __ push(edi);
+ __ push(edx);
+ __ push(ebx);
+
+ __ CallStub(stub);
+
+ __ pop(ebx);
+ __ pop(edx);
+ __ pop(edi);
+ __ pop(eax);
+ __ SmiUntag(eax);
+}
+
+
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// A monomorphic cache hit or an already megamorphic state: invoke the
// function without changing the state.
- __ cmp(ecx, edi);
+ Label check_allocation_site;
+ __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
__ j(equal, &done, Label::kFar);
- __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
+ __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
__ j(equal, &done, Label::kFar);
+ __ CompareRoot(FieldOperand(ecx, 0), Heap::kWeakCellMapRootIndex);
+ __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+
+ // If edi is not equal to the weak cell value, and the weak cell value is
+ // cleared, we have a new chance to become monomorphic.
+ __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
+ __ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
+ __ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
- Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
- __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
+ __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
- __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
+ __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(eax);
- __ push(eax);
- __ push(edi);
- __ push(edx);
- __ push(ebx);
-
- CreateAllocationSiteStub create_stub(isolate);
- __ CallStub(&create_stub);
-
- __ pop(ebx);
- __ pop(edx);
- __ pop(edi);
- __ pop(eax);
- __ SmiUntag(eax);
- }
+ CreateAllocationSiteStub create_stub(isolate);
+ CallStubInRecordCallTarget(masm, &create_stub);
__ jmp(&done);
__ bind(¬_array_function);
}
- __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize),
- edi);
- // We won't need edx or ebx anymore, just save edi
- __ push(edi);
- __ push(ebx);
- __ push(edx);
- __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ pop(edx);
- __ pop(ebx);
- __ pop(edi);
-
+ CreateWeakCellStub create_stub(isolate);
+ CallStubInRecordCallTarget(masm, &create_stub);
__ bind(&done);
}
void SharedFunctionInfo::ClearTypeFeedbackInfoAtGCTime() {
- feedback_vector()->ClearSlots(this);
+ feedback_vector()->ClearSlotsAtGCTime(this);
feedback_vector()->ClearICSlotsAtGCTime(this);
}
// This logic is copied from
// StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget.
-static bool ClearLogic(Heap* heap, int ic_age) {
+static bool ClearLogic(Heap* heap) {
return FLAG_cleanup_code_caches_at_gc &&
heap->isolate()->serializer_enabled();
}
-void TypeFeedbackVector::ClearSlots(SharedFunctionInfo* shared) {
+void TypeFeedbackVector::ClearSlotsImpl(SharedFunctionInfo* shared,
+ bool force_clear) {
int slots = Slots();
- Isolate* isolate = GetIsolate();
- Object* uninitialized_sentinel =
- TypeFeedbackVector::RawUninitializedSentinel(isolate->heap());
+ Heap* heap = GetIsolate()->heap();
+
+ if (!force_clear && !ClearLogic(heap)) return;
+ Object* uninitialized_sentinel =
+ TypeFeedbackVector::RawUninitializedSentinel(heap);
for (int i = 0; i < slots; i++) {
FeedbackVectorSlot slot(i);
Object* obj = Get(slot);
bool force_clear) {
Heap* heap = GetIsolate()->heap();
- // I'm not sure yet if this ic age is the correct one.
- int ic_age = shared->ic_age();
-
- if (!force_clear && !ClearLogic(heap, ic_age)) return;
+ if (!force_clear && !ClearLogic(heap)) return;
int slots = ICSlots();
Code* host = shared->code();
Handle<TypeFeedbackVector> vector);
// Clears the vector slots and the vector ic slots.
- void ClearSlots(SharedFunctionInfo* shared);
+ void ClearSlots(SharedFunctionInfo* shared) { ClearSlotsImpl(shared, true); }
+ void ClearSlotsAtGCTime(SharedFunctionInfo* shared) {
+ ClearSlotsImpl(shared, false);
+ }
+
void ClearICSlots(SharedFunctionInfo* shared) {
ClearICSlotsImpl(shared, true);
}
typedef BitSetComputer<VectorICKind, kVectorICKindBits, kSmiValueSize,
uint32_t> VectorICComputer;
+ void ClearSlotsImpl(SharedFunctionInfo* shared, bool force_clear);
void ClearICSlotsImpl(SharedFunctionInfo* shared, bool force_clear);
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackVector);
};
+// The following asserts protect an optimization in type feedback vector
+// code that looks into the contents of a slot assuming to find a String,
+// a Symbol, an AllocationSite, a WeakCell, or a FixedArray.
+STATIC_ASSERT(WeakCell::kSize >= 2 * kPointerSize);
+STATIC_ASSERT(WeakCell::kValueOffset == AllocationSite::kTransitionInfoOffset);
+STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
+STATIC_ASSERT(WeakCell::kValueOffset == Name::kHashFieldSlot);
+// Verify that an empty hash field looks like a tagged object, but can't
+// possibly be confused with a pointer.
+STATIC_ASSERT((Name::kEmptyHashField & kHeapObjectTag) == kHeapObjectTag);
+STATIC_ASSERT(Name::kEmptyHashField == 0x3);
+// Verify that a set hash field will not look like a tagged object.
+STATIC_ASSERT(Name::kHashNotComputedMask == kHeapObjectTag);
+
+
// A FeedbackNexus is the combination of a TypeFeedbackVector and a slot.
// Derived classes customize the update and retrieval of feedback.
class FeedbackNexus {
Handle<Object> TypeFeedbackOracle::GetInfo(FeedbackVectorSlot slot) {
DCHECK(slot.ToInt() >= 0 && slot.ToInt() < feedback_vector_->length());
+ Handle<Object> undefined =
+ Handle<Object>::cast(isolate()->factory()->undefined_value());
Object* obj = feedback_vector_->Get(slot);
+
+ // Slots do not embed direct pointers to functions. Instead a WeakCell is
+ // always used.
+ DCHECK(!obj->IsJSFunction());
+ if (obj->IsWeakCell()) {
+ WeakCell* cell = WeakCell::cast(obj);
+ if (cell->cleared()) return undefined;
+ obj = cell->value();
+ }
+
return Handle<Object>(obj, isolate());
}
}
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
+ // eax : number of arguments to the construct function
+ // ebx : Feedback vector
+ // edx : slot in feedback vector (Smi)
+ // edi : the function to call
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Arguments register must be smi-tagged to call out.
+ __ Integer32ToSmi(rax, rax);
+ __ Push(rax);
+ __ Push(rdi);
+ __ Integer32ToSmi(rdx, rdx);
+ __ Push(rdx);
+ __ Push(rbx);
+
+ __ CallStub(stub);
+
+ __ Pop(rbx);
+ __ Pop(rdx);
+ __ Pop(rdi);
+ __ Pop(rax);
+ __ SmiToInteger32(rax, rax);
+}
+
+
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// A monomorphic cache hit or an already megamorphic state: invoke the
// function without changing the state.
- __ cmpp(rcx, rdi);
- __ j(equal, &done);
- __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
- __ j(equal, &done);
+ Label check_allocation_site;
+ __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
+ __ j(equal, &done, Label::kFar);
+ __ CompareRoot(rcx, Heap::kmegamorphic_symbolRootIndex);
+ __ j(equal, &done, Label::kFar);
+ __ CompareRoot(FieldOperand(rcx, 0), Heap::kWeakCellMapRootIndex);
+ __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+
+ // If edi is not equal to the weak cell value, and the weak cell value is
+ // cleared, we have a new chance to become monomorphic. Otherwise, we
+ // need to go megamorphic.
+ __ CheckSmi(FieldOperand(rcx, WeakCell::kValueOffset));
+ __ j(equal, &initialize);
+ __ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
+ __ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in rcx.
- Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
- __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
+ __ CompareRoot(FieldOperand(rcx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
- __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
+ __ CompareRoot(rcx, Heap::kuninitialized_symbolRootIndex);
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ cmpp(rdi, rcx);
__ j(not_equal, ¬_array_function);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Arguments register must be smi-tagged to call out.
- __ Integer32ToSmi(rax, rax);
- __ Push(rax);
- __ Push(rdi);
- __ Integer32ToSmi(rdx, rdx);
- __ Push(rdx);
- __ Push(rbx);
-
- CreateAllocationSiteStub create_stub(isolate);
- __ CallStub(&create_stub);
-
- __ Pop(rbx);
- __ Pop(rdx);
- __ Pop(rdi);
- __ Pop(rax);
- __ SmiToInteger32(rax, rax);
- }
+ CreateAllocationSiteStub create_stub(isolate);
+ CallStubInRecordCallTarget(masm, &create_stub);
__ jmp(&done_no_smi_convert);
__ bind(¬_array_function);
}
- __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
- rdi);
-
- // We won't need rdx or rbx anymore, just save rdi
- __ Push(rdi);
- __ Push(rbx);
- __ Push(rdx);
- __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Pop(rdx);
- __ Pop(rbx);
- __ Pop(rdi);
+ CreateWeakCellStub create_stub(isolate);
+ CallStubInRecordCallTarget(masm, &create_stub);
+ __ jmp(&done_no_smi_convert);
__ bind(&done);
__ Integer32ToSmi(rdx, rdx);
// Fill with information
vector->Set(FeedbackVectorSlot(0), Smi::FromInt(1));
- vector->Set(FeedbackVectorSlot(1), *factory->fixed_array_map());
+ Handle<WeakCell> cell = factory->NewWeakCell(factory->fixed_array_map());
+ vector->Set(FeedbackVectorSlot(1), *cell);
Handle<AllocationSite> site = factory->NewAllocationSite();
vector->Set(FeedbackVectorSlot(2), *site);
+ // GC time clearing leaves slots alone.
+ vector->ClearSlotsAtGCTime(NULL);
+ Object* obj = vector->Get(FeedbackVectorSlot(1));
+ CHECK(obj->IsWeakCell() && !WeakCell::cast(obj)->cleared());
+
vector->ClearSlots(NULL);
- // The feedback vector slots are cleared. AllocationSites are granted
+ // The feedback vector slots are cleared. AllocationSites are still granted
// an exemption from clearing, as are smis.
CHECK_EQ(Smi::FromInt(1), vector->Get(FeedbackVectorSlot(0)));
CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
}
+TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
+ if (i::FLAG_always_opt) return;
+ CcTest::InitializeVM();
+ v8::HandleScope scope(CcTest::isolate());
+
+ // Prepare function f that contains a monomorphic IC for object
+ // originating from the same native context.
+ CompileRun(
+ "function fun() { this.x = 1; };"
+ "function f(o) { return new o(); } f(fun); f(fun);");
+ Handle<JSFunction> f = v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
+
+
+ Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
+ CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
+
+ SimulateIncrementalMarking(CcTest::heap());
+ CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
+
+ CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
+}
+
+
+TEST(IncrementalMarkingClearsMonomorphicConstructor) {
+ if (i::FLAG_always_opt) return;
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ v8::HandleScope scope(CcTest::isolate());
+ v8::Local<v8::Value> fun1;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 1; };");
+ fun1 = env->Global()->Get(v8_str("fun"));
+ }
+
+ // Prepare function f that contains a monomorphic constructor for object
+ // originating from a different native context.
+ CcTest::global()->Set(v8_str("fun1"), fun1);
+ CompileRun(
+ "function fun() { this.x = 1; };"
+ "function f(o) { return new o(); } f(fun1); f(fun1);");
+ Handle<JSFunction> f = v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
+
+
+ Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
+ CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
+
+ // Fire context dispose notification.
+ CcTest::isolate()->ContextDisposedNotification();
+ SimulateIncrementalMarking(CcTest::heap());
+ CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
+
+ CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
+ vector->Get(FeedbackVectorSlot(0)));
+}
+
+
TEST(IncrementalMarkingPreservesMonomorphicIC) {
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
}
+TEST(WeakFunctionInConstructor) {
+ if (i::FLAG_always_opt) return;
+ i::FLAG_stress_compaction = false;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ CompileRun(
+ "function createObj(obj) {"
+ " return new obj();"
+ "}");
+ Handle<JSFunction> createObj =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
+ CcTest::global()->Get(v8_str("createObj"))));
+
+ v8::Persistent<v8::Object> garbage;
+ {
+ v8::HandleScope scope(isolate);
+ const char* source =
+ " (function() {"
+ " function hat() { this.x = 5; }"
+ " createObj(hat);"
+ " createObj(hat);"
+ " return hat;"
+ " })();";
+ garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
+ }
+ weak_ic_cleared = false;
+ garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
+ Heap* heap = CcTest::i_isolate()->heap();
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ CHECK(weak_ic_cleared);
+
+ // We've determined the constructor in createObj has had it's weak cell
+ // cleared. Now, verify that one additional call with a new function
+ // allows monomorphicity.
+ Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
+ createObj->shared()->feedback_vector(), CcTest::i_isolate());
+ for (int i = 0; i < 20; i++) {
+ Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
+ CHECK(slot_value->IsWeakCell());
+ if (WeakCell::cast(slot_value)->cleared()) break;
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ }
+
+ Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
+ CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
+ CompileRun(
+ "function coat() { this.x = 6; }"
+ "createObj(coat);");
+ slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
+ CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
+}
+
+
// Checks that the value returned by execution of the source is weak.
void CheckWeakness(const char* source) {
i::FLAG_stress_compaction = false;