From: mvstanton Date: Thu, 29 Jan 2015 17:36:30 +0000 (-0800) Subject: Use a WeakCell in the CallIC type vector. X-Git-Tag: upstream/4.7.83~4708 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=bcc79d33ca6d97d9ecfcfcf110a6ea84a0225389;p=platform%2Fupstream%2Fv8.git Use a WeakCell in the CallIC type vector. This allows us to clear the IC on a more sedate schedule, just like Load and Store ICs. R=ulan@chromium.org BUG= Review URL: https://codereview.chromium.org/881433002 Cr-Commit-Position: refs/heads/master@{#26332} --- diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 2e585ba53..0fba239a9 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -916,6 +916,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { StubFailureTrampolineStub::GenerateAheadOfTime(isolate); ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); CreateAllocationSiteStub::GenerateAheadOfTime(isolate); + CreateWeakCellStub::GenerateAheadOfTime(isolate); BinaryOpICStub::GenerateAheadOfTime(isolate); BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); } @@ -2657,9 +2658,29 @@ void CallICStub::Generate(MacroAssembler* masm) { // The checks. First, does r1 match the recorded monomorphic target? __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); - __ cmp(r1, r4); + + // We don't know that we have a weak cell. We might have a private symbol + // or an AllocationSite, but the memory is safe to examine. + // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to + // FixedArray. + // WeakCell::kValueOffset - contains a JSFunction or Smi(0) + // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not + // computed, meaning that it can't appear to be a pointer. If the low bit is + // 0, then hash is computed, but the 0 bit prevents the field from appearing + // to be a pointer. + STATIC_ASSERT(WeakCell::kSize >= kPointerSize); + STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == + WeakCell::kValueOffset && + WeakCell::kValueOffset == Symbol::kHashFieldSlot); + + __ ldr(r5, FieldMemOperand(r4, WeakCell::kValueOffset)); + __ cmp(r1, r5); __ b(ne, &extra_checks_or_miss); + // The compare above could have been a SMI/SMI comparison. Guard against this + // convincing us that we have a monomorphic JSFunction. + __ JumpIfSmi(r1, &extra_checks_or_miss); + __ bind(&have_js_function); if (CallAsMethod()) { EmitContinueIfStrictOrNative(masm, &cont); @@ -2735,15 +2756,18 @@ void CallICStub::Generate(MacroAssembler* masm) { __ add(r4, r4, Operand(Smi::FromInt(1))); __ str(r4, FieldMemOperand(r2, with_types_offset)); - // Store the function. - __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); - __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ str(r1, MemOperand(r4, 0)); + // Store the function. Use a stub since we need a frame for allocation. + // r2 - vector + // r3 - slot + // r1 - function + { + FrameScope scope(masm, StackFrame::INTERNAL); + CreateWeakCellStub create_stub(masm->isolate()); + __ Push(r1); + __ CallStub(&create_stub); + __ Pop(r1); + } - // Update the write barrier. - __ mov(r5, r1); - __ RecordWrite(r2, r4, r5, kLRHasNotBeenSaved, kDontSaveFPRegs, - EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); __ jmp(&have_js_function); // We are here because tracing is on or we encountered a MISS case we can't diff --git a/src/arm/interface-descriptors-arm.cc b/src/arm/interface-descriptors-arm.cc index cf6386a79..da0cba9d1 100644 --- a/src/arm/interface-descriptors-arm.cc +++ b/src/arm/interface-descriptors-arm.cc @@ -98,7 +98,19 @@ void FastCloneShallowObjectDescriptor::Initialize( void CreateAllocationSiteDescriptor::Initialize( CallInterfaceDescriptorData* data) { Register registers[] = {cp, r2, r3}; - data->Initialize(arraysize(registers), registers, NULL); + Representation representations[] = {Representation::Tagged(), + Representation::Tagged(), + Representation::Smi()}; + data->Initialize(arraysize(registers), registers, representations); +} + + +void CreateWeakCellDescriptor::Initialize(CallInterfaceDescriptorData* data) { + Register registers[] = {cp, r2, r3, r1}; + Representation representations[] = { + Representation::Tagged(), Representation::Tagged(), Representation::Smi(), + Representation::Tagged()}; + data->Initialize(arraysize(registers), registers, representations); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 730fdafe7..4bc869068 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -973,6 +973,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { StubFailureTrampolineStub::GenerateAheadOfTime(isolate); ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); CreateAllocationSiteStub::GenerateAheadOfTime(isolate); + CreateWeakCellStub::GenerateAheadOfTime(isolate); BinaryOpICStub::GenerateAheadOfTime(isolate); StoreRegistersStateStub::GenerateAheadOfTime(isolate); RestoreRegistersStateStub::GenerateAheadOfTime(isolate); @@ -3049,9 +3050,28 @@ void CallICStub::Generate(MacroAssembler* masm) { Operand::UntagSmiAndScale(index, kPointerSizeLog2)); __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); - __ Cmp(x4, function); + // We don't know that we have a weak cell. We might have a private symbol + // or an AllocationSite, but the memory is safe to examine. + // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to + // FixedArray. + // WeakCell::kValueOffset - contains a JSFunction or Smi(0) + // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not + // computed, meaning that it can't appear to be a pointer. If the low bit is + // 0, then hash is computed, but the 0 bit prevents the field from appearing + // to be a pointer. + STATIC_ASSERT(WeakCell::kSize >= kPointerSize); + STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == + WeakCell::kValueOffset && + WeakCell::kValueOffset == Symbol::kHashFieldSlot); + + __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset)); + __ Cmp(x5, function); __ B(ne, &extra_checks_or_miss); + // The compare above could have been a SMI/SMI comparison. Guard against this + // convincing us that we have a monomorphic JSFunction. + __ JumpIfSmi(function, &extra_checks_or_miss); + __ bind(&have_js_function); if (CallAsMethod()) { EmitContinueIfStrictOrNative(masm, &cont); @@ -3127,20 +3147,18 @@ void CallICStub::Generate(MacroAssembler* masm) { __ Adds(x4, x4, Operand(Smi::FromInt(1))); __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); - // Store the function. - __ Add(x4, feedback_vector, - Operand::UntagSmiAndScale(index, kPointerSizeLog2)); - __ Str(function, FieldMemOperand(x4, FixedArray::kHeaderSize)); - - __ Add(x4, feedback_vector, - Operand::UntagSmiAndScale(index, kPointerSizeLog2)); - __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); - __ Str(function, MemOperand(x4, 0)); + // Store the function. Use a stub since we need a frame for allocation. + // x2 - vector + // x3 - slot + // x1 - function + { + FrameScope scope(masm, StackFrame::INTERNAL); + CreateWeakCellStub create_stub(masm->isolate()); + __ Push(function); + __ CallStub(&create_stub); + __ Pop(function); + } - // Update the write barrier. - __ Mov(x5, function); - __ RecordWrite(feedback_vector, x4, x5, kLRHasNotBeenSaved, kDontSaveFPRegs, - EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); __ B(&have_js_function); // We are here because tracing is on or we encountered a MISS case we can't diff --git a/src/arm64/interface-descriptors-arm64.cc b/src/arm64/interface-descriptors-arm64.cc index 009701025..6deeabfcf 100644 --- a/src/arm64/interface-descriptors-arm64.cc +++ b/src/arm64/interface-descriptors-arm64.cc @@ -124,7 +124,23 @@ void CreateAllocationSiteDescriptor::Initialize( // x2: feedback vector // x3: call feedback slot Register registers[] = {cp, x2, x3}; - data->Initialize(arraysize(registers), registers, NULL); + Representation representations[] = {Representation::Tagged(), + Representation::Tagged(), + Representation::Smi()}; + data->Initialize(arraysize(registers), registers, representations); +} + + +void CreateWeakCellDescriptor::Initialize(CallInterfaceDescriptorData* data) { + // cp: context + // x2: feedback vector + // x3: call feedback slot + // x1: tagged value to put in the weak cell + Register registers[] = {cp, x2, x3, x1}; + Representation representations[] = { + Representation::Tagged(), Representation::Tagged(), Representation::Smi(), + Representation::Tagged()}; + data->Initialize(arraysize(registers), registers, representations); } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 83a994b1c..d971c3c84 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -450,6 +450,10 @@ Handle FastCloneShallowObjectStub::GenerateCode() { template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { + // This stub is performance sensitive, the generated code must be tuned + // so that it doesn't build an eager frame. + info()->MarkMustNotHaveEagerFrame(); + HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); @@ -522,6 +526,36 @@ Handle CreateAllocationSiteStub::GenerateCode() { } +template <> +HValue* CodeStubGraphBuilder::BuildCodeStub() { + // This stub is performance sensitive, the generated code must be tuned + // so that it doesn't build an eager frame. + info()->MarkMustNotHaveEagerFrame(); + + HValue* size = Add(WeakCell::kSize); + HInstruction* object = + Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); + + Handle weak_cell_map = isolate()->factory()->weak_cell_map(); + AddStoreMapConstant(object, weak_cell_map); + + HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); + Add(object, HObjectAccess::ForWeakCellValue(), value); + Add(object, HObjectAccess::ForWeakCellNext(), + graph()->GetConstantUndefined()); + + HInstruction* feedback_vector = + GetParameter(CreateWeakCellDescriptor::kVectorIndex); + HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); + Add(feedback_vector, slot, object, FAST_ELEMENTS, + INITIALIZING_STORE); + return graph()->GetConstant0(); +} + + +Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } + + template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 895569d41..ecd8ef2d5 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -679,6 +679,9 @@ void FastCloneShallowObjectStub::InitializeDescriptor( void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {} +void CreateWeakCellStub::InitializeDescriptor(CodeStubDescriptor* d) {} + + void RegExpConstructResultStub::InitializeDescriptor( CodeStubDescriptor* descriptor) { descriptor->Initialize( @@ -738,6 +741,12 @@ void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) { } +void CreateWeakCellStub::GenerateAheadOfTime(Isolate* isolate) { + CreateWeakCellStub stub(isolate); + stub.GetCode(); +} + + void StoreElementStub::Generate(MacroAssembler* masm) { switch (elements_kind()) { case FAST_ELEMENTS: diff --git a/src/code-stubs.h b/src/code-stubs.h index b5ea4427c..864fbadbb 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -63,6 +63,7 @@ namespace internal { V(BinaryOpWithAllocationSite) \ V(CompareNilIC) \ V(CreateAllocationSite) \ + V(CreateWeakCell) \ V(ElementsTransitionAndStore) \ V(FastCloneShallowArray) \ V(FastCloneShallowObject) \ @@ -676,6 +677,17 @@ class CreateAllocationSiteStub : public HydrogenCodeStub { }; +class CreateWeakCellStub : public HydrogenCodeStub { + public: + explicit CreateWeakCellStub(Isolate* isolate) : HydrogenCodeStub(isolate) {} + + static void GenerateAheadOfTime(Isolate* isolate); + + DEFINE_CALL_INTERFACE_DESCRIPTOR(CreateWeakCell); + DEFINE_HYDROGEN_CODE_STUB(CreateWeakCell, HydrogenCodeStub); +}; + + class InstanceofStub: public PlatformCodeStub { public: enum Flags { diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index ed4b065ef..aa621223c 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -6227,6 +6227,10 @@ class HObjectAccess FINAL { return HObjectAccess(kInobject, WeakCell::kValueOffset); } + static HObjectAccess ForWeakCellNext() { + return HObjectAccess(kInobject, WeakCell::kNextOffset); + } + static HObjectAccess ForAllocationMementoSite() { return HObjectAccess(kInobject, AllocationMemento::kAllocationSiteOffset); } diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index ca1afcda9..b7fa9aef4 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -2245,10 +2245,30 @@ void CallICStub::Generate(MacroAssembler* masm) { ParameterCount actual(argc); // The checks. First, does edi match the recorded monomorphic target? - __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size, + __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize)); + + // We don't know that we have a weak cell. We might have a private symbol + // or an AllocationSite, but the memory is safe to examine. + // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to + // FixedArray. + // WeakCell::kValueOffset - contains a JSFunction or Smi(0) + // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not + // computed, meaning that it can't appear to be a pointer. If the low bit is + // 0, then hash is computed, but the 0 bit prevents the field from appearing + // to be a pointer. + STATIC_ASSERT(WeakCell::kSize >= kPointerSize); + STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == + WeakCell::kValueOffset && + WeakCell::kValueOffset == Symbol::kHashFieldSlot); + + __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); __ j(not_equal, &extra_checks_or_miss); + // The compare above could have been a SMI/SMI comparison. Guard against this + // convincing us that we have a monomorphic JSFunction. + __ JumpIfSmi(edi, &extra_checks_or_miss); + __ bind(&have_js_function); if (CallAsMethod()) { EmitContinueIfStrictOrNative(masm, &cont); @@ -2277,8 +2297,6 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&extra_checks_or_miss); Label uninitialized, miss; - __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, - FixedArray::kHeaderSize)); __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); __ j(equal, &slow_start); @@ -2322,15 +2340,18 @@ void CallICStub::Generate(MacroAssembler* masm) { // Update stats. __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); - // Store the function. - __ mov( - FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), - edi); + // Store the function. Use a stub since we need a frame for allocation. + // ebx - vector + // edx - slot + // edi - function + { + FrameScope scope(masm, StackFrame::INTERNAL); + CreateWeakCellStub create_stub(isolate); + __ push(edi); + __ CallStub(&create_stub); + __ pop(edi); + } - // Update the write barrier. - __ mov(eax, edi); - __ RecordWriteArray(ebx, eax, edx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); __ jmp(&have_js_function); // We are here because tracing is on or we encountered a MISS case we can't @@ -2393,6 +2414,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { // It is important that the store buffer overflow stubs are generated first. ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); CreateAllocationSiteStub::GenerateAheadOfTime(isolate); + CreateWeakCellStub::GenerateAheadOfTime(isolate); BinaryOpICStub::GenerateAheadOfTime(isolate); BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); } diff --git a/src/ia32/interface-descriptors-ia32.cc b/src/ia32/interface-descriptors-ia32.cc index 68310ff4f..b0e57fc2e 100644 --- a/src/ia32/interface-descriptors-ia32.cc +++ b/src/ia32/interface-descriptors-ia32.cc @@ -101,7 +101,19 @@ void FastCloneShallowObjectDescriptor::Initialize( void CreateAllocationSiteDescriptor::Initialize( CallInterfaceDescriptorData* data) { Register registers[] = {esi, ebx, edx}; - data->Initialize(arraysize(registers), registers, NULL); + Representation representations[] = {Representation::Tagged(), + Representation::Tagged(), + Representation::Smi()}; + data->Initialize(arraysize(registers), registers, representations); +} + + +void CreateWeakCellDescriptor::Initialize(CallInterfaceDescriptorData* data) { + Register registers[] = {esi, ebx, edx, edi}; + Representation representations[] = { + Representation::Tagged(), Representation::Tagged(), Representation::Smi(), + Representation::Tagged()}; + data->Initialize(arraysize(registers), registers, representations); } diff --git a/src/ic/ic.cc b/src/ic/ic.cc index 68c7cc2c4..92a052af8 100644 --- a/src/ic/ic.cc +++ b/src/ic/ic.cc @@ -2220,7 +2220,7 @@ void CallIC::HandleMiss(Handle receiver, Handle function) { // Hand-coded MISS handling is easier if CallIC slots don't contain smis. DCHECK(!feedback->IsSmi()); - if (feedback->IsJSFunction() || !function->IsJSFunction()) { + if (feedback->IsWeakCell() || !function->IsJSFunction()) { // We are going generic. nexus->ConfigureGeneric(); } else { diff --git a/src/interface-descriptors.h b/src/interface-descriptors.h index 8308d83a2..51fb9204f 100644 --- a/src/interface-descriptors.h +++ b/src/interface-descriptors.h @@ -28,6 +28,7 @@ class PlatformInterfaceDescriptor; V(FastCloneShallowArray) \ V(FastCloneShallowObject) \ V(CreateAllocationSite) \ + V(CreateWeakCell) \ V(CallFunction) \ V(CallFunctionWithFeedback) \ V(CallFunctionWithFeedbackAndVector) \ @@ -318,6 +319,19 @@ class CreateAllocationSiteDescriptor : public CallInterfaceDescriptor { }; +class CreateWeakCellDescriptor : public CallInterfaceDescriptor { + public: + enum ParameterIndices { + kVectorIndex, + kSlotIndex, + kValueIndex, + kParameterCount + }; + + DECLARE_DESCRIPTOR(CreateWeakCellDescriptor, CallInterfaceDescriptor) +}; + + class CallFunctionDescriptor : public CallInterfaceDescriptor { public: DECLARE_DESCRIPTOR(CallFunctionDescriptor, CallInterfaceDescriptor) diff --git a/src/type-feedback-vector.cc b/src/type-feedback-vector.cc index c51d9877f..f5d4a69b5 100644 --- a/src/type-feedback-vector.cc +++ b/src/type-feedback-vector.cc @@ -138,7 +138,7 @@ Handle TypeFeedbackVector::Copy( static bool ClearLogic(Heap* heap, int ic_age, Code::Kind kind, InlineCacheState state) { if (FLAG_cleanup_code_caches_at_gc && - (kind == Code::CALL_IC || heap->flush_monomorphic_ics() || + (heap->flush_monomorphic_ics() || // TODO(mvstanton): is this ic_age granular enough? it comes from // the SharedFunctionInfo which may change on a different schedule // than ic targets. @@ -285,7 +285,7 @@ InlineCacheState CallICNexus::StateFromFeedback() const { if (feedback == *vector()->MegamorphicSentinel(isolate)) { return GENERIC; - } else if (feedback->IsAllocationSite() || feedback->IsJSFunction()) { + } else if (feedback->IsAllocationSite() || feedback->IsWeakCell()) { return MONOMORPHIC; } @@ -319,7 +319,8 @@ void CallICNexus::ConfigureUninitialized() { void CallICNexus::ConfigureMonomorphic(Handle function) { - SetFeedback(*function); + Handle new_cell = GetIsolate()->factory()->NewWeakCell(function); + SetFeedback(*new_cell); } diff --git a/src/type-info.cc b/src/type-info.cc index 096e383c9..01943414a 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -62,12 +62,23 @@ Handle TypeFeedbackOracle::GetInfo(FeedbackVectorSlot slot) { Handle TypeFeedbackOracle::GetInfo(FeedbackVectorICSlot slot) { DCHECK(slot.ToInt() >= 0 && slot.ToInt() < feedback_vector_->length()); + Handle undefined = + Handle::cast(isolate()->factory()->undefined_value()); Object* obj = feedback_vector_->Get(slot); + + // Vector-based ICs do not embed direct pointers to maps, functions. + // Instead a WeakCell is always used. + if (obj->IsWeakCell()) { + WeakCell* cell = WeakCell::cast(obj); + if (cell->cleared()) return undefined; + obj = cell->value(); + } + if (!obj->IsJSFunction() || !CanRetainOtherContext(JSFunction::cast(obj), *native_context_)) { return Handle(obj, isolate()); } - return Handle::cast(isolate()->factory()->undefined_value()); + return undefined; } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index ae2d92158..a1bef5176 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -2115,10 +2115,30 @@ void CallICStub::Generate(MacroAssembler* masm) { // The checks. First, does rdi match the recorded monomorphic target? __ SmiToInteger32(rdx, rdx); - __ cmpp(rdi, FieldOperand(rbx, rdx, times_pointer_size, - FixedArray::kHeaderSize)); + __ movp(rcx, + FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); + + // We don't know that we have a weak cell. We might have a private symbol + // or an AllocationSite, but the memory is safe to examine. + // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to + // FixedArray. + // WeakCell::kValueOffset - contains a JSFunction or Smi(0) + // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not + // computed, meaning that it can't appear to be a pointer. If the low bit is + // 0, then hash is computed, but the 0 bit prevents the field from appearing + // to be a pointer. + STATIC_ASSERT(WeakCell::kSize >= kPointerSize); + STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == + WeakCell::kValueOffset && + WeakCell::kValueOffset == Symbol::kHashFieldSlot); + + __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); __ j(not_equal, &extra_checks_or_miss); + // The compare above could have been a SMI/SMI comparison. Guard against this + // convincing us that we have a monomorphic JSFunction. + __ JumpIfSmi(rdi, &extra_checks_or_miss); + __ bind(&have_js_function); if (CallAsMethod()) { EmitContinueIfStrictOrNative(masm, &cont); @@ -2147,8 +2167,6 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&extra_checks_or_miss); Label uninitialized, miss; - __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, - FixedArray::kHeaderSize)); __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); __ j(equal, &slow_start); @@ -2191,14 +2209,20 @@ void CallICStub::Generate(MacroAssembler* masm) { // Update stats. __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); - // Store the function. - __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), - rdi); + // Store the function. Use a stub since we need a frame for allocation. + // rbx - vector + // rdx - slot (needs to be in smi form) + // rdi - function + { + FrameScope scope(masm, StackFrame::INTERNAL); + CreateWeakCellStub create_stub(isolate); + + __ Integer32ToSmi(rdx, rdx); + __ Push(rdi); + __ CallStub(&create_stub); + __ Pop(rdi); + } - // Update the write barrier. - __ movp(rax, rdi); - __ RecordWriteArray(rbx, rax, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); __ jmp(&have_js_function); // We are here because tracing is on or we encountered a MISS case we can't @@ -2260,6 +2284,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { // It is important that the store buffer overflow stubs are generated first. ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); CreateAllocationSiteStub::GenerateAheadOfTime(isolate); + CreateWeakCellStub::GenerateAheadOfTime(isolate); BinaryOpICStub::GenerateAheadOfTime(isolate); BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); } diff --git a/src/x64/interface-descriptors-x64.cc b/src/x64/interface-descriptors-x64.cc index f97c6c767..1ca0c8587 100644 --- a/src/x64/interface-descriptors-x64.cc +++ b/src/x64/interface-descriptors-x64.cc @@ -101,7 +101,19 @@ void FastCloneShallowObjectDescriptor::Initialize( void CreateAllocationSiteDescriptor::Initialize( CallInterfaceDescriptorData* data) { Register registers[] = {rsi, rbx, rdx}; - data->Initialize(arraysize(registers), registers, NULL); + Representation representations[] = {Representation::Tagged(), + Representation::Tagged(), + Representation::Smi()}; + data->Initialize(arraysize(registers), registers, representations); +} + + +void CreateWeakCellDescriptor::Initialize(CallInterfaceDescriptorData* data) { + Register registers[] = {rsi, rbx, rdx, rdi}; + Representation representations[] = { + Representation::Tagged(), Representation::Tagged(), Representation::Smi(), + Representation::Tagged()}; + data->Initialize(arraysize(registers), registers, representations); } diff --git a/test/cctest/test-compiler.cc b/test/cctest/test-compiler.cc index 9982a5597..e47b1d61b 100644 --- a/test/cctest/test-compiler.cc +++ b/test/cctest/test-compiler.cc @@ -314,7 +314,9 @@ TEST(FeedbackVectorPreservedAcrossRecompiles) { CHECK_EQ(expected_slots, feedback_vector->Slots()); CHECK_EQ(expected_ic_slots, feedback_vector->ICSlots()); FeedbackVectorICSlot slot_for_a(0); - CHECK(feedback_vector->Get(slot_for_a)->IsJSFunction()); + Object* object = feedback_vector->Get(slot_for_a); + CHECK(object->IsWeakCell() && + WeakCell::cast(object)->value()->IsJSFunction()); CompileRun("%OptimizeFunctionOnNextCall(f); f(fun1);"); @@ -322,7 +324,9 @@ TEST(FeedbackVectorPreservedAcrossRecompiles) { // of the full code. CHECK(f->IsOptimized()); CHECK(f->shared()->has_deoptimization_support()); - CHECK(f->shared()->feedback_vector()->Get(slot_for_a)->IsJSFunction()); + object = f->shared()->feedback_vector()->Get(slot_for_a); + CHECK(object->IsWeakCell() && + WeakCell::cast(object)->value()->IsJSFunction()); } diff --git a/test/cctest/test-feedback-vector.cc b/test/cctest/test-feedback-vector.cc index 439b986e4..9c11a498b 100644 --- a/test/cctest/test-feedback-vector.cc +++ b/test/cctest/test-feedback-vector.cc @@ -171,44 +171,43 @@ TEST(VectorICProfilerStatistics) { Handle f = v8::Utils::OpenHandle( *v8::Handle::Cast(CcTest::global()->Get(v8_str("f")))); // There should be one IC. - Code* code = f->shared()->code(); + Handle code = handle(f->shared()->code(), isolate); TypeFeedbackInfo* feedback_info = TypeFeedbackInfo::cast(code->type_feedback_info()); CHECK_EQ(1, feedback_info->ic_total_count()); CHECK_EQ(0, feedback_info->ic_with_type_info_count()); CHECK_EQ(0, feedback_info->ic_generic_count()); - TypeFeedbackVector* feedback_vector = f->shared()->feedback_vector(); + Handle feedback_vector = + handle(f->shared()->feedback_vector(), isolate); + int ic_slot = 0; + CallICNexus nexus(feedback_vector, FeedbackVectorICSlot(ic_slot)); CHECK_EQ(1, feedback_vector->ic_with_type_info_count()); CHECK_EQ(0, feedback_vector->ic_generic_count()); // Now send the information generic. CompileRun("f(Object);"); - feedback_vector = f->shared()->feedback_vector(); CHECK_EQ(0, feedback_vector->ic_with_type_info_count()); CHECK_EQ(1, feedback_vector->ic_generic_count()); - // A collection will make the site uninitialized again. + // A collection will not affect the site. heap->CollectAllGarbage(i::Heap::kNoGCFlags); - feedback_vector = f->shared()->feedback_vector(); CHECK_EQ(0, feedback_vector->ic_with_type_info_count()); - CHECK_EQ(0, feedback_vector->ic_generic_count()); + CHECK_EQ(1, feedback_vector->ic_generic_count()); // The Array function is special. A call to array remains monomorphic // and isn't cleared by gc because an AllocationSite is being held. + // Clear the IC manually in order to test this case. + nexus.Clear(*code); CompileRun("f(Array);"); - feedback_vector = f->shared()->feedback_vector(); CHECK_EQ(1, feedback_vector->ic_with_type_info_count()); CHECK_EQ(0, feedback_vector->ic_generic_count()); - int ic_slot = 0; - CHECK( - feedback_vector->Get(FeedbackVectorICSlot(ic_slot))->IsAllocationSite()); + + CHECK(nexus.GetFeedback()->IsAllocationSite()); heap->CollectAllGarbage(i::Heap::kNoGCFlags); - feedback_vector = f->shared()->feedback_vector(); CHECK_EQ(1, feedback_vector->ic_with_type_info_count()); CHECK_EQ(0, feedback_vector->ic_generic_count()); - CHECK( - feedback_vector->Get(FeedbackVectorICSlot(ic_slot))->IsAllocationSite()); + CHECK(nexus.GetFeedback()->IsAllocationSite()); } @@ -238,15 +237,16 @@ TEST(VectorCallICStates) { CompileRun("f(function() { return 16; })"); CHECK_EQ(GENERIC, nexus.StateFromFeedback()); - // After a collection, state should be reset to UNINITIALIZED. + // After a collection, state should remain GENERIC. heap->CollectAllGarbage(i::Heap::kNoGCFlags); - CHECK_EQ(UNINITIALIZED, nexus.StateFromFeedback()); + CHECK_EQ(GENERIC, nexus.StateFromFeedback()); - // Array is special. It will remain monomorphic across gcs and it contains an - // AllocationSite. + // A call to Array is special, it contains an AllocationSite as feedback. + // Clear the IC manually in order to test this case. + nexus.Clear(f->shared()->code()); CompileRun("f(Array)"); CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback()); - CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot))->IsAllocationSite()); + CHECK(nexus.GetFeedback()->IsAllocationSite()); heap->CollectAllGarbage(i::Heap::kNoGCFlags); CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback()); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index ed80bd698..a64915f0a 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -3320,7 +3320,7 @@ TEST(Regress2211) { } -TEST(IncrementalMarkingClearsTypeFeedbackInfo) { +TEST(IncrementalMarkingPreservesMonomorphicCallIC) { if (i::FLAG_always_opt) return; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -3355,16 +3355,16 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) { CHECK_EQ(expected_slots, feedback_vector->ICSlots()); int slot1 = 0; int slot2 = 1; - CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsJSFunction()); - CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsJSFunction()); + CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell()); + CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell()); SimulateIncrementalMarking(CcTest::heap()); CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); - CHECK_EQ(feedback_vector->Get(FeedbackVectorICSlot(slot1)), - *TypeFeedbackVector::UninitializedSentinel(CcTest::i_isolate())); - CHECK_EQ(feedback_vector->Get(FeedbackVectorICSlot(slot2)), - *TypeFeedbackVector::UninitializedSentinel(CcTest::i_isolate())); + CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1))) + ->cleared()); + CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2))) + ->cleared()); }