}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r1 - function
// r3 - slot id
// r2 - vector
- Label miss;
- int argc = arg_count();
- ParameterCount actual(argc);
-
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
- __ cmp(r1, r4);
- __ b(ne, &miss);
+ // r4 - allocation site (loaded from vector[slot])
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ cmp(r1, r5);
+ __ b(ne, miss);
__ mov(r0, Operand(arg_count()));
- __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
- __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
-
- // Verify that r4 contains an AllocationSite
- __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
- __ b(ne, &miss);
// Increment the call count for monomorphic function calls.
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
__ mov(r3, r1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- __ mov(r0, Operand(arg_count()));
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &slow_start);
+ // Verify that r4 contains an AllocationSite
+ __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
+ __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ b(ne, ¬_allocation_site);
+
+ // We have an allocation site.
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ Push(r1, r2, r3);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ mov(r1, r0);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, r2);
- CallIC_ArrayStub stub(isolate(), state());
- __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// x1 - function
// x3 - slot id
// x2 - vector
- Label miss;
+ // x4 - allocation site (loaded from vector[slot])
Register function = x1;
Register feedback_vector = x2;
Register index = x3;
- Register scratch = x4;
+ Register allocation_site = x4;
+ Register scratch = x5;
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
__ Cmp(function, scratch);
- __ B(ne, &miss);
+ __ B(ne, miss);
__ Mov(x0, Operand(arg_count()));
- __ Add(scratch, feedback_vector,
- Operand::UntagSmiAndScale(index, kPointerSizeLog2));
- __ Ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
-
- // Verify that scratch contains an AllocationSite
- Register map = x5;
- __ Ldr(map, FieldMemOperand(scratch, HeapObject::kMapOffset));
- __ JumpIfNotRoot(map, Heap::kAllocationSiteMapRootIndex, &miss);
-
// Increment the call count for monomorphic function calls.
__ Add(feedback_vector, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ Str(index, FieldMemOperand(feedback_vector, 0));
- Register allocation_site = feedback_vector;
- Register original_constructor = index;
- __ Mov(allocation_site, scratch);
- __ Mov(original_constructor, function);
+ // Set up arguments for the array constructor stub.
+ Register allocation_site_arg = feedback_vector;
+ Register original_constructor_arg = index;
+ __ Mov(allocation_site_arg, allocation_site);
+ __ Mov(original_constructor_arg, function);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- __ Mov(x0, arg_count());
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &slow_start);
+ __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
+ __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, ¬_allocation_site);
+
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ Push(x1, x2, x3);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ Mov(x1, x0);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, x2);
- CallIC_ArrayStub stub(isolate(), state());
- __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
}
-void CallIC_ArrayStub::PrintState(std::ostream& os) const { // NOLINT
- os << state() << " (Array)";
-}
-
-
void CallICStub::PrintState(std::ostream& os) const { // NOLINT
os << state();
}
V(CallConstruct) \
V(CallFunction) \
V(CallIC) \
- V(CallIC_Array) \
V(CEntry) \
V(CompareIC) \
V(DoubleToI) \
V(KeyedLoadICTrampoline) \
V(LoadICTrampoline) \
V(CallICTrampoline) \
- V(CallIC_ArrayTrampoline) \
V(LoadIndexedInterceptor) \
V(LoadIndexedString) \
V(MathPow) \
// Code generation helpers.
void GenerateMiss(MacroAssembler* masm);
+ void HandleArrayCase(MacroAssembler* masm, Label* miss);
private:
void PrintState(std::ostream& os) const override; // NOLINT
};
-class CallIC_ArrayStub: public CallICStub {
- public:
- CallIC_ArrayStub(Isolate* isolate, const CallICState& state_in)
- : CallICStub(isolate, state_in) {}
-
- InlineCacheState GetICState() const final { return MONOMORPHIC; }
-
- private:
- void PrintState(std::ostream& os) const override; // NOLINT
-
- DEFINE_PLATFORM_CODE_STUB(CallIC_Array, CallICStub);
-};
-
-
// TODO(verwaest): Translate to hydrogen code stub.
class FunctionPrototypeStub : public PlatformCodeStub {
public:
};
-class CallIC_ArrayTrampolineStub : public CallICTrampolineStub {
- public:
- CallIC_ArrayTrampolineStub(Isolate* isolate, const CallICState& state)
- : CallICTrampolineStub(isolate, state) {}
-
- private:
- DEFINE_PLATFORM_CODE_STUB(CallIC_ArrayTrampoline, CallICTrampolineStub);
-};
-
-
class LoadICStub : public PlatformCodeStub {
public:
explicit LoadICStub(Isolate* isolate, const LoadICState& state)
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// edi - function
// edx - slot id
// ebx - vector
- Label miss;
- int argc = arg_count();
- ParameterCount actual(argc);
-
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
- __ j(not_equal, &miss);
+ __ j(not_equal, miss);
__ mov(eax, arg_count());
+ // Reload ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
- // Verify that ecx contains an AllocationSite
- Factory* factory = masm->isolate()->factory();
- __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- factory->allocation_site_map());
- __ j(not_equal, &miss);
-
// Increment the call count for monomorphic function calls.
__ add(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- __ Set(eax, arg_count());
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ // Unreachable.
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
__ j(equal, &slow_start);
+ // Check if we have an allocation site.
+ __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
+ Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, ¬_allocation_site);
+
+ // We have an allocation site.
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ push(edx);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ mov(edi, eax);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, ebx);
- CallIC_ArrayStub stub(isolate(), state());
- __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());
DCHECK(!target->is_inline_cache_stub() ||
(target->kind() != Code::LOAD_IC &&
target->kind() != Code::KEYED_LOAD_IC &&
+ target->kind() != Code::CALL_IC &&
(!FLAG_vector_stores || (target->kind() != Code::STORE_IC &&
target->kind() != Code::KEYED_STORE_IC))));
}
-bool CallIC::DoCustomHandler(Handle<Object> function,
- const CallICState& callic_state) {
- DCHECK(FLAG_use_ic && function->IsJSFunction());
-
- // Are we the array function?
- Handle<JSFunction> array_function =
- Handle<JSFunction>(isolate()->native_context()->array_function());
- if (array_function.is_identical_to(Handle<JSFunction>::cast(function))) {
- // Alter the slot.
- CallICNexus* nexus = casted_nexus<CallICNexus>();
- nexus->ConfigureMonomorphicArray();
-
- // Vector-based ICs have a different calling convention in optimized code
- // than full code so the correct stub has to be chosen.
- if (AddressIsOptimizedCode()) {
- CallIC_ArrayStub stub(isolate(), callic_state);
- set_target(*stub.GetCode());
- } else {
- CallIC_ArrayTrampolineStub stub(isolate(), callic_state);
- set_target(*stub.GetCode());
- }
-
- Handle<String> name;
- if (array_function->shared()->name()->IsString()) {
- name = Handle<String>(String::cast(array_function->shared()->name()),
- isolate());
- }
- TRACE_IC("CallIC", name);
- OnTypeFeedbackChanged(isolate(), get_host(), nexus->vector(), state(),
- MONOMORPHIC);
- return true;
- }
- return false;
-}
-
-
-void CallIC::PatchMegamorphic(Handle<Object> function) {
- CallICState callic_state(target()->extra_ic_state());
-
- // We are going generic.
- CallICNexus* nexus = casted_nexus<CallICNexus>();
- nexus->ConfigureMegamorphic();
-
- // Vector-based ICs have a different calling convention in optimized code
- // than full code so the correct stub has to be chosen.
- if (AddressIsOptimizedCode()) {
- CallICStub stub(isolate(), callic_state);
- set_target(*stub.GetCode());
- } else {
- CallICTrampolineStub stub(isolate(), callic_state);
- set_target(*stub.GetCode());
- }
-
- Handle<Object> name = isolate()->factory()->empty_string();
- if (function->IsJSFunction()) {
- Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
- name = handle(js_function->shared()->name(), isolate());
- }
-
- TRACE_IC("CallIC", name);
- OnTypeFeedbackChanged(isolate(), get_host(), nexus->vector(), state(),
- GENERIC);
-}
-
-
void CallIC::HandleMiss(Handle<Object> function) {
- CallICState callic_state(target()->extra_ic_state());
Handle<Object> name = isolate()->factory()->empty_string();
CallICNexus* nexus = casted_nexus<CallICNexus>();
Object* feedback = nexus->GetFeedback();
// Hand-coded MISS handling is easier if CallIC slots don't contain smis.
DCHECK(!feedback->IsSmi());
- if (feedback->IsWeakCell() || !function->IsJSFunction()) {
+ if (feedback->IsWeakCell() || !function->IsJSFunction() ||
+ feedback->IsAllocationSite()) {
// We are going generic.
nexus->ConfigureMegamorphic();
} else {
- // The feedback is either uninitialized or an allocation site.
- // It might be an allocation site because if we re-compile the full code
- // to add deoptimization support, we call with the default call-ic, and
- // merely need to patch the target to match the feedback.
- // TODO(mvstanton): the better approach is to dispense with patching
- // altogether, which is in progress.
- DCHECK(feedback == *TypeFeedbackVector::UninitializedSentinel(isolate()) ||
- feedback->IsAllocationSite());
-
- // Do we want to install a custom handler?
- if (FLAG_use_ic && DoCustomHandler(function, callic_state)) {
- return;
- }
+ DCHECK(feedback == *TypeFeedbackVector::UninitializedSentinel(isolate()));
+ Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
- nexus->ConfigureMonomorphic(Handle<JSFunction>::cast(function));
+ Handle<JSFunction> array_function =
+ Handle<JSFunction>(isolate()->native_context()->array_function());
+ if (array_function.is_identical_to(js_function)) {
+ // Alter the slot.
+ nexus->ConfigureMonomorphicArray();
+ } else {
+ nexus->ConfigureMonomorphic(js_function);
+ }
}
if (function->IsJSFunction()) {
}
-RUNTIME_FUNCTION(Runtime_CallIC_Customization_Miss) {
- TimerEventScope<TimerEventIcMiss> timer(isolate);
- HandleScope scope(isolate);
- DCHECK(args.length() == 3);
- Handle<Object> function = args.at<Object>(0);
- Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(1);
- Handle<Smi> slot = args.at<Smi>(2);
- FeedbackVectorICSlot vector_slot = vector->ToICSlot(slot->value());
- CallICNexus nexus(vector, vector_slot);
- // A miss on a custom call ic always results in going megamorphic.
- CallIC ic(isolate, &nexus);
- ic.PatchMegamorphic(function);
- return *function;
-}
-
-
// Used from ic-<arch>.cc.
RUNTIME_FUNCTION(Runtime_LoadIC_Miss) {
TimerEventScope<TimerEventIcMiss> timer(isolate);
DCHECK(nexus != NULL);
}
- void PatchMegamorphic(Handle<Object> function);
-
void HandleMiss(Handle<Object> function);
- // Returns true if a custom handler was installed.
- bool DoCustomHandler(Handle<Object> function,
- const CallICState& callic_state);
-
// Code generator routines.
static Handle<Code> initialize_stub(Isolate* isolate, int argc,
CallICState::CallType call_type);
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function
// a3 - slot id
// a2 - vector
- Label miss;
-
+ // t0 - loaded from vector[slot]
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
- __ Branch(&miss, ne, a1, Operand(at));
+ __ Branch(miss, ne, a1, Operand(at));
__ li(a0, Operand(arg_count()));
- __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(at, a2, Operand(at));
- __ lw(t0, FieldMemOperand(at, FixedArray::kHeaderSize));
-
- // Verify that t0 contains an AllocationSite
- __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&miss, ne, t1, Operand(at));
// Increment the call count for monomorphic function calls.
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ mov(a3, a1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- __ li(a0, Operand(arg_count()));
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&slow_start, eq, t0, Operand(at));
+ // Verify that t0 contains an AllocationSite
+ __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(¬_allocation_site, ne, t1, Operand(at));
+
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ Push(a1, a2, a3);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, a2);
- CallIC_ArrayStub stub(isolate(), state());
- __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function
// a3 - slot id
// a2 - vector
- Label miss;
-
+ // a4 - allocation site (loaded from vector[slot])
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
- __ Branch(&miss, ne, a1, Operand(at));
-
- __ li(a0, Operand(arg_count()));
- __ dsrl(at, a3, 32 - kPointerSizeLog2);
- __ Daddu(at, a2, Operand(at));
- __ ld(a4, FieldMemOperand(at, FixedArray::kHeaderSize));
-
- // Verify that a4 contains an AllocationSite
- __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&miss, ne, a5, Operand(at));
+ __ Branch(miss, ne, a1, Operand(at));
// Increment the call count for monomorphic function calls.
__ dsrl(t0, a3, 32 - kPointerSizeLog2);
__ mov(a3, a1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- __ li(a0, Operand(arg_count()));
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&slow_start, eq, a4, Operand(at));
+ // Verify that a4 contains an AllocationSite
+ __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(¬_allocation_site, ne, a5, Operand(at));
+
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ Push(a1, a2, a3);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss //
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, a2);
- CallIC_ArrayStub stub(isolate(), state());
- __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
F(LoadIC_Miss, 3, 1) \
F(KeyedLoadIC_Miss, 3, 1) \
F(CallIC_Miss, 3, 1) \
- F(CallIC_Customization_Miss, 3, 1) \
F(StoreIC_Miss, 3, 1) \
F(StoreIC_Slow, 3, 1) \
F(KeyedStoreIC_Miss, 3, 1) \
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// rdi - function
- // rdx - slot id (as integer)
+ // rdx - slot id
// rbx - vector
- Label miss;
- int argc = arg_count();
- ParameterCount actual(argc);
-
- __ SmiToInteger32(rdx, rdx);
-
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
- __ cmpp(rdi, rcx);
- __ j(not_equal, &miss);
+ // rcx - allocation site (loaded from vector[slot]).
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
+ __ cmpp(rdi, r8);
+ __ j(not_equal, miss);
__ movp(rax, Immediate(arg_count()));
- __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize));
- // Verify that ecx contains an AllocationSite
- __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
- Heap::kAllocationSiteMapRootIndex);
- __ j(not_equal, &miss, Label::kNear);
// Increment the call count for monomorphic function calls.
- {
- __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize),
- Smi::FromInt(CallICNexus::kCallCountIncrement));
-
- __ movp(rbx, rcx);
- __ movp(rdx, rdi);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
+ __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize),
+ Smi::FromInt(CallICNexus::kCallCountIncrement));
- // The slow case, we need this no matter what to complete a call after a miss.
- __ Set(rax, arg_count());
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ __ movp(rbx, rcx);
+ __ movp(rdx, rdi);
+ ArrayConstructorStub stub(masm->isolate(), arg_count());
+ __ TailCallStub(&stub);
}
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
__ j(equal, &slow_start);
+ // Check if we have an allocation site.
+ __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
+ Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, ¬_allocation_site);
+
+ // We have an allocation site.
+ HandleArrayCase(masm, &miss);
+
+ __ bind(¬_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ Push(rdx);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ movp(rdi, rax);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, rbx);
- CallIC_ArrayStub stub(isolate(), state());
- __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());