static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
Register argc, Register function,
- Register feedback_vector,
- Register index) {
+ Register feedback_vector, Register index,
+ Register orig_construct) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(argc);
- __ Push(argc, function, feedback_vector, index);
+ __ Push(argc, function, feedback_vector, index, orig_construct);
DCHECK(feedback_vector.Is(x2) && index.Is(x3));
__ CallStub(stub);
- __ Pop(index, feedback_vector, function, argc);
+ __ Pop(orig_construct, index, feedback_vector, function, argc);
__ SmiUntag(argc);
}
static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
Register function,
Register feedback_vector, Register index,
- Register scratch1, Register scratch2,
- Register scratch3) {
+ Register orig_construct, Register scratch1,
+ Register scratch2, Register scratch3) {
ASM_LOCATION("GenerateRecordCallTarget");
DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function,
- feedback_vector, index));
+ feedback_vector, index, orig_construct));
// Cache the called function in a feedback vector slot. Cache states are
// uninitialized, monomorphic (indicated by a JSFunction), and megamorphic.
// argc : number of arguments to the construct function
// function : the function to call
// feedback_vector : the feedback vector
// index : slot in feedback vector (smi)
+ // orig_construct : original constructor
Label initialize, done, miss, megamorphic, not_array_function;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
- feedback_vector, index);
+ feedback_vector, index, orig_construct);
__ B(&done);
__ Bind(¬_array_function);
CreateWeakCellStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
- feedback_vector, index);
+ feedback_vector, index, orig_construct);
__ Bind(&done);
}
&slow);
if (RecordCallTarget()) {
- if (IsSuperConstructorCall()) {
- __ Push(x4);
- }
- // TODO(mstarzinger): Consider tweaking target recording to avoid push/pop.
- GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11);
- if (IsSuperConstructorCall()) {
- __ Pop(x4);
- }
+ GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12);
__ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
if (FLAG_pretenuring_call_new) {
}
+void MacroAssembler::Pop(const CPURegister& dst0, const CPURegister& dst1,
+ const CPURegister& dst2, const CPURegister& dst3,
+ const CPURegister& dst4, const CPURegister& dst5,
+ const CPURegister& dst6, const CPURegister& dst7) {
+ // It is not valid to pop into the same register more than once in one
+ // instruction, not even into the zero register.
+ DCHECK(!AreAliased(dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7));
+ DCHECK(AreSameSizeAndType(dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7));
+ DCHECK(dst0.IsValid());
+
+ int count = 5 + dst5.IsValid() + dst6.IsValid() + dst7.IsValid();
+ int size = dst0.SizeInBytes();
+
+ PopHelper(4, size, dst0, dst1, dst2, dst3);
+ PopHelper(count - 4, size, dst4, dst5, dst6, dst7);
+ PopPostamble(count, size);
+}
+
+
void MacroAssembler::Push(const Register& src0, const FPRegister& src1) {
int size = src0.SizeInBytes() + src1.SizeInBytes();