__ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ Move(r5, FeedbackVector());
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
__ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ Move(r5, FeedbackVector());
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- r3 : number of arguments
// -- r4 : constructor function
// -- sp[...]: constructor arguments
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
Isolate* isolate = masm->isolate();
// Enter a construct frame.
// r5: initial map
Label rt_call_reload_new_target;
__ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
- if (create_memento) {
- __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
- }
__ Allocate(r6, r7, r8, r9, &rt_call_reload_new_target, SIZE_IN_WORDS);
// initial map and properties and elements are set to empty fixed array.
// r4: constructor function
// r5: initial map
- // r6: object size (including memento if create_memento)
+ // r6: object size
// r7: JSObject (not tagged)
__ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
__ mr(r8, r7);
// Fill all the in-object properties with the appropriate filler.
// r4: constructor function
// r5: initial map
- // r6: object size (in words, including memento if create_memento)
+ // r6: object size
// r7: JSObject (not tagged)
// r8: First in-object property of JSObject (not tagged)
// r9: End of object
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ subi(r3, r9, Operand(AllocationMemento::kSize));
- __ InitializeFieldsWithFiller(r8, r3, r10);
-
- // Fill in memento fields.
- // r8: points to the allocated but uninitialized memento.
- __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
- __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
- // Load the AllocationSite
- __ LoadP(r10, MemOperand(sp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(r10, r3);
- __ StoreP(r10,
- MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
- __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
- kPointerSize));
- } else {
- __ InitializeFieldsWithFiller(r8, r9, r10);
- }
+ __ InitializeFieldsWithFiller(r8, r9, r10);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// r4: constructor function
// r6: original constructor
__ bind(&rt_call);
- if (create_memento) {
- // Get the cell or allocation site.
- __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
- __ Push(r5, r4, r6);
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ Push(r4, r6);
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ Push(r4, r6);
+ __ CallRuntime(Runtime::kNewObject, 2);
__ mr(r7, r3);
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ b(&count_incremented);
- }
-
// Receiver for constructor call allocated.
// r7: JSObject
__ bind(&allocated);
- if (create_memento) {
- __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
- __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
- __ cmp(r5, r8);
- __ beq(&count_incremented);
- // r5 is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ LoadP(
- r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
- __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
- __ StoreP(
- r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
- r0);
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ Pop(r4, ip);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ beq(&done);
__ LoadP(feedback_map, FieldMemOperand(r8, HeapObject::kMapOffset));
__ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
- __ bne(FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ bne(&check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ b(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
- __ bne(&miss);
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
+ __ bne(&miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
- __ cmp(r4, r8);
- __ bne(&megamorphic);
- __ b(&done);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
+ __ cmp(r4, r8);
+ __ bne(&megamorphic);
+ __ b(&done);
__ bind(&miss);
// An uninitialized cache is patched with the function
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
- __ cmp(r4, r8);
- __ bne(¬_array_function);
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ b(&done);
-
- __ bind(¬_array_function);
- }
+ // Make sure the function is the Array() function.
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
+ __ cmp(r4, r8);
+ __ bne(¬_array_function);
- CreateWeakCellStub create_stub(masm->isolate());
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ b(&done);
+
+ __ bind(¬_array_function);
+
+ CreateWeakCellStub weak_cell_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
__ SmiToPtrArrayOffset(r8, r6);
__ add(r8, r5, r8);
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into r5.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by r6 + 1.
- __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize + kPointerSize));
+ // Put the AllocationSite from the feedback vector into r5, or undefined.
+ __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize));
+ __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset));
+ __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
+ if (CpuFeatures::IsSupported(ISELECT)) {
+ __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
+ __ isel(eq, r5, r5, r8);
} else {
- // Put the AllocationSite from the feedback vector into r5, or undefined.
- __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize));
- __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset));
- __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
- if (CpuFeatures::IsSupported(ISELECT)) {
- __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
- __ isel(eq, r5, r5, r8);
- } else {
- Label feedback_register_initialized;
- __ beq(&feedback_register_initialized);
- __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ __ beq(&feedback_register_initialized);
+ __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
}
__ AssertUndefinedOrAllocationSite(r5, r8);