static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
// -- sp[...]: constructor arguments
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
Isolate* isolate = masm->isolate();
// Enter a construct frame.
// r2: initial map
Label rt_call_reload_new_target;
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
- if (create_memento) {
- __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
- }
__ Allocate(r3, r4, r5, r6, &rt_call_reload_new_target, SIZE_IN_WORDS);
// initial map and properties and elements are set to empty fixed array.
// r1: constructor function
// r2: initial map
- // r3: object size (including memento if create_memento)
+ // r3: object size
// r4: JSObject (not tagged)
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ mov(r5, r4);
// Fill all the in-object properties with the appropriate filler.
// r1: constructor function
// r2: initial map
- // r3: object size (in words, including memento if create_memento)
+ // r3: object size
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
- __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
- __ InitializeFieldsWithFiller(r5, r0, r6);
-
- // Fill in memento fields.
- // r5: points to the allocated but uninitialized memento.
- __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
- DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
- __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
- // Load the AllocationSite
- __ ldr(r6, MemOperand(sp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(r6, r0);
- DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
- __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
- } else {
- __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
- __ InitializeFieldsWithFiller(r5, r0, r6);
- }
+ __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ InitializeFieldsWithFiller(r5, r0, r6);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// r1: constructor function
// r3: original constructor
__ bind(&rt_call);
- if (create_memento) {
- // Get the cell or allocation site.
- __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
- __ push(r2); // argument 1: allocation site
- }
__ push(r1); // argument 2/1: constructor function
__ push(r3); // argument 3/2: original constructor
- if (create_memento) {
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ CallRuntime(Runtime::kNewObject, 2);
__ mov(r4, r0);
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ jmp(&count_incremented);
- }
-
// Receiver for constructor call allocated.
// r4: JSObject
__ bind(&allocated);
- if (create_memento) {
- __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
- __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
- __ cmp(r2, r5);
- __ b(eq, &count_incremented);
- // r2 is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ ldr(r3, FieldMemOperand(r2,
- AllocationSite::kPretenureCreateCountOffset));
- __ add(r3, r3, Operand(Smi::FromInt(1)));
- __ str(r3, FieldMemOperand(r2,
- AllocationSite::kPretenureCreateCountOffset));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ pop(r3);
__ pop(r1);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ b(eq, &done);
__ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
__ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
- __ b(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ b(ne, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
- __ b(ne, &miss);
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
+ __ b(ne, &miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
- __ cmp(r1, r5);
- __ b(ne, &megamorphic);
- __ jmp(&done);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ cmp(r1, r5);
+ __ b(ne, &megamorphic);
+ __ jmp(&done);
__ bind(&miss);
// An uninitialized cache is patched with the function
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
- __ cmp(r1, r5);
- __ b(ne, ¬_array_function);
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ b(&done);
-
- __ bind(¬_array_function);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ cmp(r1, r5);
+ __ b(ne, ¬_array_function);
- CreateWeakCellStub create_stub(masm->isolate());
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ b(&done);
+
+ __ bind(¬_array_function);
+ CreateWeakCellStub weak_cell_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into r2.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by r3 + 1.
- __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into r2, or undefined.
- __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
- __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
- __ b(eq, &feedback_register_initialized);
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into r2, or undefined.
+ __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
+ __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
+ __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ b(eq, &feedback_register_initialized);
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(r2, r5);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
Isolate* isolate = masm->isolate();
Register obj_size = x3;
Register new_obj = x4;
__ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
- if (create_memento) {
- __ Add(x7, obj_size,
- Operand(AllocationMemento::kSize / kPointerSize));
- __ Allocate(x7, new_obj, x10, x11, &rt_call_reload_new_target,
- SIZE_IN_WORDS);
- } else {
- __ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
- SIZE_IN_WORDS);
- }
+ __ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
+ SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- // Fill the pre-allocated fields with undef.
- __ FillFields(first_prop, prop_fields, filler);
- __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
- __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
- DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
- __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
- // Load the AllocationSite
- __ Peek(x14, 3 * kXRegSize);
- __ AssertUndefinedOrAllocationSite(x14, x10);
- DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
- __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
- first_prop = NoReg;
- } else {
- // Fill all of the property fields with undef.
- __ FillFields(first_prop, prop_fields, filler);
- first_prop = NoReg;
- prop_fields = NoReg;
- }
+
+ // Fill all of the property fields with undef.
+ __ FillFields(first_prop, prop_fields, filler);
+ first_prop = NoReg;
+ prop_fields = NoReg;
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// x1: constructor function
// x3: original constructor
__ Bind(&rt_call);
- Label count_incremented;
- if (create_memento) {
- // Get the cell or allocation site.
- __ Peek(x4, 3 * kXRegSize);
- __ Push(x4, constructor, original_constructor); // arguments 1-3
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- __ Mov(x4, x0);
- // If we ended up using the runtime, and we want a memento, then the
- // runtime call made it for us, and we shouldn't do create count
- // increment.
- __ B(&count_incremented);
- } else {
- __ Push(constructor, original_constructor); // arguments 1-2
- __ CallRuntime(Runtime::kNewObject, 2);
- __ Mov(x4, x0);
- }
+ __ Push(constructor, original_constructor); // arguments 1-2
+ __ CallRuntime(Runtime::kNewObject, 2);
+ __ Mov(x4, x0);
// Receiver for constructor call allocated.
// x4: JSObject
__ Bind(&allocated);
- if (create_memento) {
- __ Peek(x10, 3 * kXRegSize);
- __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
- // r2 is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ Ldr(x5, FieldMemOperand(x10,
- AllocationSite::kPretenureCreateCountOffset));
- __ Add(x5, x5, Operand(Smi::FromInt(1)));
- __ Str(x5, FieldMemOperand(x10,
- AllocationSite::kPretenureCreateCountOffset));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ Pop(original_constructor);
__ Pop(constructor);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ B(eq, &done);
__ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset));
__ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
- __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ B(ne, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(feedback_value, &initialize);
__ B(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
-
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
- __ Cmp(function, scratch1);
- __ B(ne, &megamorphic);
- __ B(&done);
- }
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
+
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ Cmp(function, scratch1);
+ __ B(ne, &megamorphic);
+ __ B(&done);
__ Bind(&miss);
// indicate the ElementsKind if function is the Array constructor.
__ Bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
- __ Cmp(function, scratch1);
- __ B(ne, ¬_array_function);
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, argc, function,
- feedback_vector, index, orig_construct,
- is_super);
- __ B(&done);
-
- __ Bind(¬_array_function);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ Cmp(function, scratch1);
+ __ B(ne, ¬_array_function);
- CreateWeakCellStub create_stub(masm->isolate());
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
feedback_vector, index, orig_construct, is_super);
+ __ B(&done);
+
+ __ Bind(¬_array_function);
+ CreateWeakCellStub weak_cell_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
+ feedback_vector, index, orig_construct, is_super);
__ Bind(&done);
}
IsSuperConstructorCall());
__ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into x2.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by x3 + 1.
- __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
- } else {
Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into x2, or undefined.
- __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
- __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
- __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
- &feedback_register_initialized);
- __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ // Put the AllocationSite from the feedback vector into x2, or undefined.
+ __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
+ __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
+ __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
+ &feedback_register_initialized);
+ __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(x2, x5);
}
// Type feedback information.
virtual FeedbackVectorRequirements ComputeFeedbackRequirements(
Isolate* isolate, const ICSlotCache* cache) override {
- return FeedbackVectorRequirements(FLAG_pretenuring_call_new ? 2 : 1, 0);
+ return FeedbackVectorRequirements(1, 0);
}
void SetFirstFeedbackSlot(FeedbackVectorSlot slot) override {
callnew_feedback_slot_ = slot;
DCHECK(!callnew_feedback_slot_.IsInvalid());
return callnew_feedback_slot_;
}
- FeedbackVectorSlot AllocationSiteFeedbackSlot() {
- DCHECK(FLAG_pretenuring_call_new);
- return CallNewFeedbackSlot().next();
- }
bool IsMonomorphic() override { return is_monomorphic_; }
Handle<JSFunction> target() const { return target_; }
// Flags for experimental implementation features.
DEFINE_BOOL(compiled_keyed_generic_loads, false,
"use optimizing compiler to generate keyed generic load stubs")
-// TODO(hpayer): We will remove this flag as soon as we have pretenuring
-// support for specific allocation sites.
-DEFINE_BOOL(pretenuring_call_new, false, "pretenure call new")
DEFINE_BOOL(allocation_site_pretenuring, true,
"pretenure with allocation sites")
DEFINE_BOOL(trace_pretenuring, false,
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
__ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
__ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot.
- if (FLAG_pretenuring_call_new) {
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- }
-
__ Move(rbx, FeedbackVector());
__ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- if (FLAG_pretenuring_call_new) {
- UNREACHABLE();
- /* TODO(dslomov): support pretenuring.
- EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
- DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
- expr->CallNewFeedbackSlot().ToInt() + 1);
- */
- }
-
__ Move(rbx, FeedbackVector());
__ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
// Allocate an instance of the implicit receiver object.
HValue* size_in_bytes = Add<HConstant>(instance_size);
HAllocationMode allocation_mode;
- if (FLAG_pretenuring_call_new) {
- if (FLAG_allocation_site_pretenuring) {
- // Try to use pretenuring feedback.
- Handle<AllocationSite> allocation_site = expr->allocation_site();
- allocation_mode = HAllocationMode(allocation_site);
- // Take a dependency on allocation site.
- top_info()->dependencies()->AssumeTenuringDecision(allocation_site);
- }
- }
-
HAllocate* receiver = BuildAllocate(
size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
receiver->set_known_initial_map(initial_map);
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
// -- edx: original constructor
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
// eax: initial map
__ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
__ shl(edi, kPointerSizeLog2);
- if (create_memento) {
- __ add(edi, Immediate(AllocationMemento::kSize));
- }
__ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields.
// eax: initial map
// ebx: JSObject
- // edi: start of next object (including memento if create_memento)
+ // edi: start of next object
__ mov(Operand(ebx, JSObject::kMapOffset), eax);
__ mov(ecx, factory->empty_fixed_array());
__ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
// Set extra fields in the newly allocated object.
// eax: initial map
// ebx: JSObject
- // edi: start of next object (including memento if create_memento)
+ // edi: start of next object
// esi: slack tracking counter (non-API function case)
__ mov(edx, factory->undefined_value());
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ lea(esi, Operand(edi, -AllocationMemento::kSize));
- __ InitializeFieldsWithFiller(ecx, esi, edx);
-
- // Fill in memento fields if necessary.
- // esi: points to the allocated but uninitialized memento.
- __ mov(Operand(esi, AllocationMemento::kMapOffset),
- factory->allocation_memento_map());
- // Get the cell or undefined.
- __ mov(edx, Operand(esp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(edx);
- __ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset),
- edx);
- } else {
- __ InitializeFieldsWithFiller(ecx, edi, edx);
- }
+ __ InitializeFieldsWithFiller(ecx, edi, edx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// edx: original constructor
__ bind(&rt_call);
int offset = kPointerSize;
- if (create_memento) {
- // Get the cell or allocation site.
- __ mov(edi, Operand(esp, kPointerSize * 3));
- __ push(edi); // argument 1: allocation site
- offset += kPointerSize;
- }
// Must restore esi (context) and edi (constructor) before calling
// runtime.
__ mov(edi, Operand(esp, offset));
__ push(edi); // argument 2/1: constructor function
__ push(edx); // argument 3/2: original constructor
- if (create_memento) {
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ CallRuntime(Runtime::kNewObject, 2);
__ mov(ebx, eax); // store result in ebx
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ jmp(&count_incremented);
- }
-
// New object allocated.
// ebx: newly allocated object
__ bind(&allocated);
- if (create_memento) {
- __ mov(ecx, Operand(esp, 3 * kPointerSize));
- __ cmp(ecx, masm->isolate()->factory()->undefined_value());
- __ j(equal, &count_incremented);
- // ecx is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ add(FieldOperand(ecx, AllocationSite::kPretenureCreateCountOffset),
- Immediate(Smi::FromInt(1)));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ pop(edx); // new.target
__ pop(edi); // Constructor function.
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
Heap::kWeakCellMapRootIndex);
- __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ j(not_equal, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
- __ j(not_equal, &miss);
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, &miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, &megamorphic);
- __ jmp(&done, Label::kFar);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, &megamorphic);
+ __ jmp(&done, Label::kFar);
__ bind(&miss);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, ¬_array_function);
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(isolate);
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ jmp(&done);
-
- __ bind(¬_array_function);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, ¬_array_function);
- CreateWeakCellStub create_stub(isolate);
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ jmp(&done);
+
+ __ bind(¬_array_function);
+ CreateWeakCellStub weak_cell_stub(isolate);
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into ebx.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by edx + 1.
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into ebx, or undefined.
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize));
- Handle<Map> allocation_site_map =
- isolate()->factory()->allocation_site_map();
- __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
- __ j(equal, &feedback_register_initialized);
- __ mov(ebx, isolate()->factory()->undefined_value());
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into ebx, or undefined.
+ __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ Handle<Map> allocation_site_map =
+ isolate()->factory()->allocation_site_map();
+ __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
+ __ j(equal, &feedback_register_initialized);
+ __ mov(ebx, isolate()->factory()->undefined_value());
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(ebx);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
// -- sp[...]: constructor arguments
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
Isolate* isolate = masm->isolate();
// Enter a construct frame.
// a2: initial map
Label rt_call_reload_new_target;
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
- if (create_memento) {
- __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
- }
__ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
// initial map and properties and elements are set to empty fixed array.
// a1: constructor function
// a2: initial map
- // a3: object size (including memento if create_memento)
+ // a3: object size
// t4: JSObject (not tagged)
__ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
__ mov(t5, t4);
// Fill all the in-object properties with appropriate filler.
// a1: constructor function
// a2: initial map
- // a3: object size (in words, including memento if create_memento)
+ // a3: object size (in words)
// t4: JSObject (not tagged)
// t5: First in-object property of JSObject (not tagged)
// t2: slack tracking counter (non-API function case)
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
- __ sll(a0, a0, kPointerSizeLog2);
- __ Addu(a0, t4, Operand(a0)); // End of object.
- __ InitializeFieldsWithFiller(t5, a0, t7);
-
- // Fill in memento fields.
- // t5: points to the allocated but uninitialized memento.
- __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
- DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
- __ sw(t7, MemOperand(t5));
- __ Addu(t5, t5, kPointerSize);
- // Load the AllocationSite.
- __ lw(t7, MemOperand(sp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(a2, t0);
- DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
- __ sw(t7, MemOperand(t5));
- __ Addu(t5, t5, kPointerSize);
- } else {
- __ sll(at, a3, kPointerSizeLog2);
- __ Addu(a0, t4, Operand(at)); // End of object.
- __ InitializeFieldsWithFiller(t5, a0, t7);
- }
+ __ sll(at, a3, kPointerSizeLog2);
+ __ Addu(a0, t4, Operand(at)); // End of object.
+ __ InitializeFieldsWithFiller(t5, a0, t7);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// a1: constructor function
// a3: original constructor
__ bind(&rt_call);
- if (create_memento) {
- // Get the cell or allocation site.
- __ lw(a2, MemOperand(sp, 3 * kPointerSize));
- __ push(a2); // argument 1: allocation site
- }
__ Push(a1, a3); // arguments 2-3 / 1-2
- if (create_memento) {
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ CallRuntime(Runtime::kNewObject, 2);
__ mov(t4, v0);
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ jmp(&count_incremented);
- }
-
// Receiver for constructor call allocated.
// t4: JSObject
__ bind(&allocated);
- if (create_memento) {
- __ lw(a2, MemOperand(sp, 3 * kPointerSize));
- __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
- __ Branch(&count_incremented, eq, a2, Operand(t5));
- // a2 is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ lw(a3, FieldMemOperand(a2,
- AllocationSite::kPretenureCreateCountOffset));
- __ Addu(a3, a3, Operand(Smi::FromInt(1)));
- __ sw(a3, FieldMemOperand(a2,
- AllocationSite::kPretenureCreateCountOffset));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ Pop(a3); // new.target
__ Pop(a1);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ Branch(&done, eq, t2, Operand(at));
__ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kWeakCellMapRootIndex);
- __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
- feedback_map, Operand(at));
+ __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&miss, ne, feedback_map, Operand(at));
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&miss, ne, feedback_map, Operand(at));
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
- __ Branch(&megamorphic, ne, a1, Operand(t2));
- __ jmp(&done);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
+ __ Branch(&megamorphic, ne, a1, Operand(t2));
+ __ jmp(&done);
__ bind(&miss);
// An uninitialized cache is patched with the function.
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
- __ Branch(¬_array_function, ne, a1, Operand(t2));
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ Branch(&done);
-
- __ bind(¬_array_function);
- }
-
- CreateWeakCellStub create_stub(masm->isolate());
+ // Make sure the function is the Array() function.
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
+ __ Branch(¬_array_function, ne, a1, Operand(t2));
+
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ Branch(&done);
+
+ __ bind(¬_array_function);
+ CreateWeakCellStub weak_cell_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, at);
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into a2.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by a3 + 1.
- __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into a2, or undefined.
- __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
- __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into a2, or undefined.
+ __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
+ __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(a2, t1);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
// -- sp[...]: constructor arguments
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
Isolate* isolate = masm->isolate();
// Enter a construct frame.
// a2: initial map
Label rt_call_reload_new_target;
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
- if (create_memento) {
- __ Daddu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
- }
__ Allocate(a3, t0, t1, t2, &rt_call_reload_new_target, SIZE_IN_WORDS);
// initial map and properties and elements are set to empty fixed array.
// a1: constructor function
// a2: initial map
- // a3: object size (including memento if create_memento)
+ // a3: object size
// t0: JSObject (not tagged)
__ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
__ mov(t1, t0);
// Fill all the in-object properties with appropriate filler.
// a1: constructor function
// a2: initial map
- // a3: object size (in words, including memento if create_memento)
+ // a3: object size (in words)
// t0: JSObject (not tagged)
// t1: First in-object property of JSObject (not tagged)
// a6: slack tracking counter (non-API function case)
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ Dsubu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
- __ dsll(a0, a0, kPointerSizeLog2);
- __ Daddu(a0, t0, Operand(a0)); // End of object.
- __ InitializeFieldsWithFiller(t1, a0, t3);
-
- // Fill in memento fields.
- // t1: points to the allocated but uninitialized memento.
- __ LoadRoot(t3, Heap::kAllocationMementoMapRootIndex);
- DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
- __ sd(t3, MemOperand(t1));
- __ Daddu(t1, t1, kPointerSize);
- // Load the AllocationSite.
- __ ld(t3, MemOperand(sp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(t3, a0);
- DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
- __ sd(t3, MemOperand(t1));
- __ Daddu(t1, t1, kPointerSize);
- } else {
- __ dsll(at, a3, kPointerSizeLog2);
- __ Daddu(a0, t0, Operand(at)); // End of object.
- __ InitializeFieldsWithFiller(t1, a0, t3);
- }
+ __ dsll(at, a3, kPointerSizeLog2);
+ __ Daddu(a0, t0, Operand(at)); // End of object.
+ __ InitializeFieldsWithFiller(t1, a0, t3);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// a1: constructor function
// a3: original constructor
__ bind(&rt_call);
- if (create_memento) {
- // Get the cell or allocation site.
- __ ld(a2, MemOperand(sp, 3 * kPointerSize));
- __ push(a2); // argument 1: allocation site
- }
__ Push(a1, a3); // arguments 2-3 / 1-2
- if (create_memento) {
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ CallRuntime(Runtime::kNewObject, 2);
__ mov(t0, v0);
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ jmp(&count_incremented);
- }
-
// Receiver for constructor call allocated.
// t0: JSObject
__ bind(&allocated);
- if (create_memento) {
- __ ld(a2, MemOperand(sp, 3 * kPointerSize));
- __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
- __ Branch(&count_incremented, eq, a2, Operand(t1));
- // a2 is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ ld(a3, FieldMemOperand(a2,
- AllocationSite::kPretenureCreateCountOffset));
- __ Daddu(a3, a3, Operand(Smi::FromInt(1)));
- __ sd(a3, FieldMemOperand(a2,
- AllocationSite::kPretenureCreateCountOffset));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ Pop(a3); // new.target
__ Pop(a1);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ Branch(&done, eq, a5, Operand(at));
__ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kWeakCellMapRootIndex);
- __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
- feedback_map, Operand(at));
+ __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&miss, ne, feedback_map, Operand(at));
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&miss, ne, feedback_map, Operand(at));
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
- __ Branch(&megamorphic, ne, a1, Operand(a5));
- __ jmp(&done);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+ __ Branch(&megamorphic, ne, a1, Operand(a5));
+ __ jmp(&done);
__ bind(&miss);
// An uninitialized cache is patched with the function.
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
- __ Branch(¬_array_function, ne, a1, Operand(a5));
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ Branch(&done);
+ // Make sure the function is the Array() function.
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+ __ Branch(¬_array_function, ne, a1, Operand(a5));
+
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ Branch(&done);
- __ bind(¬_array_function);
- }
+ __ bind(¬_array_function);
- CreateWeakCellStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ CreateWeakCellStub weak_cell_stub(masm->isolate());
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
__ dsrl(at, a3, 32 - kPointerSizeLog2);
__ Daddu(a5, a2, at);
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into a2.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by a3 + 1.
- __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into a2, or undefined.
- __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
- __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into a2, or undefined.
+ __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
+ __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(a2, a5);
}
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSite::GetMode(
ElementsKind boilerplate_elements_kind) {
- if (FLAG_pretenuring_call_new ||
- IsFastSmiElementsKind(boilerplate_elements_kind)) {
+ if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
return TRACK_ALLOCATION_SITE;
}
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
ElementsKind to) {
- if (FLAG_pretenuring_call_new ||
- (IsFastSmiElementsKind(from) &&
- IsMoreGeneralElementsKindTransition(from, to))) {
+ if (IsFastSmiElementsKind(from) &&
+ IsMoreGeneralElementsKindTransition(from, to)) {
return TRACK_ALLOCATION_SITE;
}
}
-RUNTIME_FUNCTION(Runtime_NewObjectWithAllocationSite) {
- HandleScope scope(isolate);
- DCHECK(args.length() == 3);
- CONVERT_ARG_HANDLE_CHECKED(Object, original_constructor, 2);
- CONVERT_ARG_HANDLE_CHECKED(Object, constructor, 1);
- CONVERT_ARG_HANDLE_CHECKED(Object, feedback, 0);
- Handle<AllocationSite> site;
- if (feedback->IsAllocationSite()) {
- // The feedback can be an AllocationSite or undefined.
- site = Handle<AllocationSite>::cast(feedback);
- }
- return Runtime_NewObjectHelper(isolate, constructor, original_constructor,
- site);
-}
-
-
RUNTIME_FUNCTION(Runtime_FinalizeInstanceSize) {
HandleScope scope(isolate);
DCHECK(args.length() == 1);
F(ToFastProperties, 1, 1) \
F(AllocateHeapNumber, 0, 1) \
F(NewObject, 2, 1) \
- F(NewObjectWithAllocationSite, 3, 1) \
F(FinalizeInstanceSize, 1, 1) \
F(GlobalProxy, 1, 1) \
F(LookupAccessor, 3, 1) \
bool TypeFeedbackOracle::CallNewIsMonomorphic(FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
- return FLAG_pretenuring_call_new
- ? info->IsJSFunction()
- : info->IsAllocationSite() || info->IsJSFunction();
+ return info->IsAllocationSite() || info->IsJSFunction();
}
Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(
FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
- if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
+ if (info->IsJSFunction()) {
return Handle<JSFunction>::cast(info);
}
Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(
FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
- if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
+ if (info->IsAllocationSite()) {
return Handle<AllocationSite>::cast(info);
}
return Handle<AllocationSite>::null();
void AstTyper::VisitCallNew(CallNew* expr) {
// Collect type feedback.
FeedbackVectorSlot allocation_site_feedback_slot =
- FLAG_pretenuring_call_new ? expr->AllocationSiteFeedbackSlot()
- : expr->CallNewFeedbackSlot();
+ expr->CallNewFeedbackSlot();
expr->set_allocation_site(
oracle()->GetCallNewAllocationSite(allocation_site_feedback_slot));
bool monomorphic =
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool create_memento) {
+ bool is_api_function) {
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rdi: constructor function
// -- rdx: original constructor
// -----------------------------------
- // Should never create mementos for api functions.
- DCHECK(!is_api_function || !create_memento);
-
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
// Now allocate the JSObject on the heap.
__ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
__ shlp(rdi, Immediate(kPointerSizeLog2));
- if (create_memento) {
- __ addp(rdi, Immediate(AllocationMemento::kSize));
- }
// rdi: size of new object
__ Allocate(rdi,
rbx,
no_reg,
&rt_call,
NO_ALLOCATION_FLAGS);
- Factory* factory = masm->isolate()->factory();
// Allocated the JSObject, now initialize the fields.
// rax: initial map
// rbx: JSObject (not HeapObject tagged - the actual address).
- // rdi: start of next object (including memento if create_memento)
+ // rdi: start of next object
__ movp(Operand(rbx, JSObject::kMapOffset), rax);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
// Set extra fields in the newly allocated object.
// rax: initial map
// rbx: JSObject
- // rdi: start of next object (including memento if create_memento)
+ // rdi: start of next object
// rsi: slack tracking counter (non-API function case)
__ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
__ bind(&no_inobject_slack_tracking);
}
- if (create_memento) {
- __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
- __ InitializeFieldsWithFiller(rcx, rsi, rdx);
- // Fill in memento fields if necessary.
- // rsi: points to the allocated but uninitialized memento.
- __ Move(Operand(rsi, AllocationMemento::kMapOffset),
- factory->allocation_memento_map());
- // Get the cell or undefined.
- __ movp(rdx, Operand(rsp, 3 * kPointerSize));
- __ AssertUndefinedOrAllocationSite(rdx);
- __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
- } else {
- __ InitializeFieldsWithFiller(rcx, rdi, rdx);
- }
+ __ InitializeFieldsWithFiller(rcx, rdi, rdx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
// rdx: original constructor
__ bind(&rt_call);
int offset = kPointerSize;
- if (create_memento) {
- // Get the cell or allocation site.
- __ movp(rdi, Operand(rsp, kPointerSize * 3));
- __ Push(rdi); // argument 1: allocation site
- offset += kPointerSize;
- }
// Must restore rsi (context) and rdi (constructor) before calling runtime.
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ movp(rdi, Operand(rsp, offset));
__ Push(rdi); // argument 2/1: constructor function
__ Push(rdx); // argument 3/2: original constructor
- if (create_memento) {
- __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
- } else {
- __ CallRuntime(Runtime::kNewObject, 2);
- }
+ __ CallRuntime(Runtime::kNewObject, 2);
__ movp(rbx, rax); // store result in rbx
- // Runtime_NewObjectWithAllocationSite increments allocation count.
- // Skip the increment.
- Label count_incremented;
- if (create_memento) {
- __ jmp(&count_incremented);
- }
-
// New object allocated.
// rbx: newly allocated object
__ bind(&allocated);
- if (create_memento) {
- __ movp(rcx, Operand(rsp, 3 * kPointerSize));
- __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
- __ j(equal, &count_incremented);
- // rcx is an AllocationSite. We are creating a memento from it, so we
- // need to increment the memento create count.
- __ SmiAddConstant(
- FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
- Smi::FromInt(1));
- __ bind(&count_incremented);
- }
-
// Restore the parameters.
__ Pop(rdx);
__ Pop(rdi);
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+ Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true);
}
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
Heap::kWeakCellMapRootIndex);
- __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ j(not_equal, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
__ j(equal, &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
- __ j(not_equal, &miss);
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, &miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
- __ cmpp(rdi, r11);
- __ j(not_equal, &megamorphic);
- __ jmp(&done);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
+ __ cmpp(rdi, r11);
+ __ j(not_equal, &megamorphic);
+ __ jmp(&done);
__ bind(&miss);
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
- __ cmpp(rdi, r11);
- __ j(not_equal, ¬_array_function);
-
- CreateAllocationSiteStub create_stub(isolate);
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ jmp(&done_no_smi_convert);
-
- __ bind(¬_array_function);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
+ __ cmpp(rdi, r11);
+ __ j(not_equal, ¬_array_function);
- CreateWeakCellStub create_stub(isolate);
+ CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ jmp(&done_no_smi_convert);
+ __ bind(¬_array_function);
+ CreateWeakCellStub weak_cell_stub(isolate);
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
+ __ jmp(&done_no_smi_convert);
+
__ bind(&done);
__ Integer32ToSmi(rdx, rdx);
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ SmiToInteger32(rdx, rdx);
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into ebx.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by rdx + 1.
- __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into rbx, or undefined.
- __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize));
- __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
- __ j(equal, &feedback_register_initialized);
- __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into rbx, or undefined.
+ __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
+ __ j(equal, &feedback_register_initialized);
+ __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(rbx);
}
}
-// Make sure pretenuring feedback is gathered for constructed objects as well
-// as for literals.
-TEST(OptimizedPretenuringConstructorCalls) {
- if (!i::FLAG_pretenuring_call_new) {
- // FLAG_pretenuring_call_new needs to be synced with the snapshot.
- return;
- }
- i::FLAG_allow_natives_syntax = true;
- i::FLAG_expose_gc = true;
- CcTest::InitializeVM();
- if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
- if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
- v8::HandleScope scope(CcTest::isolate());
-
- // Grow new space unitl maximum capacity reached.
- while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
- CcTest::heap()->new_space()->Grow();
- }
-
- i::ScopedVector<char> source(1024);
- // Call new is doing slack tracking for the first
- // JSFunction::kGenerousAllocationCount allocations, and we can't find
- // mementos during that time.
- i::SNPrintF(
- source,
- "var number_elements = %d;"
- "var elements = new Array(number_elements);"
- "function foo() {"
- " this.a = 3;"
- " this.b = {};"
- "}"
- "function f() {"
- " for (var i = 0; i < number_elements; i++) {"
- " elements[i] = new foo();"
- " }"
- " return elements[number_elements - 1];"
- "};"
- "f(); gc();"
- "f(); f();"
- "%%OptimizeFunctionOnNextCall(f);"
- "f();",
- AllocationSite::kPretenureMinimumCreated +
- JSFunction::kGenerousAllocationCount);
-
- v8::Local<v8::Value> res = CompileRun(source.start());
-
- Handle<JSObject> o =
- v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-
- CHECK(CcTest::heap()->InOldSpace(*o));
-}
-
-
-TEST(OptimizedPretenuringCallNew) {
- if (!i::FLAG_pretenuring_call_new) {
- // FLAG_pretenuring_call_new needs to be synced with the snapshot.
- return;
- }
- i::FLAG_allow_natives_syntax = true;
- i::FLAG_expose_gc = true;
- CcTest::InitializeVM();
- if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
- if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
- v8::HandleScope scope(CcTest::isolate());
-
- // Grow new space unitl maximum capacity reached.
- while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
- CcTest::heap()->new_space()->Grow();
- }
-
- i::ScopedVector<char> source(1024);
- // Call new is doing slack tracking for the first
- // JSFunction::kGenerousAllocationCount allocations, and we can't find
- // mementos during that time.
- i::SNPrintF(
- source,
- "var number_elements = %d;"
- "var elements = new Array(number_elements);"
- "function g() { this.a = 0; }"
- "function f() {"
- " for (var i = 0; i < number_elements; i++) {"
- " elements[i] = new g();"
- " }"
- " return elements[number_elements - 1];"
- "};"
- "f(); gc();"
- "f(); f();"
- "%%OptimizeFunctionOnNextCall(f);"
- "f();",
- AllocationSite::kPretenureMinimumCreated +
- JSFunction::kGenerousAllocationCount);
-
- v8::Local<v8::Value> res = CompileRun(source.start());
-
- Handle<JSObject> o =
- v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldSpace(*o));
-}
-
-
// Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
// Force GC to test the poisoned memento handling
CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
}
-
-
-TEST(PretenuringCallNew) {
- CcTest::InitializeVM();
- if (!i::FLAG_allocation_site_pretenuring) return;
- if (!i::FLAG_pretenuring_call_new) return;
- if (i::FLAG_always_opt) return;
-
- v8::HandleScope scope(CcTest::isolate());
- Isolate* isolate = CcTest::i_isolate();
- Heap* heap = isolate->heap();
-
- int call_count = 10;
- i::ScopedVector<char> test_buf(1024);
- const char* program =
- "function f() {"
- " this.a = 3;"
- " this.b = {};"
- " return this;"
- "};"
- "var a;"
- "for(var i = 0; i < %d; i++) {"
- " a = new f();"
- "}"
- "a;";
- i::SNPrintF(test_buf, program, call_count);
- v8::Local<v8::Value> res = CompileRun(test_buf.start());
- Handle<JSObject> o =
- v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-
- // The object of class f should have a memento secreted behind it.
- Address memento_address = o->address() + o->map()->instance_size();
- AllocationMemento* memento =
- reinterpret_cast<AllocationMemento*>(memento_address + kHeapObjectTag);
- CHECK_EQ(memento->map(), heap->allocation_memento_map());
-
- // Furthermore, how many mementos did we create? The count should match
- // call_count. Note, that mementos are allocated during the inobject slack
- // tracking phase.
- AllocationSite* site = memento->GetAllocationSite();
- CHECK_EQ(call_count, site->pretenure_create_count()->value());
-}