static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool count_constructions) {
+ bool count_constructions,
+ bool create_memento) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
+ // -- x2 : allocation site or undefined
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
+ // Should never create mementos for api functions.
+ ASSERT(!is_api_function || !create_memento);
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
Isolate* isolate = masm->isolate();
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
- // Preserve the two incoming parameters on the stack.
+ // Preserve the three incoming parameters on the stack.
+ if (create_memento) {
+ __ AssertUndefinedOrAllocationSite(x2, x10);
+ __ Push(x2);
+ }
+
Register argc = x0;
Register constructor = x1;
// x1: constructor function
Register obj_size = x3;
Register new_obj = x4;
__ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
- __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
+ if (create_memento) {
+ __ Add(x7, obj_size,
+ Operand(AllocationMemento::kSize / kPointerSize));
+ __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
+ } else {
+ __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
+ }
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
__ FillFields(first_non_prealloc, non_prealloc_fields,
one_pointer_filler);
prop_fields = NoReg;
+ } else if (create_memento) {
+ // Fill the pre-allocated fields with undef.
+ __ FillFields(first_prop, prop_fields, undef);
+ __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
+ __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
+ ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
+ __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
+ // Load the AllocationSite
+ __ Peek(x14, 2 * kXRegSize);
+ ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
+ __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
+ first_prop = NoReg;
} else {
// Fill all of the property fields with undef.
__ FillFields(first_prop, prop_fields, undef);
// Allocate the new receiver object using the runtime call.
__ Bind(&rt_call);
- __ Push(constructor); // Argument for Runtime_NewObject.
- __ CallRuntime(Runtime::kNewObject, 1);
- __ Mov(x4, x0);
+ Label count_incremented;
+ if (create_memento) {
+ // Get the cell or allocation site.
+ __ Peek(x4, 2 * kXRegSize);
+ __ Push(x4);
+ __ Push(constructor); // Argument for Runtime_NewObject.
+ __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
+ __ Mov(x4, x0);
+ // If we ended up using the runtime, and we want a memento, then the
+ // runtime call made it for us, and we shouldn't do create count
+ // increment.
+ __ jmp(&count_incremented);
+ } else {
+ __ Push(constructor); // Argument for Runtime_NewObject.
+ __ CallRuntime(Runtime::kNewObject, 1);
+ __ Mov(x4, x0);
+ }
// Receiver for constructor call allocated.
// x4: JSObject
__ Bind(&allocated);
+
+ if (create_memento) {
+ __ Peek(x10, 2 * kXRegSize);
+ __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
+ // r2 is an AllocationSite. We are creating a memento from it, so we
+ // need to increment the memento create count.
+ __ Ldr(x5, FieldMemOperand(x10,
+ AllocationSite::kPretenureCreateCountOffset));
+ __ Add(x5, x5, Operand(Smi::FromInt(1)));
+ __ Str(x5, FieldMemOperand(x10,
+ AllocationSite::kPretenureCreateCountOffset));
+ __ bind(&count_incremented);
+ }
+
__ Push(x4, x4);
// Reload the number of arguments from the stack.
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
__ Cmp(scratch1, function);
__ B(eq, &done);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite. Do a map check on the object in scratch1 register.
- __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset));
- __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
-
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
- __ Cmp(function, scratch1);
- __ B(ne, &megamorphic);
- __ B(&done);
+ if (!FLAG_pretenuring_call_new) {
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite. Do a map check on the object in scratch1 register.
+ __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset));
+ __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
+
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ Cmp(function, scratch1);
+ __ B(ne, &megamorphic);
+ __ B(&done);
+ }
__ Bind(&miss);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ Bind(&initialize);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
- __ Cmp(function, scratch1);
- __ B(ne, ¬_array_function);
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- CreateAllocationSiteStub create_stub;
+ if (!FLAG_pretenuring_call_new) {
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ Cmp(function, scratch1);
+ __ B(ne, ¬_array_function);
+
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ CreateAllocationSiteStub create_stub;
+
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(argc);
+ __ Push(argc, function, feedback_vector, index);
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(argc);
- __ Push(argc, function, feedback_vector, index);
+ // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
+ // index in x3.
+ ASSERT(feedback_vector.Is(x2) && index.Is(x3));
+ __ CallStub(&create_stub);
- // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
- // index in x3.
- ASSERT(feedback_vector.Is(x2) && index.Is(x3));
- __ CallStub(&create_stub);
+ __ Pop(index, feedback_vector, function, argc);
+ __ SmiUntag(argc);
+ }
+ __ B(&done);
- __ Pop(index, feedback_vector, function, argc);
- __ SmiUntag(argc);
+ __ Bind(¬_array_function);
}
- __ B(&done);
- __ Bind(¬_array_function);
// An uninitialized cache is patched with the function.
__ Add(scratch1, feedback_vector,
&slow);
if (RecordCallTarget()) {
- Label feedback_register_initialized;
GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5);
- // Put the AllocationSite from the feedback vector into x2, or undefined.
__ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
- __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
- __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
- __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
- &feedback_register_initialized);
- __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ if (FLAG_pretenuring_call_new) {
+ // Put the AllocationSite from the feedback vector into x2.
+ // By adding kPointerSize we encode that we know the AllocationSite
+ // entry is at the feedback vector slot given by x3 + 1.
+ __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
+ } else {
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into x2, or undefined.
+ __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
+ __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
+ __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
+ &feedback_register_initialized);
+ __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
+ }
+
__ AssertUndefinedOrAllocationSite(x2, x5);
}
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
+ if (FLAG_pretenuring_call_new) {
+ StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
+ isolate()->factory()->NewAllocationSite());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool count_constructions) {
+ bool count_constructions,
+ bool create_memento) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
+ // -- r2 : allocation site or undefined
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
+ // Should never create mementos for api functions.
+ ASSERT(!is_api_function || !create_memento);
+
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
Isolate* isolate = masm->isolate();
// Enter a construct frame.
{
FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
+ if (create_memento) {
+ __ AssertUndefinedOrAllocationSite(r2, r3);
+ __ push(r2);
+ }
+
// Preserve the two incoming parameters on the stack.
__ SmiTag(r0);
__ push(r0); // Smi-tagged arguments count.
// r1: constructor function
// r2: initial map
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ if (create_memento) {
+ __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
+ }
+
__ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
// r1: constructor function
// r2: initial map
- // r3: object size
+ // r3: object size (not including memento if create_memento)
// r4: JSObject (not tagged)
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ mov(r5, r4);
// Fill all the in-object properties with the appropriate filler.
// r1: constructor function
// r2: initial map
- // r3: object size (in words)
+ // r3: object size (in words, including memento if create_memento)
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
- __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
+
if (count_constructions) {
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
__ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
__ InitializeFieldsWithFiller(r5, r0, r6);
// To allow for truncation.
__ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
+ __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ InitializeFieldsWithFiller(r5, r0, r6);
+ } else if (create_memento) {
+ __ sub(r6, r3, Operand(AllocationMemento::kSize / kPointerSize));
+ __ add(r0, r4, Operand(r6, LSL, kPointerSizeLog2)); // End of object.
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
+ __ InitializeFieldsWithFiller(r5, r0, r6);
+
+ // Fill in memento fields.
+ // r5: points to the allocated but uninitialized memento.
+ __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
+ ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+ // Load the AllocationSite
+ __ ldr(r6, MemOperand(sp, 2 * kPointerSize));
+ ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+ } else {
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
+ __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ InitializeFieldsWithFiller(r5, r0, r6);
}
- __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
- __ InitializeFieldsWithFiller(r5, r0, r6);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on. Any
// Allocate the new receiver object using the runtime call.
// r1: constructor function
__ bind(&rt_call);
+ if (create_memento) {
+ // Get the cell or allocation site.
+ __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
+ __ push(r2);
+ }
+
__ push(r1); // argument for Runtime_NewObject
- __ CallRuntime(Runtime::kNewObject, 1);
+ if (create_memento) {
+ __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
+ } else {
+ __ CallRuntime(Runtime::kNewObject, 1);
+ }
__ mov(r4, r0);
+ // If we ended up using the runtime, and we want a memento, then the
+ // runtime call made it for us, and we shouldn't do create count
+ // increment.
+ Label count_incremented;
+ if (create_memento) {
+ __ jmp(&count_incremented);
+ }
+
// Receiver for constructor call allocated.
// r4: JSObject
__ bind(&allocated);
+
+ if (create_memento) {
+ __ ldr(r2, MemOperand(sp, kPointerSize * 2));
+ __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
+ __ cmp(r2, r5);
+ __ b(eq, &count_incremented);
+ // r2 is an AllocationSite. We are creating a memento from it, so we
+ // need to increment the memento create count.
+ __ ldr(r3, FieldMemOperand(r2,
+ AllocationSite::kPretenureCreateCountOffset));
+ __ add(r3, r3, Operand(Smi::FromInt(1)));
+ __ str(r3, FieldMemOperand(r2,
+ AllocationSite::kPretenureCreateCountOffset));
+ __ bind(&count_incremented);
+ }
+
__ push(r4);
__ push(r4);
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
__ cmp(r4, r1);
__ b(eq, &done);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite. Do a map check on the object in ecx.
- __ ldr(r5, FieldMemOperand(r4, 0));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
- __ b(ne, &miss);
+ if (!FLAG_pretenuring_call_new) {
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite. Do a map check on the object in ecx.
+ __ ldr(r5, FieldMemOperand(r4, 0));
+ __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ b(ne, &miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
- __ cmp(r1, r4);
- __ b(ne, &megamorphic);
- __ jmp(&done);
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
+ __ cmp(r1, r4);
+ __ b(ne, &megamorphic);
+ __ jmp(&done);
+ }
__ bind(&miss);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
- // An uninitialized cache is patched with the function or sentinel to
- // indicate the ElementsKind if function is the Array constructor.
+ // An uninitialized cache is patched with the function
__ bind(&initialize);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
- __ cmp(r1, r4);
- __ b(ne, ¬_array_function);
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the slot.
- {
- FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ if (!FLAG_pretenuring_call_new) {
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
+ __ cmp(r1, r4);
+ __ b(ne, ¬_array_function);
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(r0);
- __ Push(r3, r2, r1, r0);
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
- CreateAllocationSiteStub create_stub;
- __ CallStub(&create_stub);
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(r0);
+ __ Push(r3, r2, r1, r0);
- __ Pop(r3, r2, r1, r0);
- __ SmiUntag(r0);
- }
- __ b(&done);
+ CreateAllocationSiteStub create_stub;
+ __ CallStub(&create_stub);
- __ bind(¬_array_function);
+ __ Pop(r3, r2, r1, r0);
+ __ SmiUntag(r0);
+ }
+ __ b(&done);
+
+ __ bind(¬_array_function);
+ }
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ b(ne, &slow);
if (RecordCallTarget()) {
- Label feedback_register_initialized;
GenerateRecordCallTarget(masm);
- // Put the AllocationSite from the feedback vector into r2, or undefined.
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
- __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
- __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
- __ b(eq, &feedback_register_initialized);
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ if (FLAG_pretenuring_call_new) {
+ // Put the AllocationSite from the feedback vector into r2.
+ // By adding kPointerSize we encode that we know the AllocationSite
+ // entry is at the feedback vector slot given by r3 + 1.
+ __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize));
+ } else {
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into r2, or undefined.
+ __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
+ __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
+ __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ b(eq, &feedback_register_initialized);
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
+ }
+
__ AssertUndefinedOrAllocationSite(r2, r5);
}
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
+ if (FLAG_pretenuring_call_new) {
+ StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
+ isolate()->factory()->NewAllocationSite());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+ int allocation_site_feedback_slot = FLAG_pretenuring_call_new
+ ? AllocationSiteFeedbackSlot()
+ : CallNewFeedbackSlot();
allocation_site_ =
- oracle->GetCallNewAllocationSite(CallNewFeedbackSlot());
+ oracle->GetCallNewAllocationSite(allocation_site_feedback_slot);
is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackSlot());
if (is_monomorphic_) {
target_ = oracle->GetCallNewTarget(CallNewFeedbackSlot());
// Type feedback information.
virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; }
- virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; }
+ virtual int ComputeFeedbackSlotCount(Isolate* isolate) {
+ return FLAG_pretenuring_call_new ? 2 : 1;
+ }
virtual void SetFirstFeedbackSlot(int slot) {
callnew_feedback_slot_ = slot;
}
ASSERT(callnew_feedback_slot_ != kInvalidFeedbackSlot);
return callnew_feedback_slot_;
}
+ int AllocationSiteFeedbackSlot() {
+ ASSERT(callnew_feedback_slot_ != kInvalidFeedbackSlot);
+ ASSERT(FLAG_pretenuring_call_new);
+ return callnew_feedback_slot_ + 1;
+ }
- TypeFeedbackId CallNewFeedbackId() const { return reuse(id()); }
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() V8_OVERRIDE { return is_monomorphic_; }
Handle<JSFunction> target() const { return target_; }
}
+Handle<JSObject> Factory::NewJSObjectWithMemento(
+ Handle<JSFunction> constructor,
+ Handle<AllocationSite> site) {
+ JSFunction::EnsureHasInitialMap(constructor);
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateJSObject(*constructor, NOT_TENURED, *site),
+ JSObject);
+}
+
+
Handle<JSModule> Factory::NewJSModule(Handle<Context> context,
Handle<ScopeInfo> scope_info) {
CALL_HEAP_FUNCTION(
// runtime.
Handle<JSObject> NewJSObject(Handle<JSFunction> constructor,
PretenureFlag pretenure = NOT_TENURED);
+ // JSObject that should have a memento pointing to the allocation site.
+ Handle<JSObject> NewJSObjectWithMemento(Handle<JSFunction> constructor,
+ Handle<AllocationSite> site);
// Global objects are pretenured and initialized based on a constructor.
Handle<GlobalObject> NewGlobalObject(Handle<JSFunction> constructor);
// Allocate an instance of the implicit receiver object.
HValue* size_in_bytes = Add<HConstant>(instance_size);
- PretenureFlag pretenure_flag =
- (FLAG_pretenuring_call_new && !FLAG_allocation_site_pretenuring) ?
- isolate()->heap()->GetPretenureMode() : NOT_TENURED;
+ HAllocationMode allocation_mode;
+ if (FLAG_pretenuring_call_new) {
+ if (FLAG_allocation_site_pretenuring) {
+ // Try to use pretenuring feedback.
+ Handle<AllocationSite> allocation_site = expr->allocation_site();
+ allocation_mode = HAllocationMode(allocation_site);
+ // Take a dependency on allocation site.
+ AllocationSite::AddDependentCompilationInfo(allocation_site,
+ AllocationSite::TENURING,
+ top_info());
+ } else {
+ allocation_mode = HAllocationMode(
+ isolate()->heap()->GetPretenureMode());
+ }
+ }
+
HAllocate* receiver =
- Add<HAllocate>(size_in_bytes, HType::JSObject(), pretenure_flag,
- JS_OBJECT_TYPE);
+ BuildAllocate(size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE,
+ allocation_mode);
receiver->set_known_initial_map(initial_map);
// Load the initial map from the constructor.
: current_site_(current_site), pretenure_flag_(NOT_TENURED) {}
explicit HAllocationMode(PretenureFlag pretenure_flag)
: current_site_(NULL), pretenure_flag_(pretenure_flag) {}
+ HAllocationMode()
+ : current_site_(NULL), pretenure_flag_(NOT_TENURED) {}
HValue* current_site() const { return current_site_; }
Handle<AllocationSite> feedback_site() const { return feedback_site_; }
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool count_constructions) {
+ bool count_constructions,
+ bool create_memento) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
+ // -- ebx: allocation site or undefined
// -----------------------------------
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
+ // Should never create mementos for api functions.
+ ASSERT(!is_api_function || !create_memento);
+
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
+ if (create_memento) {
+ __ AssertUndefinedOrAllocationSite(ebx);
+ __ push(ebx);
+ }
+
// Store a smi-tagged arguments count on the stack.
__ SmiTag(eax);
__ push(eax);
// eax: initial map
__ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
__ shl(edi, kPointerSizeLog2);
+ if (create_memento) {
+ __ add(edi, Immediate(AllocationMemento::kSize));
+ }
+
__ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
+
+ Factory* factory = masm->isolate()->factory();
+
// Allocated the JSObject, now initialize the fields.
// eax: initial map
// ebx: JSObject
- // edi: start of next object
+ // edi: start of next object (including memento if create_memento)
__ mov(Operand(ebx, JSObject::kMapOffset), eax);
- Factory* factory = masm->isolate()->factory();
__ mov(ecx, factory->empty_fixed_array());
__ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
__ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
// Set extra fields in the newly allocated object.
// eax: initial map
// ebx: JSObject
- // edi: start of next object
+ // edi: start of next object (including memento if create_memento)
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
__ mov(edx, factory->undefined_value());
if (count_constructions) {
}
__ InitializeFieldsWithFiller(ecx, esi, edx);
__ mov(edx, factory->one_pointer_filler_map());
+ __ InitializeFieldsWithFiller(ecx, edi, edx);
+ } else if (create_memento) {
+ __ lea(esi, Operand(edi, -AllocationMemento::kSize));
+ __ InitializeFieldsWithFiller(ecx, esi, edx);
+
+ // Fill in memento fields if necessary.
+ // esi: points to the allocated but uninitialized memento.
+ Handle<Map> allocation_memento_map = factory->allocation_memento_map();
+ __ mov(Operand(esi, AllocationMemento::kMapOffset),
+ allocation_memento_map);
+ // Get the cell or undefined.
+ __ mov(edx, Operand(esp, kPointerSize*2));
+ __ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset),
+ edx);
+ } else {
+ __ InitializeFieldsWithFiller(ecx, edi, edx);
}
- __ InitializeFieldsWithFiller(ecx, edi, edx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on. Any
// Allocate the new receiver object using the runtime call.
__ bind(&rt_call);
+ int offset = 0;
+ if (create_memento) {
+ // Get the cell or allocation site.
+ __ mov(edi, Operand(esp, kPointerSize * 2));
+ __ push(edi);
+ offset = kPointerSize;
+ }
+
// Must restore edi (constructor) before calling runtime.
- __ mov(edi, Operand(esp, 0));
+ __ mov(edi, Operand(esp, offset));
// edi: function (constructor)
__ push(edi);
- __ CallRuntime(Runtime::kNewObject, 1);
+ if (create_memento) {
+ __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
+ } else {
+ __ CallRuntime(Runtime::kNewObject, 1);
+ }
__ mov(ebx, eax); // store result in ebx
+ // If we ended up using the runtime, and we want a memento, then the
+ // runtime call made it for us, and we shouldn't do create count
+ // increment.
+ Label count_incremented;
+ if (create_memento) {
+ __ jmp(&count_incremented);
+ }
+
// New object allocated.
// ebx: newly allocated object
__ bind(&allocated);
+
+ if (create_memento) {
+ __ mov(ecx, Operand(esp, kPointerSize * 2));
+ __ cmp(ecx, masm->isolate()->factory()->undefined_value());
+ __ j(equal, &count_incremented);
+ // ecx is an AllocationSite. We are creating a memento from it, so we
+ // need to increment the memento create count.
+ __ add(FieldOperand(ecx, AllocationSite::kPretenureCreateCountOffset),
+ Immediate(Smi::FromInt(1)));
+ __ bind(&count_incremented);
+ }
+
// Retrieve the function from the stack.
__ pop(edi);
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
__ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
__ j(equal, &done, Label::kFar);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite. Do a map check on the object in ecx.
- Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
- __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
- __ j(not_equal, &miss);
-
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, &megamorphic);
- __ jmp(&done, Label::kFar);
+ if (!FLAG_pretenuring_call_new) {
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite. Do a map check on the object in ecx.
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
+ __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
+ __ j(not_equal, &miss);
+
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, &megamorphic);
+ __ jmp(&done, Label::kFar);
+ }
__ bind(&miss);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, ¬_array_function);
+ if (!FLAG_pretenuring_call_new) {
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, ¬_array_function);
+
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(eax);
+ __ push(eax);
+ __ push(edi);
+ __ push(edx);
+ __ push(ebx);
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(eax);
- __ push(eax);
- __ push(edi);
- __ push(edx);
- __ push(ebx);
+ CreateAllocationSiteStub create_stub;
+ __ CallStub(&create_stub);
- CreateAllocationSiteStub create_stub;
- __ CallStub(&create_stub);
+ __ pop(ebx);
+ __ pop(edx);
+ __ pop(edi);
+ __ pop(eax);
+ __ SmiUntag(eax);
+ }
+ __ jmp(&done);
- __ pop(ebx);
- __ pop(edx);
- __ pop(edi);
- __ pop(eax);
- __ SmiUntag(eax);
+ __ bind(¬_array_function);
}
- __ jmp(&done);
- __ bind(¬_array_function);
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
edi);
__ j(not_equal, &slow);
if (RecordCallTarget()) {
- Label feedback_register_initialized;
GenerateRecordCallTarget(masm);
- // Put the AllocationSite from the feedback vector into ebx, or undefined.
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize));
- Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
- __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
- __ j(equal, &feedback_register_initialized);
- __ mov(ebx, masm->isolate()->factory()->undefined_value());
- __ bind(&feedback_register_initialized);
+ if (FLAG_pretenuring_call_new) {
+ // Put the AllocationSite from the feedback vector into ebx.
+ // By adding kPointerSize we encode that we know the AllocationSite
+ // entry is at the feedback vector slot given by edx + 1.
+ __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ } else {
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into ebx, or undefined.
+ __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
+ __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
+ __ j(equal, &feedback_register_initialized);
+ __ mov(ebx, masm->isolate()->factory()->undefined_value());
+ __ bind(&feedback_register_initialized);
+ }
+
__ AssertUndefinedOrAllocationSite(ebx);
}
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
+ if (FLAG_pretenuring_call_new) {
+ StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
+ isolate()->factory()->NewAllocationSite());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSite::GetMode(
ElementsKind boilerplate_elements_kind) {
- if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
+ if (FLAG_pretenuring_call_new ||
+ IsFastSmiElementsKind(boilerplate_elements_kind)) {
return TRACK_ALLOCATION_SITE;
}
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
ElementsKind to) {
- if (IsFastSmiElementsKind(from) &&
- IsMoreGeneralElementsKindTransition(from, to)) {
+ if (FLAG_pretenuring_call_new ||
+ (IsFastSmiElementsKind(from) &&
+ IsMoreGeneralElementsKindTransition(from, to))) {
return TRACK_ALLOCATION_SITE;
}
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
- HandleScope scope(isolate);
- ASSERT(args.length() == 1);
-
- Handle<Object> constructor = args.at<Object>(0);
-
+static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate,
+ Handle<Object> constructor,
+ Handle<AllocationSite> site) {
// If the constructor isn't a proper function we throw a type error.
if (!constructor->IsJSFunction()) {
Vector< Handle<Object> > arguments = HandleVector(&constructor, 1);
shared->CompleteInobjectSlackTracking();
}
- Handle<JSObject> result = isolate->factory()->NewJSObject(function);
+ Handle<JSObject> result;
+ if (site.is_null()) {
+ result = isolate->factory()->NewJSObject(function);
+ } else {
+ result = isolate->factory()->NewJSObjectWithMemento(function, site);
+ }
RETURN_IF_EMPTY_HANDLE(isolate, result);
isolate->counters()->constructed_objects()->Increment();
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+
+ Handle<Object> constructor = args.at<Object>(0);
+ return Runtime_NewObjectHelper(isolate,
+ constructor,
+ Handle<AllocationSite>::null());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectWithAllocationSite) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+
+ Handle<Object> constructor = args.at<Object>(1);
+ Handle<Object> feedback = args.at<Object>(0);
+ Handle<AllocationSite> site;
+ if (feedback->IsAllocationSite()) {
+ // The feedback can be an AllocationSite or undefined.
+ site = Handle<AllocationSite>::cast(feedback);
+ }
+ return Runtime_NewObjectHelper(isolate,
+ constructor,
+ site);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
F(NewClosure, 3, 1) \
F(NewClosureFromStubFailure, 1, 1) \
F(NewObject, 1, 1) \
+ F(NewObjectWithAllocationSite, 2, 1) \
F(NewObjectFromBound, 1, 1) \
F(FinalizeInstanceSize, 1, 1) \
F(Throw, 1, 1) \
bool TypeFeedbackOracle::CallIsMonomorphic(int slot) {
Handle<Object> value = GetInfo(slot);
- return value->IsAllocationSite() || value->IsJSFunction();
+ return FLAG_pretenuring_call_new
+ ? value->IsJSFunction()
+ : value->IsAllocationSite() || value->IsJSFunction();
}
bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) {
Handle<Object> info = GetInfo(slot);
- return info->IsAllocationSite() || info->IsJSFunction();
+ return FLAG_pretenuring_call_new
+ ? info->IsJSFunction()
+ : info->IsAllocationSite() || info->IsJSFunction();
}
Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(int slot) {
Handle<Object> info = GetInfo(slot);
- if (info->IsAllocationSite()) {
- return Handle<JSFunction>(isolate()->native_context()->array_function());
- } else {
+ if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
return Handle<JSFunction>::cast(info);
}
+
+ ASSERT(info->IsAllocationSite());
+ return Handle<JSFunction>(isolate()->native_context()->array_function());
}
Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(int slot) {
Handle<Object> info = GetInfo(slot);
- if (info->IsAllocationSite()) {
- return Handle<JSFunction>(isolate()->native_context()->array_function());
- } else {
+ if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
return Handle<JSFunction>::cast(info);
}
+
+ ASSERT(info->IsAllocationSite());
+ return Handle<JSFunction>(isolate()->native_context()->array_function());
}
Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(int slot) {
Handle<Object> info = GetInfo(slot);
- if (info->IsAllocationSite()) {
+ if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
return Handle<AllocationSite>::cast(info);
}
return Handle<AllocationSite>::null();
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool count_constructions) {
+ bool count_constructions,
+ bool create_memento) {
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rdi: constructor function
+ // -- rbx: allocation site or undefined
// -----------------------------------
// Should never count constructions for api objects.
- ASSERT(!is_api_function || !count_constructions);
+ ASSERT(!is_api_function || !count_constructions);\
+
+ // Should never create mementos for api functions.
+ ASSERT(!is_api_function || !create_memento);
+
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
+ if (create_memento) {
+ __ AssertUndefinedOrAllocationSite(rbx);
+ __ Push(rbx);
+ }
+
// Store a smi-tagged arguments count on the stack.
__ Integer32ToSmi(rax, rax);
__ Push(rax);
// Now allocate the JSObject on the heap.
__ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
__ shl(rdi, Immediate(kPointerSizeLog2));
+ if (create_memento) {
+ __ addq(rdi, Immediate(AllocationMemento::kSize));
+ }
// rdi: size of new object
__ Allocate(rdi,
rbx,
no_reg,
&rt_call,
NO_ALLOCATION_FLAGS);
+ Factory* factory = masm->isolate()->factory();
// Allocated the JSObject, now initialize the fields.
// rax: initial map
// rbx: JSObject (not HeapObject tagged - the actual address).
- // rdi: start of next object
+ // rdi: start of next object (including memento if create_memento)
__ movp(Operand(rbx, JSObject::kMapOffset), rax);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
// Set extra fields in the newly allocated object.
// rax: initial map
// rbx: JSObject
- // rdi: start of next object
+ // rdi: start of next object (including memento if create_memento)
__ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
if (count_constructions) {
}
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
__ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
+ __ InitializeFieldsWithFiller(rcx, rdi, rdx);
+ } else if (create_memento) {
+ __ lea(rsi, Operand(rdi, -AllocationMemento::kSize));
+ __ InitializeFieldsWithFiller(rcx, rsi, rdx);
+
+ // Fill in memento fields if necessary.
+ // rsi: points to the allocated but uninitialized memento.
+ Handle<Map> allocation_memento_map = factory->allocation_memento_map();
+ __ Move(Operand(rsi, AllocationMemento::kMapOffset),
+ allocation_memento_map);
+ // Get the cell or undefined.
+ __ movp(rdx, Operand(rsp, kPointerSize*2));
+ __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset),
+ rdx);
+ } else {
+ __ InitializeFieldsWithFiller(rcx, rdi, rdx);
}
- __ InitializeFieldsWithFiller(rcx, rdi, rdx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on. Any
// Allocate the new receiver object using the runtime call.
// rdi: function (constructor)
__ bind(&rt_call);
+ int offset = 0;
+ if (create_memento) {
+ // Get the cell or allocation site.
+ __ movp(rdi, Operand(rsp, kPointerSize*2));
+ __ Push(rdi);
+ offset = kPointerSize;
+ }
+
// Must restore rdi (constructor) before calling runtime.
- __ movp(rdi, Operand(rsp, 0));
+ __ movp(rdi, Operand(rsp, offset));
__ Push(rdi);
- __ CallRuntime(Runtime::kNewObject, 1);
+ if (create_memento) {
+ __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
+ } else {
+ __ CallRuntime(Runtime::kNewObject, 1);
+ }
__ movp(rbx, rax); // store result in rbx
+ // If we ended up using the runtime, and we want a memento, then the
+ // runtime call made it for us, and we shouldn't do create count
+ // increment.
+ Label count_incremented;
+ if (create_memento) {
+ __ jmp(&count_incremented);
+ }
+
// New object allocated.
// rbx: newly allocated object
__ bind(&allocated);
+
+ if (create_memento) {
+ __ movp(rcx, Operand(rsp, kPointerSize*2));
+ __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
+ __ j(equal, &count_incremented);
+ // rcx is an AllocationSite. We are creating a memento from it, so we
+ // need to increment the memento create count.
+ __ SmiAddConstant(
+ FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
+ Smi::FromInt(1));
+ __ bind(&count_incremented);
+ }
+
// Retrieve the function from the stack.
__ Pop(rdi);
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
__ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
__ j(equal, &done);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite. Do a map check on the object in rcx.
- Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
- __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
- __ j(not_equal, &miss);
-
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
- __ cmpq(rdi, rcx);
- __ j(not_equal, &megamorphic);
- __ jmp(&done);
+ if (!FLAG_pretenuring_call_new) {
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite. Do a map check on the object in rcx.
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
+ __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
+ __ j(not_equal, &miss);
+
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
+ __ cmpq(rdi, rcx);
+ __ j(not_equal, &megamorphic);
+ __ jmp(&done);
+ }
__ bind(&miss);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
- __ cmpq(rdi, rcx);
- __ j(not_equal, ¬_array_function);
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ if (!FLAG_pretenuring_call_new) {
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
+ __ cmpq(rdi, rcx);
+ __ j(not_equal, ¬_array_function);
- // Arguments register must be smi-tagged to call out.
- __ Integer32ToSmi(rax, rax);
- __ Push(rax);
- __ Push(rdi);
- __ Integer32ToSmi(rdx, rdx);
- __ Push(rdx);
- __ Push(rbx);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
- CreateAllocationSiteStub create_stub;
- __ CallStub(&create_stub);
+ // Arguments register must be smi-tagged to call out.
+ __ Integer32ToSmi(rax, rax);
+ __ Push(rax);
+ __ Push(rdi);
+ __ Integer32ToSmi(rdx, rdx);
+ __ Push(rdx);
+ __ Push(rbx);
- __ Pop(rbx);
- __ Pop(rdx);
- __ Pop(rdi);
- __ Pop(rax);
- __ SmiToInteger32(rax, rax);
+ CreateAllocationSiteStub create_stub;
+ __ CallStub(&create_stub);
+
+ __ Pop(rbx);
+ __ Pop(rdx);
+ __ Pop(rdi);
+ __ Pop(rax);
+ __ SmiToInteger32(rax, rax);
+ }
+ __ jmp(&done_no_smi_convert);
+
+ __ bind(¬_array_function);
}
- __ jmp(&done_no_smi_convert);
- __ bind(¬_array_function);
__ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
rdi);
__ j(not_equal, &slow);
if (RecordCallTarget()) {
- Label feedback_register_initialized;
GenerateRecordCallTarget(masm);
- // Put the AllocationSite from the feedback vector into rbx, or undefined.
+
__ SmiToInteger32(rdx, rdx);
- __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize));
- __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
- __ j(equal, &feedback_register_initialized);
- __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ if (FLAG_pretenuring_call_new) {
+ // Put the AllocationSite from the feedback vector into ebx.
+ // By adding kPointerSize we encode that we know the AllocationSite
+ // entry is at the feedback vector slot given by rdx + 1.
+ __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ } else {
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into rbx, or undefined.
+ __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
+ __ j(equal, &feedback_register_initialized);
+ __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
+ }
+
__ AssertUndefinedOrAllocationSite(rbx);
}
}
Label no_info;
- // If the feedback slot is the megamorphic sentinel, or contains anything
- // other than an AllocationSite, call an array constructor that doesn't use
- // AllocationSites.
+ // If the feedback vector is the undefined value call an array constructor
+ // that doesn't use AllocationSites.
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(equal, &no_info);
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
+ if (FLAG_pretenuring_call_new) {
+ StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
+ isolate()->factory()->NewAllocationSite());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
__ Move(rbx, FeedbackVector());
__ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
}
+// Make sure pretenuring feedback is gathered for constructed objects as well
+// as for literals.
+TEST(OptimizedPretenuringConstructorCalls) {
+ if (!FLAG_allocation_site_pretenuring || !i::FLAG_pretenuring_call_new) {
+ // FLAG_pretenuring_call_new needs to be synced with the snapshot.
+ return;
+ }
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_max_new_space_size = 2048;
+ CcTest::InitializeVM();
+ if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
+ if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
+ v8::HandleScope scope(CcTest::isolate());
+
+ v8::Local<v8::Value> res = CompileRun(
+ "var number_elements = 20000;"
+ "var elements = new Array(number_elements);"
+ "function foo() {"
+ " this.a = 3;"
+ " this.b = {};"
+ "}"
+ "function f() {"
+ " for (var i = 0; i < number_elements; i++) {"
+ " elements[i] = new foo();"
+ " }"
+ " return elements[number_elements - 1];"
+ "};"
+ "f(); f(); f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ CHECK(CcTest::heap()->InOldPointerSpace(*o));
+}
+
+
// Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
}
+// Test global pretenuring call new.
TEST(OptimizedPretenuringCallNew) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
// Force GC to test the poisoned memento handling
CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
}
+
+
+TEST(PretenuringCallNew) {
+ CcTest::InitializeVM();
+ if (!i::FLAG_allocation_site_pretenuring) return;
+ if (!i::FLAG_pretenuring_call_new) return;
+
+ v8::HandleScope scope(CcTest::isolate());
+ Isolate* isolate = CcTest::i_isolate();
+ Heap* heap = isolate->heap();
+
+ // We need to create several instances to get past the slack-tracking
+ // phase, where mementos aren't emitted.
+ int call_count = 10;
+ CHECK_GE(call_count, SharedFunctionInfo::kGenerousAllocationCount);
+ i::ScopedVector<char> test_buf(1024);
+ const char* program =
+ "function f() {"
+ " this.a = 3;"
+ " this.b = {};"
+ " return this;"
+ "};"
+ "var a;"
+ "for(var i = 0; i < %d; i++) {"
+ " a = new f();"
+ "}"
+ "a;";
+ i::OS::SNPrintF(test_buf, program, call_count);
+ v8::Local<v8::Value> res = CompileRun(test_buf.start());
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ // The object of class f should have a memento secreted behind it.
+ Address memento_address = o->address() + o->map()->instance_size();
+ AllocationMemento* memento =
+ reinterpret_cast<AllocationMemento*>(memento_address + kHeapObjectTag);
+ CHECK_EQ(memento->map(), heap->allocation_memento_map());
+
+ // Furthermore, how many mementos did we create? The count should match
+ // call_count - SharedFunctionInfo::kGenerousAllocationCount.
+ AllocationSite* site = memento->GetAllocationSite();
+ CHECK_EQ(call_count - SharedFunctionInfo::kGenerousAllocationCount,
+ site->pretenure_create_count()->value());
+}
obj = newarraycase_length_smidouble(2);
assertKind(elements_kind.fast_double, obj);
- // Try to continue the transition to fast object. This won't work for
- // constructed arrays because constructor dispatch is done on the
- // elements kind, and a DOUBLE array constructor won't create an allocation
- // memento.
+ // Try to continue the transition to fast object.
+ // TODO(mvstanton): re-enable commented out code when
+ // FLAG_pretenuring_call_new is turned on in the build.
obj = newarraycase_length_smidouble("coates");
assertKind(elements_kind.fast, obj);
obj = newarraycase_length_smidouble(2);
- assertKind(elements_kind.fast_double, obj);
+ // assertKind(elements_kind.fast, obj);
function newarraycase_length_smiobj(value) {
var a = new Array(3);
if (support_smi_only_arrays) {
- // Test: If a call site goes megamorphic, it loses the ability to
- // use allocation site feedback.
+ // Test: If a call site goes megamorphic, it retains the ability to
+ // use allocation site feedback (if FLAG_allocation_site_pretenuring
+ // is on).
(function() {
function bar(t, len) {
return new t(len);
assertKind(elements_kind.fast_double, b);
c = bar(Object, 3);
b = bar(Array, 10);
- assertKind(elements_kind.fast_smi_only, b);
- b[0] = 3.5;
- c = bar(Array, 10);
- assertKind(elements_kind.fast_smi_only, c);
+ // TODO(mvstanton): re-enable when FLAG_allocation_site_pretenuring
+ // is on in the build.
+ // assertKind(elements_kind.fast_double, b);
})();
bar0(Array);
%OptimizeFunctionOnNextCall(bar0);
b = bar0(Array);
- // We also lost our ability to record kind feedback, as the site
- // is megamorphic now.
- assertKind(elements_kind.fast_smi_only, b);
- assertOptimized(bar0);
- b[0] = 3.5;
- c = bar0(Array);
- assertKind(elements_kind.fast_smi_only, c);
+ // This only makes sense to test if we allow crankshafting
+ if (4 != %GetOptimizationStatus(bar0)) {
+ // We also lost our ability to record kind feedback, as the site
+ // is megamorphic now.
+ assertKind(elements_kind.fast_smi_only, b);
+ assertOptimized(bar0);
+ b[0] = 3.5;
+ c = bar0(Array);
+ assertKind(elements_kind.fast_smi_only, c);
+ }
})();