Remove --pretenure-call-new
authormvstanton <mvstanton@chromium.org>
Wed, 16 Sep 2015 15:12:24 +0000 (08:12 -0700)
committerCommit bot <commit-bot@chromium.org>
Wed, 16 Sep 2015 15:12:43 +0000 (15:12 +0000)
There isn't a plan to turn it on soon, so we'll take it out in favor of cleaner code.

BUG=

Review URL: https://codereview.chromium.org/1202173002

Cr-Commit-Position: refs/heads/master@{#30767}

28 files changed:
src/arm/builtins-arm.cc
src/arm/code-stubs-arm.cc
src/arm64/builtins-arm64.cc
src/arm64/code-stubs-arm64.cc
src/ast.h
src/flag-definitions.h
src/full-codegen/arm/full-codegen-arm.cc
src/full-codegen/arm64/full-codegen-arm64.cc
src/full-codegen/ia32/full-codegen-ia32.cc
src/full-codegen/mips/full-codegen-mips.cc
src/full-codegen/mips64/full-codegen-mips64.cc
src/full-codegen/x64/full-codegen-x64.cc
src/hydrogen.cc
src/ia32/builtins-ia32.cc
src/ia32/code-stubs-ia32.cc
src/mips/builtins-mips.cc
src/mips/code-stubs-mips.cc
src/mips64/builtins-mips64.cc
src/mips64/code-stubs-mips64.cc
src/objects-inl.h
src/runtime/runtime-object.cc
src/runtime/runtime.h
src/type-info.cc
src/typing.cc
src/x64/builtins-x64.cc
src/x64/code-stubs-x64.cc
test/cctest/test-heap.cc
test/cctest/test-mementos.cc

index c210be5546e1c3475b7b088281a148c7d176c3fa..7daf1aac2be7bd169c368a3f16355095bc5ff63c 100644 (file)
@@ -318,8 +318,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- r0     : number of arguments
   //  -- r1     : constructor function
@@ -329,9 +328,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   //  -- sp[...]: constructor arguments
   // -----------------------------------
 
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
-
   Isolate* isolate = masm->isolate();
 
   // Enter a construct frame.
@@ -406,9 +402,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // r2: initial map
       Label rt_call_reload_new_target;
       __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
-      if (create_memento) {
-        __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
-      }
 
       __ Allocate(r3, r4, r5, r6, &rt_call_reload_new_target, SIZE_IN_WORDS);
 
@@ -416,7 +409,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // initial map and properties and elements are set to empty fixed array.
       // r1: constructor function
       // r2: initial map
-      // r3: object size (including memento if create_memento)
+      // r3: object size
       // r4: JSObject (not tagged)
       __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
       __ mov(r5, r4);
@@ -430,7 +423,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Fill all the in-object properties with the appropriate filler.
       // r1: constructor function
       // r2: initial map
-      // r3: object size (in words, including memento if create_memento)
+      // r3: object size
       // r4: JSObject (not tagged)
       // r5: First in-object property of JSObject (not tagged)
       DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
@@ -469,25 +462,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
         __ bind(&no_inobject_slack_tracking);
       }
 
-      if (create_memento) {
-        __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
-        __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2));  // End of object.
-        __ InitializeFieldsWithFiller(r5, r0, r6);
-
-        // Fill in memento fields.
-        // r5: points to the allocated but uninitialized memento.
-        __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
-        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
-        __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
-        // Load the AllocationSite
-        __ ldr(r6, MemOperand(sp, 3 * kPointerSize));
-        __ AssertUndefinedOrAllocationSite(r6, r0);
-        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
-        __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
-      } else {
-        __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
-        __ InitializeFieldsWithFiller(r5, r0, r6);
-      }
+      __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
+      __ InitializeFieldsWithFiller(r5, r0, r6);
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -506,47 +482,16 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // r1: constructor function
     // r3: original constructor
     __ bind(&rt_call);
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
-      __ push(r2);  // argument 1: allocation site
-    }
 
     __ push(r1);  // argument 2/1: constructor function
     __ push(r3);  // argument 3/2: original constructor
-    if (create_memento) {
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-    } else {
-      __ CallRuntime(Runtime::kNewObject, 2);
-    }
+    __ CallRuntime(Runtime::kNewObject, 2);
     __ mov(r4, r0);
 
-    // Runtime_NewObjectWithAllocationSite increments allocation count.
-    // Skip the increment.
-    Label count_incremented;
-    if (create_memento) {
-      __ jmp(&count_incremented);
-    }
-
     // Receiver for constructor call allocated.
     // r4: JSObject
     __ bind(&allocated);
 
-    if (create_memento) {
-      __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
-      __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
-      __ cmp(r2, r5);
-      __ b(eq, &count_incremented);
-      // r2 is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ ldr(r3, FieldMemOperand(r2,
-                                 AllocationSite::kPretenureCreateCountOffset));
-      __ add(r3, r3, Operand(Smi::FromInt(1)));
-      __ str(r3, FieldMemOperand(r2,
-                                 AllocationSite::kPretenureCreateCountOffset));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ pop(r3);
     __ pop(r1);
@@ -650,12 +595,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index f6b08bd87c0d8513dc6101df76e4b2072deb2a65..8255942008860f338404f0f7146067c21aa098b1 100644 (file)
@@ -2316,27 +2316,25 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   __ b(eq, &done);
   __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
   __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
-  __ b(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+  __ b(ne, &check_allocation_site);
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ JumpIfSmi(weak_value, &initialize);
   __ jmp(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
-    __ b(ne, &miss);
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
+  __ b(ne, &miss);
 
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
-    __ cmp(r1, r5);
-    __ b(ne, &megamorphic);
-    __ jmp(&done);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+  __ cmp(r1, r5);
+  __ b(ne, &megamorphic);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -2355,24 +2353,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   // An uninitialized cache is patched with the function
   __ bind(&initialize);
 
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
-    __ cmp(r1, r5);
-    __ b(ne, &not_array_function);
-
-    // The target function is the Array constructor,
-    // Create an AllocationSite if we don't already have it, store it in the
-    // slot.
-    CreateAllocationSiteStub create_stub(masm->isolate());
-    CallStubInRecordCallTarget(masm, &create_stub, is_super);
-    __ b(&done);
-
-    __ bind(&not_array_function);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+  __ cmp(r1, r5);
+  __ b(ne, &not_array_function);
 
-  CreateWeakCellStub create_stub(masm->isolate());
+  // The target function is the Array constructor,
+  // Create an AllocationSite if we don't already have it, store it in the
+  // slot.
+  CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub, is_super);
+  __ b(&done);
+
+  __ bind(&not_array_function);
+  CreateWeakCellStub weak_cell_stub(masm->isolate());
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
   __ bind(&done);
 }
 
@@ -2488,21 +2483,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
     GenerateRecordCallTarget(masm, IsSuperConstructorCall());
 
     __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into r2.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by r3 + 1.
-      __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize));
-    } else {
-      Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into r2, or undefined.
-      __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
-      __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
-      __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
-      __ b(eq, &feedback_register_initialized);
-      __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-      __ bind(&feedback_register_initialized);
-    }
+    Label feedback_register_initialized;
+    // Put the AllocationSite from the feedback vector into r2, or undefined.
+    __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
+    __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
+    __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+    __ b(eq, &feedback_register_initialized);
+    __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(r2, r5);
   }
index 9cd7cc6c31f5e9a875db577561eb7dd7b8984d8f..20f63f82c554a45ac95a53a967b639a2df41ef70 100644 (file)
@@ -315,8 +315,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- x0     : number of arguments
   //  -- x1     : constructor function
@@ -327,8 +326,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   // -----------------------------------
 
   ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
 
   Isolate* isolate = masm->isolate();
 
@@ -409,15 +406,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       Register obj_size = x3;
       Register new_obj = x4;
       __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
-      if (create_memento) {
-        __ Add(x7, obj_size,
-               Operand(AllocationMemento::kSize / kPointerSize));
-        __ Allocate(x7, new_obj, x10, x11, &rt_call_reload_new_target,
-                    SIZE_IN_WORDS);
-      } else {
-        __ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
-                    SIZE_IN_WORDS);
-      }
+      __ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
+                  SIZE_IN_WORDS);
 
       // Allocated the JSObject, now initialize the fields. Map is set to
       // initial map and properties and elements are set to empty fixed array.
@@ -487,25 +477,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
         __ bind(&no_inobject_slack_tracking);
       }
-      if (create_memento) {
-        // Fill the pre-allocated fields with undef.
-        __ FillFields(first_prop, prop_fields, filler);
-        __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
-        __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
-        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
-        __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
-        // Load the AllocationSite
-        __ Peek(x14, 3 * kXRegSize);
-        __ AssertUndefinedOrAllocationSite(x14, x10);
-        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
-        __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
-        first_prop = NoReg;
-      } else {
-        // Fill all of the property fields with undef.
-        __ FillFields(first_prop, prop_fields, filler);
-        first_prop = NoReg;
-        prop_fields = NoReg;
-      }
+
+      // Fill all of the property fields with undef.
+      __ FillFields(first_prop, prop_fields, filler);
+      first_prop = NoReg;
+      prop_fields = NoReg;
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -523,40 +499,14 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // x1: constructor function
     // x3: original constructor
     __ Bind(&rt_call);
-    Label count_incremented;
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ Peek(x4, 3 * kXRegSize);
-      __ Push(x4, constructor, original_constructor);  // arguments 1-3
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-      __ Mov(x4, x0);
-      // If we ended up using the runtime, and we want a memento, then the
-      // runtime call made it for us, and we shouldn't do create count
-      // increment.
-      __ B(&count_incremented);
-    } else {
-      __ Push(constructor, original_constructor);  // arguments 1-2
-      __ CallRuntime(Runtime::kNewObject, 2);
-      __ Mov(x4, x0);
-    }
+    __ Push(constructor, original_constructor);  // arguments 1-2
+    __ CallRuntime(Runtime::kNewObject, 2);
+    __ Mov(x4, x0);
 
     // Receiver for constructor call allocated.
     // x4: JSObject
     __ Bind(&allocated);
 
-    if (create_memento) {
-      __ Peek(x10, 3 * kXRegSize);
-      __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
-      // r2 is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ Ldr(x5, FieldMemOperand(x10,
-                                 AllocationSite::kPretenureCreateCountOffset));
-      __ Add(x5, x5, Operand(Smi::FromInt(1)));
-      __ Str(x5, FieldMemOperand(x10,
-                                 AllocationSite::kPretenureCreateCountOffset));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ Pop(original_constructor);
     __ Pop(constructor);
@@ -662,12 +612,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index cc198b8cf3b7d7e212999c40b0adaeb437166de4..61bca1a52ea0e6f11b0b320ed2e499bcc1363f77 100644 (file)
@@ -2681,26 +2681,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
   __ B(eq, &done);
   __ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset));
   __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
-  __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+  __ B(ne, &check_allocation_site);
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ JumpIfSmi(feedback_value, &initialize);
   __ B(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
-
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
-    __ Cmp(function, scratch1);
-    __ B(ne, &megamorphic);
-    __ B(&done);
-  }
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
+
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+  __ Cmp(function, scratch1);
+  __ B(ne, &megamorphic);
+  __ B(&done);
 
   __ Bind(&miss);
 
@@ -2720,27 +2718,23 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
   // indicate the ElementsKind if function is the Array constructor.
   __ Bind(&initialize);
 
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
-    __ Cmp(function, scratch1);
-    __ B(ne, &not_array_function);
-
-    // The target function is the Array constructor,
-    // Create an AllocationSite if we don't already have it, store it in the
-    // slot.
-    CreateAllocationSiteStub create_stub(masm->isolate());
-    CallStubInRecordCallTarget(masm, &create_stub, argc, function,
-                               feedback_vector, index, orig_construct,
-                               is_super);
-    __ B(&done);
-
-    __ Bind(&not_array_function);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+  __ Cmp(function, scratch1);
+  __ B(ne, &not_array_function);
 
-  CreateWeakCellStub create_stub(masm->isolate());
+  // The target function is the Array constructor,
+  // Create an AllocationSite if we don't already have it, store it in the
+  // slot.
+  CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub, argc, function,
                              feedback_vector, index, orig_construct, is_super);
+  __ B(&done);
+
+  __ Bind(&not_array_function);
+  CreateWeakCellStub weak_cell_stub(masm->isolate());
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
+                             feedback_vector, index, orig_construct, is_super);
   __ Bind(&done);
 }
 
@@ -2862,21 +2856,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
                              IsSuperConstructorCall());
 
     __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into x2.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by x3 + 1.
-      __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
-    } else {
     Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into x2, or undefined.
-      __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
-      __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
-      __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
-                    &feedback_register_initialized);
-      __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
-      __ bind(&feedback_register_initialized);
-    }
+    // Put the AllocationSite from the feedback vector into x2, or undefined.
+    __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
+    __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
+    __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
+                  &feedback_register_initialized);
+    __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(x2, x5);
   }
index 0486c3ee3f4f19a17f021857f6f2344ce27a447c..34de3e4c74b5e876b395f136f8ee4379f605a0b3 100644 (file)
--- a/src/ast.h
+++ b/src/ast.h
@@ -1982,7 +1982,7 @@ class CallNew final : public Expression {
   // Type feedback information.
   virtual FeedbackVectorRequirements ComputeFeedbackRequirements(
       Isolate* isolate, const ICSlotCache* cache) override {
-    return FeedbackVectorRequirements(FLAG_pretenuring_call_new ? 2 : 1, 0);
+    return FeedbackVectorRequirements(1, 0);
   }
   void SetFirstFeedbackSlot(FeedbackVectorSlot slot) override {
     callnew_feedback_slot_ = slot;
@@ -1992,10 +1992,6 @@ class CallNew final : public Expression {
     DCHECK(!callnew_feedback_slot_.IsInvalid());
     return callnew_feedback_slot_;
   }
-  FeedbackVectorSlot AllocationSiteFeedbackSlot() {
-    DCHECK(FLAG_pretenuring_call_new);
-    return CallNewFeedbackSlot().next();
-  }
 
   bool IsMonomorphic() override { return is_monomorphic_; }
   Handle<JSFunction> target() const { return target_; }
index 850208bf603e8f6c0ed9561fba1d0d0061dde40c..da587cbc232fbfa6a43fc0fae3d9aa44d73828c6 100644 (file)
@@ -253,9 +253,6 @@ DEFINE_IMPLICATION(harmony_destructuring, harmony_default_parameters)
 // Flags for experimental implementation features.
 DEFINE_BOOL(compiled_keyed_generic_loads, false,
             "use optimizing compiler to generate keyed generic load stubs")
-// TODO(hpayer): We will remove this flag as soon as we have pretenuring
-// support for specific allocation sites.
-DEFINE_BOOL(pretenuring_call_new, false, "pretenure call new")
 DEFINE_BOOL(allocation_site_pretenuring, true,
             "pretenure with allocation sites")
 DEFINE_BOOL(trace_pretenuring, false,
index 22a2d33f53dc74805b660a8f78eebb155da366fd..4470387b2d605acb21c3b555c9867eba2660810f 100644 (file)
@@ -3191,12 +3191,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ Move(r2, FeedbackVector());
   __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
 
@@ -3237,15 +3231,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ Move(r2, FeedbackVector());
   __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
 
index 5363c026a72ac0fdb89a5f2b534c893a92c2359a..64c634a66158b1d358710e1ec699f45cd83959fe 100644 (file)
@@ -2898,12 +2898,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Peek(x1, arg_count * kXRegSize);
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ LoadObject(x2, FeedbackVector());
   __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
 
@@ -2944,15 +2938,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ Peek(x1, arg_count * kXRegSize);
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ LoadObject(x2, FeedbackVector());
   __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
 
index 9729f59cc9977b924a6e772bf497ac3438126484..285975cbd9ed8fac8855b1e5fe49bb33a33fc4fc 100644 (file)
@@ -3080,12 +3080,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ mov(edi, Operand(esp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ LoadHeapObject(ebx, FeedbackVector());
   __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
 
@@ -3126,15 +3120,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ mov(edi, Operand(esp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ LoadHeapObject(ebx, FeedbackVector());
   __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
 
index 1bf21f085b2adfd9c50317ac4ef374437af15550..c29404d1f51ac989aed6f1bf5abb715cd95b676f 100644 (file)
@@ -3183,12 +3183,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ li(a2, FeedbackVector());
   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
 
@@ -3229,15 +3223,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ li(a2, FeedbackVector());
   __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
 
index a273d570e2c63d3dedae5c4516f4318c9fdd9d48..a61454ed7c9b2503ceec3dc6bbe679a7ce5bb958 100644 (file)
@@ -3185,12 +3185,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ li(a2, FeedbackVector());
   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
 
@@ -3231,15 +3225,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ li(a2, FeedbackVector());
   __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
 
index 3f0dec1dd7dc165b25857418e0c2d01320439671..e5e3e5438d0e80c3155f26799ede939b0c8dfd95 100644 (file)
@@ -3108,12 +3108,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code, but not in the snapshot.
-  if (FLAG_pretenuring_call_new) {
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-  }
-
   __ Move(rbx, FeedbackVector());
   __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
 
@@ -3154,15 +3148,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
 
   // Record call targets in unoptimized code.
-  if (FLAG_pretenuring_call_new) {
-    UNREACHABLE();
-    /* TODO(dslomov): support pretenuring.
-    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
-    DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
-           expr->CallNewFeedbackSlot().ToInt() + 1);
-    */
-  }
-
   __ Move(rbx, FeedbackVector());
   __ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
 
index f41748518a988ad9a364e7a9552fa45dac7b4474..52184b36e679937eb341a6f9d9f5ebda29494c17 100644 (file)
@@ -9908,16 +9908,6 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
     // Allocate an instance of the implicit receiver object.
     HValue* size_in_bytes = Add<HConstant>(instance_size);
     HAllocationMode allocation_mode;
-    if (FLAG_pretenuring_call_new) {
-      if (FLAG_allocation_site_pretenuring) {
-        // Try to use pretenuring feedback.
-        Handle<AllocationSite> allocation_site = expr->allocation_site();
-        allocation_mode = HAllocationMode(allocation_site);
-        // Take a dependency on allocation site.
-        top_info()->dependencies()->AssumeTenuringDecision(allocation_site);
-      }
-    }
-
     HAllocate* receiver = BuildAllocate(
         size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
     receiver->set_known_initial_map(initial_map);
index 2f111935eb02d53ea0584ca9e0b924d780b37938..433c98085c34ca6a717f4e7cc9546110caf126a5 100644 (file)
@@ -100,8 +100,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- eax: number of arguments
   //  -- edi: constructor function
@@ -109,9 +108,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   //  -- edx: original constructor
   // -----------------------------------
 
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
-
   // Enter a construct frame.
   {
     FrameScope scope(masm, StackFrame::CONSTRUCT);
@@ -192,9 +188,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // eax: initial map
       __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
       __ shl(edi, kPointerSizeLog2);
-      if (create_memento) {
-        __ add(edi, Immediate(AllocationMemento::kSize));
-      }
 
       __ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
 
@@ -203,7 +196,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Allocated the JSObject, now initialize the fields.
       // eax: initial map
       // ebx: JSObject
-      // edi: start of next object (including memento if create_memento)
+      // edi: start of next object
       __ mov(Operand(ebx, JSObject::kMapOffset), eax);
       __ mov(ecx, factory->empty_fixed_array());
       __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
@@ -211,7 +204,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Set extra fields in the newly allocated object.
       // eax: initial map
       // ebx: JSObject
-      // edi: start of next object (including memento if create_memento)
+      // edi: start of next object
       // esi: slack tracking counter (non-API function case)
       __ mov(edx, factory->undefined_value());
       __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
@@ -244,22 +237,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
         __ bind(&no_inobject_slack_tracking);
       }
 
-      if (create_memento) {
-        __ lea(esi, Operand(edi, -AllocationMemento::kSize));
-        __ InitializeFieldsWithFiller(ecx, esi, edx);
-
-        // Fill in memento fields if necessary.
-        // esi: points to the allocated but uninitialized memento.
-        __ mov(Operand(esi, AllocationMemento::kMapOffset),
-               factory->allocation_memento_map());
-        // Get the cell or undefined.
-        __ mov(edx, Operand(esp, 3 * kPointerSize));
-        __ AssertUndefinedOrAllocationSite(edx);
-        __ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset),
-               edx);
-      } else {
-        __ InitializeFieldsWithFiller(ecx, edi, edx);
-      }
+      __ InitializeFieldsWithFiller(ecx, edi, edx);
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -275,12 +253,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // edx: original constructor
     __ bind(&rt_call);
     int offset = kPointerSize;
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ mov(edi, Operand(esp, kPointerSize * 3));
-      __ push(edi);  // argument 1: allocation site
-      offset += kPointerSize;
-    }
 
     // Must restore esi (context) and edi (constructor) before calling
     // runtime.
@@ -288,35 +260,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     __ mov(edi, Operand(esp, offset));
     __ push(edi);  // argument 2/1: constructor function
     __ push(edx);  // argument 3/2: original constructor
-    if (create_memento) {
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-    } else {
-      __ CallRuntime(Runtime::kNewObject, 2);
-    }
+    __ CallRuntime(Runtime::kNewObject, 2);
     __ mov(ebx, eax);  // store result in ebx
 
-    // Runtime_NewObjectWithAllocationSite increments allocation count.
-    // Skip the increment.
-    Label count_incremented;
-    if (create_memento) {
-      __ jmp(&count_incremented);
-    }
-
     // New object allocated.
     // ebx: newly allocated object
     __ bind(&allocated);
 
-    if (create_memento) {
-      __ mov(ecx, Operand(esp, 3 * kPointerSize));
-      __ cmp(ecx, masm->isolate()->factory()->undefined_value());
-      __ j(equal, &count_incremented);
-      // ecx is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ add(FieldOperand(ecx, AllocationSite::kPretenureCreateCountOffset),
-             Immediate(Smi::FromInt(1)));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ pop(edx);  // new.target
     __ pop(edi);  // Constructor function.
@@ -405,12 +355,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index b9dc33f4ee450e5138efb0ed3cdd0b337e879ee8..b61e1ef57ed27cf77589e5c46f5b5e664c7de05f 100644 (file)
@@ -1960,27 +1960,25 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   __ j(equal, &done, Label::kFar);
   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
                  Heap::kWeakCellMapRootIndex);
-  __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+  __ j(not_equal, &check_allocation_site);
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
   __ jmp(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
-    __ j(not_equal, &miss);
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
+  __ j(not_equal, &miss);
 
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
-    __ cmp(edi, ecx);
-    __ j(not_equal, &megamorphic);
-    __ jmp(&done, Label::kFar);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+  __ cmp(edi, ecx);
+  __ j(not_equal, &megamorphic);
+  __ jmp(&done, Label::kFar);
 
   __ bind(&miss);
 
@@ -1999,24 +1997,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   // An uninitialized cache is patched with the function or sentinel to
   // indicate the ElementsKind if function is the Array constructor.
   __ bind(&initialize);
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
-    __ cmp(edi, ecx);
-    __ j(not_equal, &not_array_function);
-
-    // The target function is the Array constructor,
-    // Create an AllocationSite if we don't already have it, store it in the
-    // slot.
-    CreateAllocationSiteStub create_stub(isolate);
-    CallStubInRecordCallTarget(masm, &create_stub, is_super);
-    __ jmp(&done);
-
-    __ bind(&not_array_function);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+  __ cmp(edi, ecx);
+  __ j(not_equal, &not_array_function);
 
-  CreateWeakCellStub create_stub(isolate);
+  // The target function is the Array constructor,
+  // Create an AllocationSite if we don't already have it, store it in the
+  // slot.
+  CreateAllocationSiteStub create_stub(isolate);
   CallStubInRecordCallTarget(masm, &create_stub, is_super);
+  __ jmp(&done);
+
+  __ bind(&not_array_function);
+  CreateWeakCellStub weak_cell_stub(isolate);
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
   __ bind(&done);
 }
 
@@ -2133,24 +2128,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   if (RecordCallTarget()) {
     GenerateRecordCallTarget(masm, IsSuperConstructorCall());
 
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into ebx.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by edx + 1.
-      __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
-                               FixedArray::kHeaderSize + kPointerSize));
-    } else {
-      Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into ebx, or undefined.
-      __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
-                               FixedArray::kHeaderSize));
-      Handle<Map> allocation_site_map =
-          isolate()->factory()->allocation_site_map();
-      __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
-      __ j(equal, &feedback_register_initialized);
-      __ mov(ebx, isolate()->factory()->undefined_value());
-      __ bind(&feedback_register_initialized);
-    }
+    Label feedback_register_initialized;
+    // Put the AllocationSite from the feedback vector into ebx, or undefined.
+    __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+                             FixedArray::kHeaderSize));
+    Handle<Map> allocation_site_map =
+        isolate()->factory()->allocation_site_map();
+    __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
+    __ j(equal, &feedback_register_initialized);
+    __ mov(ebx, isolate()->factory()->undefined_value());
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(ebx);
   }
index 2e6795b03db6dae9552e4d1aecd99953a7a37803..c78149fc975a1e69a370ae4b712359a68182bff1 100644 (file)
@@ -328,8 +328,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- a0     : number of arguments
   //  -- a1     : constructor function
@@ -339,9 +338,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   //  -- sp[...]: constructor arguments
   // -----------------------------------
 
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
-
   Isolate* isolate = masm->isolate();
 
   // Enter a construct frame.
@@ -408,9 +404,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // a2: initial map
       Label rt_call_reload_new_target;
       __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
-      if (create_memento) {
-        __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
-      }
 
       __ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
 
@@ -418,7 +411,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // initial map and properties and elements are set to empty fixed array.
       // a1: constructor function
       // a2: initial map
-      // a3: object size (including memento if create_memento)
+      // a3: object size
       // t4: JSObject (not tagged)
       __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
       __ mov(t5, t4);
@@ -433,7 +426,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Fill all the in-object properties with appropriate filler.
       // a1: constructor function
       // a2: initial map
-      // a3: object size (in words, including memento if create_memento)
+      // a3: object size (in words)
       // t4: JSObject (not tagged)
       // t5: First in-object property of JSObject (not tagged)
       // t2: slack tracking counter (non-API function case)
@@ -473,29 +466,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
         __ bind(&no_inobject_slack_tracking);
       }
 
-      if (create_memento) {
-        __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
-        __ sll(a0, a0, kPointerSizeLog2);
-        __ Addu(a0, t4, Operand(a0));  // End of object.
-        __ InitializeFieldsWithFiller(t5, a0, t7);
-
-        // Fill in memento fields.
-        // t5: points to the allocated but uninitialized memento.
-        __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
-        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
-        __ sw(t7, MemOperand(t5));
-        __ Addu(t5, t5, kPointerSize);
-        // Load the AllocationSite.
-        __ lw(t7, MemOperand(sp, 3 * kPointerSize));
-        __ AssertUndefinedOrAllocationSite(a2, t0);
-        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
-        __ sw(t7, MemOperand(t5));
-        __ Addu(t5, t5, kPointerSize);
-      } else {
-        __ sll(at, a3, kPointerSizeLog2);
-        __ Addu(a0, t4, Operand(at));  // End of object.
-        __ InitializeFieldsWithFiller(t5, a0, t7);
-      }
+      __ sll(at, a3, kPointerSizeLog2);
+      __ Addu(a0, t4, Operand(at));  // End of object.
+      __ InitializeFieldsWithFiller(t5, a0, t7);
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -514,45 +487,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // a1: constructor function
     // a3: original constructor
     __ bind(&rt_call);
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ lw(a2, MemOperand(sp, 3 * kPointerSize));
-      __ push(a2);  // argument 1: allocation site
-    }
 
     __ Push(a1, a3);  // arguments 2-3 / 1-2
-    if (create_memento) {
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-    } else {
-      __ CallRuntime(Runtime::kNewObject, 2);
-    }
+    __ CallRuntime(Runtime::kNewObject, 2);
     __ mov(t4, v0);
 
-    // Runtime_NewObjectWithAllocationSite increments allocation count.
-    // Skip the increment.
-    Label count_incremented;
-    if (create_memento) {
-      __ jmp(&count_incremented);
-    }
-
     // Receiver for constructor call allocated.
     // t4: JSObject
     __ bind(&allocated);
 
-    if (create_memento) {
-      __ lw(a2, MemOperand(sp, 3 * kPointerSize));
-      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
-      __ Branch(&count_incremented, eq, a2, Operand(t5));
-      // a2 is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ lw(a3, FieldMemOperand(a2,
-                                AllocationSite::kPretenureCreateCountOffset));
-      __ Addu(a3, a3, Operand(Smi::FromInt(1)));
-      __ sw(a3, FieldMemOperand(a2,
-                                AllocationSite::kPretenureCreateCountOffset));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ Pop(a3);  // new.target
     __ Pop(a1);
@@ -651,12 +594,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index 76a577c79cefcb107b23848dadcc661da4ef270a..3fd4842482d7288539e855b10d5f9ce771f3f813 100644 (file)
@@ -2446,27 +2446,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   __ Branch(&done, eq, t2, Operand(at));
   __ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
   __ LoadRoot(at, Heap::kWeakCellMapRootIndex);
-  __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
-            feedback_map, Operand(at));
+  __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ JumpIfSmi(weak_value, &initialize);
   __ jmp(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
-    __ Branch(&miss, ne, feedback_map, Operand(at));
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+  __ Branch(&miss, ne, feedback_map, Operand(at));
 
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
-    __ Branch(&megamorphic, ne, a1, Operand(t2));
-    __ jmp(&done);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
+  __ Branch(&megamorphic, ne, a1, Operand(t2));
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -2485,23 +2482,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
 
   // An uninitialized cache is patched with the function.
   __ bind(&initialize);
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function.
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
-    __ Branch(&not_array_function, ne, a1, Operand(t2));
-
-    // The target function is the Array constructor,
-    // Create an AllocationSite if we don't already have it, store it in the
-    // slot.
-    CreateAllocationSiteStub create_stub(masm->isolate());
-    CallStubInRecordCallTarget(masm, &create_stub, is_super);
-    __ Branch(&done);
-
-    __ bind(&not_array_function);
-  }
-
-  CreateWeakCellStub create_stub(masm->isolate());
+  // Make sure the function is the Array() function.
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
+  __ Branch(&not_array_function, ne, a1, Operand(t2));
+
+  // The target function is the Array constructor,
+  // Create an AllocationSite if we don't already have it, store it in the
+  // slot.
+  CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub, is_super);
+  __ Branch(&done);
+
+  __ bind(&not_array_function);
+  CreateWeakCellStub weak_cell_stub(masm->isolate());
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
   __ bind(&done);
 }
 
@@ -2619,21 +2613,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 
     __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
     __ Addu(t1, a2, at);
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into a2.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by a3 + 1.
-      __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize));
-    } else {
-      Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into a2, or undefined.
-      __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
-      __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
-      __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
-      __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
-      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-      __ bind(&feedback_register_initialized);
-    }
+    Label feedback_register_initialized;
+    // Put the AllocationSite from the feedback vector into a2, or undefined.
+    __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
+    __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
+    __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+    __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
+    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(a2, t1);
   }
index 99e38f6f1ea4d86b0341168ce619bb07a23d1b62..e68f0fd2650b01b7109e195dec843b9948955e52 100644 (file)
@@ -326,8 +326,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- a0     : number of arguments
   //  -- a1     : constructor function
@@ -337,9 +336,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   //  -- sp[...]: constructor arguments
   // -----------------------------------
 
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
-
   Isolate* isolate = masm->isolate();
 
   // Enter a construct frame.
@@ -407,9 +403,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // a2: initial map
       Label rt_call_reload_new_target;
       __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
-      if (create_memento) {
-        __ Daddu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
-      }
 
       __ Allocate(a3, t0, t1, t2, &rt_call_reload_new_target, SIZE_IN_WORDS);
 
@@ -417,7 +410,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // initial map and properties and elements are set to empty fixed array.
       // a1: constructor function
       // a2: initial map
-      // a3: object size (including memento if create_memento)
+      // a3: object size
       // t0: JSObject (not tagged)
       __ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
       __ mov(t1, t0);
@@ -432,7 +425,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Fill all the in-object properties with appropriate filler.
       // a1: constructor function
       // a2: initial map
-      // a3: object size (in words, including memento if create_memento)
+      // a3: object size (in words)
       // t0: JSObject (not tagged)
       // t1: First in-object property of JSObject (not tagged)
       // a6: slack tracking counter (non-API function case)
@@ -472,29 +465,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
         __ bind(&no_inobject_slack_tracking);
       }
 
-      if (create_memento) {
-        __ Dsubu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
-        __ dsll(a0, a0, kPointerSizeLog2);
-        __ Daddu(a0, t0, Operand(a0));  // End of object.
-        __ InitializeFieldsWithFiller(t1, a0, t3);
-
-        // Fill in memento fields.
-        // t1: points to the allocated but uninitialized memento.
-        __ LoadRoot(t3, Heap::kAllocationMementoMapRootIndex);
-        DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
-        __ sd(t3, MemOperand(t1));
-        __ Daddu(t1, t1, kPointerSize);
-        // Load the AllocationSite.
-        __ ld(t3, MemOperand(sp, 3 * kPointerSize));
-        __ AssertUndefinedOrAllocationSite(t3, a0);
-        DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
-        __ sd(t3, MemOperand(t1));
-        __ Daddu(t1, t1, kPointerSize);
-      } else {
-        __ dsll(at, a3, kPointerSizeLog2);
-        __ Daddu(a0, t0, Operand(at));  // End of object.
-        __ InitializeFieldsWithFiller(t1, a0, t3);
-      }
+      __ dsll(at, a3, kPointerSizeLog2);
+      __ Daddu(a0, t0, Operand(at));  // End of object.
+      __ InitializeFieldsWithFiller(t1, a0, t3);
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -513,45 +486,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // a1: constructor function
     // a3: original constructor
     __ bind(&rt_call);
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ ld(a2, MemOperand(sp, 3 * kPointerSize));
-      __ push(a2);  // argument 1: allocation site
-    }
 
     __ Push(a1, a3);  // arguments 2-3 / 1-2
-    if (create_memento) {
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-    } else {
-      __ CallRuntime(Runtime::kNewObject, 2);
-    }
+    __ CallRuntime(Runtime::kNewObject, 2);
     __ mov(t0, v0);
 
-    // Runtime_NewObjectWithAllocationSite increments allocation count.
-    // Skip the increment.
-    Label count_incremented;
-    if (create_memento) {
-      __ jmp(&count_incremented);
-    }
-
     // Receiver for constructor call allocated.
     // t0: JSObject
     __ bind(&allocated);
 
-    if (create_memento) {
-      __ ld(a2, MemOperand(sp, 3 * kPointerSize));
-      __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
-      __ Branch(&count_incremented, eq, a2, Operand(t1));
-      // a2 is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ ld(a3, FieldMemOperand(a2,
-                                AllocationSite::kPretenureCreateCountOffset));
-      __ Daddu(a3, a3, Operand(Smi::FromInt(1)));
-      __ sd(a3, FieldMemOperand(a2,
-                                AllocationSite::kPretenureCreateCountOffset));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ Pop(a3);  // new.target
     __ Pop(a1);
@@ -649,12 +592,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index 187f91259aabdc3ad987a8028a62d9278d2b9aa6..cb7e22018a1f1bfba5a80afe9054b02a1ad3ae2f 100644 (file)
@@ -2480,27 +2480,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   __ Branch(&done, eq, a5, Operand(at));
   __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
   __ LoadRoot(at, Heap::kWeakCellMapRootIndex);
-  __ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
-            feedback_map, Operand(at));
+  __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ JumpIfSmi(weak_value, &initialize);
   __ jmp(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
-    __ Branch(&miss, ne, feedback_map, Operand(at));
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+  __ Branch(&miss, ne, feedback_map, Operand(at));
 
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
-    __ Branch(&megamorphic, ne, a1, Operand(a5));
-    __ jmp(&done);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+  __ Branch(&megamorphic, ne, a1, Operand(a5));
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -2519,23 +2516,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
 
   // An uninitialized cache is patched with the function.
   __ bind(&initialize);
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function.
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
-    __ Branch(&not_array_function, ne, a1, Operand(a5));
-
-    // The target function is the Array constructor,
-    // Create an AllocationSite if we don't already have it, store it in the
-    // slot.
-    CreateAllocationSiteStub create_stub(masm->isolate());
-    CallStubInRecordCallTarget(masm, &create_stub, is_super);
-    __ Branch(&done);
+  // Make sure the function is the Array() function.
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+  __ Branch(&not_array_function, ne, a1, Operand(a5));
+
+  // The target function is the Array constructor,
+  // Create an AllocationSite if we don't already have it, store it in the
+  // slot.
+  CreateAllocationSiteStub create_stub(masm->isolate());
+  CallStubInRecordCallTarget(masm, &create_stub, is_super);
+  __ Branch(&done);
 
-    __ bind(&not_array_function);
-  }
+  __ bind(&not_array_function);
 
-  CreateWeakCellStub create_stub(masm->isolate());
-  CallStubInRecordCallTarget(masm, &create_stub, is_super);
+  CreateWeakCellStub weak_cell_stub(masm->isolate());
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
   __ bind(&done);
 }
 
@@ -2655,21 +2650,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 
     __ dsrl(at, a3, 32 - kPointerSizeLog2);
     __ Daddu(a5, a2, at);
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into a2.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by a3 + 1.
-      __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
-    } else {
-      Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into a2, or undefined.
-      __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
-      __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
-      __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
-      __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
-      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-      __ bind(&feedback_register_initialized);
-    }
+    Label feedback_register_initialized;
+    // Put the AllocationSite from the feedback vector into a2, or undefined.
+    __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
+    __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
+    __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+    __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
+    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(a2, a5);
   }
index 979020869a4dc27e4fb31db2973e089b96c200bd..bc866fd2d94b6ae1422a123d64d6512202c464d9 100644 (file)
@@ -1749,8 +1749,7 @@ bool AllocationSite::SitePointsToLiteral() {
 // elements kind is the initial elements kind.
 AllocationSiteMode AllocationSite::GetMode(
     ElementsKind boilerplate_elements_kind) {
-  if (FLAG_pretenuring_call_new ||
-      IsFastSmiElementsKind(boilerplate_elements_kind)) {
+  if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
     return TRACK_ALLOCATION_SITE;
   }
 
@@ -1760,9 +1759,8 @@ AllocationSiteMode AllocationSite::GetMode(
 
 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
                                            ElementsKind to) {
-  if (FLAG_pretenuring_call_new ||
-      (IsFastSmiElementsKind(from) &&
-       IsMoreGeneralElementsKindTransition(from, to))) {
+  if (IsFastSmiElementsKind(from) &&
+      IsMoreGeneralElementsKindTransition(from, to)) {
     return TRACK_ALLOCATION_SITE;
   }
 
index 0c6f81306ee7981c98a5d4150aa0e3d8879105e7..fda51f6f9f94eca673ef6df6f5f77edc0ef51769 100644 (file)
@@ -1117,22 +1117,6 @@ RUNTIME_FUNCTION(Runtime_NewObject) {
 }
 
 
-RUNTIME_FUNCTION(Runtime_NewObjectWithAllocationSite) {
-  HandleScope scope(isolate);
-  DCHECK(args.length() == 3);
-  CONVERT_ARG_HANDLE_CHECKED(Object, original_constructor, 2);
-  CONVERT_ARG_HANDLE_CHECKED(Object, constructor, 1);
-  CONVERT_ARG_HANDLE_CHECKED(Object, feedback, 0);
-  Handle<AllocationSite> site;
-  if (feedback->IsAllocationSite()) {
-    // The feedback can be an AllocationSite or undefined.
-    site = Handle<AllocationSite>::cast(feedback);
-  }
-  return Runtime_NewObjectHelper(isolate, constructor, original_constructor,
-                                 site);
-}
-
-
 RUNTIME_FUNCTION(Runtime_FinalizeInstanceSize) {
   HandleScope scope(isolate);
   DCHECK(args.length() == 1);
index 78e3d92aaee42e7c2d0f931f1c2d018f8c9576d4..257fc93cfca65ef70a23004d40833abc7818081f 100644 (file)
@@ -450,7 +450,6 @@ namespace internal {
   F(ToFastProperties, 1, 1)                          \
   F(AllocateHeapNumber, 0, 1)                        \
   F(NewObject, 2, 1)                                 \
-  F(NewObjectWithAllocationSite, 3, 1)               \
   F(FinalizeInstanceSize, 1, 1)                      \
   F(GlobalProxy, 1, 1)                               \
   F(LookupAccessor, 3, 1)                            \
index f6ad3529230441d25b657b8b42d9d5c0e1171651..03e0f34b11bae4c04ffbb29ba5579f6a0cfccc6d 100644 (file)
@@ -162,9 +162,7 @@ bool TypeFeedbackOracle::CallIsMonomorphic(FeedbackVectorICSlot slot) {
 
 bool TypeFeedbackOracle::CallNewIsMonomorphic(FeedbackVectorSlot slot) {
   Handle<Object> info = GetInfo(slot);
-  return FLAG_pretenuring_call_new
-      ? info->IsJSFunction()
-      : info->IsAllocationSite() || info->IsJSFunction();
+  return info->IsAllocationSite() || info->IsJSFunction();
 }
 
 
@@ -224,7 +222,7 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(
 Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(
     FeedbackVectorSlot slot) {
   Handle<Object> info = GetInfo(slot);
-  if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
+  if (info->IsJSFunction()) {
     return Handle<JSFunction>::cast(info);
   }
 
@@ -246,7 +244,7 @@ Handle<AllocationSite> TypeFeedbackOracle::GetCallAllocationSite(
 Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(
     FeedbackVectorSlot slot) {
   Handle<Object> info = GetInfo(slot);
-  if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
+  if (info->IsAllocationSite()) {
     return Handle<AllocationSite>::cast(info);
   }
   return Handle<AllocationSite>::null();
index db5ca61a8e71b217dcbb8bce2d2894def51b20bd..c390956e19dcc227d80324a042f582d4f726b884 100644 (file)
@@ -555,8 +555,7 @@ void AstTyper::VisitCall(Call* expr) {
 void AstTyper::VisitCallNew(CallNew* expr) {
   // Collect type feedback.
   FeedbackVectorSlot allocation_site_feedback_slot =
-      FLAG_pretenuring_call_new ? expr->AllocationSiteFeedbackSlot()
-                                : expr->CallNewFeedbackSlot();
+      expr->CallNewFeedbackSlot();
   expr->set_allocation_site(
       oracle()->GetCallNewAllocationSite(allocation_site_feedback_slot));
   bool monomorphic =
index 630eda61b31c9b256c9294b9f0938ee03742f2f9..f04316c3b2098f3f6148d7a143b00883b3cbc8b4 100644 (file)
@@ -98,8 +98,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
 
 
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
-                                           bool is_api_function,
-                                           bool create_memento) {
+                                           bool is_api_function) {
   // ----------- S t a t e -------------
   //  -- rax: number of arguments
   //  -- rdi: constructor function
@@ -107,9 +106,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   //  -- rdx: original constructor
   // -----------------------------------
 
-  // Should never create mementos for api functions.
-  DCHECK(!is_api_function || !create_memento);
-
   // Enter a construct frame.
   {
     FrameScope scope(masm, StackFrame::CONSTRUCT);
@@ -189,9 +185,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Now allocate the JSObject on the heap.
       __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
       __ shlp(rdi, Immediate(kPointerSizeLog2));
-      if (create_memento) {
-        __ addp(rdi, Immediate(AllocationMemento::kSize));
-      }
       // rdi: size of new object
       __ Allocate(rdi,
                   rbx,
@@ -199,11 +192,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                   no_reg,
                   &rt_call,
                   NO_ALLOCATION_FLAGS);
-      Factory* factory = masm->isolate()->factory();
       // Allocated the JSObject, now initialize the fields.
       // rax: initial map
       // rbx: JSObject (not HeapObject tagged - the actual address).
-      // rdi: start of next object (including memento if create_memento)
+      // rdi: start of next object
       __ movp(Operand(rbx, JSObject::kMapOffset), rax);
       __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
       __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
@@ -211,7 +203,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
       // Set extra fields in the newly allocated object.
       // rax: initial map
       // rbx: JSObject
-      // rdi: start of next object (including memento if create_memento)
+      // rdi: start of next object
       // rsi: slack tracking counter (non-API function case)
       __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
@@ -243,21 +235,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
         __ bind(&no_inobject_slack_tracking);
       }
-      if (create_memento) {
-        __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
-        __ InitializeFieldsWithFiller(rcx, rsi, rdx);
 
-        // Fill in memento fields if necessary.
-        // rsi: points to the allocated but uninitialized memento.
-        __ Move(Operand(rsi, AllocationMemento::kMapOffset),
-                factory->allocation_memento_map());
-        // Get the cell or undefined.
-        __ movp(rdx, Operand(rsp, 3 * kPointerSize));
-        __ AssertUndefinedOrAllocationSite(rdx);
-        __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
-      } else {
-        __ InitializeFieldsWithFiller(rcx, rdi, rdx);
-      }
+      __ InitializeFieldsWithFiller(rcx, rdi, rdx);
 
       // Add the object tag to make the JSObject real, so that we can continue
       // and jump into the continuation code at any time from now on.
@@ -273,48 +252,19 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // rdx: original constructor
     __ bind(&rt_call);
     int offset = kPointerSize;
-    if (create_memento) {
-      // Get the cell or allocation site.
-      __ movp(rdi, Operand(rsp, kPointerSize * 3));
-      __ Push(rdi);  // argument 1: allocation site
-      offset += kPointerSize;
-    }
 
     // Must restore rsi (context) and rdi (constructor) before calling runtime.
     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
     __ movp(rdi, Operand(rsp, offset));
     __ Push(rdi);  // argument 2/1: constructor function
     __ Push(rdx);  // argument 3/2: original constructor
-    if (create_memento) {
-      __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
-    } else {
-      __ CallRuntime(Runtime::kNewObject, 2);
-    }
+    __ CallRuntime(Runtime::kNewObject, 2);
     __ movp(rbx, rax);  // store result in rbx
 
-    // Runtime_NewObjectWithAllocationSite increments allocation count.
-    // Skip the increment.
-    Label count_incremented;
-    if (create_memento) {
-      __ jmp(&count_incremented);
-    }
-
     // New object allocated.
     // rbx: newly allocated object
     __ bind(&allocated);
 
-    if (create_memento) {
-      __ movp(rcx, Operand(rsp, 3 * kPointerSize));
-      __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
-      __ j(equal, &count_incremented);
-      // rcx is an AllocationSite. We are creating a memento from it, so we
-      // need to increment the memento create count.
-      __ SmiAddConstant(
-          FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
-          Smi::FromInt(1));
-      __ bind(&count_incremented);
-    }
-
     // Restore the parameters.
     __ Pop(rdx);
     __ Pop(rdi);
@@ -403,12 +353,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
 
 
 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
+  Generate_JSConstructStubHelper(masm, false);
 }
 
 
 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
-  Generate_JSConstructStubHelper(masm, true, false);
+  Generate_JSConstructStubHelper(masm, true);
 }
 
 
index 7000e1ddc2f4df9ab918e3e2c5e24fcb5e42d21c..c4df5b7f19c53041513df868a1d8aed7777be042 100644 (file)
@@ -1818,28 +1818,26 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   __ j(equal, &done, Label::kFar);
   __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
                  Heap::kWeakCellMapRootIndex);
-  __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+  __ j(not_equal, &check_allocation_site);
 
   // If the weak cell is cleared, we have a new chance to become monomorphic.
   __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
   __ j(equal, &initialize);
   __ jmp(&megamorphic);
 
-  if (!FLAG_pretenuring_call_new) {
-    __ bind(&check_allocation_site);
-    // If we came here, we need to see if we are the array function.
-    // If we didn't have a matching function, and we didn't find the megamorph
-    // sentinel, then we have in the slot either some other function or an
-    // AllocationSite.
-    __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
-    __ j(not_equal, &miss);
+  __ bind(&check_allocation_site);
+  // If we came here, we need to see if we are the array function.
+  // If we didn't have a matching function, and we didn't find the megamorph
+  // sentinel, then we have in the slot either some other function or an
+  // AllocationSite.
+  __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
+  __ j(not_equal, &miss);
 
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
-    __ cmpp(rdi, r11);
-    __ j(not_equal, &megamorphic);
-    __ jmp(&done);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
+  __ cmpp(rdi, r11);
+  __ j(not_equal, &megamorphic);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -1858,23 +1856,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
   // indicate the ElementsKind if function is the Array constructor.
   __ bind(&initialize);
 
-  if (!FLAG_pretenuring_call_new) {
-    // Make sure the function is the Array() function
-    __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
-    __ cmpp(rdi, r11);
-    __ j(not_equal, &not_array_function);
-
-    CreateAllocationSiteStub create_stub(isolate);
-    CallStubInRecordCallTarget(masm, &create_stub, is_super);
-    __ jmp(&done_no_smi_convert);
-
-    __ bind(&not_array_function);
-  }
+  // Make sure the function is the Array() function
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
+  __ cmpp(rdi, r11);
+  __ j(not_equal, &not_array_function);
 
-  CreateWeakCellStub create_stub(isolate);
+  CreateAllocationSiteStub create_stub(isolate);
   CallStubInRecordCallTarget(masm, &create_stub, is_super);
   __ jmp(&done_no_smi_convert);
 
+  __ bind(&not_array_function);
+  CreateWeakCellStub weak_cell_stub(isolate);
+  CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
+  __ jmp(&done_no_smi_convert);
+
   __ bind(&done);
   __ Integer32ToSmi(rdx, rdx);
 
@@ -1998,22 +1993,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
     GenerateRecordCallTarget(masm, IsSuperConstructorCall());
 
     __ SmiToInteger32(rdx, rdx);
-    if (FLAG_pretenuring_call_new) {
-      // Put the AllocationSite from the feedback vector into ebx.
-      // By adding kPointerSize we encode that we know the AllocationSite
-      // entry is at the feedback vector slot given by rdx + 1.
-      __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
-                                FixedArray::kHeaderSize + kPointerSize));
-    } else {
-      Label feedback_register_initialized;
-      // Put the AllocationSite from the feedback vector into rbx, or undefined.
-      __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
-                                FixedArray::kHeaderSize));
-      __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
-      __ j(equal, &feedback_register_initialized);
-      __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
-      __ bind(&feedback_register_initialized);
-    }
+    Label feedback_register_initialized;
+    // Put the AllocationSite from the feedback vector into rbx, or undefined.
+    __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
+                              FixedArray::kHeaderSize));
+    __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
+    __ j(equal, &feedback_register_initialized);
+    __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
+    __ bind(&feedback_register_initialized);
 
     __ AssertUndefinedOrAllocationSite(rbx);
   }
index fd85b023be6dff3e7863817fde736fbfaffbfbcd..4c286f2c1c14e38eb22694ac933850ab7d27099c 100644 (file)
@@ -3312,106 +3312,6 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) {
 }
 
 
-// Make sure pretenuring feedback is gathered for constructed objects as well
-// as for literals.
-TEST(OptimizedPretenuringConstructorCalls) {
-  if (!i::FLAG_pretenuring_call_new) {
-    // FLAG_pretenuring_call_new needs to be synced with the snapshot.
-    return;
-  }
-  i::FLAG_allow_natives_syntax = true;
-  i::FLAG_expose_gc = true;
-  CcTest::InitializeVM();
-  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
-  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
-  v8::HandleScope scope(CcTest::isolate());
-
-  // Grow new space unitl maximum capacity reached.
-  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
-    CcTest::heap()->new_space()->Grow();
-  }
-
-  i::ScopedVector<char> source(1024);
-  // Call new is doing slack tracking for the first
-  // JSFunction::kGenerousAllocationCount allocations, and we can't find
-  // mementos during that time.
-  i::SNPrintF(
-      source,
-      "var number_elements = %d;"
-      "var elements = new Array(number_elements);"
-      "function foo() {"
-      "  this.a = 3;"
-      "  this.b = {};"
-      "}"
-      "function f() {"
-      "  for (var i = 0; i < number_elements; i++) {"
-      "    elements[i] = new foo();"
-      "  }"
-      "  return elements[number_elements - 1];"
-      "};"
-      "f(); gc();"
-      "f(); f();"
-      "%%OptimizeFunctionOnNextCall(f);"
-      "f();",
-      AllocationSite::kPretenureMinimumCreated +
-      JSFunction::kGenerousAllocationCount);
-
-  v8::Local<v8::Value> res = CompileRun(source.start());
-
-  Handle<JSObject> o =
-      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-
-  CHECK(CcTest::heap()->InOldSpace(*o));
-}
-
-
-TEST(OptimizedPretenuringCallNew) {
-  if (!i::FLAG_pretenuring_call_new) {
-    // FLAG_pretenuring_call_new needs to be synced with the snapshot.
-    return;
-  }
-  i::FLAG_allow_natives_syntax = true;
-  i::FLAG_expose_gc = true;
-  CcTest::InitializeVM();
-  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
-  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
-  v8::HandleScope scope(CcTest::isolate());
-
-  // Grow new space unitl maximum capacity reached.
-  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
-    CcTest::heap()->new_space()->Grow();
-  }
-
-  i::ScopedVector<char> source(1024);
-  // Call new is doing slack tracking for the first
-  // JSFunction::kGenerousAllocationCount allocations, and we can't find
-  // mementos during that time.
-  i::SNPrintF(
-      source,
-      "var number_elements = %d;"
-      "var elements = new Array(number_elements);"
-      "function g() { this.a = 0; }"
-      "function f() {"
-      "  for (var i = 0; i < number_elements; i++) {"
-      "    elements[i] = new g();"
-      "  }"
-      "  return elements[number_elements - 1];"
-      "};"
-      "f(); gc();"
-      "f(); f();"
-      "%%OptimizeFunctionOnNextCall(f);"
-      "f();",
-      AllocationSite::kPretenureMinimumCreated +
-      JSFunction::kGenerousAllocationCount);
-
-  v8::Local<v8::Value> res = CompileRun(source.start());
-
-  Handle<JSObject> o =
-      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
-}
-
-
 // Test regular array literals allocation.
 TEST(OptimizedAllocationArrayLiterals) {
   i::FLAG_allow_natives_syntax = true;
index a97666384bf245d3f7c1b840d93ddc9673cbb0f4..b26aad03a54354633c5fca254e0d0de41529374f 100644 (file)
@@ -95,45 +95,3 @@ TEST(BadMementoAfterTopForceScavenge) {
   // Force GC to test the poisoned memento handling
   CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
 }
-
-
-TEST(PretenuringCallNew) {
-  CcTest::InitializeVM();
-  if (!i::FLAG_allocation_site_pretenuring) return;
-  if (!i::FLAG_pretenuring_call_new) return;
-  if (i::FLAG_always_opt) return;
-
-  v8::HandleScope scope(CcTest::isolate());
-  Isolate* isolate = CcTest::i_isolate();
-  Heap* heap = isolate->heap();
-
-  int call_count = 10;
-  i::ScopedVector<char> test_buf(1024);
-  const char* program =
-      "function f() {"
-      "  this.a = 3;"
-      "  this.b = {};"
-      "  return this;"
-      "};"
-      "var a;"
-      "for(var i = 0; i < %d; i++) {"
-      "  a = new f();"
-      "}"
-      "a;";
-  i::SNPrintF(test_buf, program, call_count);
-  v8::Local<v8::Value> res = CompileRun(test_buf.start());
-  Handle<JSObject> o =
-      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-
-  // The object of class f should have a memento secreted behind it.
-  Address memento_address = o->address() + o->map()->instance_size();
-  AllocationMemento* memento =
-      reinterpret_cast<AllocationMemento*>(memento_address + kHeapObjectTag);
-  CHECK_EQ(memento->map(), heap->allocation_memento_map());
-
-  // Furthermore, how many mementos did we create? The count should match
-  // call_count. Note, that mementos are allocated during the inobject slack
-  // tracking phase.
-  AllocationSite* site = memento->GetAllocationSite();
-  CHECK_EQ(call_count, site->pretenure_create_count()->value());
-}