MIPS: Additional work to get array literal allocation tracking working, even with...
authormvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 21 Jan 2013 08:30:11 +0000 (08:30 +0000)
committermvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 21 Jan 2013 08:30:11 +0000 (08:30 +0000)
Port r13406 (796a9f11)

BUG=
TEST=

Review URL: https://codereview.chromium.org/12026002
Patch from Akos Palfi <palfia@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13441 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/mips/code-stubs-mips.cc
src/mips/codegen-mips.cc
src/mips/full-codegen-mips.cc
src/mips/ic-mips.cc
src/mips/lithium-codegen-mips.cc
src/mips/lithium-codegen-mips.h
src/mips/stub-cache-mips.cc

index bcd068b..a22e706 100644 (file)
@@ -343,7 +343,7 @@ static void GenerateFastCloneShallowArrayCommon(
     MacroAssembler* masm,
     int length,
     FastCloneShallowArrayStub::Mode mode,
-    AllocationSiteInfoMode allocation_site_info_mode,
+    AllocationSiteMode allocation_site_mode,
     Label* fail) {
   // Registers on entry:
   // a3: boilerplate literal array.
@@ -356,9 +356,10 @@ static void GenerateFastCloneShallowArrayCommon(
         ? FixedDoubleArray::SizeFor(length)
         : FixedArray::SizeFor(length);
   }
+
   int size = JSArray::kSize;
   int allocation_info_start = size;
-  if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+  if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
     size += AllocationSiteInfo::kSize;
   }
   size += elements_size;
@@ -372,7 +373,7 @@ static void GenerateFastCloneShallowArrayCommon(
                         fail,
                         TAG_OBJECT);
 
-  if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+  if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
     __ li(a2, Operand(Handle<Map>(masm->isolate()->heap()->
                                    allocation_site_info_map())));
     __ sw(a2, FieldMemOperand(v0, allocation_info_start));
@@ -391,7 +392,7 @@ static void GenerateFastCloneShallowArrayCommon(
     // Get hold of the elements array of the boilerplate and setup the
     // elements pointer in the resulting object.
     __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
-    if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+    if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
       __ Addu(a2, v0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
     } else {
       __ Addu(a2, v0, Operand(JSArray::kSize));
@@ -424,21 +425,14 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
   __ Branch(&slow_case, eq, a3, Operand(t1));
 
   FastCloneShallowArrayStub::Mode mode = mode_;
-  AllocationSiteInfoMode allocation_site_info_mode =
-      DONT_TRACK_ALLOCATION_SITE_INFO;
-  if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
-    mode = CLONE_ANY_ELEMENTS;
-    allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
-  }
   if (mode == CLONE_ANY_ELEMENTS) {
     Label double_elements, check_fast_elements;
     __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
     __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex);
     __ Branch(&check_fast_elements, ne, v0, Operand(t1));
-    GenerateFastCloneShallowArrayCommon(masm, 0,
-                                        COPY_ON_WRITE_ELEMENTS,
-                                        allocation_site_info_mode,
+    GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
+                                        allocation_site_mode_,
                                         &slow_case);
     // Return and remove the on-stack parameters.
     __ DropAndRet(3);
@@ -446,9 +440,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
     __ bind(&check_fast_elements);
     __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
     __ Branch(&double_elements, ne, v0, Operand(t1));
-    GenerateFastCloneShallowArrayCommon(masm, length_,
-                                        CLONE_ELEMENTS,
-                                        allocation_site_info_mode,
+    GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
+                                        allocation_site_mode_,
                                         &slow_case);
     // Return and remove the on-stack parameters.
     __ DropAndRet(3);
@@ -481,7 +474,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
   }
 
   GenerateFastCloneShallowArrayCommon(masm, length_, mode,
-                                      allocation_site_info_mode, &slow_case);
+                                      allocation_site_mode_,
+                                      &slow_case);
 
   // Return and remove the on-stack parameters.
   __ DropAndRet(3);
index 1ea8fd9..f5caab9 100644 (file)
@@ -143,7 +143,8 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
 #define __ ACCESS_MASM(masm)
 
 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
-    MacroAssembler* masm) {
+    MacroAssembler* masm, AllocationSiteMode mode,
+    Label* allocation_site_info_found) {
   // ----------- S t a t e -------------
   //  -- a0    : value
   //  -- a1    : key
@@ -152,6 +153,12 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
   //  -- a3    : target map, scratch for subsequent call
   //  -- t0    : scratch (elements)
   // -----------------------------------
+  if (mode == TRACK_ALLOCATION_SITE) {
+    ASSERT(allocation_site_info_found != NULL);
+    masm->TestJSArrayForAllocationSiteInfo(a2, t0,
+                                           allocation_site_info_found);
+  }
+
   // Set transitioned map.
   __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
   __ RecordWriteField(a2,
@@ -166,7 +173,7 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
 
 
 void ElementsTransitionGenerator::GenerateSmiToDouble(
-    MacroAssembler* masm, Label* fail) {
+    MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
   // ----------- S t a t e -------------
   //  -- a0    : value
   //  -- a1    : key
@@ -180,7 +187,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
 
   Register scratch = t6;
 
-  if (FLAG_track_allocation_sites) {
+  if (mode == TRACK_ALLOCATION_SITE) {
     masm->TestJSArrayForAllocationSiteInfo(a2, t0, fail);
   }
 
@@ -313,7 +320,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
 
 
 void ElementsTransitionGenerator::GenerateDoubleToObject(
-    MacroAssembler* masm, Label* fail) {
+    MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
   // ----------- S t a t e -------------
   //  -- a0    : value
   //  -- a1    : key
@@ -324,6 +331,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
   // -----------------------------------
   Label entry, loop, convert_hole, gc_required, only_change_map;
 
+  if (mode == TRACK_ALLOCATION_SITE) {
+    masm->TestJSArrayForAllocationSiteInfo(a2, t0, fail);
+  }
+
   // Check for empty arrays, which only require a map transition and no changes
   // to the backing store.
   __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
index d4390b2..c4341dc 100644 (file)
@@ -1730,7 +1730,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   if (has_fast_elements && constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
+        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
+        DONT_TRACK_ALLOCATION_SITE,
+        length);
     __ CallStub(&stub);
     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
         1, a1, a2);
@@ -1741,19 +1743,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   } else {
     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
            FLAG_smi_only_arrays);
-    FastCloneShallowArrayStub::Mode mode = has_fast_elements
-      ? FastCloneShallowArrayStub::CLONE_ELEMENTS
-      : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
-    // Tracking allocation info allows us to pre-transition later if it makes
-    // sense.
-    if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS &&
-        FLAG_track_allocation_sites) {
-      mode = FastCloneShallowArrayStub::
-          CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
+    FastCloneShallowArrayStub::Mode mode =
+        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
+    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+
+    if (has_fast_elements) {
+      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
+      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
     }
 
-    FastCloneShallowArrayStub stub(mode, length);
+    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
     __ CallStub(&stub);
   }
 
index 0f9d687..6d29c1e 100644 (file)
@@ -1295,7 +1295,9 @@ static void KeyedStoreGenerateGenericHelper(
                                          t0,
                                          slow);
   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
-  ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
+  AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
+                                                        FAST_DOUBLE_ELEMENTS);
+  ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
   __ jmp(&fast_double_without_map_check);
 
@@ -1307,7 +1309,9 @@ static void KeyedStoreGenerateGenericHelper(
                                          t0,
                                          slow);
   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
-  ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
+  mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
+  ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
+                                                                   slow);
   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
   __ jmp(&finish_object_store);
 
@@ -1321,7 +1325,8 @@ static void KeyedStoreGenerateGenericHelper(
                                          t0,
                                          slow);
   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
-  ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
+  mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
+  ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
   __ jmp(&finish_object_store);
 }
@@ -1505,7 +1510,9 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
   // Must return the modified receiver in v0.
   if (!FLAG_trace_elements_transitions) {
     Label fail;
-    ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
+    AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
+                                                          FAST_DOUBLE_ELEMENTS);
+    ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, &fail);
     __ Ret(USE_DELAY_SLOT);
     __ mov(v0, a2);
     __ bind(&fail);
@@ -1526,7 +1533,9 @@ void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
   // Must return the modified receiver in v0.
   if (!FLAG_trace_elements_transitions) {
     Label fail;
-    ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+    AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS,
+                                                          FAST_ELEMENTS);
+    ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, &fail);
     __ Ret(USE_DELAY_SLOT);
     __ mov(v0, a2);
     __ bind(&fail);
index 4484ed3..33a32e7 100644 (file)
@@ -5118,6 +5118,8 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   Handle<FixedArray> literals(instr->environment()->closure()->literals());
   ElementsKind boilerplate_elements_kind =
       instr->hydrogen()->boilerplate_elements_kind();
+  AllocationSiteMode allocation_site_mode =
+      instr->hydrogen()->allocation_site_mode();
 
   // Deopt if the array literal boilerplate ElementsKind is of a type different
   // than the expected one. The check isn't necessary if the boilerplate has
@@ -5151,7 +5153,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
     ASSERT(instr->hydrogen()->depth() == 1);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
-    FastCloneShallowArrayStub stub(mode, length);
+    FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   } else if (instr->hydrogen()->depth() > 1) {
     CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@@ -5160,9 +5162,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   } else {
     FastCloneShallowArrayStub::Mode mode =
         boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
-            ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
-            : FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    FastCloneShallowArrayStub stub(mode, length);
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   }
 }
@@ -5171,10 +5173,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
                             Register result,
                             Register source,
-                            int* offset) {
+                            int* offset,
+                            AllocationSiteMode mode) {
   ASSERT(!source.is(a2));
   ASSERT(!result.is(a2));
 
+  bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
+      object->map()->CanTrackAllocationSite();
+
   // Only elements backing stores for non-COW arrays need to be copied.
   Handle<FixedArrayBase> elements(object->elements());
   bool has_elements = elements->length() > 0 &&
@@ -5184,8 +5190,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
   // this object and its backing store.
   int object_offset = *offset;
   int object_size = object->map()->instance_size();
-  int elements_offset = *offset + object_size;
   int elements_size = has_elements ? elements->Size() : 0;
+  int elements_offset = *offset + object_size;
+  if (create_allocation_site_info) {
+    elements_offset += AllocationSiteInfo::kSize;
+    *offset += AllocationSiteInfo::kSize;
+  }
+
   *offset += object_size + elements_size;
 
   // Copy object header.
@@ -5210,7 +5221,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
       __ Addu(a2, result, Operand(*offset));
       __ sw(a2, FieldMemOperand(result, total_offset));
       __ LoadHeapObject(source, value_object);
-      EmitDeepCopy(value_object, result, source, offset);
+      EmitDeepCopy(value_object, result, source, offset,
+                   DONT_TRACK_ALLOCATION_SITE);
     } else if (value->IsHeapObject()) {
       __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
       __ sw(a2, FieldMemOperand(result, total_offset));
@@ -5220,6 +5232,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
     }
   }
 
+  // Build Allocation Site Info if desired
+  if (create_allocation_site_info) {
+    __ li(a2, Operand(Handle<Map>(isolate()->heap()->
+                                  allocation_site_info_map())));
+    __ sw(a2, FieldMemOperand(result, object_size));
+    __ sw(source, FieldMemOperand(result, object_size + kPointerSize));
+  }
 
   if (has_elements) {
     // Copy elements backing store header.
@@ -5256,7 +5275,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
           __ Addu(a2, result, Operand(*offset));
           __ sw(a2, FieldMemOperand(result, total_offset));
           __ LoadHeapObject(source, value_object);
-          EmitDeepCopy(value_object, result, source, offset);
+          EmitDeepCopy(value_object, result, source, offset,
+                       DONT_TRACK_ALLOCATION_SITE);
         } else if (value->IsHeapObject()) {
           __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
           __ sw(a2, FieldMemOperand(result, total_offset));
@@ -5307,7 +5327,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
   __ bind(&allocated);
   int offset = 0;
   __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
-  EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset);
+  EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset,
+               instr->hydrogen()->allocation_site_mode());
   ASSERT_EQ(size, offset);
 }
 
index 443a20a..83bda9a 100644 (file)
@@ -375,7 +375,8 @@ class LCodeGen BASE_EMBEDDED {
   void EmitDeepCopy(Handle<JSObject> object,
                     Register result,
                     Register source,
-                    int* offset);
+                    int* offset,
+                    AllocationSiteMode mode);
 
   struct JumpTableEntry {
     inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
index 0313826..c3d8ad5 100644 (file)
@@ -1693,7 +1693,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
                                                &try_holey_map);
         __ mov(a2, receiver);
         ElementsTransitionGenerator::
-            GenerateMapChangeElementsTransition(masm());
+            GenerateMapChangeElementsTransition(masm(),
+                                                DONT_TRACK_ALLOCATION_SITE,
+                                                NULL);
         __ jmp(&fast_object);
 
         __ bind(&try_holey_map);
@@ -1704,7 +1706,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
                                                &call_builtin);
         __ mov(a2, receiver);
         ElementsTransitionGenerator::
-            GenerateMapChangeElementsTransition(masm());
+            GenerateMapChangeElementsTransition(masm(),
+                                                DONT_TRACK_ALLOCATION_SITE,
+                                                NULL);
         __ bind(&fast_object);
       } else {
         __ CheckFastObjectElements(a3, a3, &call_builtin);