Optimize array literal boilerplate copy for fast cases.
authordanno@chromium.org <danno@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 17 Nov 2011 17:22:21 +0000 (17:22 +0000)
committerdanno@chromium.org <danno@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 17 Nov 2011 17:22:21 +0000 (17:22 +0000)
R=jkummerow@chromium.org
BUG=none
TEST=none

Review URL: http://codereview.chromium.org/8590026

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10024 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/ia32/full-codegen-ia32.cc
src/x64/code-stubs-x64.cc
src/x64/full-codegen-x64.cc

index aa70dcb..c1ef22e 100644 (file)
@@ -1477,10 +1477,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   int length = subexprs->length();
   Handle<FixedArray> constant_elements = expr->constant_elements();
   ASSERT_EQ(2, constant_elements->length());
-#if DEBUG
   ElementsKind constant_elements_kind =
       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
-#endif
+  bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
@@ -1488,7 +1487,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   __ push(Immediate(Smi::FromInt(expr->literal_index())));
   __ push(Immediate(constant_elements));
-  if (expr->depth() > 1) {
+  Heap* heap = isolate()->heap();
+  if (has_constant_fast_elements &&
+      constant_elements_values->map() == heap->fixed_cow_array_map()) {
+    // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+    // change, so it's possible to specialize the stub in advance.
+    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
+    FastCloneShallowArrayStub stub(
+        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
+        length);
+    __ CallStub(&stub);
+  } else if (expr->depth() > 1) {
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
@@ -1496,13 +1505,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     ASSERT(constant_elements_kind == FAST_ELEMENTS ||
            constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
            FLAG_smi_only_arrays);
-    if (constant_elements_values->map() ==
-        isolate()->heap()->fixed_cow_array_map()) {
-      __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
-                          1);
-    }
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length);
+    // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+    // change, so it's possible to specialize the stub in advance.
+    FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
+        ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }
 
index 45fb755..a30d62f 100644 (file)
@@ -226,7 +226,7 @@ static void GenerateFastCloneShallowArrayCommon(
     Label* fail) {
   // Registers on entry:
   //
-  // rcx: boilerplate array.
+  // rcx: boilerplate literal array.
   ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
 
   // All sizes here are multiples of kPointerSize.
@@ -315,7 +315,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
     __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
            factory->fixed_array_map());
     __ j(not_equal, &double_elements);
-    GenerateFastCloneShallowArrayCommon(masm, 0,
+    GenerateFastCloneShallowArrayCommon(masm, length_,
                                         CLONE_ELEMENTS, &slow_case);
     __ ret(3 * kPointerSize);
 
@@ -346,8 +346,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
     __ pop(rcx);
   }
 
-  GenerateFastCloneShallowArrayCommon(masm, 0,
-                                      CLONE_DOUBLE_ELEMENTS, &slow_case);
+  GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
   __ ret(3 * kPointerSize);
 
   __ bind(&slow_case);
index 2dc765b..35b902d 100644 (file)
@@ -1480,10 +1480,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   int length = subexprs->length();
   Handle<FixedArray> constant_elements = expr->constant_elements();
   ASSERT_EQ(2, constant_elements->length());
-#if DEBUG
   ElementsKind constant_elements_kind =
       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
-#endif
+  bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
@@ -1491,7 +1490,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   __ Push(Smi::FromInt(expr->literal_index()));
   __ Push(constant_elements);
-  if (expr->depth() > 1) {
+  Heap* heap = isolate()->heap();
+  if (has_constant_fast_elements &&
+      constant_elements_values->map() == heap->fixed_cow_array_map()) {
+    // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+    // change, so it's possible to specialize the stub in advance.
+    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
+    FastCloneShallowArrayStub stub(
+        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
+        length);
+    __ CallStub(&stub);
+  } else if (expr->depth() > 1) {
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
@@ -1499,13 +1508,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     ASSERT(constant_elements_kind == FAST_ELEMENTS ||
            constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
            FLAG_smi_only_arrays);
-    if (constant_elements_values->map() ==
-        isolate()->heap()->fixed_cow_array_map()) {
-      __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
-                          1);
-    }
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length);
+    // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+    // change, so it's possible to specialize the stub in advance.
+    FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
+        ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }