[heap] Reland Remove retry space from AllocateRaw.
authorhpayer <hpayer@chromium.org>
Tue, 29 Sep 2015 09:08:10 +0000 (02:08 -0700)
committerCommit bot <commit-bot@chromium.org>
Tue, 29 Sep 2015 09:08:22 +0000 (09:08 +0000)
BUG=

Review URL: https://codereview.chromium.org/1374163002

Cr-Commit-Position: refs/heads/master@{#30998}

src/heap/heap-inl.h
src/heap/heap.cc
src/heap/heap.h
test/cctest/test-alloc.cc
test/cctest/test-heap.cc
test/cctest/test-strings.cc
test/cctest/test-unboxed-doubles.cc

index 4d7e31f..cff69b1 100644 (file)
@@ -126,7 +126,7 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
   // Allocate string.
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -157,7 +157,7 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
   // Allocate string.
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -189,7 +189,6 @@ AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
 
 
 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
-                                   AllocationSpace retry_space,
                                    AllocationAlignment alignment) {
   DCHECK(AllowHandleAllocation::IsAllowed());
   DCHECK(AllowHeapAllocation::IsAllowed());
@@ -207,19 +206,14 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
   HeapObject* object = nullptr;
   AllocationResult allocation;
   if (NEW_SPACE == space) {
-    if (!large_object) {
+    if (large_object) {
+      space = LO_SPACE;
+    } else {
       allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
-      if (always_allocate() && allocation.IsRetry() &&
-          retry_space != NEW_SPACE) {
-        space = retry_space;
-      } else {
-        if (allocation.To(&object)) {
-          OnAllocationEvent(object, size_in_bytes);
-        }
-        return allocation;
+      if (allocation.To(&object)) {
+        OnAllocationEvent(object, size_in_bytes);
       }
-    } else {
-      space = LO_SPACE;
+      return allocation;
     }
   }
 
index 2943739..83a1cb1 100644 (file)
@@ -1988,7 +1988,7 @@ void Heap::ConfigureInitialOldGenerationSize() {
 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
                                           int instance_size) {
   Object* result = nullptr;
-  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
+  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   // Map::cast cannot be used due to uninitialized map field.
@@ -2022,7 +2022,7 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
                                    int instance_size,
                                    ElementsKind elements_kind) {
   HeapObject* result = nullptr;
-  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
+  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(meta_map());
@@ -2063,7 +2063,7 @@ AllocationResult Heap::AllocateFillerObject(int size, bool double_align,
   HeapObject* obj = nullptr;
   {
     AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned;
-    AllocationResult allocation = AllocateRaw(size, space, space, align);
+    AllocationResult allocation = AllocateRaw(size, space, align);
     if (!allocation.To(&obj)) return allocation;
   }
 #ifdef DEBUG
@@ -2376,8 +2376,7 @@ AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode,
 
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation =
-        AllocateRaw(size, space, OLD_SPACE, kDoubleUnaligned);
+    AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -2398,7 +2397,7 @@ AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode,
     HeapObject* result = nullptr;                                         \
     {                                                                     \
       AllocationResult allocation =                                       \
-          AllocateRaw(size, space, OLD_SPACE, kSimd128Unaligned);         \
+          AllocateRaw(size, space, kSimd128Unaligned);                    \
       if (!allocation.To(&result)) return allocation;                     \
     }                                                                     \
                                                                           \
@@ -2419,7 +2418,7 @@ AllocationResult Heap::AllocateCell(Object* value) {
 
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(cell_map());
@@ -2433,7 +2432,7 @@ AllocationResult Heap::AllocatePropertyCell() {
   STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
 
   HeapObject* result = nullptr;
-  AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+  AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(global_property_cell_map());
@@ -2451,7 +2450,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
   STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(weak_cell_map());
@@ -2936,7 +2935,7 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
   AllocationSpace space = SelectSpace(pretenure);
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -2960,7 +2959,7 @@ AllocationResult Heap::AllocateBytecodeArray(int length,
   int size = BytecodeArray::SizeFor(length);
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3147,7 +3146,7 @@ AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer(
   AllocationSpace space = SelectSpace(pretenure);
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3192,7 +3191,7 @@ AllocationResult Heap::AllocateFixedTypedArray(int length,
 
   HeapObject* object = nullptr;
   AllocationResult allocation = AllocateRaw(
-      size, space, OLD_SPACE,
+      size, space,
       array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
   if (!allocation.To(&object)) return allocation;
 
@@ -3210,8 +3209,7 @@ AllocationResult Heap::AllocateFixedTypedArray(int length,
 
 AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
   DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
-  AllocationResult allocation =
-      AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
+  AllocationResult allocation = AllocateRaw(object_size, CODE_SPACE);
 
   HeapObject* result = nullptr;
   if (!allocation.To(&result)) return allocation;
@@ -3250,7 +3248,7 @@ AllocationResult Heap::CopyCode(Code* code) {
   HeapObject* result = nullptr;
   // Allocate an object the same size as the code object.
   int obj_size = code->Size();
-  allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
+  allocation = AllocateRaw(obj_size, CODE_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   // Copy code object.
@@ -3289,8 +3287,7 @@ AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
       static_cast<size_t>(code->instruction_end() - old_addr);
 
   HeapObject* result = nullptr;
-  AllocationResult allocation =
-      AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
+  AllocationResult allocation = AllocateRaw(new_obj_size, CODE_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   // Copy code object.
@@ -3336,15 +3333,12 @@ AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
                                 AllocationSite* allocation_site) {
   DCHECK(gc_state_ == NOT_IN_GC);
   DCHECK(map->instance_type() != MAP_TYPE);
-  // If allocation failures are disallowed, we may allocate in a different
-  // space when new space is full and the object is not a large object.
-  AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE;
   int size = map->instance_size();
   if (allocation_site != NULL) {
     size += AllocationMemento::kSize;
   }
   HeapObject* result = nullptr;
-  AllocationResult allocation = AllocateRaw(size, space, retry_space);
+  AllocationResult allocation = AllocateRaw(size, space);
   if (!allocation.To(&result)) return allocation;
   // No need for write barrier since object is white and map is in old space.
   result->set_map_no_write_barrier(map);
@@ -3446,65 +3440,20 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
 
   DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type()));
 
-  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
-
-  // If we're forced to always allocate, we use the general allocation
-  // functions which may leave us with an object in old space.
-  if (always_allocate()) {
-    {
-      AllocationResult allocation =
-          AllocateRaw(object_size, NEW_SPACE, OLD_SPACE);
-      if (!allocation.To(&clone)) return allocation;
-    }
-    Address clone_address = clone->address();
-    CopyBlock(clone_address, source->address(), object_size);
-
-    // Update write barrier for all tagged fields that lie beyond the header.
-    const int start_offset = JSObject::kHeaderSize;
-    const int end_offset = object_size;
-
-#if V8_DOUBLE_FIELDS_UNBOXING
-    LayoutDescriptorHelper helper(map);
-    bool has_only_tagged_fields = helper.all_fields_tagged();
-
-    if (!has_only_tagged_fields) {
-      for (int offset = start_offset; offset < end_offset;) {
-        int end_of_region_offset;
-        if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
-          RecordWrites(clone_address, offset,
-                       (end_of_region_offset - offset) / kPointerSize);
-        }
-        offset = end_of_region_offset;
-      }
-    } else {
-#endif
-      // Object has only tagged fields.
-      RecordWrites(clone_address, start_offset,
-                   (end_offset - start_offset) / kPointerSize);
-#if V8_DOUBLE_FIELDS_UNBOXING
-    }
-#endif
+  int adjusted_object_size =
+      site != NULL ? object_size + AllocationMemento::kSize : object_size;
+  AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE);
+  if (!allocation.To(&clone)) return allocation;
 
-  } else {
-    wb_mode = SKIP_WRITE_BARRIER;
+  SLOW_DCHECK(InNewSpace(clone));
+  // Since we know the clone is allocated in new space, we can copy
+  // the contents without worrying about updating the write barrier.
+  CopyBlock(clone->address(), source->address(), object_size);
 
-    {
-      int adjusted_object_size =
-          site != NULL ? object_size + AllocationMemento::kSize : object_size;
-      AllocationResult allocation =
-          AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE);
-      if (!allocation.To(&clone)) return allocation;
-    }
-    SLOW_DCHECK(InNewSpace(clone));
-    // Since we know the clone is allocated in new space, we can copy
-    // the contents without worrying about updating the write barrier.
-    CopyBlock(clone->address(), source->address(), object_size);
-
-    if (site != NULL) {
-      AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
-          reinterpret_cast<Address>(clone) + object_size);
-      InitializeAllocationMemento(alloc_memento, site);
-    }
+  if (site != NULL) {
+    AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
+        reinterpret_cast<Address>(clone) + object_size);
+    InitializeAllocationMemento(alloc_memento, site);
   }
 
   SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() ==
@@ -3525,7 +3474,7 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
       }
       if (!allocation.To(&elem)) return allocation;
     }
-    JSObject::cast(clone)->set_elements(elem, wb_mode);
+    JSObject::cast(clone)->set_elements(elem, SKIP_WRITE_BARRIER);
   }
   // Update properties if necessary.
   if (properties->length() > 0) {
@@ -3534,7 +3483,7 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
       AllocationResult allocation = CopyFixedArray(properties);
       if (!allocation.To(&prop)) return allocation;
     }
-    JSObject::cast(clone)->set_properties(prop, wb_mode);
+    JSObject::cast(clone)->set_properties(prop, SKIP_WRITE_BARRIER);
   }
   // Return the new clone.
   return clone;
@@ -3608,7 +3557,7 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
   // Allocate string.
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3650,7 +3599,7 @@ AllocationResult Heap::AllocateRawOneByteString(int length,
 
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3674,7 +3623,7 @@ AllocationResult Heap::AllocateRawTwoByteString(int length,
 
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3691,7 +3640,7 @@ AllocationResult Heap::AllocateEmptyFixedArray() {
   int size = FixedArray::SizeFor(0);
   HeapObject* result = nullptr;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   // Initialize the object.
@@ -3807,7 +3756,7 @@ AllocationResult Heap::AllocateRawFixedArray(int length,
   int size = FixedArray::SizeFor(length);
   AllocationSpace space = SelectSpace(pretenure);
 
-  return AllocateRaw(size, space, OLD_SPACE);
+  return AllocateRaw(size, space);
 }
 
 
@@ -3878,8 +3827,7 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
 
   HeapObject* object = nullptr;
   {
-    AllocationResult allocation =
-        AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
+    AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned);
     if (!allocation.To(&object)) return allocation;
   }
 
@@ -3892,8 +3840,7 @@ AllocationResult Heap::AllocateSymbol() {
   STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
 
   HeapObject* result = nullptr;
-  AllocationResult allocation =
-      AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
+  AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(symbol_map());
index 1029250..410b649 100644 (file)
@@ -1967,7 +1967,7 @@ class Heap {
   // performed by the runtime and should not be bypassed (to extend this to
   // inlined allocations, use the Heap::DisableInlineAllocation() support).
   MUST_USE_RESULT inline AllocationResult AllocateRaw(
-      int size_in_bytes, AllocationSpace space, AllocationSpace retry_space,
+      int size_in_bytes, AllocationSpace space,
       AllocationAlignment aligment = kWordAligned);
 
   // Allocates a heap object based on the map.
index 19e10d5..b9d0f61 100644 (file)
@@ -40,7 +40,6 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() {
   Heap* heap = CcTest::heap();
 
   // New space.
-  SimulateFullSpace(heap->new_space());
   heap->AllocateByteArray(100).ToObjectChecked();
   heap->AllocateFixedArray(100, NOT_TENURED).ToObjectChecked();
 
index ec76d58..570f48a 100644 (file)
@@ -1181,85 +1181,6 @@ TEST(Iteration) {
 }
 
 
-static int LenFromSize(int size) {
-  return (size - FixedArray::kHeaderSize) / kPointerSize;
-}
-
-
-HEAP_TEST(Regression39128) {
-  // Test case for crbug.com/39128.
-  CcTest::InitializeVM();
-  Isolate* isolate = CcTest::i_isolate();
-  Heap* heap = CcTest::heap();
-
-  // Increase the chance of 'bump-the-pointer' allocation in old space.
-  heap->CollectAllGarbage();
-
-  v8::HandleScope scope(CcTest::isolate());
-
-  // The plan: create JSObject which references objects in new space.
-  // Then clone this object (forcing it to go into old space) and check
-  // that region dirty marks are updated correctly.
-
-  // Step 1: prepare a map for the object.  We add 1 inobject property to it.
-  // Create a map with single inobject property.
-  Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
-  int n_properties = my_map->GetInObjectProperties();
-  CHECK_GT(n_properties, 0);
-
-  int object_size = my_map->instance_size();
-
-  // Step 2: allocate a lot of objects so to almost fill new space: we need
-  // just enough room to allocate JSObject and thus fill the newspace.
-
-  int allocation_amount = Min(FixedArray::kMaxSize,
-                              Page::kMaxRegularHeapObjectSize + kPointerSize);
-  int allocation_len = LenFromSize(allocation_amount);
-  NewSpace* new_space = heap->new_space();
-  DisableInlineAllocationSteps(new_space);
-  Address* top_addr = new_space->allocation_top_address();
-  Address* limit_addr = new_space->allocation_limit_address();
-  while ((*limit_addr - *top_addr) > allocation_amount) {
-    CHECK(!heap->always_allocate());
-    Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
-    CHECK(new_space->Contains(array));
-  }
-
-  // Step 3: now allocate fixed array and JSObject to fill the whole new space.
-  int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
-  int fixed_array_len = LenFromSize(to_fill);
-  CHECK(fixed_array_len < FixedArray::kMaxLength);
-
-  CHECK(!heap->always_allocate());
-  Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
-  CHECK(new_space->Contains(array));
-
-  Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
-  CHECK(new_space->Contains(object));
-  JSObject* jsobject = JSObject::cast(object);
-  CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
-  CHECK_EQ(0, jsobject->properties()->length());
-  // Create a reference to object in new space in jsobject.
-  FieldIndex index = FieldIndex::ForInObjectOffset(
-      JSObject::kHeaderSize - kPointerSize);
-  jsobject->FastPropertyAtPut(index, array);
-
-  CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
-
-  // Step 4: clone jsobject, but force always allocate first to create a clone
-  // in old pointer space.
-  Address old_space_top = heap->old_space()->top();
-  AlwaysAllocateScope aa_scope(isolate);
-  Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
-  JSObject* clone = JSObject::cast(clone_obj);
-  if (clone->address() != old_space_top) {
-    // Alas, got allocated from free list, we cannot do checks.
-    return;
-  }
-  CHECK(heap->old_space()->Contains(clone->address()));
-}
-
-
 UNINITIALIZED_TEST(TestCodeFlushing) {
   // If we do not flush code this test is invalid.
   if (!FLAG_flush_code) return;
@@ -3691,38 +3612,6 @@ TEST(CountForcedGC) {
 }
 
 
-TEST(Regress2237) {
-  i::FLAG_stress_compaction = false;
-  CcTest::InitializeVM();
-  Isolate* isolate = CcTest::i_isolate();
-  Factory* factory = isolate->factory();
-  v8::HandleScope scope(CcTest::isolate());
-  Handle<String> slice(CcTest::heap()->empty_string());
-
-  {
-    // Generate a parent that lives in new-space.
-    v8::HandleScope inner_scope(CcTest::isolate());
-    const char* c = "This text is long enough to trigger sliced strings.";
-    Handle<String> s = factory->NewStringFromAsciiChecked(c);
-    CHECK(s->IsSeqOneByteString());
-    CHECK(CcTest::heap()->InNewSpace(*s));
-
-    // Generate a sliced string that is based on the above parent and
-    // lives in old-space.
-    SimulateFullSpace(CcTest::heap()->new_space());
-    AlwaysAllocateScope always_allocate(isolate);
-    Handle<String> t = factory->NewProperSubString(s, 5, 35);
-    CHECK(t->IsSlicedString());
-    CHECK(!CcTest::heap()->InNewSpace(*t));
-    *slice.location() = *t.location();
-  }
-
-  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
-  CcTest::heap()->CollectAllGarbage();
-  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
-}
-
-
 #ifdef OBJECT_PRINT
 TEST(PrintSharedFunctionInfo) {
   CcTest::InitializeVM();
index ce60b95..6e0ee04 100644 (file)
@@ -642,6 +642,7 @@ static inline void PrintStats(const ConsStringGenerationData& data) {
 
 template<typename BuildString>
 void TestStringCharacterStream(BuildString build, int test_cases) {
+  FLAG_gc_global = true;
   CcTest::InitializeVM();
   Isolate* isolate = CcTest::i_isolate();
   HandleScope outer_scope(isolate);
index 3299e12..3dd56ee 100644 (file)
@@ -1404,89 +1404,6 @@ TEST(StoreBufferScanOnScavenge) {
 }
 
 
-static int LenFromSize(int size) {
-  return (size - FixedArray::kHeaderSize) / kPointerSize;
-}
-
-
-HEAP_TEST(WriteBarriersInCopyJSObject) {
-  FLAG_max_semi_space_size = 1;  // Ensure new space is not growing.
-  CcTest::InitializeVM();
-  Isolate* isolate = CcTest::i_isolate();
-  Heap* heap = CcTest::heap();
-
-  v8::HandleScope scope(CcTest::isolate());
-
-  // The plan: create JSObject which contains unboxed double value that looks
-  // like a reference to an object in new space.
-  // Then clone this object (forcing it to go into old space) and check
-  // that the value of the unboxed double property of the cloned object has
-  // was not corrupted by GC.
-
-  // Step 1: prepare a map for the object. We add unboxed double property to it.
-  // Create a map with single inobject property.
-  Handle<Map> my_map = Map::Create(isolate, 1);
-  Handle<String> name = isolate->factory()->InternalizeUtf8String("foo");
-  my_map = Map::CopyWithField(my_map, name, HeapType::Any(isolate), NONE,
-                              Representation::Double(),
-                              INSERT_TRANSITION).ToHandleChecked();
-
-  int object_size = my_map->instance_size();
-
-  // Step 2: allocate a lot of objects so to almost fill new space: we need
-  // just enough room to allocate JSObject and thus fill the newspace.
-
-  int allocation_amount =
-      Min(FixedArray::kMaxSize, Page::kMaxRegularHeapObjectSize + kPointerSize);
-  int allocation_len = LenFromSize(allocation_amount);
-  NewSpace* new_space = heap->new_space();
-  DisableInlineAllocationSteps(new_space);
-  Address* top_addr = new_space->allocation_top_address();
-  Address* limit_addr = new_space->allocation_limit_address();
-  while ((*limit_addr - *top_addr) > allocation_amount) {
-    CHECK(!heap->always_allocate());
-    Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
-    CHECK(new_space->Contains(array));
-  }
-
-  // Step 3: now allocate fixed array and JSObject to fill the whole new space.
-  int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
-  int fixed_array_len = LenFromSize(to_fill);
-  CHECK(fixed_array_len < FixedArray::kMaxLength);
-
-  CHECK(!heap->always_allocate());
-  Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
-  CHECK(new_space->Contains(array));
-
-  Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
-  CHECK(new_space->Contains(object));
-  JSObject* jsobject = JSObject::cast(object);
-  CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
-  CHECK_EQ(0, jsobject->properties()->length());
-
-  // Construct a double value that looks like a pointer to the new space object
-  // and store it into the obj.
-  Address fake_object = reinterpret_cast<Address>(array) + kPointerSize;
-  double boom_value = bit_cast<double>(fake_object);
-  FieldIndex index = FieldIndex::ForDescriptor(*my_map, 0);
-  jsobject->RawFastDoublePropertyAtPut(index, boom_value);
-
-  CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
-
-  // Step 4: clone jsobject, but force always allocate first to create a clone
-  // in old pointer space.
-  AlwaysAllocateScope aa_scope(isolate);
-  Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
-  Handle<JSObject> clone(JSObject::cast(clone_obj));
-  CHECK(heap->old_space()->Contains(clone->address()));
-
-  CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
-
-  // The value in cloned object should not be corrupted by GC.
-  CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
-}
-
-
 static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
                              int tagged_descriptor, int double_descriptor,
                              bool check_tagged_value = true) {
@@ -1553,7 +1470,6 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
                                         int double_descriptor,
                                         bool check_tagged_value = true) {
   if (FLAG_never_compact || !FLAG_incremental_marking) return;
-  FLAG_stress_compaction = true;
   FLAG_manual_evacuation_candidates_selection = true;
   Isolate* isolate = CcTest::i_isolate();
   Factory* factory = isolate->factory();