From: ishell Date: Tue, 3 Feb 2015 10:28:18 +0000 (-0800) Subject: Avoid issuing write barriers for unboxed double fields in Heap::CopyJSObject(). X-Git-Tag: upstream/4.7.83~4646 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ba8409d2f66ee56fd18198019591f49c9c675244;p=platform%2Fupstream%2Fv8.git Avoid issuing write barriers for unboxed double fields in Heap::CopyJSObject(). Review URL: https://codereview.chromium.org/880043003 Cr-Commit-Position: refs/heads/master@{#26394} --- diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 52bd70a..d5417ac 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -3906,9 +3906,33 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) { } Address clone_address = clone->address(); CopyBlock(clone_address, source->address(), object_size); - // Update write barrier for all fields that lie beyond the header. - RecordWrites(clone_address, JSObject::kHeaderSize, - (object_size - JSObject::kHeaderSize) / kPointerSize); + + // Update write barrier for all tagged fields that lie beyond the header. + const int start_offset = JSObject::kHeaderSize; + const int end_offset = object_size; + +#if V8_DOUBLE_FIELDS_UNBOXING + LayoutDescriptorHelper helper(map); + bool has_only_tagged_fields = helper.all_fields_tagged(); + + if (!has_only_tagged_fields) { + for (int offset = start_offset; offset < end_offset;) { + int end_of_region_offset; + if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) { + RecordWrites(clone_address, offset, + (end_of_region_offset - offset) / kPointerSize); + } + offset = end_of_region_offset; + } + } else { +#endif + // Object has only tagged fields. + RecordWrites(clone_address, start_offset, + (end_offset - start_offset) / kPointerSize); +#if V8_DOUBLE_FIELDS_UNBOXING + } +#endif + } else { wb_mode = SKIP_WRITE_BARRIER; diff --git a/test/cctest/test-unboxed-doubles.cc b/test/cctest/test-unboxed-doubles.cc index ca78455..a783ce6 100644 --- a/test/cctest/test-unboxed-doubles.cc +++ b/test/cctest/test-unboxed-doubles.cc @@ -1223,4 +1223,89 @@ TEST(StoreBufferScanOnScavenge) { CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index)); } + +static int LenFromSize(int size) { + return (size - FixedArray::kHeaderSize) / kPointerSize; +} + + +TEST(WriteBarriersInCopyJSObject) { + FLAG_max_semi_space_size = 1; // Ensure new space is not growing. + CcTest::InitializeVM(); + Isolate* isolate = CcTest::i_isolate(); + TestHeap* heap = CcTest::test_heap(); + + v8::HandleScope scope(CcTest::isolate()); + + // The plan: create JSObject which contains unboxed double value that looks + // like a reference to an object in new space. + // Then clone this object (forcing it to go into old space) and check + // that the value of the unboxed double property of the cloned object has + // was not corrupted by GC. + + // Step 1: prepare a map for the object. We add unboxed double property to it. + // Create a map with single inobject property. + Handle my_map = Map::Create(isolate, 1); + Handle name = isolate->factory()->InternalizeUtf8String("foo"); + my_map = Map::CopyWithField(my_map, name, HeapType::Any(isolate), NONE, + Representation::Double(), + INSERT_TRANSITION).ToHandleChecked(); + my_map->set_pre_allocated_property_fields(1); + int n_properties = my_map->InitialPropertiesLength(); + CHECK_GE(n_properties, 0); + + int object_size = my_map->instance_size(); + + // Step 2: allocate a lot of objects so to almost fill new space: we need + // just enough room to allocate JSObject and thus fill the newspace. + + int allocation_amount = + Min(FixedArray::kMaxSize, Page::kMaxRegularHeapObjectSize + kPointerSize); + int allocation_len = LenFromSize(allocation_amount); + NewSpace* new_space = heap->new_space(); + Address* top_addr = new_space->allocation_top_address(); + Address* limit_addr = new_space->allocation_limit_address(); + while ((*limit_addr - *top_addr) > allocation_amount) { + CHECK(!heap->always_allocate()); + Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked(); + CHECK(new_space->Contains(array)); + } + + // Step 3: now allocate fixed array and JSObject to fill the whole new space. + int to_fill = static_cast(*limit_addr - *top_addr - object_size); + int fixed_array_len = LenFromSize(to_fill); + CHECK(fixed_array_len < FixedArray::kMaxLength); + + CHECK(!heap->always_allocate()); + Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked(); + CHECK(new_space->Contains(array)); + + Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked(); + CHECK(new_space->Contains(object)); + JSObject* jsobject = JSObject::cast(object); + CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length()); + CHECK_EQ(0, jsobject->properties()->length()); + + // Construct a double value that looks like a pointer to the new space object + // and store it into the obj. + Address fake_object = reinterpret_cast
(array) + kPointerSize; + double boom_value = bit_cast(fake_object); + FieldIndex index = FieldIndex::ForDescriptor(*my_map, 0); + jsobject->RawFastDoublePropertyAtPut(index, boom_value); + + CHECK_EQ(0, static_cast(*limit_addr - *top_addr)); + + // Step 4: clone jsobject, but force always allocate first to create a clone + // in old pointer space. + AlwaysAllocateScope aa_scope(isolate); + Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked(); + Handle clone(JSObject::cast(clone_obj)); + CHECK(heap->old_pointer_space()->Contains(clone->address())); + + CcTest::heap()->CollectGarbage(NEW_SPACE, "boom"); + + // The value in cloned object should not be corrupted by GC. + CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index)); +} + #endif