}
-void StoreBuffer::RemoveSlots(Address start_address, Address end_address) {
- struct IsValueInRangePredicate {
- Address start_address_;
- Address end_address_;
-
- IsValueInRangePredicate(Address start_address, Address end_address)
- : start_address_(start_address), end_address_(end_address) {}
-
- bool operator()(Address addr) {
- return start_address_ <= addr && addr < end_address_;
- }
- };
-
- IsValueInRangePredicate predicate(start_address, end_address);
- // Some address in old space that does not move.
- const Address kRemovedSlot = heap_->undefined_value()->address();
- DCHECK(Page::FromAddress(kRemovedSlot)->NeverEvacuate());
-
- {
- Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
- std::replace_if(start_, top, predicate, kRemovedSlot);
- }
-
- if (old_buffer_is_sorted_) {
- // Remove slots from an old buffer preserving the order.
- Address* lower = std::lower_bound(old_start_, old_top_, start_address);
- if (lower != old_top_) {
- // [lower, old_top_) range contain elements that are >= |start_address|.
- Address* upper = std::lower_bound(lower, old_top_, end_address);
- // Remove [lower, upper) from the buffer.
- if (upper == old_top_) {
- // All elements in [lower, old_top_) range are < |end_address|.
- old_top_ = lower;
- } else if (lower != upper) {
- // [upper, old_top_) range contain elements that are >= |end_address|,
- // move [upper, old_top_) range to [lower, ...) and update old_top_.
- Address* new_top = lower;
- for (Address* p = upper; p < old_top_; p++) {
- *new_top++ = *p;
- }
- old_top_ = new_top;
- }
- }
- } else {
- std::replace_if(old_start_, old_top_, predicate, kRemovedSlot);
- }
-}
-
-
void StoreBuffer::SortUniq() {
Compact();
if (old_buffer_is_sorted_) return;
bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) {
- DCHECK_NOT_NULL(cell_address);
- Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
+ if (!FLAG_enable_slow_asserts) return true;
if (in_store_buffer_1_element_cache != NULL &&
*in_store_buffer_1_element_cache == cell_address) {
- // Check if the cache still points into the active part of the buffer.
- if ((start_ <= in_store_buffer_1_element_cache &&
- in_store_buffer_1_element_cache < top) ||
- (old_start_ <= in_store_buffer_1_element_cache &&
- in_store_buffer_1_element_cache < old_top_)) {
- return true;
- }
+ return true;
}
+ Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
for (Address* current = top - 1; current >= start_; current--) {
if (*current == cell_address) {
in_store_buffer_1_element_cache = current;
}
-// Returns true if during migration from |old_map| to |new_map| "tagged"
-// inobject fields are going to be replaced with unboxed double fields.
-static bool ShouldClearSlotsRecorded(Map* old_map, Map* new_map,
- int new_number_of_fields) {
- DisallowHeapAllocation no_gc;
- int inobject = new_map->inobject_properties();
- DCHECK(inobject <= old_map->inobject_properties());
-
- int limit = Min(inobject, new_number_of_fields);
- for (int i = 0; i < limit; i++) {
- FieldIndex index = FieldIndex::ForPropertyIndex(new_map, i);
- if (new_map->IsUnboxedDoubleField(index) &&
- !old_map->IsUnboxedDoubleField(index)) {
- return true;
- }
- }
- return false;
-}
-
-
-static void RemoveOldToOldSlotsRecorded(Heap* heap, JSObject* object,
- FieldIndex index) {
- DisallowHeapAllocation no_gc;
-
- Object* old_value = object->RawFastPropertyAt(index);
- if (old_value->IsHeapObject()) {
- HeapObject* ho = HeapObject::cast(old_value);
- if (heap->InNewSpace(ho)) {
- // At this point there must be no old-to-new slots recorded for this
- // object.
- SLOW_DCHECK(
- !heap->store_buffer()->CellIsInStoreBuffer(reinterpret_cast<Address>(
- HeapObject::RawField(object, index.offset()))));
- } else {
- Page* p = Page::FromAddress(reinterpret_cast<Address>(ho));
- if (p->IsEvacuationCandidate()) {
- Object** slot = HeapObject::RawField(object, index.offset());
- SlotsBuffer::RemoveSlot(p->slots_buffer(), slot);
- }
- }
- }
-}
-
-
// To migrate a fast instance to a fast map:
// - First check whether the instance needs to be rewritten. If not, simply
// change the map.
// From here on we cannot fail and we shouldn't GC anymore.
DisallowHeapAllocation no_allocation;
- Heap* heap = isolate->heap();
-
- // If we are going to put an unboxed double to the field that used to
- // contain HeapObject we should ensure that this slot is removed from
- // both StoreBuffer and respective SlotsBuffer.
- bool clear_slots_recorded =
- FLAG_unbox_double_fields && !heap->InNewSpace(object->address()) &&
- ShouldClearSlotsRecorded(*old_map, *new_map, number_of_fields);
- if (clear_slots_recorded) {
- Address obj_address = object->address();
- Address end_address = obj_address + old_map->instance_size();
- heap->store_buffer()->RemoveSlots(obj_address, end_address);
- }
-
// Copy (real) inobject properties. If necessary, stop at number_of_fields to
// avoid overwriting |one_pointer_filler_map|.
int limit = Min(inobject, number_of_fields);
for (int i = 0; i < limit; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
Object* value = array->get(external + i);
+ // Can't use JSObject::FastPropertyAtPut() because proper map was not set
+ // yet.
if (new_map->IsUnboxedDoubleField(index)) {
DCHECK(value->IsMutableHeapNumber());
- if (clear_slots_recorded && !old_map->IsUnboxedDoubleField(index)) {
- RemoveOldToOldSlotsRecorded(heap, *object, index);
- }
object->RawFastDoublePropertyAtPut(index,
HeapNumber::cast(value)->value());
} else {
}
}
+ Heap* heap = isolate->heap();
+
// If there are properties in the new backing store, trim it to the correct
// size and install the backing store into the object.
if (external > 0) {
}
-Handle<JSObject> GetObject(const char* name) {
- return v8::Utils::OpenHandle(
- *v8::Handle<v8::Object>::Cast(CcTest::global()->Get(v8_str(name))));
-}
-
-
static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
if (obj->IsUnboxedDoubleField(field_index)) {
return obj->RawFastDoublePropertyAt(field_index);
CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
}
-
-static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
- int tagged_descriptor, int double_descriptor,
- bool check_tagged_value = true) {
- FLAG_stress_compaction = true;
- FLAG_manual_evacuation_candidates_selection = true;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- Heap* heap = CcTest::heap();
- PagedSpace* old_pointer_space = heap->old_pointer_space();
-
- // The plan: create |obj| by |map| in old space, create |obj_value| in
- // new space and ensure that write barrier is triggered when |obj_value| is
- // written to property |tagged_descriptor| of |obj|.
- // Then migrate object to |new_map| and set proper value for property
- // |double_descriptor|. Call GC and ensure that it did not crash during
- // store buffer entries updating.
-
- Handle<JSObject> obj;
- Handle<HeapObject> obj_value;
- {
- AlwaysAllocateScope always_allocate(isolate);
- obj = factory->NewJSObjectFromMap(map, TENURED, false);
- CHECK(old_pointer_space->Contains(*obj));
-
- obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
- }
-
- CHECK(heap->InNewSpace(*obj_value));
-
- StoreBuffer* store_buffer = heap->store_buffer();
- USE(store_buffer);
- Address slot;
- {
- FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
- int offset = index.offset();
- slot = reinterpret_cast<Address>(HeapObject::RawField(*obj, offset));
- USE(slot);
- DCHECK(!store_buffer->CellIsInStoreBuffer(slot));
-
- const int n = 153;
- for (int i = 0; i < n; i++) {
- obj->FastPropertyAtPut(index, *obj_value);
- }
- // Ensure that the slot was actually added to the store buffer.
- DCHECK(store_buffer->CellIsInStoreBuffer(slot));
- }
-
- // Migrate |obj| to |new_map| which should shift fields and put the
- // |boom_value| to the slot that was earlier recorded by write barrier.
- JSObject::MigrateToMap(obj, new_map);
-
- // Ensure that invalid entries were removed from the store buffer.
- DCHECK(!store_buffer->CellIsInStoreBuffer(slot));
-
- Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
- double boom_value = bit_cast<double>(fake_object);
-
- FieldIndex double_field_index =
- FieldIndex::ForDescriptor(*new_map, double_descriptor);
- CHECK(obj->IsUnboxedDoubleField(double_field_index));
- obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
-
- // Trigger GC to evacuate all candidates.
- CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
-
- if (check_tagged_value) {
- FieldIndex tagged_field_index =
- FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
- CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
- }
- CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
-}
-
-
-static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
- int tagged_descriptor,
- int double_descriptor,
- bool check_tagged_value = true) {
- if (FLAG_never_compact || !FLAG_incremental_marking) return;
- FLAG_stress_compaction = true;
- FLAG_manual_evacuation_candidates_selection = true;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- Heap* heap = CcTest::heap();
- PagedSpace* old_pointer_space = heap->old_pointer_space();
-
- // The plan: create |obj| by |map| in old space, create |obj_value| in
- // old space and ensure it end up in evacuation candidate page. Start
- // incremental marking and ensure that incremental write barrier is triggered
- // when |obj_value| is written to property |tagged_descriptor| of |obj|.
- // Then migrate object to |new_map| and set proper value for property
- // |double_descriptor|. Call GC and ensure that it did not crash during
- // slots buffer entries updating.
-
- Handle<JSObject> obj;
- Handle<HeapObject> obj_value;
- Page* ec_page;
- {
- AlwaysAllocateScope always_allocate(isolate);
- obj = factory->NewJSObjectFromMap(map, TENURED, false);
- CHECK(old_pointer_space->Contains(*obj));
-
- // Make sure |obj_value| is placed on an old-space evacuation candidate.
- SimulateFullSpace(old_pointer_space);
- obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
- ec_page = Page::FromAddress(obj_value->address());
- CHECK_NE(ec_page, Page::FromAddress(obj->address()));
- }
-
- // Heap is ready, force |ec_page| to become an evacuation candidate and
- // simulate incremental marking.
- ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
- SimulateIncrementalMarking(heap);
-
- // Check that everything is ready for triggering incremental write barrier
- // (i.e. that both |obj| and |obj_value| are black and the marking phase is
- // still active and |obj_value|'s page is indeed an evacuation candidate).
- IncrementalMarking* marking = heap->incremental_marking();
- CHECK(marking->IsMarking());
- CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
- CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
- CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
-
- // Trigger incremental write barrier, which should add a slot to |ec_page|'s
- // slots buffer.
- {
- int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
- FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
- const int n = SlotsBuffer::kNumberOfElements + 10;
- for (int i = 0; i < n; i++) {
- obj->FastPropertyAtPut(index, *obj_value);
- }
- // Ensure that the slot was actually added to the |ec_page|'s slots buffer.
- CHECK_EQ(slots_buffer_len + n,
- SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
- }
-
- // Migrate |obj| to |new_map| which should shift fields and put the
- // |boom_value| to the slot that was earlier recorded by incremental write
- // barrier.
- JSObject::MigrateToMap(obj, new_map);
-
- double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
-
- FieldIndex double_field_index =
- FieldIndex::ForDescriptor(*new_map, double_descriptor);
- CHECK(obj->IsUnboxedDoubleField(double_field_index));
- obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
-
- // Trigger GC to evacuate all candidates.
- CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom");
-
- // Ensure that the values are still there and correct.
- CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
-
- if (check_tagged_value) {
- FieldIndex tagged_field_index =
- FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
- CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
- }
- CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
-}
-
-
-enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER };
-static void TestWriteBarrierObjectShiftFieldsRight(
- WriteBarrierKind write_barrier_kind) {
- CcTest::InitializeVM();
- Isolate* isolate = CcTest::i_isolate();
- v8::HandleScope scope(CcTest::isolate());
-
- Handle<HeapType> any_type = HeapType::Any(isolate);
-
- CompileRun("function func() { return 1; }");
-
- Handle<JSObject> func = GetObject("func");
-
- Handle<Map> map = Map::Create(isolate, 10);
- map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
- INSERT_TRANSITION).ToHandleChecked();
- map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
- Representation::Double(),
- INSERT_TRANSITION).ToHandleChecked();
- map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
- Representation::Tagged(),
- INSERT_TRANSITION).ToHandleChecked();
-
- // Shift fields right by turning constant property to a field.
- Handle<Map> new_map = Map::ReconfigureProperty(
- map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
-
- if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
- TestWriteBarrier(map, new_map, 2, 1);
- } else {
- CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
- TestIncrementalWriteBarrier(map, new_map, 2, 1);
- }
-}
-
-
-TEST(WriteBarrierObjectShiftFieldsRight) {
- TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
-}
-
-
-TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
- TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
-}
-
-
-// TODO(ishell): add respective tests for property kind reconfiguring from
-// accessor field to double, once accessor fields are supported by
-// Map::ReconfigureProperty().
-
-
-// TODO(ishell): add respective tests for fast property removal case once
-// Map::ReconfigureProperty() supports that.
-
#endif