From: hpayer Date: Tue, 30 Jun 2015 13:32:02 +0000 (-0700) Subject: Directly remove slot buffer entries in deoptimized code objects. X-Git-Tag: upstream/4.7.83~1661 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=80b3f16951251d342ebaf9174872d50e46f048a5;p=platform%2Fupstream%2Fv8.git Directly remove slot buffer entries in deoptimized code objects. BUG= Review URL: https://codereview.chromium.org/1221643004 Cr-Commit-Position: refs/heads/master@{#29379} --- diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index f3bb07f..fac7e27 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -414,9 +414,9 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { PatchCodeForDeoptimization(isolate, codes[i]); // We might be in the middle of incremental marking with compaction. - // Tell collector to treat this code object in a special way and - // ignore all slots that might have been recorded on it. - isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); + // Ignore all slots that might have been recorded on the deoptimized code + // object. + isolate->heap()->mark_compact_collector()->RemoveObjectSlots(codes[i]); } } diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 13391a8..e465977 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -558,34 +558,6 @@ void MarkCompactCollector::RefillFreeList(PagedSpace* space) { } -void Marking::SetAllMarkBitsInRange(MarkBit start, MarkBit end) { - MarkBit::CellType* start_cell = start.cell(); - MarkBit::CellType* end_cell = end.cell(); - MarkBit::CellType start_mask = ~(start.mask() - 1); - MarkBit::CellType end_mask = (end.mask() << 1) - 1; - - if (start_cell == end_cell) { - *start_cell |= start_mask & end_mask; - } else { - *start_cell |= start_mask; - for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { - *cell = ~0; - } - *end_cell |= end_mask; - } -} - - -void Marking::ClearAllMarkBitsOfCellsContainedInRange(MarkBit start, - MarkBit end) { - MarkBit::CellType* start_cell = start.cell(); - MarkBit::CellType* end_cell = end.cell(); - for (MarkBit::CellType* cell = start_cell; cell <= end_cell; cell++) { - *cell = 0; - } -} - - void Marking::TransferMark(Address old_start, Address new_start) { // This is only used when resizing an object. DCHECK(MemoryChunk::FromAddress(old_start) == @@ -778,7 +750,6 @@ void MarkCompactCollector::AbortCompaction() { } compacting_ = false; evacuation_candidates_.Rewind(0); - invalidated_code_.Rewind(0); } DCHECK_EQ(0, evacuation_candidates_.length()); } @@ -3242,6 +3213,21 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, } +void MarkCompactCollector::RemoveObjectSlots(HeapObject* invalid_object) { + // Remove entries by replacing them with an old-space slot containing a smi + // that is located in an unmovable page. + int npages = evacuation_candidates_.length(); + for (int i = 0; i < npages; i++) { + Page* p = evacuation_candidates_[i]; + DCHECK(p->IsEvacuationCandidate() || + p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); + if (p->IsEvacuationCandidate()) { + SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), invalid_object); + } + } +} + + void MarkCompactCollector::EvacuateNewSpace() { // There are soft limits in the allocation code, designed trigger a mark // sweep collection by failing allocations. But since we are already in @@ -3558,121 +3544,18 @@ static int Sweep(PagedSpace* space, FreeList* free_list, Page* p, } -static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { - Page* p = Page::FromAddress(code->address()); - - if (p->IsEvacuationCandidate() || p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { - return false; - } - - Address code_start = code->address(); - Address code_end = code_start + code->Size(); - - uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); - uint32_t end_index = - MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); - - // TODO(hpayer): Filter out invalidated code in - // ClearInvalidSlotsBufferEntries. - Bitmap* b = p->markbits(); - - MarkBit start_mark_bit = b->MarkBitFromIndex(start_index); - MarkBit end_mark_bit = b->MarkBitFromIndex(end_index); - - if (value) { - Marking::SetAllMarkBitsInRange(start_mark_bit, end_mark_bit); - } else { - Marking::ClearAllMarkBitsOfCellsContainedInRange(start_mark_bit, - end_mark_bit); - } - - return true; -} - - -static bool IsOnInvalidatedCodeObject(Address addr) { - // We did not record any slots in large objects thus - // we can safely go to the page from the slot address. - Page* p = Page::FromAddress(addr); - - // First check owner's identity because old space is swept concurrently or - // lazily and might still have non-zero mark-bits on some pages. - if (p->owner()->identity() != CODE_SPACE) return false; - - // In code space only bits on evacuation candidates (but we don't record - // any slots on them) and under invalidated code objects are non-zero. - MarkBit mark_bit = - p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); - - return Marking::IsBlackOrGrey(mark_bit); -} - - -void MarkCompactCollector::InvalidateCode(Code* code) { - if (heap_->incremental_marking()->IsCompacting() && - !ShouldSkipEvacuationSlotRecording(code)) { - DCHECK(compacting_); - - // If the object is white than no slots were recorded on it yet. - MarkBit mark_bit = Marking::MarkBitFrom(code); - if (Marking::IsWhite(mark_bit)) return; - - invalidated_code_.Add(code); - } -} - - // Return true if the given code is deoptimized or will be deoptimized. bool MarkCompactCollector::WillBeDeoptimized(Code* code) { return code->is_optimized_code() && code->marked_for_deoptimization(); } -bool MarkCompactCollector::MarkInvalidatedCode() { - bool code_marked = false; - - int length = invalidated_code_.length(); - for (int i = 0; i < length; i++) { - Code* code = invalidated_code_[i]; - - if (SetMarkBitsUnderInvalidatedCode(code, true)) { - code_marked = true; - } - } - - return code_marked; -} - - -void MarkCompactCollector::RemoveDeadInvalidatedCode() { - int length = invalidated_code_.length(); - for (int i = 0; i < length; i++) { - if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; - } -} - - -void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { - int length = invalidated_code_.length(); - for (int i = 0; i < length; i++) { - Code* code = invalidated_code_[i]; - if (code != NULL) { - code->Iterate(visitor); - SetMarkBitsUnderInvalidatedCode(code, false); - } - } - invalidated_code_.Rewind(0); -} - - void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { Heap::RelocationLock relocation_lock(heap()); - bool code_slots_filtering_required; { GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEWSPACE); - code_slots_filtering_required = MarkInvalidatedCode(); EvacuationScope evacuation_scope(this); EvacuateNewSpace(); } @@ -3719,8 +3602,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { { GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); - SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_, - code_slots_filtering_required); + SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); if (FLAG_trace_fragmentation_verbose) { PrintF(" migration slots buffer: %d\n", SlotsBuffer::SizeOfChain(migration_slots_buffer_)); @@ -3754,8 +3636,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); if (p->IsEvacuationCandidate()) { - SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(), - code_slots_filtering_required); + SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer()); if (FLAG_trace_fragmentation_verbose) { PrintF(" page %p slots buffer: %d\n", reinterpret_cast(p), SlotsBuffer::SizeOfChain(p->slots_buffer())); @@ -3811,10 +3692,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { EvacuationWeakObjectRetainer evacuation_object_retainer; heap()->ProcessAllWeakReferences(&evacuation_object_retainer); - // Visit invalidated code (we ignored all slots on it) and clear mark-bits - // under it. - ProcessInvalidatedCode(&updating_visitor); - heap_->isolate()->inner_pointer_to_code_cache()->Flush(); slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); @@ -4404,8 +4281,6 @@ void MarkCompactCollector::SweepSpaces() { StartSweeperThreads(); } } - RemoveDeadInvalidatedCode(); - { GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE); @@ -4565,6 +4440,43 @@ void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) { } +void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer, + HeapObject* invalid_object) { + // Remove entries by replacing them with an old-space slot containing a smi + // that is located in an unmovable page. + const ObjectSlot kRemovedEntry = HeapObject::RawField( + heap->empty_fixed_array(), FixedArrayBase::kLengthOffset); + DCHECK(Page::FromAddress(reinterpret_cast
(kRemovedEntry)) + ->NeverEvacuate()); + + while (buffer != NULL) { + SlotsBuffer::ObjectSlot* slots = buffer->slots_; + intptr_t slots_count = buffer->idx_; + bool is_typed_slot = false; + + for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { + ObjectSlot slot = slots[slot_idx]; + if (!IsTypedSlot(slot)) { + Address slot_address = reinterpret_cast
(slot); + if (slot_address >= invalid_object->address() && + slot_address < + (invalid_object->address() + invalid_object->Size())) { + slots[slot_idx] = kRemovedEntry; + if (is_typed_slot) { + slots[slot_idx - 1] = kRemovedEntry; + } + } + is_typed_slot = false; + } else { + is_typed_slot = true; + DCHECK(slot_idx < slots_count); + } + } + buffer = buffer->next(); + } +} + + void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { while (buffer != NULL) { SlotsBuffer::ObjectSlot* slots = buffer->slots_; @@ -4708,28 +4620,6 @@ void SlotsBuffer::UpdateSlots(Heap* heap) { } -void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { - PointersUpdatingVisitor v(heap); - - for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { - ObjectSlot slot = slots_[slot_idx]; - if (!IsTypedSlot(slot)) { - if (!IsOnInvalidatedCodeObject(reinterpret_cast
(slot))) { - PointersUpdatingVisitor::UpdateSlot(heap, slot); - } - } else { - ++slot_idx; - DCHECK(slot_idx < idx_); - Address pc = reinterpret_cast
(slots_[slot_idx]); - if (!IsOnInvalidatedCodeObject(pc)) { - UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), - reinterpret_cast
(slots_[slot_idx])); - } - } - } -} - - SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { return new SlotsBuffer(next_buffer); } diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index de21aa1..3c3607e 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -116,10 +116,6 @@ class Marking { markbit.Next().Set(); } - static void SetAllMarkBitsInRange(MarkBit start, MarkBit end); - static void ClearAllMarkBitsOfCellsContainedInRange(MarkBit start, - MarkBit end); - void TransferMark(Address old_start, Address new_start); #ifdef DEBUG @@ -325,6 +321,12 @@ class SlotsBuffer { slots_[idx_++] = slot; } + // Should be used for testing only. + ObjectSlot Get(intptr_t i) { + DCHECK(i >= 0 && i < kNumberOfElements); + return slots_[i]; + } + enum SlotType { EMBEDDED_OBJECT_SLOT, OBJECT_SLOT, @@ -363,8 +365,6 @@ class SlotsBuffer { void UpdateSlots(Heap* heap); - void UpdateSlotsWithFilter(Heap* heap); - SlotsBuffer* next() { return next_; } static int SizeOfChain(SlotsBuffer* buffer) { @@ -377,14 +377,9 @@ class SlotsBuffer { inline bool HasSpaceForTypedSlot() { return idx_ < kNumberOfElements - 1; } - static void UpdateSlotsRecordedIn(Heap* heap, SlotsBuffer* buffer, - bool code_slots_filtering_required) { + static void UpdateSlotsRecordedIn(Heap* heap, SlotsBuffer* buffer) { while (buffer != NULL) { - if (code_slots_filtering_required) { - buffer->UpdateSlotsWithFilter(heap); - } else { - buffer->UpdateSlots(heap); - } + buffer->UpdateSlots(heap); buffer = buffer->next(); } } @@ -423,6 +418,10 @@ class SlotsBuffer { // before sweeping when mark bits are still intact. static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer); + // Eliminate all slots that point to the given invalid_object. + static void RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer, + HeapObject* invalid_object); + // Ensures that there are no invalid slots in the chain of slots buffers. static void VerifySlots(Heap* heap, SlotsBuffer* buffer); @@ -670,8 +669,6 @@ class MarkCompactCollector { bool TryPromoteObject(HeapObject* object, int object_size); - void InvalidateCode(Code* code); - void ClearMarkbits(); bool abort_incremental_marking() const { return abort_incremental_marking_; } @@ -744,16 +741,17 @@ class MarkCompactCollector { bool IsSlotInLiveObject(Address slot); void VerifyIsSlotInLiveObject(Address slot, HeapObject* object); + // Removes all the slots in the slot buffers that are within the given + // invalid_object. + void RemoveObjectSlots(HeapObject* invalid_object); + private: class SweeperTask; explicit MarkCompactCollector(Heap* heap); ~MarkCompactCollector(); - bool MarkInvalidatedCode(); bool WillBeDeoptimized(Code* code); - void RemoveDeadInvalidatedCode(); - void ProcessInvalidatedCode(ObjectVisitor* visitor); void EvictPopularEvacuationCandidate(Page* page); void ClearInvalidSlotsBufferEntries(PagedSpace* space); void ClearInvalidStoreAndSlotsBufferEntries(); @@ -970,7 +968,6 @@ class MarkCompactCollector { bool have_code_to_deoptimize_; List evacuation_candidates_; - List invalidated_code_; SmartPointer free_list_old_space_; diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 668fb01..6423540 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -6015,3 +6015,45 @@ TEST(AllocationThroughput) { throughput = tracer->AllocationThroughputInBytesPerMillisecond(100); CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput); } + + +TEST(SlotsBufferObjectSlotsRemoval) { + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + Isolate* isolate = CcTest::i_isolate(); + Heap* heap = isolate->heap(); + Factory* factory = isolate->factory(); + + SlotsBuffer* buffer = new SlotsBuffer(NULL); + void* fake_object[1]; + + Handle array = factory->NewFixedArray(2, TENURED); + CHECK(heap->old_space()->Contains(*array)); + array->set(0, reinterpret_cast(fake_object), SKIP_WRITE_BARRIER); + + // Firstly, let's test the regular slots buffer entry. + buffer->Add(HeapObject::RawField(*array, FixedArray::kHeaderSize)); + DCHECK(reinterpret_cast(buffer->Get(0)) == + HeapObject::RawField(*array, FixedArray::kHeaderSize)); + SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer, *array); + DCHECK(reinterpret_cast(buffer->Get(0)) == + HeapObject::RawField(heap->empty_fixed_array(), + FixedArrayBase::kLengthOffset)); + + // Secondly, let's test the typed slots buffer entry. + SlotsBuffer::AddTo(NULL, &buffer, SlotsBuffer::EMBEDDED_OBJECT_SLOT, + array->address() + FixedArray::kHeaderSize, + SlotsBuffer::FAIL_ON_OVERFLOW); + DCHECK(reinterpret_cast(buffer->Get(1)) == + reinterpret_cast(SlotsBuffer::EMBEDDED_OBJECT_SLOT)); + DCHECK(reinterpret_cast(buffer->Get(2)) == + HeapObject::RawField(*array, FixedArray::kHeaderSize)); + SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer, *array); + DCHECK(reinterpret_cast(buffer->Get(1)) == + HeapObject::RawField(heap->empty_fixed_array(), + FixedArrayBase::kLengthOffset)); + DCHECK(reinterpret_cast(buffer->Get(2)) == + HeapObject::RawField(heap->empty_fixed_array(), + FixedArrayBase::kLengthOffset)); + delete buffer; +}