From: ulan@chromium.org Date: Tue, 8 Nov 2011 12:42:02 +0000 (+0000) Subject: Shrink the new space and uncommit marking deque on low memory notification. X-Git-Tag: upstream/4.7.83~17979 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=0d536dec2615b824f1875ff55e11dc403f59a83f;p=platform%2Fupstream%2Fv8.git Shrink the new space and uncommit marking deque on low memory notification. BUG=v8:1669 TEST=cctest/test-heap/CollectingAllAvailableGarbageShrinksNewSpace Review URL: http://codereview.chromium.org/8065003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9912 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/heap.cc b/src/heap.cc index ef1eb77a5..4da83e859 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -447,6 +447,7 @@ void Heap::CollectAllAvailableGarbage() { // hope that eventually there will be no weak callbacks invocations. // Therefore stop recollecting after several attempts. mark_compact_collector()->SetFlags(kMakeHeapIterableMask); + isolate_->compilation_cache()->Clear(); const int kMaxNumberOfAttempts = 7; for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { @@ -454,6 +455,8 @@ void Heap::CollectAllAvailableGarbage() { } } mark_compact_collector()->SetFlags(kNoGCFlags); + new_space_.Shrink(); + incremental_marking()->UncommitMarkingDeque(); } diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc index 5bc1ebf28..bd0f0837b 100644 --- a/src/incremental-marking.cc +++ b/src/incremental-marking.cc @@ -41,6 +41,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap) : heap_(heap), state_(STOPPED), marking_deque_memory_(NULL), + marking_deque_memory_committed_(false), steps_count_(0), steps_took_(0), longest_step_(0.0), @@ -440,10 +441,25 @@ static void PatchIncrementalMarkingRecordWriteStubs( void IncrementalMarking::EnsureMarkingDequeIsCommitted() { if (marking_deque_memory_ == NULL) { marking_deque_memory_ = new VirtualMemory(4 * MB); - marking_deque_memory_->Commit( + } + if (!marking_deque_memory_committed_) { + bool success = marking_deque_memory_->Commit( reinterpret_cast
(marking_deque_memory_->address()), marking_deque_memory_->size(), false); // Not executable. + CHECK(success); + marking_deque_memory_committed_ = true; + } +} + +void IncrementalMarking::UncommitMarkingDeque() { + ASSERT(state_ == STOPPED); + if (marking_deque_memory_committed_) { + bool success = marking_deque_memory_->Uncommit( + reinterpret_cast
(marking_deque_memory_->address()), + marking_deque_memory_->size()); + CHECK(success); + marking_deque_memory_committed_ = false; } } diff --git a/src/incremental-marking.h b/src/incremental-marking.h index 5910f1792..b5d9f1b9b 100644 --- a/src/incremental-marking.h +++ b/src/incremental-marking.h @@ -213,6 +213,8 @@ class IncrementalMarking { no_marking_scope_depth_--; } + void UncommitMarkingDeque(); + private: void set_should_hurry(bool val) { should_hurry_ = val; @@ -250,6 +252,7 @@ class IncrementalMarking { bool is_compacting_; VirtualMemory* marking_deque_memory_; + bool marking_deque_memory_committed_; MarkingDeque marking_deque_; int steps_count_; diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index d6b3c80ea..9e3eea4e7 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -1220,6 +1220,19 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) { } +static void FillUpNewSpace(NewSpace* new_space) { + // Fill up new space to the point that it is completely full. Make sure + // that the scavenger does not undo the filling. + v8::HandleScope scope; + AlwaysAllocateScope always_allocate; + intptr_t available = new_space->EffectiveCapacity() - new_space->Size(); + intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10; + for (intptr_t i = 0; i < number_of_fillers; i++) { + CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED))); + } +} + + TEST(GrowAndShrinkNewSpace) { InitializeVM(); NewSpace* new_space = HEAP->new_space(); @@ -1231,18 +1244,8 @@ TEST(GrowAndShrinkNewSpace) { new_capacity = new_space->Capacity(); CHECK(2 * old_capacity == new_capacity); - // Fill up new space to the point that it is completely full. Make sure - // that the scavenger does not undo the filling. old_capacity = new_space->Capacity(); - { - v8::HandleScope scope; - AlwaysAllocateScope always_allocate; - intptr_t available = new_space->EffectiveCapacity() - new_space->Size(); - intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10; - for (intptr_t i = 0; i < number_of_fillers; i++) { - CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED))); - } - } + FillUpNewSpace(new_space); new_capacity = new_space->Capacity(); CHECK(old_capacity == new_capacity); @@ -1270,3 +1273,19 @@ TEST(GrowAndShrinkNewSpace) { new_capacity = new_space->Capacity(); CHECK(old_capacity == new_capacity); } + + +TEST(CollectingAllAvailableGarbageShrinksNewSpace) { + InitializeVM(); + v8::HandleScope scope; + NewSpace* new_space = HEAP->new_space(); + intptr_t old_capacity, new_capacity; + old_capacity = new_space->Capacity(); + new_space->Grow(); + new_capacity = new_space->Capacity(); + CHECK(2 * old_capacity == new_capacity); + FillUpNewSpace(new_space); + HEAP->CollectAllAvailableGarbage(); + new_capacity = new_space->Capacity(); + CHECK(old_capacity == new_capacity); +}