Shrink the new space and uncommit marking deque on low memory notification.
authorulan@chromium.org <ulan@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 8 Nov 2011 12:42:02 +0000 (12:42 +0000)
committerulan@chromium.org <ulan@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 8 Nov 2011 12:42:02 +0000 (12:42 +0000)
BUG=v8:1669
TEST=cctest/test-heap/CollectingAllAvailableGarbageShrinksNewSpace

Review URL: http://codereview.chromium.org/8065003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9912 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/heap.cc
src/incremental-marking.cc
src/incremental-marking.h
test/cctest/test-heap.cc

index ef1eb77..4da83e8 100644 (file)
@@ -447,6 +447,7 @@ void Heap::CollectAllAvailableGarbage() {
   // hope that eventually there will be no weak callbacks invocations.
   // Therefore stop recollecting after several attempts.
   mark_compact_collector()->SetFlags(kMakeHeapIterableMask);
+  isolate_->compilation_cache()->Clear();
   const int kMaxNumberOfAttempts = 7;
   for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
     if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
@@ -454,6 +455,8 @@ void Heap::CollectAllAvailableGarbage() {
     }
   }
   mark_compact_collector()->SetFlags(kNoGCFlags);
+  new_space_.Shrink();
+  incremental_marking()->UncommitMarkingDeque();
 }
 
 
index 5bc1ebf..bd0f083 100644 (file)
@@ -41,6 +41,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
     : heap_(heap),
       state_(STOPPED),
       marking_deque_memory_(NULL),
+      marking_deque_memory_committed_(false),
       steps_count_(0),
       steps_took_(0),
       longest_step_(0.0),
@@ -440,10 +441,25 @@ static void PatchIncrementalMarkingRecordWriteStubs(
 void IncrementalMarking::EnsureMarkingDequeIsCommitted() {
   if (marking_deque_memory_ == NULL) {
     marking_deque_memory_ = new VirtualMemory(4 * MB);
-    marking_deque_memory_->Commit(
+  }
+  if (!marking_deque_memory_committed_) {
+    bool success = marking_deque_memory_->Commit(
         reinterpret_cast<Address>(marking_deque_memory_->address()),
         marking_deque_memory_->size(),
         false);  // Not executable.
+    CHECK(success);
+    marking_deque_memory_committed_ = true;
+  }
+}
+
+void IncrementalMarking::UncommitMarkingDeque() {
+  ASSERT(state_ == STOPPED);
+  if (marking_deque_memory_committed_) {
+    bool success = marking_deque_memory_->Uncommit(
+        reinterpret_cast<Address>(marking_deque_memory_->address()),
+        marking_deque_memory_->size());
+    CHECK(success);
+    marking_deque_memory_committed_ = false;
   }
 }
 
index 5910f17..b5d9f1b 100644 (file)
@@ -213,6 +213,8 @@ class IncrementalMarking {
     no_marking_scope_depth_--;
   }
 
+  void UncommitMarkingDeque();
+
  private:
   void set_should_hurry(bool val) {
     should_hurry_ = val;
@@ -250,6 +252,7 @@ class IncrementalMarking {
   bool is_compacting_;
 
   VirtualMemory* marking_deque_memory_;
+  bool marking_deque_memory_committed_;
   MarkingDeque marking_deque_;
 
   int steps_count_;
index d6b3c80..9e3eea4 100644 (file)
@@ -1220,6 +1220,19 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
 }
 
 
+static void FillUpNewSpace(NewSpace* new_space) {
+  // Fill up new space to the point that it is completely full. Make sure
+  // that the scavenger does not undo the filling.
+  v8::HandleScope scope;
+  AlwaysAllocateScope always_allocate;
+  intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
+  intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
+  for (intptr_t i = 0; i < number_of_fillers; i++) {
+    CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
+  }
+}
+
+
 TEST(GrowAndShrinkNewSpace) {
   InitializeVM();
   NewSpace* new_space = HEAP->new_space();
@@ -1231,18 +1244,8 @@ TEST(GrowAndShrinkNewSpace) {
   new_capacity = new_space->Capacity();
   CHECK(2 * old_capacity == new_capacity);
 
-  // Fill up new space to the point that it is completely full. Make sure
-  // that the scavenger does not undo the filling.
   old_capacity = new_space->Capacity();
-  {
-    v8::HandleScope scope;
-    AlwaysAllocateScope always_allocate;
-    intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
-    intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
-    for (intptr_t i = 0; i < number_of_fillers; i++) {
-      CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
-    }
-  }
+  FillUpNewSpace(new_space);
   new_capacity = new_space->Capacity();
   CHECK(old_capacity == new_capacity);
 
@@ -1270,3 +1273,19 @@ TEST(GrowAndShrinkNewSpace) {
   new_capacity = new_space->Capacity();
   CHECK(old_capacity == new_capacity);
 }
+
+
+TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
+  InitializeVM();
+  v8::HandleScope scope;
+  NewSpace* new_space = HEAP->new_space();
+  intptr_t old_capacity, new_capacity;
+  old_capacity = new_space->Capacity();
+  new_space->Grow();
+  new_capacity = new_space->Capacity();
+  CHECK(2 * old_capacity == new_capacity);
+  FillUpNewSpace(new_space);
+  HEAP->CollectAllAvailableGarbage();
+  new_capacity = new_space->Capacity();
+  CHECK(old_capacity == new_capacity);
+}