// hope that eventually there will be no weak callbacks invocations.
// Therefore stop recollecting after several attempts.
mark_compact_collector()->SetFlags(kMakeHeapIterableMask);
+ isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
}
}
mark_compact_collector()->SetFlags(kNoGCFlags);
+ new_space_.Shrink();
+ incremental_marking()->UncommitMarkingDeque();
}
: heap_(heap),
state_(STOPPED),
marking_deque_memory_(NULL),
+ marking_deque_memory_committed_(false),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
void IncrementalMarking::EnsureMarkingDequeIsCommitted() {
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new VirtualMemory(4 * MB);
- marking_deque_memory_->Commit(
+ }
+ if (!marking_deque_memory_committed_) {
+ bool success = marking_deque_memory_->Commit(
reinterpret_cast<Address>(marking_deque_memory_->address()),
marking_deque_memory_->size(),
false); // Not executable.
+ CHECK(success);
+ marking_deque_memory_committed_ = true;
+ }
+}
+
+void IncrementalMarking::UncommitMarkingDeque() {
+ ASSERT(state_ == STOPPED);
+ if (marking_deque_memory_committed_) {
+ bool success = marking_deque_memory_->Uncommit(
+ reinterpret_cast<Address>(marking_deque_memory_->address()),
+ marking_deque_memory_->size());
+ CHECK(success);
+ marking_deque_memory_committed_ = false;
}
}
}
+static void FillUpNewSpace(NewSpace* new_space) {
+ // Fill up new space to the point that it is completely full. Make sure
+ // that the scavenger does not undo the filling.
+ v8::HandleScope scope;
+ AlwaysAllocateScope always_allocate;
+ intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
+ intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
+ for (intptr_t i = 0; i < number_of_fillers; i++) {
+ CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
+ }
+}
+
+
TEST(GrowAndShrinkNewSpace) {
InitializeVM();
NewSpace* new_space = HEAP->new_space();
new_capacity = new_space->Capacity();
CHECK(2 * old_capacity == new_capacity);
- // Fill up new space to the point that it is completely full. Make sure
- // that the scavenger does not undo the filling.
old_capacity = new_space->Capacity();
- {
- v8::HandleScope scope;
- AlwaysAllocateScope always_allocate;
- intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
- intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
- for (intptr_t i = 0; i < number_of_fillers; i++) {
- CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
- }
- }
+ FillUpNewSpace(new_space);
new_capacity = new_space->Capacity();
CHECK(old_capacity == new_capacity);
new_capacity = new_space->Capacity();
CHECK(old_capacity == new_capacity);
}
+
+
+TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
+ InitializeVM();
+ v8::HandleScope scope;
+ NewSpace* new_space = HEAP->new_space();
+ intptr_t old_capacity, new_capacity;
+ old_capacity = new_space->Capacity();
+ new_space->Grow();
+ new_capacity = new_space->Capacity();
+ CHECK(2 * old_capacity == new_capacity);
+ FillUpNewSpace(new_space);
+ HEAP->CollectAllAvailableGarbage();
+ new_capacity = new_space->Capacity();
+ CHECK(old_capacity == new_capacity);
+}