void MarkCompactCollector::SetUp() {
free_list_old_space_.Reset(new FreeList(heap_->old_space()));
+ free_list_code_space_.Reset(new FreeList(heap_->code_space()));
EnsureMarkingDequeIsReserved();
EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
}
SweepSpaces();
-#ifdef VERIFY_HEAP
- VerifyWeakEmbeddedObjectsInCode();
- if (FLAG_omit_map_checks_for_leaf_maps) {
- VerifyOmittedMapChecks();
- }
-#endif
-
Finish();
if (marking_parity_ == EVEN_MARKING_PARITY) {
void MarkCompactCollector::StartSweeperThreads() {
DCHECK(free_list_old_space_.get()->IsEmpty());
+ DCHECK(free_list_code_space_.get()->IsEmpty());
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new SweeperTask(heap(), heap()->old_space()),
v8::Platform::kShortRunningTask);
+ V8::GetCurrentPlatform()->CallOnBackgroundThread(
+ new SweeperTask(heap(), heap()->code_space()),
+ v8::Platform::kShortRunningTask);
+}
+
+
+void MarkCompactCollector::SweepOrWaitUntilSweepingCompleted(Page* page) {
+ PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner());
+ if (!page->SweepingCompleted()) {
+ SweepInParallel(page, owner);
+ if (!page->SweepingCompleted()) {
+ // We were not able to sweep that page, i.e., a concurrent
+ // sweeper thread currently owns this page. Wait for the sweeper
+ // thread to be done with this page.
+ page->WaitUntilSweepingCompleted();
+ }
+ }
}
// here.
if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
+ SweepInParallel(heap()->paged_space(CODE_SPACE), 0);
}
// Wait twice for both jobs.
if (heap()->concurrent_sweeping_enabled()) {
pending_sweeper_jobs_semaphore_.Wait();
+ pending_sweeper_jobs_semaphore_.Wait();
}
ParallelSweepSpacesComplete();
sweeping_in_progress_ = false;
RefillFreeList(heap()->paged_space(OLD_SPACE));
+ RefillFreeList(heap()->paged_space(CODE_SPACE));
heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
+ heap()->paged_space(CODE_SPACE)->ResetUnsweptFreeBytes();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !evacuation()) {
if (space == heap()->old_space()) {
free_list = free_list_old_space_.get();
+ } else if (space == heap()->code_space()) {
+ free_list = free_list_code_space_.get();
} else {
// Any PagedSpace might invoke RefillFreeLists, so we need to make sure
// to only refill them for the old space.
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
int offsets[16];
+ // If we use the skip list for code space pages, we have to lock the skip
+ // list because it could be accessed concurrently by the runtime or the
+ // deoptimizer.
SkipList* skip_list = p->skip_list();
- int curr_region = -1;
if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) {
skip_list->Clear();
}
intptr_t freed_bytes = 0;
intptr_t max_freed_bytes = 0;
+ int curr_region = -1;
for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
Address cell_base = it.CurrentCellBase();
return 0;
}
page->set_parallel_sweeping(MemoryChunk::SWEEPING_IN_PROGRESS);
- FreeList* free_list = free_list_old_space_.get();
+ FreeList* free_list;
FreeList private_free_list(space);
- max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
- IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
+ if (space->identity() == CODE_SPACE) {
+ free_list = free_list_code_space_.get();
+ max_freed =
+ Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, REBUILD_SKIP_LIST,
+ IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
+ } else {
+ free_list = free_list_old_space_.get();
+ max_freed =
+ Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
+ IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
+ }
free_list->Concatenate(&private_free_list);
page->mutex()->Unlock();
}
PrintF("Sweeping 0x%" V8PRIxPTR ".\n",
reinterpret_cast<intptr_t>(p));
}
- Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
- IGNORE_FREE_SPACE>(space, NULL, p, NULL);
+ if (space->identity() == CODE_SPACE) {
+ if (FLAG_zap_code_space) {
+ Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
+ ZAP_FREE_SPACE>(space, NULL, p, NULL);
+ } else {
+ Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
+ IGNORE_FREE_SPACE>(space, NULL, p, NULL);
+ }
+ } else {
+ DCHECK(space->identity() == OLD_SPACE);
+ Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
+ IGNORE_FREE_SPACE>(space, NULL, p, NULL);
+ }
pages_swept++;
parallel_sweeping_active = true;
} else {
if (FLAG_gc_verbose) {
PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p));
}
- if (space->identity() == CODE_SPACE && FLAG_zap_code_space) {
- Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
- ZAP_FREE_SPACE>(space, NULL, p, NULL);
- } else if (space->identity() == CODE_SPACE) {
- Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
- IGNORE_FREE_SPACE>(space, NULL, p, NULL);
+ if (space->identity() == CODE_SPACE) {
+ if (FLAG_zap_code_space) {
+ Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
+ ZAP_FREE_SPACE>(space, NULL, p, NULL);
+ } else {
+ Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
+ IGNORE_FREE_SPACE>(space, NULL, p, NULL);
+ }
} else {
+ DCHECK(space->identity() == OLD_SPACE ||
+ space->identity() == MAP_SPACE);
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
}
// the other spaces rely on possibly non-live maps to get the sizes for
// non-live objects.
{
- GCTracer::Scope sweep_scope(heap()->tracer(),
- GCTracer::Scope::MC_SWEEP_OLDSPACE);
- { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
+ {
+ GCTracer::Scope sweep_scope(heap()->tracer(),
+ GCTracer::Scope::MC_SWEEP_OLDSPACE);
+ SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
+ }
+ {
+ GCTracer::Scope sweep_scope(heap()->tracer(),
+ GCTracer::Scope::MC_SWEEP_CODE);
+ SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
+ }
+
sweeping_in_progress_ = true;
if (heap()->concurrent_sweeping_enabled()) {
StartSweeperThreads();
}
}
- RemoveDeadInvalidatedCode();
- {
- GCTracer::Scope sweep_scope(heap()->tracer(),
- GCTracer::Scope::MC_SWEEP_CODE);
- SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
- }
+ RemoveDeadInvalidatedCode();
EvacuateNewSpaceAndCandidates();
void MarkCompactCollector::ParallelSweepSpacesComplete() {
ParallelSweepSpaceComplete(heap()->old_space());
+ ParallelSweepSpaceComplete(heap()->code_space());
}