It is intended to bring memory usage down on idle notifications.
R=erik.corry@gmail.com
BUG=v8:1726
Review URL: https://chromiumcodereview.appspot.com/9323079
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10616
ce2b1a6d-e550-0410-aec6-
3dcde31c8c00
"Flush code caches in maps during mark compact cycle.")
DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only")
-DEFINE_bool(compact_code_space, false, "Compact code space")
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
}
is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
- heap_->mark_compact_collector()->StartCompaction();
+ heap_->mark_compact_collector()->StartCompaction(
+ MarkCompactCollector::INCREMENTAL_COMPACTION);
state_ = MARKING;
}
-bool MarkCompactCollector::StartCompaction() {
+bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0);
CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space());
- if (FLAG_compact_code_space) {
+ if (mode == NON_INCREMENTAL_COMPACTION) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
// Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) {
- StartCompaction();
+ StartCompaction(NON_INCREMENTAL_COMPACTION);
}
PagedSpaces spaces;
// Performs a global garbage collection.
void CollectGarbage();
- bool StartCompaction();
+ enum CompactionMode {
+ INCREMENTAL_COMPACTION,
+ NON_INCREMENTAL_COMPACTION
+ };
+
+ bool StartCompaction(CompactionMode mode);
void AbortCompaction();