In the gc-tracer, we check whether we're marking to figure out which
part of the mark compact we're in. If we aborted incremental marking for
whatever reason, the check fails and we might later run into trouble
BUG=none
R=hpayer@chromium.org
LOG=n
Review URL: https://codereview.chromium.org/
783453003
Cr-Commit-Position: refs/heads/master@{#25663}
if (collector == SCAVENGER) {
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
} else if (collector == MARK_COMPACTOR) {
- if (heap_->incremental_marking()->IsMarking()) {
+ if (heap_->incremental_marking()->WasActivated()) {
current_ =
Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
} else {
LOG(isolate_, ResourceEvent("markcompact", "end"));
+ MarkCompactEpilogue();
+
+ if (FLAG_allocation_site_pretenuring) {
+ EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
+ }
+}
+
+
+void Heap::MarkCompactEpilogue() {
gc_state_ = NOT_IN_GC;
isolate_->counters()->objs_since_last_full()->Set(0);
flush_monomorphic_ics_ = false;
- if (FLAG_allocation_site_pretenuring) {
- EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
- }
+ incremental_marking()->Epilogue();
}
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
+ void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer);
allocated_(0),
idle_marking_delay_counter_(0),
no_marking_scope_depth_(0),
- unscanned_bytes_of_large_object_(0) {}
+ unscanned_bytes_of_large_object_(0),
+ was_activated_(false) {}
void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
}
+bool IncrementalMarking::WasActivated() { return was_activated_; }
+
+
bool IncrementalMarking::WorthActivating() {
#ifndef DEBUG
static const intptr_t kActivationThreshold = 8 * MB;
ResetStepCounters();
+ was_activated_ = true;
+
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
StartMarking(flag);
} else {
}
+void IncrementalMarking::Epilogue() { was_activated_ = false; }
+
+
void IncrementalMarking::OldSpaceStep(intptr_t allocated) {
if (IsStopped() && ShouldActivate()) {
// TODO(hpayer): Let's play safe for now, but compaction should be
bool ShouldActivate();
+ bool WasActivated();
+
enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
void Start(CompactionFlag flag = ALLOW_COMPACTION);
void MarkingComplete(CompletionAction action);
+ void Epilogue();
+
// It's hard to know how much work the incremental marker should do to make
// progress in the face of the mutator creating new work for it. We start
// of at a moderate rate of work and gradually increase the speed of the
int unscanned_bytes_of_large_object_;
+ bool was_activated_;
+
DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
};
}