ring_buffer_end_(0),
promotion_queue_(this),
configured_(false),
+ current_gc_flags_(Heap::kNoGCFlags),
external_string_table_(this),
chunks_queued_for_free_(NULL),
gc_callbacks_depth_(0),
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
- mark_compact_collector_.SetFlags(flags);
+ set_current_gc_flags(flags);
CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
- mark_compact_collector_.SetFlags(kNoGCFlags);
+ set_current_gc_flags(kNoGCFlags);
}
isolate()->optimizing_compile_dispatcher()->Flush();
}
isolate()->ClearSerializerData();
- mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
- kReduceMemoryFootprintMask);
+ set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
const int kMinNumberOfAttempts = 2;
break;
}
}
- mark_compact_collector()->SetFlags(kNoGCFlags);
+ set_current_gc_flags(kNoGCFlags);
new_space_.Shrink();
UncommitFromSpace();
}
}
}
- if (collector == MARK_COMPACTOR &&
- !mark_compact_collector()->finalize_incremental_marking() &&
- !mark_compact_collector()->abort_incremental_marking() &&
- !incremental_marking()->IsStopped() &&
+ if (collector == MARK_COMPACTOR && !ShouldFinalizeIncrementalMarking() &&
+ !ShouldAbortIncrementalMarking() && !incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() && FLAG_incremental_marking) {
// Make progress in incremental marking.
const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB;
// Start incremental marking for the next cycle. The heap snapshot
// generator needs incremental marking to stay off after it aborted.
- if (!mark_compact_collector()->abort_incremental_marking() &&
- incremental_marking()->IsStopped() &&
+ if (!ShouldAbortIncrementalMarking() && incremental_marking()->IsStopped() &&
incremental_marking()->ShouldActivateEvenWithoutIdleNotification()) {
incremental_marking()->Start(kNoGCFlags, kNoGCCallbackFlags, "GC epilogue");
}
factor = Min(factor, kConservativeHeapGrowingFactor);
}
- if (FLAG_stress_compaction ||
- mark_compact_collector()->reduce_memory_footprint_) {
+ if (FLAG_stress_compaction || ShouldReduceMemory()) {
factor = kMinHeapGrowingFactor;
}
private:
Heap();
+ int current_gc_flags() { return current_gc_flags_; }
+ void set_current_gc_flags(int flags) {
+ current_gc_flags_ = flags;
+ DCHECK(!ShouldFinalizeIncrementalMarking() ||
+ !ShouldAbortIncrementalMarking());
+ }
+
+ inline bool ShouldReduceMemory() const {
+ return current_gc_flags_ & kReduceMemoryFootprintMask;
+ }
+
+ inline bool ShouldAbortIncrementalMarking() const {
+ return current_gc_flags_ & kAbortIncrementalMarkingMask;
+ }
+
+ inline bool ShouldFinalizeIncrementalMarking() const {
+ return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
+ }
+
// The amount of external memory registered through the API kept alive
// by global handles
int64_t amount_of_external_allocated_memory_;
// configured through the API until it is set up.
bool configured_;
+ // Currently set GC flags that are respected by all GC components.
+ int current_gc_flags_;
+
ExternalStringTable external_string_table_;
VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
friend class GCCallbacksScope;
friend class GCTracer;
friend class HeapIterator;
+ friend class IncrementalMarking;
friend class Isolate;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class MapCompact;
friend class Page;
+ // Used in cctest.
+ friend class HeapTester;
+
DISALLOW_COPY_AND_ASSIGN(Heap);
};
}
-void IncrementalMarking::Start(int mark_compact_flags,
+void IncrementalMarking::Start(int flags,
const GCCallbackFlags gc_callback_flags,
const char* reason) {
if (FLAG_trace_incremental_marking) {
was_activated_ = true;
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
- heap_->mark_compact_collector()->SetFlags(mark_compact_flags);
+ heap_->set_current_gc_flags(flags);
StartMarking();
- heap_->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
} else {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start sweeping.\n");
bool WasActivated();
- void Start(int mark_compact_flags,
+ void Start(int flags,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags,
const char* reason = nullptr);
namespace v8 {
namespace internal {
-
-void MarkCompactCollector::SetFlags(int flags) {
- reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
- abort_incremental_marking_ =
- ((flags & Heap::kAbortIncrementalMarkingMask) != 0);
- finalize_incremental_marking_ =
- ((flags & Heap::kFinalizeIncrementalMarkingMask) != 0);
- DCHECK(!finalize_incremental_marking_ || !abort_incremental_marking_);
-}
-
-
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(obj)));
if (marking_deque_.Push(obj)) {
#ifdef DEBUG
state_(IDLE),
#endif
- reduce_memory_footprint_(false),
- abort_incremental_marking_(false),
- finalize_incremental_marking_(false),
marking_parity_(ODD_MARKING_PARITY),
compacting_(false),
was_marked_incrementally_(false),
int total_live_bytes = 0;
bool reduce_memory =
- reduce_memory_footprint_ || heap()->HasLowAllocationRate();
+ heap()->ShouldReduceMemory() || heap()->HasLowAllocationRate();
if (FLAG_manual_evacuation_candidates_selection) {
for (size_t i = 0; i < pages.size(); i++) {
Page* p = pages[i].second;
}
// Clear marking bits if incremental marking is aborted.
- if (was_marked_incrementally_ && abort_incremental_marking_) {
+ if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
heap()->incremental_marking()->Stop();
ClearMarkbits();
AbortWeakCollections();
void MarkCompactCollector::RetainMaps() {
- if (reduce_memory_footprint_ || abort_incremental_marking_ ||
+ if (heap()->ShouldReduceMemory() || heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0) {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// Mark-Compact collector
class MarkCompactCollector {
public:
- // Set the global flags, it must be called before Prepare to take effect.
- inline void SetFlags(int flags);
-
static void Initialize();
void SetUp();
void ClearMarkbits();
- bool abort_incremental_marking() const { return abort_incremental_marking_; }
-
- bool finalize_incremental_marking() const {
- return finalize_incremental_marking_;
- }
-
bool is_compacting() const { return compacting_; }
MarkingParity marking_parity() { return marking_parity_; }
CollectorState state_;
#endif
- bool reduce_memory_footprint_;
-
- bool abort_incremental_marking_;
-
- bool finalize_incremental_marking_;
-
MarkingParity marking_parity_;
// True if we are collecting slots to perform evacuation from evacuation
// Import from test-heap.cc
+namespace v8 {
+namespace internal {
+
int CountNativeContexts();
+}
+}
static void NopListener(const v8::Debug::EventDetails& event_details) {
TEST(DebuggerCreatesContextIffActive) {
DebugLocalContext env;
v8::HandleScope scope(env->GetIsolate());
- CHECK_EQ(1, CountNativeContexts());
+ CHECK_EQ(1, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NULL);
CompileRun("debugger;");
- CHECK_EQ(1, CountNativeContexts());
+ CHECK_EQ(1, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NopListener);
CompileRun("debugger;");
- CHECK_EQ(2, CountNativeContexts());
+ CHECK_EQ(2, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NULL);
}
#include "src/snapshot/snapshot.h"
#include "test/cctest/cctest.h"
-using namespace v8::internal;
using v8::Just;
+namespace v8 {
+namespace internal {
+
+// Tests that should have access to private methods of {v8::internal::Heap}.
+// Those tests need to be defined using HEAP_TEST(Name) { ... }.
+#define HEAP_TEST_METHODS(V) \
+ V(GCFlags)
+
+
+#define HEAP_TEST(Name) \
+ CcTest register_test_##Name(HeapTester::Test##Name, __FILE__, #Name, NULL, \
+ true, true); \
+ void HeapTester::Test##Name()
+
+
+class HeapTester {
+ public:
+#define DECLARE_STATIC(Name) static void Test##Name();
+
+ HEAP_TEST_METHODS(DECLARE_STATIC)
+#undef HEAP_TEST_METHODS
+};
+
+
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
}
+HEAP_TEST(GCFlags) {
+ CcTest::InitializeVM();
+ Heap* heap = CcTest::heap();
+
+ heap->set_current_gc_flags(Heap::kNoGCFlags);
+ CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
+
+ // Set the flags to check whether we appropriately resets them after the GC.
+ heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
+ heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
+ CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
+
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ if (collector->sweeping_in_progress()) {
+ collector->EnsureSweepingCompleted();
+ }
+
+ IncrementalMarking* marking = heap->incremental_marking();
+ marking->Stop();
+ marking->Start(Heap::kReduceMemoryFootprintMask);
+ CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
+
+ heap->Scavenge();
+ CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
+
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
+}
+
+
TEST(IdleNotificationFinishMarking) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
CHECK_LE(measure.Count(), count_upper_limit);
CHECK_LE(measure.Size(), size_upper_limit);
}
+
+} // namespace internal
+} // namespace v8