kGCCallbackFlagSynchronousPhantomCallbackProcessing, gc_reason);
} else {
heap->CollectAllGarbage(
- gc_reason, i::Heap::kNoGCFlags,
+ i::Heap::kNoGCFlags, gc_reason,
kGCCallbackFlagSynchronousPhantomCallbackProcessing);
}
} else {
CHECK(i::FLAG_expose_gc);
if (type == kMinorGarbageCollection) {
reinterpret_cast<i::Isolate*>(this)->heap()->CollectGarbage(
- i::NEW_SPACE, "Isolate::RequestGarbageCollection", i::Heap::kNoGCFlags,
+ i::NEW_SPACE, "Isolate::RequestGarbageCollection",
kGCCallbackFlagForced);
} else {
DCHECK_EQ(kFullGarbageCollection, type);
reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage(
- "Isolate::RequestGarbageCollection",
- i::Heap::kAbortIncrementalMarkingMask, kGCCallbackFlagForced);
+ i::Heap::kAbortIncrementalMarkingMask,
+ "Isolate::RequestGarbageCollection", kGCCallbackFlagForced);
}
}
}
// Make sure we abort incremental marking.
- isolate_->heap()->CollectAllGarbage("prepare for break points",
- Heap::kMakeHeapIterableMask);
+ isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "prepare for break points");
{
HeapIterator iterator(isolate_->heap());
args[0]
->BooleanValue(args.GetIsolate()->GetCurrentContext())
.FromMaybe(false)) {
- heap->CollectAllGarbage("counters extension", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
}
}
PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy());
}
- heap_->CollectAllGarbage("HeapObjectsMap::UpdateHeapObjectsMap",
- Heap::kMakeHeapIterableMask);
+ heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "HeapObjectsMap::UpdateHeapObjectsMap");
HeapIterator iterator(heap_);
for (HeapObject* obj = iterator.next();
obj != NULL;
// full GC is reachable from the root when computing dominators.
// This is not true for weakly reachable objects.
// As a temporary solution we call GC twice.
- heap_->CollectAllGarbage("HeapSnapshotGenerator::GenerateSnapshot",
- Heap::kMakeHeapIterableMask);
- heap_->CollectAllGarbage("HeapSnapshotGenerator::GenerateSnapshot",
- Heap::kMakeHeapIterableMask);
+ heap_->CollectAllGarbage(
+ Heap::kMakeHeapIterableMask,
+ "HeapSnapshotGenerator::GenerateSnapshot");
+ heap_->CollectAllGarbage(
+ Heap::kMakeHeapIterableMask,
+ "HeapSnapshotGenerator::GenerateSnapshot");
#ifdef VERIFY_HEAP
Heap* debug_heap = heap_;
}
-bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
- const GCFlags flags,
- const GCCallbackFlags callback_flags,
- const GCFlagOverride override) {
- GCFlagScope flag_scope(this, flags, callback_flags, override);
- const char* collector_reason = nullptr;
- const GarbageCollector collector =
- SelectGarbageCollector(space, &collector_reason);
- return CollectGarbage(collector, gc_reason, collector_reason);
-}
-
-
-bool Heap::CollectGarbageNewSpace(const char* gc_reason) {
- return CollectGarbage(NEW_SPACE, gc_reason, kNoGCFlags, kNoGCCallbackFlags,
- kDontOverride);
-}
-
-
void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
ScratchpadSlotMode mode) {
Heap* heap = object->GetHeap();
}
+bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
+ const v8::GCCallbackFlags callbackFlags) {
+ const char* collector_reason = NULL;
+ GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
+ return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
+}
+
+
Isolate* Heap::isolate() {
return reinterpret_cast<Isolate*>(
reinterpret_cast<intptr_t>(this) -
RETURN_VALUE; \
}
-#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
- do { \
- AllocationResult __allocation__ = FUNCTION_CALL; \
- Object* __object__ = NULL; \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- /* Two GCs before panicking. In newspace will almost always succeed. */ \
- for (int __i__ = 0; __i__ < 2; __i__++) { \
- (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
- "allocation failure", \
- Heap::kNoGCFlags, kNoGCCallbackFlags); \
- __allocation__ = FUNCTION_CALL; \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- } \
- (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
- (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
- { \
- AlwaysAllocateScope __scope__(ISOLATE); \
- __allocation__ = FUNCTION_CALL; \
- } \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- /* TODO(1181417): Fix this. */ \
- v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
- RETURN_EMPTY; \
+#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
+ do { \
+ AllocationResult __allocation__ = FUNCTION_CALL; \
+ Object* __object__ = NULL; \
+ RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
+ /* Two GCs before panicking. In newspace will almost always succeed. */ \
+ for (int __i__ = 0; __i__ < 2; __i__++) { \
+ (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
+ "allocation failure"); \
+ __allocation__ = FUNCTION_CALL; \
+ RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
+ } \
+ (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
+ (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
+ { \
+ AlwaysAllocateScope __scope__(ISOLATE); \
+ __allocation__ = FUNCTION_CALL; \
+ } \
+ RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
+ /* TODO(1181417): Fix this. */ \
+ v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
+ RETURN_EMPTY; \
} while (false)
#define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
};
-DEFINE_OPERATORS_FOR_FLAGS(Heap::GCFlags)
-
-
Heap::Heap()
: amount_of_external_allocated_memory_(0),
amount_of_external_allocated_memory_at_last_global_gc_(0),
ring_buffer_end_(0),
promotion_queue_(this),
configured_(false),
- current_gc_flags_(kNoGCFlags),
+ current_gc_flags_(Heap::kNoGCFlags),
current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
external_string_table_(this),
chunks_queued_for_free_(NULL),
void Heap::HandleGCRequest() {
if (incremental_marking()->request_type() ==
IncrementalMarking::COMPLETE_MARKING) {
- CollectAllGarbage("GC interrupt", current_gc_flags_,
+ CollectAllGarbage(current_gc_flags_, "GC interrupt",
current_gc_callback_flags_);
return;
}
}
-void Heap::CollectAllGarbage(const char* gc_reason, const GCFlags flags,
+void Heap::CollectAllGarbage(int flags, const char* gc_reason,
const v8::GCCallbackFlags gc_callback_flags) {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
- CollectGarbage(OLD_SPACE, gc_reason, flags, gc_callback_flags);
+ set_current_gc_flags(flags);
+ CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
+ set_current_gc_flags(kNoGCFlags);
}
isolate()->optimizing_compile_dispatcher()->Flush();
}
isolate()->ClearSerializerData();
- isolate()->compilation_cache()->Clear();
+ set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
+ isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
const int kMinNumberOfAttempts = 2;
for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
- if (!CollectGarbage(OLD_SPACE, gc_reason,
- Heap::kAbortIncrementalMarkingMask |
- Heap::kReduceMemoryFootprintMask,
- kGCCallbackFlagForced) &&
- ((attempt + 1) >= kMinNumberOfAttempts)) {
+ if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL,
+ v8::kGCCallbackFlagForced) &&
+ attempt + 1 >= kMinNumberOfAttempts) {
break;
}
}
+ set_current_gc_flags(kNoGCFlags);
new_space_.Shrink();
UncommitFromSpace();
}
bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
- const char* collector_reason) {
+ const char* collector_reason,
+ const v8::GCCallbackFlags gc_callback_flags) {
// The VM is in the GC state until exiting this function.
VMState<GC> state(isolate_);
HistogramTimerScope histogram_timer_scope(
(collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
: isolate_->counters()->gc_compactor());
- next_gc_likely_to_collect_more = PerformGarbageCollection(collector);
+ next_gc_likely_to_collect_more =
+ PerformGarbageCollection(collector, gc_callback_flags);
}
GarbageCollectionEpilogue();
}
if (collector == MARK_COMPACTOR &&
- (current_gc_callback_flags_ & kGCCallbackFlagForced) != 0) {
+ (gc_callback_flags & kGCCallbackFlagForced) != 0) {
isolate()->CountUsage(v8::Isolate::kForcedGC);
}
}
-void Heap::StartIncrementalMarking(const GCFlags gc_flags,
+void Heap::StartIncrementalMarking(int gc_flags,
const GCCallbackFlags gc_callback_flags,
const char* reason) {
DCHECK(incremental_marking()->IsStopped());
}
if (perform_gc) {
if (space == NEW_SPACE) {
- CollectGarbageNewSpace("failed to reserve space in the new space");
+ CollectGarbage(NEW_SPACE, "failed to reserve space in the new space");
} else {
if (counter > 1) {
CollectAllGarbage(
+ kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
"failed to reserve space in paged or large "
- "object space, trying to reduce memory footprint",
- kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask);
+ "object space, trying to reduce memory footprint");
} else {
CollectAllGarbage(
- "failed to reserve space in paged or large object space",
- kAbortIncrementalMarkingMask);
+ kAbortIncrementalMarkingMask,
+ "failed to reserve space in paged or large object space");
}
}
gc_performed = true;
}
}
-
-bool Heap::PerformGarbageCollection(GarbageCollector collector) {
+bool Heap::PerformGarbageCollection(
+ GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
int freed_global_handles = 0;
if (collector != SCAVENGER) {
GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
freed_global_handles =
isolate_->global_handles()->PostGarbageCollectionProcessing(
- collector, current_gc_callback_flags_);
+ collector, gc_callback_flags);
}
gc_post_processing_depth_--;
GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
- CallGCEpilogueCallbacks(gc_type, current_gc_callback_flags_);
+ CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
}
}
void Heap::MakeHeapIterable() {
DCHECK(AllowHeapAllocation::IsAllowed());
if (!IsHeapIterable()) {
- CollectAllGarbage("Heap::MakeHeapIterable", kMakeHeapIterableMask);
+ CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
}
if (mark_compact_collector()->sweeping_in_progress()) {
mark_compact_collector()->EnsureSweepingCompleted();
gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact(
static_cast<size_t>(idle_time_in_ms), size_of_objects,
final_incremental_mark_compact_speed_in_bytes_per_ms))) {
- CollectAllGarbage("idle notification: finalize incremental",
- current_gc_flags_);
+ CollectAllGarbage(current_gc_flags_,
+ "idle notification: finalize incremental");
return true;
}
return false;
case DO_FULL_GC: {
DCHECK(contexts_disposed_ > 0);
HistogramTimerScope scope(isolate_->counters()->gc_context());
- CollectAllGarbage("idle notification: contexts disposed", kNoGCFlags);
+ CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
break;
}
case DO_SCAVENGE:
- CollectGarbageNewSpace("idle notification: scavenge");
+ CollectGarbage(NEW_SPACE, "idle notification: scavenge");
break;
case DO_FINALIZE_SWEEPING:
mark_compact_collector()->EnsureSweepingCompleted();
#include "src/allocation.h"
#include "src/assert-scope.h"
-#include "src/base/flags.h"
#include "src/globals.h"
#include "src/heap/gc-idle-time-handler.h"
#include "src/heap/incremental-marking.h"
kSmiRootsStart = kStringTableRootIndex + 1
};
- // Flags to indicate modes for a GC run.
- enum GCFlag {
- kNoGCFlags = 0u,
- kReduceMemoryFootprintMask = 1u << 0,
- kAbortIncrementalMarkingMask = 1u << 1,
- kFinalizeIncrementalMarkingMask = 1u << 2,
-
- // Making the heap iterable requires us to abort incremental marking.
- kMakeHeapIterableMask = kAbortIncrementalMarkingMask,
- };
- typedef base::Flags<GCFlag> GCFlags;
-
- // A GC invocation always respects the passed flags. Upon finished the current
- // cycle the previously set flags are either restored (kDontOverride), or
- // overriden with the flags indicating no special behavior (kOverride).
- enum GCFlagOverride {
- kOverride,
- kDontOverride,
- };
-
// Indicates whether live bytes adjustment is triggered
// - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
// - or from within GC (CONCURRENT_TO_SWEEPER),
OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
};
- class GCFlagScope {
- public:
- GCFlagScope(Heap* heap, GCFlags gc_flags, GCCallbackFlags callback_flags,
- GCFlagOverride override)
- : heap_(heap), override_(override) {
- if (override_ == kDontOverride) {
- saved_gc_flags_ = heap->current_gc_flags_;
- saved_gc_callback_flags_ = heap->current_gc_callback_flags_;
- }
- heap->set_current_gc_flags(gc_flags);
- heap->current_gc_callback_flags_ = callback_flags;
- }
-
- ~GCFlagScope() {
- if (override_ == kDontOverride) {
- heap_->set_current_gc_flags(saved_gc_flags_);
- heap_->current_gc_callback_flags_ = saved_gc_callback_flags_;
- } else {
- heap_->set_current_gc_flags(kNoGCFlags);
- heap_->current_gc_callback_flags_ = kNoGCCallbackFlags;
- }
- }
-
- private:
- Heap* heap_;
- GCFlagOverride override_;
- GCFlags saved_gc_flags_;
- GCCallbackFlags saved_gc_callback_flags_;
- };
-
// Taking this lock prevents the GC from entering a phase that relocates
// object references.
class RelocationLock {
// callee is only valid in sloppy mode.
static const int kArgumentsCalleeIndex = 1;
+ static const int kNoGCFlags = 0;
+ static const int kReduceMemoryFootprintMask = 1;
+ static const int kAbortIncrementalMarkingMask = 2;
+ static const int kFinalizeIncrementalMarkingMask = 4;
+
+ // Making the heap iterable requires us to abort incremental marking.
+ static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
+
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
// Methods triggering GCs. ===================================================
// ===========================================================================
- // Perform a garbage collection operation in a given space.
+ // Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
inline bool CollectGarbage(
- AllocationSpace space, const char* gc_reason = nullptr,
- const GCFlags flags = kNoGCFlags,
- const GCCallbackFlags callback_flags = kNoGCCallbackFlags,
- const GCFlagOverride override = kOverride);
-
- inline bool CollectGarbageNewSpace(const char* gc_reason = nullptr);
+ AllocationSpace space, const char* gc_reason = NULL,
+ const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
- // Performs a full garbage collection.
+ // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
+ // non-zero, then the slower precise sweeper is used, which leaves the heap
+ // in a state where we can iterate over the heap visiting all objects.
void CollectAllGarbage(
- const char* gc_reason = nullptr,
- const GCFlags flags = Heap::kFinalizeIncrementalMarkingMask,
+ int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Last hope GC, should try to squeeze as much as possible.
- void CollectAllAvailableGarbage(const char* gc_reason = nullptr);
+ void CollectAllAvailableGarbage(const char* gc_reason = NULL);
// Invoked when GC was requested via the stack guard.
void HandleGCRequest();
// Starts incremental marking assuming incremental marking is currently
// stopped.
- void StartIncrementalMarking(const GCFlags = kNoGCFlags,
+ void StartIncrementalMarking(int gc_flags = kNoGCFlags,
const GCCallbackFlags gc_callback_flags =
GCCallbackFlags::kNoGCCallbackFlags,
const char* reason = nullptr);
StoreBuffer* store_buffer() { return &store_buffer_; }
- void set_current_gc_flags(GCFlags flags) {
+ void set_current_gc_flags(int flags) {
current_gc_flags_ = flags;
DCHECK(!ShouldFinalizeIncrementalMarking() ||
!ShouldAbortIncrementalMarking());
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
- bool CollectGarbage(GarbageCollector collector, const char* gc_reason,
- const char* collector_reason);
+ bool CollectGarbage(
+ GarbageCollector collector, const char* gc_reason,
+ const char* collector_reason,
+ const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs garbage collection
// Returns whether there is a chance another major GC could
// collect more garbage.
- bool PerformGarbageCollection(GarbageCollector collector);
+ bool PerformGarbageCollection(
+ GarbageCollector collector,
+ const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
inline void UpdateOldSpaceLimits();
bool configured_;
// Currently set GC flags that are respected by all GC components.
- GCFlags current_gc_flags_;
+ int current_gc_flags_;
// Currently set GC callback flags that are used to pass information between
// the embedder and V8's GC.
// TODO(ulan): Replace it with incremental marking GC once
// chromium:490559 is fixed.
if (event.time_ms > state_.last_gc_time_ms + kLongDelayMs) {
- heap()->CollectAllGarbage("memory reducer background GC",
- Heap::kReduceMemoryFootprintMask);
+ heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
+ "memory reducer background GC");
} else {
DCHECK(FLAG_incremental_marking);
heap()->StartIdleIncrementalMarking();
void Logger::LogCodeObjects() {
Heap* heap = isolate_->heap();
- heap->CollectAllGarbage("Logger::LogCodeObjects",
- Heap::kMakeHeapIterableMask);
+ heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogCodeObjects");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
void Logger::LogCompiledFunctions() {
Heap* heap = isolate_->heap();
- heap->CollectAllGarbage("Logger::LogCompiledFunctions",
- Heap::kMakeHeapIterableMask);
+ heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogCompiledFunctions");
HandleScope scope(isolate_);
const int compiled_funcs_count = EnumerateCompiledFunctions(heap, NULL, NULL);
ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
void Logger::LogAccessorCallbacks() {
Heap* heap = isolate_->heap();
- heap->CollectAllGarbage("Logger::LogAccessorCallbacks",
- Heap::kMakeHeapIterableMask);
+ heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogAccessorCallbacks");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
RUNTIME_FUNCTION(Runtime_CollectGarbage) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 1);
- isolate->heap()->CollectAllGarbage("%CollectGarbage", Heap::kNoGCFlags);
+ isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
return isolate->heap()->undefined_value();
}
Local<String> source =
String::NewFromTwoByte(env->GetIsolate(), two_byte_source);
// Trigger GCs so that the newly allocated string moves to old gen.
- CcTest::heap()->CollectGarbageNewSpace(); // in survivor space now
- CcTest::heap()->CollectGarbageNewSpace(); // in old gen now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
CHECK_EQ(source->IsExternal(), false);
CHECK_EQ(source->IsExternalOneByte(), false);
String::Encoding encoding = String::UNKNOWN_ENCODING;
v8::HandleScope scope(env->GetIsolate());
Local<String> source = v8_str(c_source);
// Trigger GCs so that the newly allocated string moves to old gen.
- CcTest::heap()->CollectGarbageNewSpace(); // in survivor space now
- CcTest::heap()->CollectGarbageNewSpace(); // in old gen now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
bool success = source->MakeExternal(
new TestOneByteResource(i::StrDup(c_source), &dispose_count));
CHECK(success);
v8::HandleScope scope(env->GetIsolate());
// Free some space in the new space so that we can check freshness.
- CcTest::heap()->CollectGarbageNewSpace();
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
uint16_t* two_byte_string = AsciiToTwoByteString("s1");
Local<String> small_string =
v8::HandleScope scope(env->GetIsolate());
// Free some space in the new space so that we can check freshness.
- CcTest::heap()->CollectGarbageNewSpace();
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
Local<String> small_string = String::NewFromUtf8(env->GetIsolate(), "s1");
// We should refuse to externalize small strings.
// Trigger GCs so that the newly allocated string moves to old gen.
SimulateFullSpace(CcTest::heap()->old_space());
- CcTest::heap()->CollectGarbageNewSpace(); // in survivor space now
- CcTest::heap()->CollectGarbageNewSpace(); // in old gen now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
// Turn into external string with unaligned resource data.
const char* c_cons = "_abcdefghijklmnopqrstuvwxyz";
// Trigger GCs and force evacuation.
CcTest::heap()->CollectAllGarbage();
- CcTest::heap()->CollectAllGarbage("MakingExternalUnalignedOneByteString",
- i::Heap::kReduceMemoryFootprintMask);
+ CcTest::heap()->CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask);
}
CcTest::isolate(), new TestResource(two_byte_string));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
// Trigger GCs so that the newly allocated string moves to old gen.
- CcTest::heap()->CollectGarbageNewSpace(); // in survivor space now
- CcTest::heap()->CollectGarbageNewSpace(); // in old gen now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
i::Handle<i::String> isymbol =
factory->InternalizeString(istring);
CHECK(isymbol->IsInternalizedString());
CcTest::isolate(), new TestOneByteResource(i::StrDup(one_byte_string)));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
// Trigger GCs so that the newly allocated string moves to old gen.
- CcTest::heap()->CollectGarbageNewSpace(); // in survivor space now
- CcTest::heap()->CollectGarbageNewSpace(); // in old gen now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
i::Handle<i::String> isymbol =
factory->InternalizeString(istring);
CHECK(isymbol->IsInternalizedString());
Local<String> string = String::NewExternal(
CcTest::isolate(), new TestResource(two_byte_string, &dispose_count));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
in_new_space = CcTest::heap()->InNewSpace(*istring);
CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
CHECK_EQ(0, dispose_count);
CcTest::isolate(),
new TestOneByteResource(i::StrDup(one_byte_string), &dispose_count));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
in_new_space = CcTest::heap()->InNewSpace(*istring);
CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
CHECK_EQ(0, dispose_count);
CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
if (map.IsWeak()) {
CcTest::i_isolate()->heap()->CollectAllGarbage(
- "TestGlobalValueMap", i::Heap::kAbortIncrementalMarkingMask);
+ i::Heap::kAbortIncrementalMarkingMask);
} else {
map.Clear();
}
if (global_gc) {
CcTest::heap()->CollectAllGarbage();
} else {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
// We are relying on this creating a big flag array and reserving the space
// up front.
if (global_gc) {
CcTest::heap()->CollectAllGarbage();
} else {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
CHECK_EQ(1729, t1->x());
object_a.handle.Reset(iso, a);
object_b.handle.Reset(iso, b);
if (global_gc) {
- CcTest::heap()->CollectAllGarbage("ResetWeakHandle",
- Heap::kAbortIncrementalMarkingMask);
+ CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
} else {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
}
CHECK(object_b.handle.IsIndependent());
}
if (global_gc) {
- CcTest::heap()->CollectAllGarbage("ResetWeakHandle",
- Heap::kAbortIncrementalMarkingMask);
+ CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
} else {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
}
-static void InvokeScavenge() { CcTest::heap()->CollectGarbageNewSpace(); }
+static void InvokeScavenge() { CcTest::heap()->CollectGarbage(i::NEW_SPACE); }
static void InvokeMarkSweep() { CcTest::heap()->CollectAllGarbage(); }
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage();
- CcTest::heap()->CollectAllGarbage("CheckSurvivingGlobalObjectsCount",
- i::Heap::kMakeHeapIterableMask);
+ CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG
if (count != expected) CcTest::heap()->TracePathToGlobal();
handle, WeakApiCallback, v8::WeakCallbackType::kParameter);
}
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
- "WeakCallbackApi", i::Heap::kAbortIncrementalMarkingMask);
+ i::Heap::kAbortIncrementalMarkingMask);
// Verify disposed.
CHECK_EQ(initial_handles, globals->global_handles_count());
}
TEST(Regress2333) {
LocalContext env;
for (int i = 0; i < 3; i++) {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
}
Local<Object> obj = Object::New(isolate);
CHECK(!obj.IsEmpty());
- CcTest::heap()->CollectAllGarbage("PrologueCallbackAlloc",
- i::Heap::kAbortIncrementalMarkingMask);
+ CcTest::heap()->CollectAllGarbage(
+ i::Heap::kAbortIncrementalMarkingMask);
}
Local<Object> obj = Object::New(isolate);
CHECK(!obj.IsEmpty());
- CcTest::heap()->CollectAllGarbage("EpilogueCallbackAlloc",
- i::Heap::kAbortIncrementalMarkingMask);
+ CcTest::heap()->CollectAllGarbage(
+ i::Heap::kAbortIncrementalMarkingMask);
}
CHECK_EQ(0, epilogue_call_count_alloc);
isolate->AddGCPrologueCallback(PrologueCallbackAlloc);
isolate->AddGCEpilogueCallback(EpilogueCallbackAlloc);
- CcTest::heap()->CollectAllGarbage("GCCallbacks",
- i::Heap::kAbortIncrementalMarkingMask);
+ CcTest::heap()->CollectAllGarbage(
+ i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, prologue_call_count_alloc);
CHECK_EQ(1, epilogue_call_count_alloc);
isolate->RemoveGCPrologueCallback(PrologueCallbackAlloc);
// Collect garbage to ensure weak handles are cleared.
CcTest::heap()->CollectAllGarbage();
- CcTest::heap()->CollectAllGarbage("CheckDebuggerUnloaded",
- Heap::kMakeHeapIterableMask);
+ CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
// Iterate the head and check that there are no debugger related objects left.
HeapIterator iterator(CcTest::heap());
break_point_hit_count++;
if (break_point_hit_count % 2 == 0) {
// Scavenge.
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
} else {
// Mark sweep compact.
CcTest::heap()->CollectAllGarbage();
// Run the garbage collector to enforce heap verification if option
// --verify-heap is set.
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
// Set the break flag again to come back here as soon as possible.
v8::Debug::DebugBreak(CcTest::isolate());
CHECK_EQ(1 + i * 3, break_point_hit_count);
// Scavenge and call function.
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
f->Call(recv, 0, NULL);
CHECK_EQ(2 + i * 3, break_point_hit_count);
HandleScope sc(isolate);
// Check GC.
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
Handle<String> name = factory->InternalizeUtf8String("theFunction");
*Object::GetProperty(obj, prop_namex).ToHandleChecked());
}
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
// Function should be alive.
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
}
// after gc, it should survive
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK((*h1)->IsString());
CHECK((*h2)->IsHeapNumber());
&TestWeakGlobalHandleCallback);
// Scavenge treats weak pointers as normal roots.
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK((*h1)->IsString());
CHECK((*h2)->IsHeapNumber());
// Make sure the objects are promoted.
heap->CollectGarbage(OLD_SPACE);
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
&TestWeakGlobalHandleCallback);
// Scanvenge does not recognize weak reference.
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK(!WeakPointerCleared);
// perform a scavenge while incremental marking is still running.
SimulateIncrementalMarking(CcTest::heap());
*function2.location() = NULL;
- CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking");
+ CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
// Simulate one final GC to make sure the candidate queue is sane.
CcTest::heap()->CollectAllGarbage();
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
}
// Get rid of f3 and f5 in the same way.
CompileRun("f3=null");
for (int j = 0; j < 10; j++) {
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage();
CHECK(old_capacity == new_capacity);
// Let the scavenger empty the new space.
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK_LE(new_space->Size(), old_capacity);
// Explicitly shrinking should halve the space capacity.
// Set the flags to check whether we appropriately resets them after the GC.
heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
- heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask);
+ heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
MarkCompactCollector* collector = heap->mark_compact_collector();
heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
// NewSpace scavenges should not overwrite the flags.
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
- heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask);
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
}
// Triggering one GC will cause a lot of garbage to be discovered but
// even spread across all allocated pages.
- heap->CollectAllGarbage("triggered for preparation",
- Heap::kFinalizeIncrementalMarkingMask);
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+ "triggered for preparation");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
// Triggering subsequent GCs should cause at least half of the pages
// to be released to the OS after at most two cycles.
- heap->CollectAllGarbage("triggered by test 1",
- Heap::kFinalizeIncrementalMarkingMask);
- ;
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+ "triggered by test 1");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
- heap->CollectAllGarbage("triggered by test 2",
- Heap::kFinalizeIncrementalMarkingMask);
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+ "triggered by test 2");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
// Triggering a last-resort GC should cause all pages to be released to the
CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
// First make sure we flip spaces
- CcTest::heap()->CollectGarbageNewSpace();
+ CcTest::heap()->CollectGarbage(NEW_SPACE);
// Allocate the object.
Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
}
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
heap->CollectAllAvailableGarbage();
heap->StartIncrementalMarking();
}
marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell->value()->IsFixedArray());
weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
}
CHECK(2 * old_capacity == new_space->TotalCapacity());
// Call the scavenger two times to get an empty new space
- heap->CollectGarbageNewSpace();
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
+ heap->CollectGarbage(NEW_SPACE);
// First create a few objects which will survive a scavenge, and will get
// promoted to the old generation later on. These objects will create
for (int i = 0; i < number_handles; i++) {
handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
}
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
// Create the first huge object which will exactly fit the first semi-space
// page.
// This scavenge will corrupt memory if the promotion queue is not
// evacuated.
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
}
isolate->Dispose();
}
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
size_t counter1 = heap->NewSpaceAllocationCounter();
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
const size_t kSize = 1024;
AllocateInSpace(isolate, kSize, NEW_SPACE);
size_t counter2 = heap->NewSpaceAllocationCounter();
CHECK_EQ(kSize, counter2 - counter1);
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
size_t counter3 = heap->NewSpaceAllocationCounter();
CHECK_EQ(0U, counter3 - counter2);
// Test counter overflow.
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
size_t counter1 = heap->OldGenerationAllocationCounter();
- heap->CollectGarbageNewSpace();
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
+ heap->CollectGarbage(NEW_SPACE);
const size_t kSize = 1024;
AllocateInSpace(isolate, kSize, OLD_SPACE);
size_t counter2 = heap->OldGenerationAllocationCounter();
// TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
CHECK_LE(kSize, counter2 - counter1);
- heap->CollectGarbageNewSpace();
+ heap->CollectGarbage(NEW_SPACE);
size_t counter3 = heap->OldGenerationAllocationCounter();
CHECK_EQ(0u, counter3 - counter2);
AllocateInSpace(isolate, kSize, OLD_SPACE);
"})(this);");
logger->StopProfiler();
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
- "EquivalenceOfLoggingAndTraversal", i::Heap::kMakeHeapIterableMask);
+ i::Heap::kMakeHeapIterableMask);
logger->StringEvent("test-logging-done", "");
// Iterate heap to find compiled functions, will write to log.
// Call GC to see if we can handle a poisonous memento right after the
// current new space top pointer.
CcTest::i_isolate()->heap()->CollectAllGarbage(
- "Regress340063", Heap::kAbortIncrementalMarkingMask);
+ Heap::kAbortIncrementalMarkingMask);
}
// Call GC to see if we can handle a poisonous memento right after the
// current new space top pointer.
CcTest::i_isolate()->heap()->CollectAllGarbage(
- "Regress470390", Heap::kAbortIncrementalMarkingMask);
+ Heap::kAbortIncrementalMarkingMask);
}
SetUpNewSpaceWithPoisonedMementoAtTop();
// Force GC to test the poisoned memento handling
- CcTest::i_isolate()->heap()->CollectGarbageNewSpace();
+ CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
}
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage();
- CcTest::heap()->CollectAllGarbage("CheckSurvivingGlobalObjectsCount",
- i::Heap::kMakeHeapIterableMask);
+ CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG
if (count != expected) CcTest::heap()->TracePathToGlobal();
CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
// Force a full GC.
- heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(0, NumberOfWeakCalls);
CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
CHECK_EQ(
// Force a full GC.
// Perform two consecutive GCs because the first one will only clear
// weak references whereas the second one will also clear weak maps.
- heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
CHECK_EQ(
0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
- heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
CHECK_EQ(2,
CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
CHECK_EQ(
0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
- heap->CollectAllGarbage("Shrinking", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
CHECK_EQ(
32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
// Force a full GC.
- heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(0, NumberOfWeakCalls);
CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
CHECK_EQ(
// Force a full GC.
// Perform two consecutive GCs because the first one will only clear
// weak references whereas the second one will also clear weak sets.
- heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
CHECK_EQ(
0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
- heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
CHECK_EQ(
CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->NumberOfElements());
CHECK_EQ(
0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
- heap->CollectAllGarbage("WeakSet_Shrinking", Heap::kNoGCFlags);
+ heap->CollectAllGarbage(false);
CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
CHECK_EQ(
32, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());