if (FLAG_gc_interval >= 0 &&
!disallow_allocation_failure_ &&
Heap::allocation_timeout_-- <= 0) {
- return Failure::RetryAfterGC(size_in_bytes, space);
+ return Failure::RetryAfterGC(space);
}
Counters::objs_since_last_full.Increment();
Counters::objs_since_last_young.Increment();
}
+#ifdef DEBUG
#define GC_GREEDY_CHECK() \
- ASSERT(!FLAG_gc_greedy || v8::internal::Heap::GarbageCollectionGreedyCheck())
+ if (FLAG_gc_greedy) v8::internal::Heap::GarbageCollectionGreedyCheck()
+#else
+#define GC_GREEDY_CHECK() { }
+#endif
// Calls the FUNCTION_CALL function and retries it up to three times
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
} \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
- Heap::CollectGarbage(Failure::cast(__object__)->requested(), \
- Failure::cast(__object__)->allocation_space()); \
+ Heap::CollectGarbage(Failure::cast(__object__)->allocation_space()); \
__object__ = FUNCTION_CALL; \
if (!__object__->IsFailure()) RETURN_VALUE; \
if (__object__->IsOutOfMemoryFailure()) { \
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
MarkCompactCollector::SetForceCompaction(force_compaction);
- CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy);
+ CollectGarbage(OLD_POINTER_SPACE, collectionPolicy);
MarkCompactCollector::SetForceCompaction(false);
}
}
-bool Heap::CollectGarbage(int requested_size,
- AllocationSpace space,
+void Heap::CollectGarbage(AllocationSpace space,
CollectionPolicy collectionPolicy) {
// The VM is in the GC state until exiting this function.
VMState state(GC);
#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log_gc) HeapProfiler::WriteSample();
#endif
-
- switch (space) {
- case NEW_SPACE:
- return new_space_.Available() >= requested_size;
- case OLD_POINTER_SPACE:
- return old_pointer_space_->Available() >= requested_size;
- case OLD_DATA_SPACE:
- return old_data_space_->Available() >= requested_size;
- case CODE_SPACE:
- return code_space_->Available() >= requested_size;
- case MAP_SPACE:
- return map_space_->Available() >= requested_size;
- case CELL_SPACE:
- return cell_space_->Available() >= requested_size;
- case LO_SPACE:
- return lo_space_->Available() >= requested_size;
- }
- return false;
}
while (gc_performed) {
gc_performed = false;
if (!new_space->ReserveSpace(new_space_size)) {
- Heap::CollectGarbage(new_space_size, NEW_SPACE);
+ Heap::CollectGarbage(NEW_SPACE);
gc_performed = true;
}
if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
- Heap::CollectGarbage(pointer_space_size, OLD_POINTER_SPACE);
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
gc_performed = true;
}
if (!(old_data_space->ReserveSpace(data_space_size))) {
- Heap::CollectGarbage(data_space_size, OLD_DATA_SPACE);
+ Heap::CollectGarbage(OLD_DATA_SPACE);
gc_performed = true;
}
if (!(code_space->ReserveSpace(code_space_size))) {
- Heap::CollectGarbage(code_space_size, CODE_SPACE);
+ Heap::CollectGarbage(CODE_SPACE);
gc_performed = true;
}
if (!(map_space->ReserveSpace(map_space_size))) {
- Heap::CollectGarbage(map_space_size, MAP_SPACE);
+ Heap::CollectGarbage(MAP_SPACE);
gc_performed = true;
}
if (!(cell_space->ReserveSpace(cell_space_size))) {
- Heap::CollectGarbage(cell_space_size, CELL_SPACE);
+ Heap::CollectGarbage(CELL_SPACE);
gc_performed = true;
}
// We add a slack-factor of 2 in order to have space for a series of
large_object_size += cell_space_size + map_space_size + code_space_size +
data_space_size + pointer_space_size;
if (!(lo_space->ReserveSpace(large_object_size))) {
- Heap::CollectGarbage(large_object_size, LO_SPACE);
+ Heap::CollectGarbage(LO_SPACE);
gc_performed = true;
}
}
HistogramTimerScope scope(&Counters::gc_context);
CollectAllGarbage(false);
} else {
- CollectGarbage(0, NEW_SPACE);
+ CollectGarbage(NEW_SPACE);
}
new_space_.Shrink();
last_gc_count = gc_count_;
#ifdef DEBUG
-bool Heap::GarbageCollectionGreedyCheck() {
+void Heap::GarbageCollectionGreedyCheck() {
ASSERT(FLAG_gc_greedy);
- if (Bootstrapper::IsActive()) return true;
- if (disallow_allocation_failure()) return true;
- return CollectGarbage(0, NEW_SPACE);
+ if (Bootstrapper::IsActive()) return;
+ if (disallow_allocation_failure()) return;
+ CollectGarbage(NEW_SPACE);
}
#endif
// Performs garbage collection operation.
// Returns whether required_space bytes are available after the collection.
- static bool CollectGarbage(int required_space,
- AllocationSpace space,
+ static void CollectGarbage(AllocationSpace space,
CollectionPolicy collectionPolicy = NORMAL);
// Performs a full garbage collection. Force compaction if the
#ifdef DEBUG
// Utility used with flag gc-greedy.
- static bool GarbageCollectionGreedyCheck();
+ static void GarbageCollectionGreedyCheck();
#endif
static void AddGCPrologueCallback(
}
-int Failure::requested() const {
- const int kShiftBits =
- kFailureTypeTagSize + kSpaceTagSize - kObjectAlignmentBits;
- STATIC_ASSERT(kShiftBits >= 0);
- ASSERT(type() == RETRY_AFTER_GC);
- return static_cast<int>(value() >> kShiftBits);
-}
-
-
AllocationSpace Failure::allocation_space() const {
ASSERT_EQ(RETRY_AFTER_GC, type());
return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
}
-Failure* Failure::RetryAfterGC(int requested_bytes) {
- // Assert that the space encoding fits in the three bytes allotted for it.
- ASSERT((LAST_SPACE & ~kSpaceTagMask) == 0);
- uintptr_t requested =
- static_cast<uintptr_t>(requested_bytes >> kObjectAlignmentBits);
- int tag_bits = kSpaceTagSize + kFailureTypeTagSize + kFailureTagSize;
- if (((requested << tag_bits) >> tag_bits) != requested) {
- // No room for entire requested size in the bits. Round down to
- // maximally representable size.
- requested = static_cast<intptr_t>(
- (~static_cast<uintptr_t>(0)) >> (tag_bits + 1));
- }
- int value = static_cast<int>(requested << kSpaceTagSize) | NEW_SPACE;
- return Construct(RETRY_AFTER_GC, value);
+Failure* Failure::RetryAfterGC() {
+ return RetryAfterGC(NEW_SPACE);
+}
+
+
+Failure* Failure::RetryAfterGC(AllocationSpace space) {
+ ASSERT((space & ~kSpaceTagMask) == 0);
+ return Construct(RETRY_AFTER_GC, space);
}
}
-Failure* Failure::RetryAfterGC(int requested_bytes, AllocationSpace space) {
- ASSERT((space & ~kSpaceTagMask) == 0);
- // TODO(X64): Stop using Smi validation for non-smi checks, even if they
- // happen to be identical at the moment.
-
- int requested = requested_bytes >> kObjectAlignmentBits;
- int value = (requested << kSpaceTagSize) | space;
- // We can't very well allocate a heap number in this situation, and if the
- // requested memory is so large it seems reasonable to say that this is an
- // out of memory situation. This fixes a crash in
- // js1_5/Regress/regress-303213.js.
- if (value >> kSpaceTagSize != requested ||
- !Smi::IsValid(value) ||
- value != ((value << kFailureTypeTagSize) >> kFailureTypeTagSize) ||
- !Smi::IsValid(value << kFailureTypeTagSize)) {
- Top::context()->mark_out_of_memory();
- return Failure::OutOfMemoryException();
- }
- return Construct(RETRY_AFTER_GC, value);
-}
-
-
// Should a word be prefixed by 'a' or 'an' in order to read naturally in
// English? Returns false for non-ASCII or words that don't start with
// a capital letter. The a/an rule follows pronunciation in English.
//
// Failures are a single word, encoded as follows:
// +-------------------------+---+--+--+
-// |...rrrrrrrrrrrrrrrrrrrrrr|sss|tt|11|
+// |.........unused..........|sss|tt|11|
// +-------------------------+---+--+--+
// 7 6 4 32 10
//
// allocation space tag is 000 for all failure types except
// RETRY_AFTER_GC. For RETRY_AFTER_GC, the possible values are the
// allocation spaces (the encoding is found in globals.h).
-//
-// The remaining bits is the size of the allocation request in units
-// of the pointer size, and is zeroed except for RETRY_AFTER_GC
-// failures. The 25 bits (on a 32 bit platform) gives a representable
-// range of 2^27 bytes (128MB).
// Failure type tag info.
const int kFailureTypeTagSize = 2;
// Returns the space that needs to be collected for RetryAfterGC failures.
inline AllocationSpace allocation_space() const;
- // Returns the number of bytes requested (up to the representable maximum)
- // for RetryAfterGC failures.
- inline int requested() const;
-
inline bool IsInternalError() const;
inline bool IsOutOfMemoryException() const;
- static Failure* RetryAfterGC(int requested_bytes, AllocationSpace space);
- static inline Failure* RetryAfterGC(int requested_bytes); // NEW_SPACE
+ static inline Failure* RetryAfterGC(AllocationSpace space);
+ static inline Failure* RetryAfterGC(); // NEW_SPACE
static inline Failure* Exception();
static inline Failure* InternalError();
static inline Failure* OutOfMemoryException();
if (failure->IsRetryAfterGC()) {
// Try to do a garbage collection; ignore it if it fails. The C
// entry stub will throw an out-of-memory exception in that case.
- Heap::CollectGarbage(failure->requested(), failure->allocation_space());
+ Heap::CollectGarbage(failure->allocation_space());
} else {
// Handle last resort GC and make sure to allow future allocations
// to grow the heap without causing GCs (if possible).
object = SlowAllocateRaw(size_in_bytes);
if (object != NULL) return object;
- return Failure::RetryAfterGC(size_in_bytes, identity());
+ return Failure::RetryAfterGC(identity());
}
object = SlowMCAllocateRaw(size_in_bytes);
if (object != NULL) return object;
- return Failure::RetryAfterGC(size_in_bytes, identity());
+ return Failure::RetryAfterGC(identity());
}
Object* NewSpace::AllocateRawInternal(int size_in_bytes,
AllocationInfo* alloc_info) {
Address new_top = alloc_info->top + size_in_bytes;
- if (new_top > alloc_info->limit) return Failure::RetryAfterGC(size_in_bytes);
+ if (new_top > alloc_info->limit) return Failure::RetryAfterGC();
Object* obj = HeapObject::FromAddress(alloc_info->top);
alloc_info->top = new_top;
if (cur == kEnd) {
// No large enough size in list.
*wasted_bytes = 0;
- return Failure::RetryAfterGC(size_in_bytes, owner_);
+ return Failure::RetryAfterGC(owner_);
}
ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
int rem = cur - index;
Object* FixedSizeFreeList::Allocate() {
if (head_ == NULL) {
- return Failure::RetryAfterGC(object_size_, owner_);
+ return Failure::RetryAfterGC(owner_);
}
ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
// Check if we want to force a GC before growing the old space further.
// If so, fail the allocation.
if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
- return Failure::RetryAfterGC(requested_size, identity());
+ return Failure::RetryAfterGC(identity());
}
size_t chunk_size;
LargeObjectChunk* chunk =
LargeObjectChunk::New(requested_size, &chunk_size, executable);
if (chunk == NULL) {
- return Failure::RetryAfterGC(requested_size, identity());
+ return Failure::RetryAfterGC(identity());
}
size_ += static_cast<int>(chunk_size);
static Object* AllocateAfterFailures() {
static int attempts = 0;
- if (++attempts < 3) return Failure::RetryAfterGC(0);
+ if (++attempts < 3) return Failure::RetryAfterGC();
// New space.
NewSpace* new_space = Heap::new_space();
LocalContext env;
Local<String> source = String::New(two_byte_source);
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
bool success = source->MakeExternal(new TestResource(two_byte_source));
CHECK(success);
Local<Script> script = Script::Compile(source);
LocalContext env;
Local<String> source = v8_str(c_source);
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
bool success = source->MakeExternal(
new TestAsciiResource(i::StrDup(c_source)));
CHECK(success);
LocalContext env;
// Free some space in the new space so that we can check freshness.
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
uint16_t* two_byte_string = AsciiToTwoByteString("small");
Local<String> small_string = String::New(two_byte_string);
// We should refuse to externalize newly created small string.
CHECK(!small_string->CanMakeExternal());
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
// Old space strings should be accepted.
CHECK(small_string->CanMakeExternal());
LocalContext env;
// Free some space in the new space so that we can check freshness.
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
Local<String> small_string = String::New("small");
// We should refuse to externalize newly created small string.
CHECK(!small_string->CanMakeExternal());
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
// Old space strings should be accepted.
CHECK(small_string->CanMakeExternal());
String::NewExternal(new TestResource(two_byte_string));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
CHECK(isymbol->IsSymbol());
}
new TestAsciiResource(i::StrDup(one_byte_string)));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
// Trigger GCs so that the newly allocated string moves to old gen.
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in survivor space now
+ i::Heap::CollectGarbage(i::NEW_SPACE); // in old gen now
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
CHECK(isymbol->IsSymbol());
}
Local<String> string =
String::NewExternal(new TestResource(two_byte_string));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
in_new_space = i::Heap::InNewSpace(*istring);
CHECK(in_new_space || i::Heap::old_data_space()->Contains(*istring));
CHECK_EQ(0, TestResource::dispose_count);
}
- i::Heap::CollectGarbage(0, in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+ i::Heap::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
CHECK_EQ(1, TestResource::dispose_count);
}
Local<String> string = String::NewExternal(
new TestAsciiResource(i::StrDup(one_byte_string)));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
- i::Heap::CollectGarbage(0, i::NEW_SPACE);
+ i::Heap::CollectGarbage(i::NEW_SPACE);
in_new_space = i::Heap::InNewSpace(*istring);
CHECK(in_new_space || i::Heap::old_data_space()->Contains(*istring));
CHECK_EQ(0, TestAsciiResource::dispose_count);
}
- i::Heap::CollectGarbage(0, in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+ i::Heap::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
CHECK_EQ(1, TestAsciiResource::dispose_count);
}
break_point_hit_count++;
if (break_point_hit_count % 2 == 0) {
// Scavenge.
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
+ Heap::CollectGarbage(v8::internal::NEW_SPACE);
} else {
// Mark sweep compact.
Heap::CollectAllGarbage(true);
// Run the garbage collector to enforce heap verification if option
// --verify-heap is set.
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
+ Heap::CollectGarbage(v8::internal::NEW_SPACE);
// Set the break flag again to come back here as soon as possible.
v8::Debug::DebugBreak();
CHECK_EQ(1 + i * 3, break_point_hit_count);
// Scavenge and call function.
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
+ Heap::CollectGarbage(v8::internal::NEW_SPACE);
f->Call(recv, 0, NULL);
CHECK_EQ(2 + i * 3, break_point_hit_count);
InitializeIfNeeded();
// A retry after a GC may pollute the counts, so perform gc now
// to avoid that.
- v8::internal::Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
+ v8::internal::Heap::CollectGarbage(v8::internal::NEW_SPACE);
HandleScope scope;
TryCatch catcher;
catcher.SetVerbose(true);
int request = 24;
CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
CHECK(Smi::FromInt(42)->IsSmi());
- CHECK(Failure::RetryAfterGC(request, NEW_SPACE)->IsFailure());
- CHECK_EQ(request, Failure::RetryAfterGC(request, NEW_SPACE)->requested());
+ CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
CHECK_EQ(NEW_SPACE,
- Failure::RetryAfterGC(request, NEW_SPACE)->allocation_space());
+ Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
CHECK_EQ(OLD_POINTER_SPACE,
- Failure::RetryAfterGC(request,
- OLD_POINTER_SPACE)->allocation_space());
+ Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
CHECK(Failure::Exception()->IsFailure());
CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
v8::HandleScope sc;
// Check GC.
- int free_bytes = Heap::MaxObjectSizeInPagedSpace();
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
Handle<String> name = Factory::LookupAsciiSymbol("theFunction");
Handle<String> prop_name = Factory::LookupAsciiSymbol("theSlot");
CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
}
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
// Function should be alive.
CHECK(Top::context()->global()->HasLocalProperty(*name));
}
// After gc, it should survive.
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
CHECK(Top::context()->global()->HasLocalProperty(*obj_name));
CHECK(Top::context()->global()->GetProperty(*obj_name)->IsJSObject());
}
// after gc, it should survive
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
CHECK((*h1)->IsString());
CHECK((*h2)->IsHeapNumber());
h2 = GlobalHandles::Create(*u);
}
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
+ Heap::CollectGarbage(NEW_SPACE);
// Make sure the object is promoted.
GlobalHandles::MakeWeak(h2.location(),
CHECK(!GlobalHandles::IsNearDeath(h1.location()));
CHECK(!GlobalHandles::IsNearDeath(h2.location()));
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
CHECK((*h1)->IsString());
CHECK(!WeakPointerCleared);
// Mark-compact treats weak reference properly.
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
CHECK(WeakPointerCleared);
}
TEST(LargeObjectSpaceContains) {
InitializeVM();
- int free_bytes = Heap::MaxObjectSizeInPagedSpace();
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
Address current_top = Heap::new_space()->top();
Page* page = Page::FromAddress(current_top);
CHECK(Heap::InSpace(*array, NEW_SPACE));
// Call the m-c collector, so array becomes an old object.
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// Array now sits in the old space
CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
v8::HandleScope sc;
// Do a mark compact GC to shrink the heap.
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// Allocate a big Fixed array in the new space.
int size = (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
}
// Call mark compact GC, and it should pass.
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// array should not be promoted because the old space is full.
CHECK(Heap::InSpace(*array, NEW_SPACE));
v8::HandleScope sc;
// call mark-compact when heap is empty
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// keep allocating garbage in new space until it fails
const int ARRAY_SIZE = 100;
do {
array = Heap::AllocateFixedArray(ARRAY_SIZE);
} while (!array->IsFailure());
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
+ Heap::CollectGarbage(NEW_SPACE);
array = Heap::AllocateFixedArray(ARRAY_SIZE);
CHECK(!array->IsFailure());
do {
mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
} while (!mapp->IsFailure());
- CHECK(Heap::CollectGarbage(0, MAP_SPACE));
+ Heap::CollectGarbage(MAP_SPACE);
mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
CHECK(!mapp->IsFailure());
Top::context()->global()->SetProperty(func_name, function, NONE);
JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
CHECK(Top::context()->global()->HasLocalProperty(func_name));
String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
CHECK(Top::context()->global()->HasLocalProperty(obj_name));
CHECK_EQ(0, gc_starts);
CHECK_EQ(gc_ends, gc_starts);
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
CHECK_EQ(1, gc_starts);
CHECK_EQ(gc_ends, gc_starts);
}
GlobalHandles::AddGroup(g2_objects, 2);
}
// Do a full GC
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// All object should be alive.
CHECK_EQ(0, NumberOfWeakCalls);
GlobalHandles::AddGroup(g2_objects, 2);
}
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
+ Heap::CollectGarbage(OLD_POINTER_SPACE);
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, NumberOfWeakCalls);