ifeq ($(objectprint), on)
GYPFLAGS += -Dv8_object_print=1
endif
+# verifyheap=on
+ifeq ($(verifyheap), on)
+ GYPFLAGS += -Dv8_enable_verify_heap=1
+endif
# snapshot=off
ifeq ($(snapshot), off)
GYPFLAGS += -Dv8_use_snapshot='false'
# Enable extra checks in API functions and other strategic places.
'v8_enable_extra_checks%': 1,
- 'v8_object_print%': 0,
-
'v8_enable_gdbjit%': 0,
+ 'v8_object_print%': 0,
+
# Enable profiling support. Only required on Windows.
'v8_enable_prof%': 0,
+ 'v8_enable_verify_heap%': 0,
+
# Some versions of GCC 4.5 seem to need -fno-strict-aliasing.
'v8_no_strict_aliasing%': 0,
['v8_enable_extra_checks==1', {
'defines': ['ENABLE_EXTRA_CHECKS',],
}],
+ ['v8_enable_gdbjit==1', {
+ 'defines': ['ENABLE_GDB_JIT_INTERFACE',],
+ }],
['v8_object_print==1', {
'defines': ['OBJECT_PRINT',],
}],
- ['v8_enable_gdbjit==1', {
- 'defines': ['ENABLE_GDB_JIT_INTERFACE',],
+ ['v8_enable_verify_heap==1', {
+ 'defines': ['VERIFY_HEAP',],
}],
['v8_interpreted_regexp==1', {
'defines': ['V8_INTERPRETED_REGEXP',],
'ENABLE_DISASSEMBLER',
'V8_ENABLE_CHECKS',
'OBJECT_PRINT',
+ 'VERIFY_HEAP',
],
'msvs_settings': {
'VCCLCompilerTool': {
#endif // ENABLE_DISASSEMBLER
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void RelocInfo::Verify() {
switch (rmode_) {
case EMBEDDED_OBJECT:
case CODE_TARGET: {
// convert inline target address to code object
Address addr = target_address();
- ASSERT(addr != NULL);
+ CHECK(addr != NULL);
// Check that we can find the right code object.
Code* code = Code::GetCodeFromTargetAddress(addr);
Object* found = HEAP->FindCodeObject(addr);
- ASSERT(found->IsCode());
- ASSERT(code->address() == HeapObject::cast(found)->address());
+ CHECK(found->IsCode());
+ CHECK(code->address() == HeapObject::cast(found)->address());
break;
}
case RUNTIME_ENTRY:
break;
}
}
-#endif // DEBUG
+#endif // VERIFY_HEAP
// -----------------------------------------------------------------------------
static const char* RelocModeName(Mode rmode);
void Print(FILE* out);
#endif // ENABLE_DISASSEMBLER
-#ifdef DEBUG
- // Debugging
+#ifdef VERIFY_HEAP
void Verify();
#endif
"trace progress of the incremental marking")
DEFINE_bool(track_gc_object_stats, false,
"track object counts and memory usage")
+#ifdef VERIFY_HEAP
+DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
+#endif
// v8.cc
DEFINE_bool(use_idle_notification, true,
DEFINE_bool(gc_verbose, false, "print stuff during garbage collection")
DEFINE_bool(heap_stats, false, "report heap statistics before and after GC")
DEFINE_bool(code_stats, false, "report code statistics after GC")
-DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
DEFINE_bool(verify_native_context_separation, false,
"verify that code holds on to at most one native context after GC")
DEFINE_bool(print_handles, false, "report handles after GC")
void ExternalStringTable::ShrinkNewStrings(int position) {
new_space_strings_.Rewind(position);
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
}
-#ifdef DEBUG
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*current);
- ASSERT(HEAP->Contains(object));
- ASSERT(object->map()->IsMap());
+ CHECK(HEAP->Contains(object));
+ CHECK(object->map()->IsMap());
}
}
}
-#endif
double GCTracer::SizeOfHeapObjects() {
ClearJSFunctionResultCaches();
gc_count_++;
unflattened_strings_length_ = 0;
-#ifdef DEBUG
- ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
- allow_allocation(false);
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
+
+#ifdef DEBUG
+ ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
+ allow_allocation(false);
if (FLAG_gc_verbose) Print();
-#endif // DEBUG
-#if defined(DEBUG)
ReportStatisticsBeforeGC();
#endif // DEBUG
void Heap::GarbageCollectionEpilogue() {
store_buffer()->GCEpilogue();
LiveObjectList::GCEpilogue();
-#ifdef DEBUG
- allow_allocation(true);
- ZapFromSpace();
+ // In release mode, we only zap the from space under heap verification.
+ if (Heap::ShouldZapGarbage()) {
+ ZapFromSpace();
+ }
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
+#ifdef DEBUG
+ allow_allocation(true);
if (FLAG_print_global_handles) isolate_->global_handles()->Print();
if (FLAG_print_handles) PrintHandles();
if (FLAG_gc_verbose) Print();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Helper class for verifying the symbol table.
class SymbolTableVerifier : public ObjectVisitor {
public:
for (Object** p = start; p < end; p++) {
if ((*p)->IsHeapObject()) {
// Check that the symbol is actually a symbol.
- ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
+ CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
}
}
}
};
-#endif // DEBUG
static void VerifySymbolTable() {
-#ifdef DEBUG
SymbolTableVerifier verifier;
HEAP->symbol_table()->IterateElements(&verifier);
-#endif // DEBUG
}
+#endif // VERIFY_HEAP
static bool AbortIncrementalMarkingAndCollectGarbage(
PROFILE(isolate_, CodeMovingGCEvent());
}
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifySymbolTable();
}
+#endif
+
if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
ASSERT(!allocation_allowed_);
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
global_gc_epilogue_callback_();
}
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifySymbolTable();
}
+#endif
return next_gc_likely_to_collect_more;
}
};
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Visitor class to verify pointers in code or data space do not point into
// new space.
class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
void VisitPointers(Object** start, Object**end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
- ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
+ CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
}
}
}
object->Iterate(&v);
}
}
-#endif
+#endif // VERIFY_HEAP
void Heap::CheckNewSpaceExpansionCriteria() {
void Heap::Scavenge() {
RelocationLock relocation_lock(this);
-#ifdef DEBUG
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
#endif
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
external_string_table_.Verify();
}
+#endif
if (external_string_table_.new_space_strings_.is_empty()) return;
}
ASSERT(buffer->IsFlat());
-#if DEBUG
+#if VERIFY_HEAP
if (FLAG_verify_heap) {
buffer->StringVerify();
}
// through the self_reference parameter.
code->CopyFrom(desc);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
code->Verify();
}
isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
code->Verify();
}
String::cast(result)->set_hash_field(String::kEmptyHashField);
ASSERT_EQ(size, HeapObject::cast(result)->Size());
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
// Initialize string's content to ensure ASCII-ness (character range 0-127)
// as required when verifying the heap.
char* dest = SeqAsciiString::cast(result)->GetChars();
memset(dest, 0x0F, length * kCharSize);
}
-#endif // DEBUG
+#endif
return result;
}
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void Heap::Verify() {
- ASSERT(HasBeenSetUp());
+ CHECK(HasBeenSetUp());
store_buffer()->Verify();
lo_space_->Verify();
}
-
-
-#endif // DEBUG
+#endif
MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
return symbol_table()->LookupSymbolIfExists(string, symbol);
}
-
-#ifdef DEBUG
void Heap::ZapFromSpace() {
NewSpacePageIterator it(new_space_.FromSpaceStart(),
new_space_.FromSpaceEnd());
}
}
}
-#endif // DEBUG
void Heap::IterateAndMarkPointersToFromSpace(Address start,
void Heap::TearDown() {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
#endif
+
if (FLAG_print_cumulative_gc_stat) {
PrintF("\n\n");
PrintF("gc_count=%d ", gc_count_);
old_space_strings_[last++] = old_space_strings_[i];
}
old_space_strings_.Rewind(last);
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
return &native_contexts_list_;
}
+#ifdef VERIFY_HEAP
+ // Verify the heap is in its normal state before or after a GC.
+ void Verify();
+#endif
+
#ifdef DEBUG
void Print();
void PrintHandles();
- // Verify the heap is in its normal state before or after a GC.
- void Verify();
-
void OldPointerSpaceCheckStoreBuffer();
void MapSpaceCheckStoreBuffer();
void LargeObjectSpaceCheckStoreBuffer();
// Report heap statistics.
void ReportHeapStatistics(const char* title);
void ReportCodeStatistics(const char* title);
+#endif
+
+ // Zapping is needed for verify heap, and always done in debug builds.
+ static inline bool ShouldZapGarbage() {
+#ifdef DEBUG
+ return true;
+#else
+#ifdef VERIFY_HEAP
+ return FLAG_verify_heap;
+#else
+ return false;
+#endif
+#endif
+ }
// Fill in bogus values in from space
void ZapFromSpace();
-#endif
// Print short heap statistics.
void PrintShortHeapStatistics();
};
-#ifdef DEBUG
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
public:
inline void VisitPointers(Object** start, Object** end);
};
-#endif
// Space iterator for iterating over all spaces of the heap.
ActivateIncrementalWriteBarrier();
-#ifdef DEBUG
// Marking bits are cleared by the sweeper.
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
}
encountered_weak_maps_(NULL) { }
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
class VerifyMarkingVisitor: public ObjectVisitor {
public:
void VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*current);
- ASSERT(HEAP->mark_compact_collector()->IsMarked(object));
+ CHECK(HEAP->mark_compact_collector()->IsMarked(object));
}
}
}
current += kPointerSize) {
object = HeapObject::FromAddress(current);
if (MarkCompactCollector::IsMarked(object)) {
- ASSERT(current >= next_object_must_be_here_or_later);
+ CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(&visitor);
next_object_must_be_here_or_later = current + object->Size();
}
NewSpacePageIterator it(space->bottom(), end);
// The bottom position is at the start of its page. Allows us to use
// page->area_start() as start of range on all pages.
- ASSERT_EQ(space->bottom(),
+ CHECK_EQ(space->bottom(),
NewSpacePage::FromAddress(space->bottom())->area_start());
while (it.has_next()) {
NewSpacePage* page = it.next();
Address limit = it.has_next() ? page->area_end() : end;
- ASSERT(limit == end || !page->Contains(end));
+ CHECK(limit == end || !page->Contains(end));
VerifyMarking(page->area_start(), limit);
}
}
current += kPointerSize) {
object = HeapObject::FromAddress(current);
if (MarkCompactCollector::IsMarked(object)) {
- ASSERT(current >= next_object_must_be_here_or_later);
+ CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(&visitor);
next_object_must_be_here_or_later = current + object->Size();
}
NewSpacePage* page = it.next();
Address current = page->area_start();
Address limit = it.has_next() ? page->area_end() : space->top();
- ASSERT(limit == space->top() || !page->Contains(space->top()));
+ CHECK(limit == space->top() || !page->Contains(space->top()));
while (current < limit) {
HeapObject* object = HeapObject::FromAddress(current);
object->Iterate(&visitor);
VerifyEvacuationVisitor visitor;
heap->IterateStrongRoots(&visitor, VISIT_ALL);
}
+#endif // VERIFY_HEAP
+#ifdef DEBUG
class VerifyNativeContextSeparationVisitor: public ObjectVisitor {
public:
VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {}
ClearWeakMaps();
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyMarking(heap_);
}
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
PageIterator it(space);
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), space->top());
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_pointer_space());
VerifyMarkbitsAreClean(heap_->old_data_space());
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
- ASSERT(Marking::IsWhite(mark_bit));
- ASSERT_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
+ CHECK(Marking::IsWhite(mark_bit));
+ CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
-#endif
+#endif // VERIFY_HEAP
static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
space->PrepareForMarkCompact();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
VerifyMarkbitsAreClean();
}
heap_->isolate()->inner_pointer_to_code_cache()->Flush();
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyEvacuation(heap_);
}
PRECISE
};
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
namespace v8 {
namespace internal {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void MaybeObject::Verify() {
Object* this_as_object;
if (p->IsHeapObject()) {
HeapObject::VerifyHeapPointer(p);
} else {
- ASSERT(p->IsSmi());
+ CHECK(p->IsSmi());
}
}
void Smi::SmiVerify() {
- ASSERT(IsSmi());
+ CHECK(IsSmi());
}
void Failure::FailureVerify() {
- ASSERT(IsFailure());
+ CHECK(IsFailure());
}
void HeapObject::VerifyHeapPointer(Object* p) {
- ASSERT(p->IsHeapObject());
- ASSERT(HEAP->Contains(HeapObject::cast(p)));
+ CHECK(p->IsHeapObject());
+ CHECK(HEAP->Contains(HeapObject::cast(p)));
}
void HeapNumber::HeapNumberVerify() {
- ASSERT(IsHeapNumber());
+ CHECK(IsHeapNumber());
}
void ByteArray::ByteArrayVerify() {
- ASSERT(IsByteArray());
+ CHECK(IsByteArray());
}
void FreeSpace::FreeSpaceVerify() {
- ASSERT(IsFreeSpace());
+ CHECK(IsFreeSpace());
}
void ExternalPixelArray::ExternalPixelArrayVerify() {
- ASSERT(IsExternalPixelArray());
+ CHECK(IsExternalPixelArray());
}
void ExternalByteArray::ExternalByteArrayVerify() {
- ASSERT(IsExternalByteArray());
+ CHECK(IsExternalByteArray());
}
void ExternalUnsignedByteArray::ExternalUnsignedByteArrayVerify() {
- ASSERT(IsExternalUnsignedByteArray());
+ CHECK(IsExternalUnsignedByteArray());
}
void ExternalShortArray::ExternalShortArrayVerify() {
- ASSERT(IsExternalShortArray());
+ CHECK(IsExternalShortArray());
}
void ExternalUnsignedShortArray::ExternalUnsignedShortArrayVerify() {
- ASSERT(IsExternalUnsignedShortArray());
+ CHECK(IsExternalUnsignedShortArray());
}
void ExternalIntArray::ExternalIntArrayVerify() {
- ASSERT(IsExternalIntArray());
+ CHECK(IsExternalIntArray());
}
void ExternalUnsignedIntArray::ExternalUnsignedIntArrayVerify() {
- ASSERT(IsExternalUnsignedIntArray());
+ CHECK(IsExternalUnsignedIntArray());
}
void ExternalFloatArray::ExternalFloatArrayVerify() {
- ASSERT(IsExternalFloatArray());
+ CHECK(IsExternalFloatArray());
}
void ExternalDoubleArray::ExternalDoubleArrayVerify() {
- ASSERT(IsExternalDoubleArray());
+ CHECK(IsExternalDoubleArray());
}
VerifyHeapPointer(elements());
if (GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS) {
- ASSERT(this->elements()->IsFixedArray());
- ASSERT(this->elements()->length() >= 2);
+ CHECK(this->elements()->IsFixedArray());
+ CHECK_GE(this->elements()->length(), 2);
}
if (HasFastProperties()) {
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
- ASSERT_EQ((map()->has_fast_smi_or_object_elements() ||
+ CHECK_EQ((map()->has_fast_smi_or_object_elements() ||
(elements() == GetHeap()->empty_fixed_array())),
(elements()->map() == GetHeap()->fixed_array_map() ||
elements()->map() == GetHeap()->fixed_cow_array_map()));
- ASSERT(map()->has_fast_object_elements() == HasFastObjectElements());
+ CHECK(map()->has_fast_object_elements() == HasFastObjectElements());
}
void Map::MapVerify() {
- ASSERT(!HEAP->InNewSpace(this));
- ASSERT(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
- ASSERT(instance_size() == kVariableSizeSentinel ||
+ CHECK(!HEAP->InNewSpace(this));
+ CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
+ CHECK(instance_size() == kVariableSizeSentinel ||
(kPointerSize <= instance_size() &&
instance_size() < HEAP->Capacity()));
VerifyHeapPointer(prototype());
VerifyHeapPointer(instance_descriptors());
DescriptorArray* descriptors = instance_descriptors();
for (int i = 0; i < NumberOfOwnDescriptors(); ++i) {
- ASSERT_EQ(i, descriptors->GetDetails(i).descriptor_index() - 1);
+ CHECK_EQ(i, descriptors->GetDetails(i).descriptor_index() - 1);
}
SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
if (HasTransitionArray()) {
void Map::SharedMapVerify() {
MapVerify();
- ASSERT(is_shared());
- ASSERT(instance_descriptors()->IsEmpty());
- ASSERT_EQ(0, pre_allocated_property_fields());
- ASSERT_EQ(0, unused_property_fields());
- ASSERT_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
+ CHECK(is_shared());
+ CHECK(instance_descriptors()->IsEmpty());
+ CHECK_EQ(0, pre_allocated_property_fields());
+ CHECK_EQ(0, unused_property_fields());
+ CHECK_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
visitor_id());
}
void CodeCache::CodeCacheVerify() {
VerifyHeapPointer(default_cache());
VerifyHeapPointer(normal_type_cache());
- ASSERT(default_cache()->IsFixedArray());
- ASSERT(normal_type_cache()->IsUndefined()
+ CHECK(default_cache()->IsFixedArray());
+ CHECK(normal_type_cache()->IsUndefined()
|| normal_type_cache()->IsCodeCacheHashTable());
}
void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
VerifyHeapPointer(cache());
- ASSERT(cache()->IsUndefined() || cache()->IsPolymorphicCodeCacheHashTable());
+ CHECK(cache()->IsUndefined() || cache()->IsPolymorphicCodeCacheHashTable());
}
for (int i = 0; i < length(); i++) {
if (!is_the_hole(i)) {
double value = get_scalar(i);
- ASSERT(!isnan(value) ||
+ CHECK(!isnan(value) ||
(BitCast<uint64_t>(value) ==
BitCast<uint64_t>(canonical_not_the_hole_nan_as_double())) ||
((BitCast<uint64_t>(value) & Double::kSignMask) != 0));
VerifyHeapPointer(to_string());
Object* number = to_number();
if (number->IsHeapObject()) {
- ASSERT(number == HEAP->nan_value());
+ CHECK(number == HEAP->nan_value());
} else {
- ASSERT(number->IsSmi());
+ CHECK(number->IsSmi());
int value = Smi::cast(number)->value();
// Hidden oddballs have negative smis.
const int kLeastHiddenOddballNumber = -4;
- ASSERT(value <= 1);
- ASSERT(value >= kLeastHiddenOddballNumber);
+ CHECK_LE(value, 1);
+ CHECK(value >= kLeastHiddenOddballNumber);
}
}
void JSArray::JSArrayVerify() {
JSObjectVerify();
- ASSERT(length()->IsNumber() || length()->IsUndefined());
- ASSERT(elements()->IsUndefined() ||
+ CHECK(length()->IsNumber() || length()->IsUndefined());
+ CHECK(elements()->IsUndefined() ||
elements()->IsFixedArray() ||
elements()->IsFixedDoubleArray());
}
CHECK(IsJSSet());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
CHECK(IsJSMap());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
CHECK(IsJSWeakMap());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
void JSRegExp::JSRegExpVerify() {
JSObjectVerify();
- ASSERT(data()->IsUndefined() || data()->IsFixedArray());
+ CHECK(data()->IsUndefined() || data()->IsFixedArray());
switch (TypeTag()) {
case JSRegExp::ATOM: {
FixedArray* arr = FixedArray::cast(data());
- ASSERT(arr->get(JSRegExp::kAtomPatternIndex)->IsString());
+ CHECK(arr->get(JSRegExp::kAtomPatternIndex)->IsString());
break;
}
case JSRegExp::IRREGEXP: {
// Smi : Not compiled yet (-1) or code prepared for flushing.
// JSObject: Compilation error.
// Code/ByteArray: Compiled code.
- ASSERT(ascii_data->IsSmi() ||
+ CHECK(ascii_data->IsSmi() ||
(is_native ? ascii_data->IsCode() : ascii_data->IsByteArray()));
Object* uc16_data = arr->get(JSRegExp::kIrregexpUC16CodeIndex);
- ASSERT(uc16_data->IsSmi() ||
+ CHECK(uc16_data->IsSmi() ||
(is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
Object* ascii_saved = arr->get(JSRegExp::kIrregexpASCIICodeSavedIndex);
- ASSERT(ascii_saved->IsSmi() || ascii_saved->IsString() ||
+ CHECK(ascii_saved->IsSmi() || ascii_saved->IsString() ||
ascii_saved->IsCode());
Object* uc16_saved = arr->get(JSRegExp::kIrregexpUC16CodeSavedIndex);
- ASSERT(uc16_saved->IsSmi() || uc16_saved->IsString() ||
+ CHECK(uc16_saved->IsSmi() || uc16_saved->IsString() ||
uc16_saved->IsCode());
- ASSERT(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
- ASSERT(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
+ CHECK(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
+ CHECK(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
break;
}
default:
- ASSERT_EQ(JSRegExp::NOT_COMPILED, TypeTag());
- ASSERT(data()->IsUndefined());
+ CHECK_EQ(JSRegExp::NOT_COMPILED, TypeTag());
+ CHECK(data()->IsUndefined());
break;
}
}
void JSProxy::JSProxyVerify() {
CHECK(IsJSProxy());
VerifyPointer(handler());
- ASSERT(hash()->IsSmi() || hash()->IsUndefined());
+ CHECK(hash()->IsSmi() || hash()->IsUndefined());
}
void Foreign::ForeignVerify() {
- ASSERT(IsForeign());
+ CHECK(IsForeign());
}
}
+void JSFunctionResultCache::JSFunctionResultCacheVerify() {
+ JSFunction::cast(get(kFactoryIndex))->Verify();
+
+ int size = Smi::cast(get(kCacheSizeIndex))->value();
+ CHECK(kEntriesIndex <= size);
+ CHECK(size <= length());
+ CHECK_EQ(0, size % kEntrySize);
+
+ int finger = Smi::cast(get(kFingerIndex))->value();
+ CHECK(kEntriesIndex <= finger);
+ CHECK((finger < size) || (finger == kEntriesIndex && finger == size));
+ CHECK_EQ(0, finger % kEntrySize);
+
+ if (FLAG_enable_slow_asserts) {
+ for (int i = kEntriesIndex; i < size; i++) {
+ CHECK(!get(i)->IsTheHole());
+ get(i)->Verify();
+ }
+ for (int i = size; i < length(); i++) {
+ CHECK(get(i)->IsTheHole());
+ get(i)->Verify();
+ }
+ }
+}
+
+
+void NormalizedMapCache::NormalizedMapCacheVerify() {
+ FixedArray::cast(this)->Verify();
+ if (FLAG_enable_slow_asserts) {
+ for (int i = 0; i < length(); i++) {
+ Object* e = get(i);
+ if (e->IsMap()) {
+ Map::cast(e)->SharedMapVerify();
+ } else {
+ CHECK(e->IsUndefined());
+ }
+ }
+ }
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
void DebugInfo::DebugInfoVerify() {
CHECK(IsDebugInfo());
VerifyPointer(break_point_objects());
}
#endif // ENABLE_DEBUGGER_SUPPORT
+#endif // VERIFY_HEAP
+#ifdef DEBUG
void JSObject::IncrementSpillStatistics(SpillInformation* info) {
info->number_of_objects_++;
}
-void JSFunctionResultCache::JSFunctionResultCacheVerify() {
- JSFunction::cast(get(kFactoryIndex))->Verify();
-
- int size = Smi::cast(get(kCacheSizeIndex))->value();
- ASSERT(kEntriesIndex <= size);
- ASSERT(size <= length());
- ASSERT_EQ(0, size % kEntrySize);
-
- int finger = Smi::cast(get(kFingerIndex))->value();
- ASSERT(kEntriesIndex <= finger);
- ASSERT((finger < size) || (finger == kEntriesIndex && finger == size));
- ASSERT_EQ(0, finger % kEntrySize);
-
- if (FLAG_enable_slow_asserts) {
- for (int i = kEntriesIndex; i < size; i++) {
- ASSERT(!get(i)->IsTheHole());
- get(i)->Verify();
- }
- for (int i = size; i < length(); i++) {
- ASSERT(get(i)->IsTheHole());
- get(i)->Verify();
- }
- }
-}
-
-
-void NormalizedMapCache::NormalizedMapCacheVerify() {
- FixedArray::cast(this)->Verify();
- if (FLAG_enable_slow_asserts) {
- for (int i = 0; i < length(); i++) {
- Object* e = get(i);
- if (e->IsMap()) {
- Map::cast(e)->SharedMapVerify();
- } else {
- ASSERT(e->IsUndefined());
- }
- }
- }
-}
-
-
-void Map::ZapTransitions() {
- TransitionArray* transition_array = transitions();
- MemsetPointer(transition_array->data_start(),
- GetHeap()->the_hole_value(),
- transition_array->length());
-}
-
-
-void Map::ZapPrototypeTransitions() {
- FixedArray* proto_transitions = GetPrototypeTransitions();
- MemsetPointer(proto_transitions->data_start(),
- GetHeap()->the_hole_value(),
- proto_transitions->length());
-}
-
-
#endif // DEBUG
} } // namespace v8::internal
% JSFunctionResultCache::kEntrySize != 0) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<JSFunctionResultCache*>(this)->
JSFunctionResultCacheVerify();
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
}
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void HeapObject::VerifyObjectField(int offset) {
VerifyPointer(READ_FIELD(this, offset));
}
void HeapObject::VerifySmiField(int offset) {
- ASSERT(READ_FIELD(this, offset)->IsSmi());
+ CHECK(READ_FIELD(this, offset)->IsSmi());
}
#endif
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
Object* back_pointer = GetBackPointer();
-#ifdef DEBUG
- Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
- if (object->IsTransitionArray()) {
+
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
ZapTransitions();
- } else {
- ASSERT(object->IsMap() || object->IsUndefined());
}
-#endif
+
WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
CONDITIONAL_WRITE_BARRIER(
heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
void Map::set_transitions(TransitionArray* transition_array,
WriteBarrierMode mode) {
-#ifdef DEBUG
- if (HasTransitionArray()) {
- ASSERT(transitions() != transition_array);
+ // In release mode, only run this code if verify_heap is on.
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
+ CHECK(transitions() != transition_array);
ZapTransitions();
}
-#endif
WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
CONDITIONAL_WRITE_BARRIER(
Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
- if (trim_mode == FROM_GC) {
-#ifdef DEBUG
- ZapEndOfFixedArray(new_end, to_trim);
-#endif
- } else {
- ZapEndOfFixedArray(new_end, to_trim);
+ if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
+ ZapEndOfFixedArray(new_end, to_trim);
}
int size_delta = to_trim * kPointerSize;
Object* result = get(index);
if (result->IsMap() &&
Map::cast(result)->EquivalentToForNormalization(fast, mode)) {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Map::cast(result)->SharedMapVerify();
}
+#endif
+#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
// The cached map should match newly created normalized map bit-by-bit,
// except for the code cache, which can contain some ics which can be
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
result->set_dictionary_map(true);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap && result->is_shared()) {
result->SharedMapVerify();
}
}
+void Map::ZapTransitions() {
+ TransitionArray* transition_array = transitions();
+ MemsetPointer(transition_array->data_start(),
+ GetHeap()->the_hole_value(),
+ transition_array->length());
+}
+
+
+void Map::ZapPrototypeTransitions() {
+ FixedArray* proto_transitions = GetPrototypeTransitions();
+ MemsetPointer(proto_transitions->data_start(),
+ GetHeap()->the_hole_value(),
+ proto_transitions->length());
+}
+
+
MaybeObject* JSReceiver::SetPrototype(Object* value,
bool skip_hidden_prototypes) {
#ifdef DEBUG
// A template-ized version of the IsXXX functions.
template <class C> static inline bool Is(Object* obj);
+#ifdef VERIFY_HEAP
+#define DECLARE_VERIFIER(Name) void Name##Verify();
+#else
+#define DECLARE_VERIFIER(Name)
+#endif
class MaybeObject BASE_EMBEDDED {
public:
void Print(FILE* out);
void PrintLn(FILE* out);
#endif
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verifies the object.
void Verify();
#endif
// < the length of the string. Used to implement [] on strings.
inline bool IsStringObjectWithCharacterAt(uint32_t index);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verify a pointer is a valid object pointer.
static void VerifyPointer(Object* p);
#endif
}
void SmiPrint(FILE* out);
void SmiPrint(StringStream* accumulator);
-#ifdef DEBUG
- void SmiVerify();
-#endif
+
+ DECLARE_VERIFIER(Smi)
static const int kMinValue =
(static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
}
void FailurePrint(FILE* out);
void FailurePrint(StringStream* accumulator);
-#ifdef DEBUG
- void FailureVerify();
-#endif
+
+ DECLARE_VERIFIER(Failure)
private:
inline intptr_t value() const;
void HeapObjectPrint(FILE* out);
void PrintHeader(FILE* out, const char* id);
#endif
-
-#ifdef DEBUG
- void HeapObjectVerify();
+ DECLARE_VERIFIER(HeapObject)
+#ifdef VERIFY_HEAP
inline void VerifyObjectField(int offset);
inline void VerifySmiField(int offset);
}
void HeapNumberPrint(FILE* out);
void HeapNumberPrint(StringStream* accumulator);
-#ifdef DEBUG
- void HeapNumberVerify();
-#endif
+ DECLARE_VERIFIER(HeapNumber)
inline int get_exponent();
inline int get_sign();
}
void JSObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSObject)
#ifdef OBJECT_PRINT
inline void PrintProperties() {
PrintProperties(stdout);
}
void FixedArrayPrint(FILE* out);
#endif
+ DECLARE_VERIFIER(FixedArray)
#ifdef DEBUG
- void FixedArrayVerify();
// Checks if two FixedArrays have identical contents.
bool IsEqualTo(FixedArray* other);
#endif
}
void FixedDoubleArrayPrint(FILE* out);
#endif
-
-#ifdef DEBUG
- void FixedDoubleArrayVerify();
-#endif
+ DECLARE_VERIFIER(FixedDoubleArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FixedDoubleArray);
// Casting
static inline JSFunctionResultCache* cast(Object* obj);
-#ifdef DEBUG
- void JSFunctionResultCacheVerify();
-#endif
+ DECLARE_VERIFIER(JSFunctionResultCache)
};
// Casting
static inline NormalizedMapCache* cast(Object* obj);
-#ifdef DEBUG
- void NormalizedMapCacheVerify();
-#endif
+ DECLARE_VERIFIER(NormalizedMapCache)
};
}
void ByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ByteArrayVerify();
-#endif
+ DECLARE_VERIFIER(ByteArray)
// Layout description.
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
}
void FreeSpacePrint(FILE* out);
#endif
-#ifdef DEBUG
- void FreeSpaceVerify();
-#endif
+ DECLARE_VERIFIER(FreeSpace)
// Layout description.
// Size is smi tagged when it is stored.
}
void ExternalPixelArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalPixelArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalPixelArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalPixelArray);
}
void ExternalByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalByteArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalByteArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalByteArray);
}
void ExternalUnsignedByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedByteArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedByteArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedByteArray);
}
void ExternalShortArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalShortArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalShortArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalShortArray);
}
void ExternalUnsignedShortArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedShortArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedShortArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedShortArray);
}
void ExternalIntArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalIntArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalIntArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalIntArray);
}
void ExternalUnsignedIntArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedIntArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedIntArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedIntArray);
}
void ExternalFloatArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalFloatArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalFloatArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalFloatArray);
}
void ExternalDoubleArrayPrint(FILE* out);
#endif // OBJECT_PRINT
-#ifdef DEBUG
- void ExternalDoubleArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalDoubleArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalDoubleArray);
}
void CodePrint(FILE* out);
#endif
-#ifdef DEBUG
- void CodeVerify();
-#endif
+ DECLARE_VERIFIER(Code)
+
void ClearInlineCaches();
void ClearTypeFeedbackCells(Heap* heap);
Handle<Map> FindTransitionedMap(MapHandleList* candidates);
Map* FindTransitionedMap(MapList* candidates);
- // Zaps the contents of backing data structures in debug mode. Note that the
+ // Zaps the contents of backing data structures. Note that the
// heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects
// holding weak references when incremental marking is used, because it also
// iterates over objects that are otherwise unreachable.
-#ifdef DEBUG
+ // In general we only want to call these functions in release mode when
+ // heap verification is turned on.
void ZapPrototypeTransitions();
void ZapTransitions();
-#endif
// Dispatched behavior.
#ifdef OBJECT_PRINT
}
void MapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void MapVerify();
+ DECLARE_VERIFIER(Map)
+
+#ifdef VERIFY_HEAP
void SharedMapVerify();
#endif
}
void ScriptPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ScriptVerify();
-#endif
+ DECLARE_VERIFIER(Script)
static const int kSourceOffset = HeapObject::kHeaderSize;
static const int kNameOffset = kSourceOffset + kPointerSize;
}
void SharedFunctionInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void SharedFunctionInfoVerify();
-#endif
+ DECLARE_VERIFIER(SharedFunctionInfo)
void ResetForNewContext(int new_ic_age);
}
void JSModulePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSModuleVerify();
-#endif
+ DECLARE_VERIFIER(JSModule)
// Layout description.
static const int kContextOffset = JSObject::kHeaderSize;
}
void JSFunctionPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSFunctionVerify();
-#endif
+ DECLARE_VERIFIER(JSFunction)
// Returns the number of allocated literals.
inline int NumberOfLiterals();
}
void JSGlobalProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSGlobalProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSGlobalProxy)
// Layout description.
static const int kNativeContextOffset = JSObject::kHeaderSize;
}
void JSGlobalObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSGlobalObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSGlobalObject)
// Layout description.
static const int kSize = GlobalObject::kHeaderSize;
}
void JSBuiltinsObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSBuiltinsObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSBuiltinsObject)
// Layout description. The size of the builtins object includes
// room for two pointers per runtime routine written in javascript
}
void JSValuePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSValueVerify();
-#endif
+ DECLARE_VERIFIER(JSValue)
// Layout description.
static const int kValueOffset = JSObject::kHeaderSize;
}
void JSDatePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSDateVerify();
-#endif
+ DECLARE_VERIFIER(JSDate)
+
// The order is important. It must be kept in sync with date macros
// in macros.py.
enum FieldIndex {
}
void JSMessageObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSMessageObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSMessageObject)
// Layout description.
static const int kTypeOffset = JSObject::kHeaderSize;
static inline JSRegExp* cast(Object* obj);
// Dispatched behavior.
-#ifdef DEBUG
- void JSRegExpVerify();
-#endif
+ DECLARE_VERIFIER(JSRegExp)
static const int kDataOffset = JSObject::kHeaderSize;
static const int kSize = kDataOffset + kPointerSize;
}
void CodeCachePrint(FILE* out);
#endif
-#ifdef DEBUG
- void CodeCacheVerify();
-#endif
+ DECLARE_VERIFIER(CodeCache)
static const int kDefaultCacheOffset = HeapObject::kHeaderSize;
static const int kNormalTypeCacheOffset =
}
void PolymorphicCodeCachePrint(FILE* out);
#endif
-#ifdef DEBUG
- void PolymorphicCodeCacheVerify();
-#endif
+ DECLARE_VERIFIER(PolymorphicCodeCache)
static const int kCacheOffset = HeapObject::kHeaderSize;
static const int kSize = kCacheOffset + kPointerSize;
}
void TypeFeedbackInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void TypeFeedbackInfoVerify();
-#endif
+ DECLARE_VERIFIER(TypeFeedbackInfo)
static const int kStorage1Offset = HeapObject::kHeaderSize;
static const int kStorage2Offset = kStorage1Offset + kPointerSize;
}
void AliasedArgumentsEntryPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AliasedArgumentsEntryVerify();
-#endif
+ DECLARE_VERIFIER(AliasedArgumentsEntry)
static const int kAliasedContextSlot = HeapObject::kHeaderSize;
static const int kSize = kAliasedContextSlot + kPointerSize;
char* ToAsciiArray();
#endif
-#ifdef DEBUG
- void StringVerify();
-#endif
+ DECLARE_VERIFIER(String)
+
inline bool IsFlat();
// Layout description.
unsigned* offset,
unsigned chars);
-#ifdef DEBUG
- void SeqAsciiStringVerify();
-#endif
+ DECLARE_VERIFIER(SeqAsciiString)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqAsciiString);
typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
BodyDescriptor;
-#ifdef DEBUG
- void ConsStringVerify();
-#endif
+ DECLARE_VERIFIER(ConsString)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
kOffsetOffset + kPointerSize, kSize>
BodyDescriptor;
-#ifdef DEBUG
- void SlicedStringVerify();
-#endif
+ DECLARE_VERIFIER(SlicedString)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SlicedString);
static inline Oddball* cast(Object* obj);
// Dispatched behavior.
-#ifdef DEBUG
- void OddballVerify();
-#endif
+ DECLARE_VERIFIER(Oddball)
// Initialize the fields.
MUST_USE_RESULT MaybeObject* Initialize(const char* to_string,
return address() + kValueOffset;
}
-#ifdef DEBUG
- void JSGlobalPropertyCellVerify();
-#endif
+ DECLARE_VERIFIER(JSGlobalPropertyCell)
+
#ifdef OBJECT_PRINT
inline void JSGlobalPropertyCellPrint() {
JSGlobalPropertyCellPrint(stdout);
}
void JSProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSProxy)
// Layout description. We add padding so that a proxy has the same
// size as a virgin JSObject. This is essential for becoming a JSObject
}
void JSFunctionProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSFunctionProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSFunctionProxy)
// Layout description.
static const int kCallTrapOffset = JSProxy::kPaddingOffset;
}
void JSSetPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSSetVerify();
-#endif
+ DECLARE_VERIFIER(JSSet)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kSize = kTableOffset + kPointerSize;
}
void JSMapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSMapVerify();
-#endif
+ DECLARE_VERIFIER(JSMap)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kSize = kTableOffset + kPointerSize;
}
void JSWeakMapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSWeakMapVerify();
-#endif
+ DECLARE_VERIFIER(JSWeakMap)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kNextOffset = kTableOffset + kPointerSize;
}
void ForeignPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ForeignVerify();
-#endif
+ DECLARE_VERIFIER(Foreign)
// Layout description.
}
void JSArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSArrayVerify();
-#endif
+ DECLARE_VERIFIER(JSArray)
// Number of element slots to pre-allocate for an empty array.
static const int kPreallocatedArrayElements = 4;
}
void AccessorInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AccessorInfoVerify();
-#endif
+ DECLARE_VERIFIER(AccessorInfo)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
#ifdef OBJECT_PRINT
void AccessorPairPrint(FILE* out = stdout);
#endif
-#ifdef DEBUG
- void AccessorPairVerify();
-#endif
+ DECLARE_VERIFIER(AccessorPair)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
}
void AccessCheckInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AccessCheckInfoVerify();
-#endif
+ DECLARE_VERIFIER(AccessCheckInfo)
static const int kNamedCallbackOffset = HeapObject::kHeaderSize;
static const int kIndexedCallbackOffset = kNamedCallbackOffset + kPointerSize;
}
void InterceptorInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void InterceptorInfoVerify();
-#endif
+ DECLARE_VERIFIER(InterceptorInfo)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
}
void CallHandlerInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void CallHandlerInfoVerify();
-#endif
+ DECLARE_VERIFIER(CallHandlerInfo)
static const int kCallbackOffset = HeapObject::kHeaderSize;
static const int kDataOffset = kCallbackOffset + kPointerSize;
DECL_ACCESSORS(tag, Object)
DECL_ACCESSORS(property_list, Object)
-#ifdef DEBUG
- void TemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(TemplateInfo)
static const int kTagOffset = HeapObject::kHeaderSize;
static const int kPropertyListOffset = kTagOffset + kPointerSize;
}
void FunctionTemplateInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void FunctionTemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(FunctionTemplateInfo)
static const int kSerialNumberOffset = TemplateInfo::kHeaderSize;
static const int kCallCodeOffset = kSerialNumberOffset + kPointerSize;
}
void ObjectTemplateInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ObjectTemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(ObjectTemplateInfo)
static const int kConstructorOffset = TemplateInfo::kHeaderSize;
static const int kInternalFieldCountOffset =
}
void SignatureInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void SignatureInfoVerify();
-#endif
+ DECLARE_VERIFIER(SignatureInfo)
static const int kReceiverOffset = Struct::kHeaderSize;
static const int kArgsOffset = kReceiverOffset + kPointerSize;
}
void TypeSwitchInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void TypeSwitchInfoVerify();
-#endif
+ DECLARE_VERIFIER(TypeSwitchInfo)
static const int kTypesOffset = Struct::kHeaderSize;
static const int kSize = kTypesOffset + kPointerSize;
}
void DebugInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void DebugInfoVerify();
-#endif
+ DECLARE_VERIFIER(DebugInfo)
static const int kSharedFunctionInfoIndex = Struct::kHeaderSize;
static const int kOriginalCodeIndex = kSharedFunctionInfoIndex + kPointerSize;
}
void BreakPointInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void BreakPointInfoVerify();
-#endif
+ DECLARE_VERIFIER(BreakPointInfo)
static const int kCodePositionIndex = Struct::kHeaderSize;
static const int kSourcePositionIndex = kCodePositionIndex + kPointerSize;
#undef DECL_BOOLEAN_ACCESSORS
#undef DECL_ACCESSORS
+#undef DECLARE_VERIFIER
#define VISITOR_SYNCHRONIZATION_TAGS_LIST(V) \
V(kSymbolTable, "symbol_table", "(Symbols)") \
if (pending_exception) return Failure::Exception();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
cache_handle->set(index + 1, *value);
cache_handle->set_finger_index(index);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
VirtualMemory reservation;
Address area_start = NULL;
Address area_end = NULL;
+
if (executable == EXECUTABLE) {
chunk_size = RoundUp(CodePageAreaStartOffset() + body_size,
OS::CommitPageSize()) + CodePageGuardSize();
size_executable_ += reservation.size();
}
-#ifdef DEBUG
- ZapBlock(base, CodePageGuardStartOffset());
- ZapBlock(base + CodePageAreaStartOffset(), body_size);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(base, CodePageGuardStartOffset());
+ ZapBlock(base + CodePageAreaStartOffset(), body_size);
+ }
+
area_start = base + CodePageAreaStartOffset();
area_end = area_start + body_size;
} else {
if (base == NULL) return NULL;
-#ifdef DEBUG
- ZapBlock(base, chunk_size);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(base, chunk_size);
+ }
area_start = base + Page::kObjectStartOffset;
area_end = base + chunk_size;
size_t size,
Executability executable) {
if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
-#ifdef DEBUG
- ZapBlock(start, size);
-#endif
+
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(start, size);
+ }
+
isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
return true;
}
void PagedSpace::Print() { }
#endif
-
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void PagedSpace::Verify(ObjectVisitor* visitor) {
// We can only iterate over the pages if they were swept precisely.
if (was_swept_conservatively_) return;
PageIterator page_iterator(this);
while (page_iterator.has_next()) {
Page* page = page_iterator.next();
- ASSERT(page->owner() == this);
+ CHECK(page->owner() == this);
if (page == Page::FromAllocationTop(allocation_info_.top)) {
allocation_pointer_found_in_space = true;
}
- ASSERT(page->WasSweptPrecisely());
+ CHECK(page->WasSweptPrecisely());
HeapObjectIterator it(page, NULL);
Address end_of_previous_object = page->area_start();
Address top = page->area_end();
int black_size = 0;
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
- ASSERT(end_of_previous_object <= object->address());
+ CHECK(end_of_previous_object <= object->address());
// The first word should be a map, and we expect all map pointers to
// be in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// Perform space-specific object verification.
VerifyObject(object);
black_size += size;
}
- ASSERT(object->address() + size <= top);
+ CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
- ASSERT_LE(black_size, page->LiveBytes());
+ CHECK_LE(black_size, page->LiveBytes());
}
- ASSERT(allocation_pointer_found_in_space);
+ CHECK(allocation_pointer_found_in_space);
}
-#endif
-
+#endif // VERIFY_HEAP
// -----------------------------------------------------------------------------
// NewSpace implementation
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// We do not use the SemiSpaceIterator because verification doesn't assume
// that it works (it depends on the invariants we are checking).
void NewSpace::Verify() {
}
// Check semi-spaces.
- ASSERT_EQ(from_space_.id(), kFromSpace);
- ASSERT_EQ(to_space_.id(), kToSpace);
+ CHECK_EQ(from_space_.id(), kFromSpace);
+ CHECK_EQ(to_space_.id(), kToSpace);
from_space_.Verify();
to_space_.Verify();
}
#ifdef DEBUG
void SemiSpace::Print() { }
+#endif
-
+#ifdef VERIFY_HEAP
void SemiSpace::Verify() {
bool is_from_space = (id_ == kFromSpace);
NewSpacePage* page = anchor_.next_page();
page = page->next_page();
}
}
+#endif
-
+#ifdef DEBUG
void SemiSpace::AssertValidRange(Address start, Address end) {
// Addresses belong to same semi-space
NewSpacePage* page = NewSpacePage::FromLimit(start);
// -----------------------------------------------------------------------------
// MapSpace implementation
+// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
+// there is at least one non-inlined virtual function. I would prefer to hide
+// the VerifyObject definition behind VERIFY_HEAP.
-#ifdef DEBUG
void MapSpace::VerifyObject(HeapObject* object) {
// The object should be a map or a free-list node.
- ASSERT(object->IsMap() || object->IsFreeSpace());
+ CHECK(object->IsMap() || object->IsFreeSpace());
}
-#endif
// -----------------------------------------------------------------------------
// GlobalPropertyCellSpace implementation
+// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
+// there is at least one non-inlined virtual function. I would prefer to hide
+// the VerifyObject definition behind VERIFY_HEAP.
-#ifdef DEBUG
void CellSpace::VerifyObject(HeapObject* object) {
// The object should be a global object property cell or a free-list node.
- ASSERT(object->IsJSGlobalPropertyCell() ||
+ CHECK(object->IsJSGlobalPropertyCell() ||
object->map() == heap()->two_pointer_filler_map());
}
-#endif
// -----------------------------------------------------------------------------
HeapObject* object = page->GetObject();
-#ifdef DEBUG
- // Make the object consistent so the heap can be vefified in OldSpaceStep.
- reinterpret_cast<Object**>(object->address())[0] =
- heap()->fixed_array_map();
- reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ // Make the object consistent so the heap can be verified in OldSpaceStep.
+ // We only need to do this in debug builds or if verify_heap is on.
+ reinterpret_cast<Object**>(object->address())[0] =
+ heap()->fixed_array_map();
+ reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
+ }
heap()->incremental_marking()->OldSpaceStep(object_size);
return object;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// We do not assume that the large object iterator works, because it depends
// on the invariants we are checking during verification.
void LargeObjectSpace::Verify() {
// object area start.
HeapObject* object = chunk->GetObject();
Page* page = Page::FromAddress(object->address());
- ASSERT(object->address() == page->area_start());
+ CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be
// in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// We have only code, sequential strings, external strings
// (sequential strings that have been morphed into external
// strings), fixed arrays, and byte arrays in large object space.
- ASSERT(object->IsCode() || object->IsSeqString() ||
+ CHECK(object->IsCode() || object->IsSeqString() ||
object->IsExternalString() || object->IsFixedArray() ||
object->IsFixedDoubleArray() || object->IsByteArray());
Object* element = array->get(j);
if (element->IsHeapObject()) {
HeapObject* element_object = HeapObject::cast(element);
- ASSERT(heap()->Contains(element_object));
- ASSERT(element_object->map()->IsMap());
+ CHECK(heap()->Contains(element_object));
+ CHECK(element_object->map()->IsMap());
}
}
}
}
}
+#endif
+#ifdef DEBUG
void LargeObjectSpace::Print() {
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
// The dummy page that anchors the linked list of pages.
Page* anchor() { return &anchor_; }
-#ifdef DEBUG
- // Print meta info and objects in this space.
- virtual void Print();
-
+#ifdef VERIFY_HEAP
// Verify integrity of this space.
virtual void Verify(ObjectVisitor* visitor);
- // Reports statistics for the space
- void ReportStatistics();
-
// Overridden by subclasses to verify space-specific object
// properties (e.g., only maps or free-list nodes are in map space).
virtual void VerifyObject(HeapObject* obj) {}
+#endif
+
+#ifdef DEBUG
+ // Print meta info and objects in this space.
+ virtual void Print();
+
+ // Reports statistics for the space
+ void ReportStatistics();
// Report code object related statistics
void CollectCodeStatistics();
NewSpacePage* first_page() { return anchor_.next_page(); }
NewSpacePage* current_page() { return current_page_; }
+#ifdef VERIFY_HEAP
+ virtual void Verify();
+#endif
+
#ifdef DEBUG
virtual void Print();
- virtual void Verify();
// Validate a range of of addresses in a SemiSpace.
// The "from" address must be on a page prior to the "to" address,
// in the linked page order, or it must be earlier on the same page.
template <typename StringType>
inline void ShrinkStringAtAllocationBoundary(String* string, int len);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verify the active semispace.
virtual void Verify();
+#endif
+
+#ifdef DEBUG
// Print the active semispace.
virtual void Print() { to_space_.Print(); }
#endif
}
protected:
-#ifdef DEBUG
virtual void VerifyObject(HeapObject* obj);
-#endif
private:
static const int kMapsPerPage = Page::kNonCodeObjectAreaSize / Map::kSize;
}
protected:
-#ifdef DEBUG
virtual void VerifyObject(HeapObject* obj);
-#endif
public:
TRACK_MEMORY("CellSpace")
LargePage* first_page() { return first_page_; }
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
virtual void Verify();
+#endif
+
+#ifdef DEBUG
virtual void Print();
void ReportStatistics();
void CollectCodeStatistics();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
// Do nothing.
}
void StoreBuffer::Verify() {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
VerifyPointers(heap_->old_pointer_space(),
&StoreBuffer::FindPointersToNewSpaceInRegion);
VerifyPointers(heap_->map_space(),
void StoreBuffer::GCEpilogue() {
during_gc_ = false;
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void VerifyPointers(PagedSpace* space, RegionCallback region_callback);
void VerifyPointers(LargeObjectSpace* space);
#endif
TEST(DebugBreak) {
+#ifdef VERIFY_HEAP
+ i::FLAG_verify_heap = true;
+#endif
v8::HandleScope scope;
DebugLocalContext env;
- // This test should be run with option --verify-heap. As --verify-heap is
- // only available in debug mode only check for it in that case.
-#ifdef DEBUG
- CHECK(v8::internal::FLAG_verify_heap);
-#endif
-
// Register a debug event listener which sets the break flag and counts.
v8::Debug::SetDebugEventListener(DebugEventBreak);
TEST(InstanceOfStubWriteBarrier) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
// other strong paths are correctly recorded in the slots buffer.
TEST(Regress2060b) {
FLAG_always_compact = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
FLAG_verify_heap = true;
#endif
+
LocalContext context;
v8::HandleScope scope;
Handle<JSFunction> function =