}
+#ifdef VERIFY_HEAP
+NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() {
+ HEAP->no_weak_embedded_maps_verification_scope_depth_++;
+}
+
+
+NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() {
+ HEAP->no_weak_embedded_maps_verification_scope_depth_--;
+}
+#endif
+
+
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
ms_count_at_last_idle_notification_(0),
gc_count_at_last_idle_gc_(0),
scavenges_since_last_idle_round_(kIdleScavengeThreshold),
+#ifdef VERIFY_HEAP
+ no_weak_embedded_maps_verification_scope_depth_(0),
+#endif
promotion_queue_(this),
configured_(false),
chunks_queued_for_free_(NULL),
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
+
+
+ bool weak_embedded_maps_verification_enabled() {
+ return no_weak_embedded_maps_verification_scope_depth_ == 0;
+ }
#endif
#ifdef DEBUG
unsigned int gc_count_at_last_idle_gc_;
int scavenges_since_last_idle_round_;
+#ifdef VERIFY_HEAP
+ int no_weak_embedded_maps_verification_scope_depth_;
+#endif
+
static const int kMaxMarkSweepsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class MapCompact;
+#ifdef VERIFY_HEAP
+ friend class NoWeakEmbeddedMapsVerificationScope;
+#endif
DISALLOW_COPY_AND_ASSIGN(Heap);
};
DisallowAllocationFailure disallow_allocation_failure_;
};
+#ifdef VERIFY_HEAP
+class NoWeakEmbeddedMapsVerificationScope {
+ public:
+ inline NoWeakEmbeddedMapsVerificationScope();
+ inline ~NoWeakEmbeddedMapsVerificationScope();
+};
+#endif
+
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
}
}
}
+#ifdef VERIFY_HEAP
+ // This disables verification of weak embedded maps after full GC.
+ // AddDependentCode can cause a GC, which would observe the state where
+ // this code is not yet in the depended code lists of the embedded maps.
+ NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
+#endif
for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(code);
}
void VisitEmbeddedPointer(RelocInfo* rinfo) {
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- if (!FLAG_weak_embedded_maps_in_optimized_code ||
+ if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
!rinfo->target_object()->IsMap() ||
!Map::cast(rinfo->target_object())->CanTransition()) {
}
#endif
+#ifdef VERIFY_HEAP
+ if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code &&
+ heap()->weak_embedded_maps_verification_enabled()) {
+ VerifyWeakEmbeddedMapsInOptimizedCode();
+ }
+#endif
+
Finish();
if (marking_parity_ == EVEN_MARKING_PARITY) {
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
+
+
+void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
+ HeapObjectIterator code_iterator(heap()->code_space());
+ for (HeapObject* obj = code_iterator.Next();
+ obj != NULL;
+ obj = code_iterator.Next()) {
+ Code* code = Code::cast(obj);
+ if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
+ if (code->marked_for_deoptimization()) continue;
+ code->VerifyEmbeddedMapsDependency();
+ }
+}
#endif // VERIFY_HEAP
#endif
}
+
class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
public:
virtual bool TakeFunction(JSFunction* function) {
if (IsMarked(code) && !code->marked_for_deoptimization()) {
if (new_number_of_codes != i) {
codes->set_code_at(new_number_of_codes, code);
- Object** slot = codes->code_slot_at(new_number_of_codes);
- RecordSlot(slot, slot, code);
- new_number_of_codes++;
}
+ Object** slot = codes->code_slot_at(new_number_of_codes);
+ RecordSlot(slot, slot, code);
+ new_number_of_codes++;
}
}
for (int i = new_number_of_codes; i < number_of_codes; i++) {
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
+ void VerifyWeakEmbeddedMapsInOptimizedCode();
#endif
// Sweep a single page from the given space conservatively.
#include "disassembler.h"
#include "disasm.h"
#include "jsregexp.h"
+#include "macro-assembler.h"
#include "objects-visiting.h"
namespace v8 {
}
+void Code::VerifyEmbeddedMapsDependency() {
+ int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
+ for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
+ RelocInfo::Mode mode = it.rinfo()->rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT &&
+ it.rinfo()->target_object()->IsMap()) {
+ Map* map = Map::cast(it.rinfo()->target_object());
+ if (map->CanTransition()) {
+ CHECK(map->dependent_codes()->Contains(this));
+ }
+ }
+ }
+}
+
+
void JSArray::JSArrayVerify() {
JSObjectVerify();
CHECK(length()->IsNumber() || length()->IsUndefined());
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
ASSERT(!rinfo->target_object()->IsConsString());
HeapObject* object = HeapObject::cast(rinfo->target_object());
- if (!FLAG_weak_embedded_maps_in_optimized_code ||
- !FLAG_collect_maps || rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
+ if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
+ rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
!object->IsMap() || !Map::cast(object)->CanTransition()) {
heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
StaticVisitor::MarkObject(heap, object);
}
+bool DependentCodes::Contains(Code* code) {
+ int limit = number_of_codes();
+ for (int i = 0; i < limit; i++) {
+ if (code_at(i) == code) return true;
+ }
+ return false;
+}
+
+
MaybeObject* JSReceiver::SetPrototype(Object* value,
bool skip_hidden_prototypes) {
#ifdef DEBUG
void PrintDeoptLocation(int bailout_id);
+#ifdef VERIFY_HEAP
+ void VerifyEmbeddedMapsDependency();
+#endif
+
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
static Handle<DependentCodes> Append(Handle<DependentCodes> codes,
Handle<Code> value);
static inline DependentCodes* cast(Object* object);
+ bool Contains(Code* code);
private:
static const int kNumberOfCodesIndex = 0;
static const int kCodesIndex = 1;