mark-compact.cc
messages.cc
objects.cc
+ objects-visiting.cc
oprofile-agent.cc
parser.cc
profile-generator.cc
}
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+ RelocInfo::Mode mode = rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT) {
+ StaticVisitor::VisitPointer(target_object_address());
+ } else if (RelocInfo::IsCodeTarget(mode)) {
+ StaticVisitor::VisitCodeTarget(this);
+ } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+ StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ } else if (Debug::has_break_points() &&
+ ((RelocInfo::IsJSReturn(mode) &&
+ IsPatchedReturnSequence()) ||
+ (RelocInfo::IsDebugBreakSlot(mode) &&
+ IsPatchedDebugBreakSlotSequence()))) {
+ StaticVisitor::VisitDebugTarget(this);
+#endif
+ } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+ StaticVisitor::VisitRuntimeEntry(this);
+ }
+}
+
+
Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
rm_ = no_reg;
imm32_ = immediate;
INLINE(void set_call_object(Object* target));
INLINE(Object** call_object_address());
+ template<typename StaticVisitor> inline void Visit();
inline void Visit(ObjectVisitor* v);
// Patch the code with some other code.
#include "global-handles.h"
#include "macro-assembler.h"
#include "natives.h"
+#include "objects-visiting.h"
#include "snapshot.h"
#include "stub-cache.h"
initial_map->set_instance_size(
initial_map->instance_size() + 5 * kPointerSize);
initial_map->set_instance_descriptors(*descriptors);
- initial_map->set_scavenger(
- Heap::GetScavenger(initial_map->instance_type(),
- initial_map->instance_size()));
+ initial_map->set_visitor_id(StaticVisitorBase::GetVisitorId(*initial_map));
}
{ // -- J S O N
#include "execution.h"
#include "factory.h"
#include "macro-assembler.h"
+#include "objects-visiting.h"
namespace v8 {
namespace internal {
copy->set_inobject_properties(inobject_properties);
copy->set_unused_property_fields(inobject_properties);
copy->set_instance_size(copy->instance_size() + instance_size_delta);
- copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
- copy->instance_size()));
+ copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
return copy;
}
#include "global-handles.h"
#include "mark-compact.h"
#include "natives.h"
+#include "objects-visiting.h"
#include "scanner.h"
#include "scopeinfo.h"
#include "snapshot.h"
}
+class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
+ public:
+ static inline void VisitPointer(Object** p) {
+ Object* object = *p;
+ if (!Heap::InNewSpace(object)) return;
+ Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
+ reinterpret_cast<HeapObject*>(object));
+ }
+};
+
+
Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front) {
do {
// queue is empty.
while (new_space_front < new_space_.top()) {
HeapObject* object = HeapObject::FromAddress(new_space_front);
- Map* map = object->map();
- int size = object->SizeFromMap(map);
- object->IterateBody(map->instance_type(), size, scavenge_visitor);
- new_space_front += size;
+ new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
}
// Promote and process all the to-be-promoted objects.
}
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-static void RecordCopiedObject(HeapObject* obj) {
- bool should_record = false;
-#ifdef DEBUG
- should_record = FLAG_heap_stats;
-#endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
- should_record = should_record || FLAG_log_gc;
-#endif
- if (should_record) {
- if (Heap::new_space()->Contains(obj)) {
- Heap::new_space()->RecordAllocation(obj);
- } else {
- Heap::new_space()->RecordPromotion(obj);
- }
- }
-}
-#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+class ScavengingVisitor : public StaticVisitorBase {
+ public:
+ static void Initialize() {
+ table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
+ table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
+ table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
+ table_.Register(kVisitByteArray, &EvacuateByteArray);
+ table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+ typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
-// Helper function used by CopyObject to copy a source object to an
-// allocated target object and update the forwarding pointer in the source
-// object. Returns the target object.
-inline static HeapObject* MigrateObject(HeapObject* source,
- HeapObject* target,
- int size) {
- // Copy the content of source to target.
- Heap::CopyBlock(target->address(), source->address(), size);
+ table_.Register(kVisitConsString,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ VisitSpecialized<ConsString::kSize>);
- // Set the forwarding address.
- source->set_map_word(MapWord::FromForwardingAddress(target));
+ table_.Register(kVisitSharedFunctionInfo,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ VisitSpecialized<SharedFunctionInfo::kSize>);
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
- // Update NewSpace stats if necessary.
- RecordCopiedObject(target);
-#endif
- HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
+ table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
+ kVisitDataObject,
+ kVisitDataObjectGeneric>();
- return target;
-}
+ table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
+ kVisitJSObject,
+ kVisitJSObjectGeneric>();
+ table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
+ kVisitStruct,
+ kVisitStructGeneric>();
+ }
-enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
-enum SizeRestriction { SMALL, UNKNOWN_SIZE };
+ static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
+ table_.GetVisitor(map)(map, slot, obj);
+ }
-template<ObjectContents object_contents, SizeRestriction size_restriction>
-static inline void EvacuateObject(Map* map,
- HeapObject** slot,
- HeapObject* object,
- int object_size) {
- ASSERT((size_restriction != SMALL) ||
- (object_size <= Page::kMaxHeapObjectSize));
- ASSERT(object->Size() == object_size);
- if (Heap::ShouldBePromoted(object->address(), object_size)) {
- Object* result;
+ private:
+ enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
+ enum SizeRestriction { SMALL, UNKNOWN_SIZE };
- if ((size_restriction != SMALL) &&
- (object_size > Page::kMaxHeapObjectSize)) {
- result = Heap::lo_space()->AllocateRawFixedArray(object_size);
- } else {
- if (object_contents == DATA_OBJECT) {
- result = Heap::old_data_space()->AllocateRaw(object_size);
+#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+ static void RecordCopiedObject(HeapObject* obj) {
+ bool should_record = false;
+#ifdef DEBUG
+ should_record = FLAG_heap_stats;
+#endif
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ should_record = should_record || FLAG_log_gc;
+#endif
+ if (should_record) {
+ if (Heap::new_space()->Contains(obj)) {
+ Heap::new_space()->RecordAllocation(obj);
} else {
- result = Heap::old_pointer_space()->AllocateRaw(object_size);
+ Heap::new_space()->RecordPromotion(obj);
}
}
-
- if (!result->IsFailure()) {
- HeapObject* target = HeapObject::cast(result);
- *slot = MigrateObject(object, target, object_size);
-
- if (object_contents == POINTER_OBJECT) {
- promotion_queue.insert(target, object_size);
- }
-
- Heap::tracer()->increment_promoted_objects_size(object_size);
- return;
- }
}
- Object* result = Heap::new_space()->AllocateRaw(object_size);
- ASSERT(!result->IsFailure());
- *slot = MigrateObject(object, HeapObject::cast(result), object_size);
- return;
-}
+#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+ // Helper function used by CopyObject to copy a source object to an
+ // allocated target object and update the forwarding pointer in the source
+ // object. Returns the target object.
+ INLINE(static HeapObject* MigrateObject(HeapObject* source,
+ HeapObject* target,
+ int size)) {
+ // Copy the content of source to target.
+ Heap::CopyBlock(target->address(), source->address(), size);
-template<int object_size_in_words, ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- const int object_size = object_size_in_words << kPointerSizeLog2;
- EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
+ // Set the forwarding address.
+ source->set_map_word(MapWord::FromForwardingAddress(target));
+#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+ // Update NewSpace stats if necessary.
+ RecordCopiedObject(target);
+#endif
+ HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
-template<ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- int object_size = map->instance_size();
- EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
+ return target;
+ }
-static inline void EvacuateFixedArray(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- int object_size = FixedArray::cast(object)->FixedArraySize();
- EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
+ template<ObjectContents object_contents, SizeRestriction size_restriction>
+ static inline void EvacuateObject(Map* map,
+ HeapObject** slot,
+ HeapObject* object,
+ int object_size) {
+ ASSERT((size_restriction != SMALL) ||
+ (object_size <= Page::kMaxHeapObjectSize));
+ ASSERT(object->Size() == object_size);
+ if (Heap::ShouldBePromoted(object->address(), object_size)) {
+ Object* result;
-static inline void EvacuateByteArray(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- int object_size = ByteArray::cast(object)->ByteArraySize();
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
+ if ((size_restriction != SMALL) &&
+ (object_size > Page::kMaxHeapObjectSize)) {
+ result = Heap::lo_space()->AllocateRawFixedArray(object_size);
+ } else {
+ if (object_contents == DATA_OBJECT) {
+ result = Heap::old_data_space()->AllocateRaw(object_size);
+ } else {
+ result = Heap::old_pointer_space()->AllocateRaw(object_size);
+ }
+ }
+ if (!result->IsFailure()) {
+ HeapObject* target = HeapObject::cast(result);
+ *slot = MigrateObject(object, target, object_size);
-static Scavenger GetScavengerForSize(int object_size,
- ObjectContents object_contents) {
- ASSERT(IsAligned(object_size, kPointerSize));
- ASSERT(object_size < Page::kMaxHeapObjectSize);
+ if (object_contents == POINTER_OBJECT) {
+ promotion_queue.insert(target, object_size);
+ }
- switch (object_size >> kPointerSizeLog2) {
-#define CASE(n) \
- case n: \
- if (object_contents == DATA_OBJECT) { \
- return static_cast<Scavenger>( \
- &EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
- } else { \
- return static_cast<Scavenger>( \
- &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
+ Heap::tracer()->increment_promoted_objects_size(object_size);
+ return;
}
+ }
+ Object* result = Heap::new_space()->AllocateRaw(object_size);
+ ASSERT(!result->IsFailure());
+ *slot = MigrateObject(object, HeapObject::cast(result), object_size);
+ return;
+ }
- CASE(1);
- CASE(2);
- CASE(3);
- CASE(4);
- CASE(5);
- CASE(6);
- CASE(7);
- CASE(8);
- CASE(9);
- CASE(10);
- CASE(11);
- CASE(12);
- CASE(13);
- CASE(14);
- CASE(15);
- CASE(16);
- default:
- if (object_contents == DATA_OBJECT) {
- return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
- } else {
- return static_cast<Scavenger>(
- &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
- }
-#undef CASE
+ static inline void EvacuateFixedArray(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
+ EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
+ slot,
+ object,
+ object_size);
}
-}
-static inline void EvacuateSeqAsciiString(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- int object_size = SeqAsciiString::cast(object)->
- SeqAsciiStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
+ static inline void EvacuateByteArray(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ }
-static inline void EvacuateSeqTwoByteString(Map* map,
+ static inline void EvacuateSeqAsciiString(Map* map,
HeapObject** slot,
HeapObject* object) {
- int object_size = SeqTwoByteString::cast(object)->
- SeqTwoByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
+ int object_size = SeqAsciiString::cast(object)->
+ SeqAsciiStringSize(map->instance_type());
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ }
-static inline bool IsShortcutCandidate(int type) {
- return ((type & kShortcutTypeMask) == kShortcutTypeTag);
-}
+ static inline void EvacuateSeqTwoByteString(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ int object_size = SeqTwoByteString::cast(object)->
+ SeqTwoByteStringSize(map->instance_type());
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ }
-static inline void EvacuateShortcutCandidate(Map* map,
- HeapObject** slot,
- HeapObject* object) {
- ASSERT(IsShortcutCandidate(map->instance_type()));
+ static inline bool IsShortcutCandidate(int type) {
+ return ((type & kShortcutTypeMask) == kShortcutTypeTag);
+ }
- if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
- HeapObject* first =
- HeapObject::cast(ConsString::cast(object)->unchecked_first());
+ static inline void EvacuateShortcutCandidate(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ ASSERT(IsShortcutCandidate(map->instance_type()));
- *slot = first;
+ if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
+ HeapObject* first =
+ HeapObject::cast(ConsString::cast(object)->unchecked_first());
- if (!Heap::InNewSpace(first)) {
- object->set_map_word(MapWord::FromForwardingAddress(first));
- return;
- }
+ *slot = first;
- MapWord first_word = first->map_word();
- if (first_word.IsForwardingAddress()) {
- HeapObject* target = first_word.ToForwardingAddress();
+ if (!Heap::InNewSpace(first)) {
+ object->set_map_word(MapWord::FromForwardingAddress(first));
+ return;
+ }
- *slot = target;
- object->set_map_word(MapWord::FromForwardingAddress(target));
+ MapWord first_word = first->map_word();
+ if (first_word.IsForwardingAddress()) {
+ HeapObject* target = first_word.ToForwardingAddress();
+
+ *slot = target;
+ object->set_map_word(MapWord::FromForwardingAddress(target));
+ return;
+ }
+
+ Scavenge(first->map(), slot, first);
+ object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
}
- first->map()->Scavenge(slot, first);
- object->set_map_word(MapWord::FromForwardingAddress(*slot));
- return;
+ int object_size = ConsString::kSize;
+ EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
}
- int object_size = ConsString::kSize;
- EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
-}
-
-
-Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
- if (instance_type < FIRST_NONSTRING_TYPE) {
- switch (instance_type & kStringRepresentationMask) {
- case kSeqStringTag:
- if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
- return &EvacuateSeqAsciiString;
- } else {
- return &EvacuateSeqTwoByteString;
- }
-
- case kConsStringTag:
- if (IsShortcutCandidate(instance_type)) {
- return &EvacuateShortcutCandidate;
- } else {
- ASSERT(instance_size == ConsString::kSize);
- return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
- }
-
- case kExternalStringTag:
- ASSERT(instance_size == ExternalString::kSize);
- return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
+ template<ObjectContents object_contents>
+ class ObjectEvacuationStrategy {
+ public:
+ template<int object_size>
+ static inline void VisitSpecialized(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
- UNREACHABLE();
- }
-
- switch (instance_type) {
- case BYTE_ARRAY_TYPE:
- return reinterpret_cast<Scavenger>(&EvacuateByteArray);
- case FIXED_ARRAY_TYPE:
- return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
-
- case JS_OBJECT_TYPE:
- case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
- case JS_VALUE_TYPE:
- case JS_ARRAY_TYPE:
- case JS_REGEXP_TYPE:
- case JS_FUNCTION_TYPE:
- case JS_GLOBAL_PROXY_TYPE:
- case JS_GLOBAL_OBJECT_TYPE:
- case JS_BUILTINS_OBJECT_TYPE:
- return GetScavengerForSize(instance_size, POINTER_OBJECT);
-
- case ODDBALL_TYPE:
- return NULL;
-
- case PROXY_TYPE:
- return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
+ static inline void Visit(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ int object_size = map->instance_size();
+ EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+ }
+ };
- case MAP_TYPE:
- return NULL;
+ typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
- case CODE_TYPE:
- return NULL;
+ static VisitorDispatchTable<Callback> table_;
+};
- case JS_GLOBAL_PROPERTY_CELL_TYPE:
- return NULL;
- case HEAP_NUMBER_TYPE:
- case FILLER_TYPE:
- case PIXEL_ARRAY_TYPE:
- case EXTERNAL_BYTE_ARRAY_TYPE:
- case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
- case EXTERNAL_SHORT_ARRAY_TYPE:
- case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
- case EXTERNAL_INT_ARRAY_TYPE:
- case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
- case EXTERNAL_FLOAT_ARRAY_TYPE:
- return GetScavengerForSize(instance_size, DATA_OBJECT);
-
- case SHARED_FUNCTION_INFO_TYPE:
- return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
- POINTER_OBJECT);
-
-#define MAKE_STRUCT_CASE(NAME, Name, name) \
- case NAME##_TYPE:
- STRUCT_LIST(MAKE_STRUCT_CASE)
-#undef MAKE_STRUCT_CASE
- return GetScavengerForSize(instance_size, POINTER_OBJECT);
- default:
- UNREACHABLE();
- return NULL;
- }
-}
+VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
MapWord first_word = object->map_word();
ASSERT(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
- map->Scavenge(p, object);
+ ScavengingVisitor::Scavenge(map, p, object);
}
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
reinterpret_cast<Map*>(result)->
- set_scavenger(GetScavenger(instance_type, instance_size));
+ set_visitor_id(
+ StaticVisitorBase::GetVisitorId(instance_type, instance_size));
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
Map* map = reinterpret_cast<Map*>(result);
map->set_map(meta_map());
map->set_instance_type(instance_type);
- map->set_scavenger(GetScavenger(instance_type, instance_size));
+ map->set_visitor_id(
+ StaticVisitorBase::GetVisitorId(instance_type, instance_size));
map->set_prototype(null_value());
map->set_constructor(null_value());
map->set_instance_size(instance_size);
if (!ConfigureHeapDefault()) return false;
}
+ ScavengingVisitor::Initialize();
+ NewSpaceScavenger::Initialize();
+ MarkCompactCollector::Initialize();
+
// Setup memory allocator and reserve a chunk of memory for new
// space. The chunk is double the size of the requested reserved
// new space size to ensure that we can find a pair of semispaces that
PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
+ PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
static void RecordStats(HeapStats* stats, bool take_snapshot = false);
- static Scavenger GetScavenger(int instance_type, int instance_size);
-
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
static inline void CopyBlock(Address dst, Address src, int byte_size);
EXTERNAL,
MC_MARK,
MC_SWEEP,
+ MC_SWEEP_NEWSPACE,
MC_COMPACT,
MC_FLUSH_CODE,
kNumberOfScopes
}
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+ RelocInfo::Mode mode = rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT) {
+ StaticVisitor::VisitPointer(target_object_address());
+ } else if (RelocInfo::IsCodeTarget(mode)) {
+ StaticVisitor::VisitCodeTarget(this);
+ } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+ StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ } else if (Debug::has_break_points() &&
+ ((RelocInfo::IsJSReturn(mode) &&
+ IsPatchedReturnSequence()) ||
+ (RelocInfo::IsDebugBreakSlot(mode) &&
+ IsPatchedDebugBreakSlotSequence()))) {
+ StaticVisitor::VisitDebugTarget(this);
+#endif
+ } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+ StaticVisitor::VisitRuntimeEntry(this);
+ }
+}
+
+
+
Immediate::Immediate(int x) {
x_ = x;
rmode_ = RelocInfo::NONE;
#include "global-handles.h"
#include "ic-inl.h"
#include "mark-compact.h"
+#include "objects-visiting.h"
#include "stub-cache.h"
namespace v8 {
int MarkCompactCollector::live_lo_objects_size_ = 0;
#endif
+
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
}
-// Helper class for marking pointers in HeapObjects.
-class MarkingVisitor : public ObjectVisitor {
+class StaticMarkingVisitor : public StaticVisitorBase {
public:
- void VisitPointer(Object** p) {
+ static inline void IterateBody(Map* map, HeapObject* obj) {
+ table_.GetVisitor(map)(map, obj);
+ }
+
+ static void Initialize() {
+ table_.Register(kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ ConsString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitConsString,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ ConsString::BodyDescriptor,
+ void>::Visit);
+
+
+ table_.Register(kVisitFixedArray,
+ &FlexibleBodyVisitor<StaticMarkingVisitor,
+ FixedArray::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitSharedFunctionInfo,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ SharedFunctionInfo::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
+ table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitOddball,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ Oddball::BodyDescriptor,
+ void>::Visit);
+ table_.Register(kVisitMap,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ Map::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitCode, &VisitCode);
+
+ table_.Register(kVisitPropertyCell,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ JSGlobalPropertyCell::BodyDescriptor,
+ void>::Visit);
+
+ table_.RegisterSpecializations<DataObjectVisitor,
+ kVisitDataObject,
+ kVisitDataObjectGeneric>();
+
+ table_.RegisterSpecializations<JSObjectVisitor,
+ kVisitJSObject,
+ kVisitJSObjectGeneric>();
+
+ table_.RegisterSpecializations<StructObjectVisitor,
+ kVisitStruct,
+ kVisitStructGeneric>();
+ }
+
+ INLINE(static void VisitPointer(Object** p)) {
MarkObjectByPointer(p);
}
- void VisitPointers(Object** start, Object** end) {
+ INLINE(static void VisitPointers(Object** start, Object** end)) {
// Mark all objects pointed to in [start, end).
const int kMinRangeForMarkingRecursion = 64;
if (end - start >= kMinRangeForMarkingRecursion) {
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
- void VisitCodeTarget(RelocInfo* rinfo) {
+ static inline void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
}
}
- void VisitDebugTarget(RelocInfo* rinfo) {
+ static inline void VisitDebugTarget(RelocInfo* rinfo) {
ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
MarkCompactCollector::MarkObject(code);
}
- private:
// Mark object pointed to by p.
- void MarkObjectByPointer(Object** p) {
+ INLINE(static void MarkObjectByPointer(Object** p)) {
if (!(*p)->IsHeapObject()) return;
HeapObject* object = ShortCircuitConsString(p);
MarkCompactCollector::MarkObject(object);
}
- // Tells whether the mark sweep collection will perform compaction.
- bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
-
// Visit an unmarked object.
- void VisitUnmarkedObject(HeapObject* obj) {
+ static inline void VisitUnmarkedObject(HeapObject* obj) {
#ifdef DEBUG
ASSERT(Heap::Contains(obj));
ASSERT(!obj->IsMarked());
MarkCompactCollector::SetMark(obj);
// Mark the map pointer and the body.
MarkCompactCollector::MarkObject(map);
- obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
+ IterateBody(map, obj);
}
// Visit all unmarked objects pointed to by [start, end).
// Returns false if the operation fails (lack of stack space).
- inline bool VisitUnmarkedObjects(Object** start, Object** end) {
+ static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
// Return false is we are close to the stack limit.
StackLimitCheck check;
if (check.HasOverflowed()) return false;
}
return true;
}
+
+ static inline void VisitExternalReference(Address* p) { }
+ static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
+
+ private:
+ class DataObjectVisitor {
+ public:
+ template<int size>
+ static void VisitSpecialized(Map* map, HeapObject* object) {
+ }
+
+ static void Visit(Map* map, HeapObject* object) {
+ }
+ };
+
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
+ JSObject::BodyDescriptor,
+ void> JSObjectVisitor;
+
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
+ StructBodyDescriptor,
+ void> StructObjectVisitor;
+
+ static void VisitCode(Map* map, HeapObject* object) {
+ reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
+ }
+
+ typedef void (*Callback)(Map* map, HeapObject* object);
+
+ static VisitorDispatchTable<Callback> table_;
+};
+
+
+VisitorDispatchTable<StaticMarkingVisitor::Callback>
+ StaticMarkingVisitor::table_;
+
+
+class MarkingVisitor : public ObjectVisitor {
+ public:
+ void VisitPointer(Object** p) {
+ StaticMarkingVisitor::VisitPointer(p);
+ }
+
+ void VisitPointers(Object** start, Object** end) {
+ StaticMarkingVisitor::VisitPointers(start, end);
+ }
+
+ void VisitCodeTarget(RelocInfo* rinfo) {
+ StaticMarkingVisitor::VisitCodeTarget(rinfo);
+ }
+
+ void VisitDebugTarget(RelocInfo* rinfo) {
+ StaticMarkingVisitor::VisitDebugTarget(rinfo);
+ }
};
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
- MarkingVisitor* stack_visitor() { return &stack_visitor_; }
-
private:
- MarkingVisitor stack_visitor_;
-
void MarkObjectByPointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
Map* map = object->map();
// Mark the object.
MarkCompactCollector::SetMark(object);
+
// Mark the map pointer and body, and push them on the marking stack.
MarkCompactCollector::MarkObject(map);
- object->IterateBody(map->instance_type(), object->SizeFromMap(map),
- &stack_visitor_);
+ StaticMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
- MarkCompactCollector::EmptyMarkingStack(&stack_visitor_);
+ MarkCompactCollector::EmptyMarkingStack();
}
};
// Mark the Object* fields of the Map.
// Since the descriptor array has been marked already, it is fine
// that one of these fields contains a pointer to it.
- MarkingVisitor visitor; // Has no state or contents.
- visitor.VisitPointers(HeapObject::RawField(map,
- Map::kPointerFieldsBeginOffset),
- HeapObject::RawField(map,
- Map::kPointerFieldsEndOffset));
+ Object** start_slot = HeapObject::RawField(map,
+ Map::kPointerFieldsBeginOffset);
+
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
+
+ StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
}
// Explicitly mark the prefix.
MarkingVisitor marker;
symbol_table->IteratePrefix(&marker);
- ProcessMarkingStack(&marker);
+ ProcessMarkingStack();
}
// There may be overflowed objects in the heap. Visit them now.
while (marking_stack.overflowed()) {
RefillMarkingStack();
- EmptyMarkingStack(visitor->stack_visitor());
+ EmptyMarkingStack();
}
}
// Before: the marking stack contains zero or more heap object pointers.
// After: the marking stack is empty, and all objects reachable from the
// marking stack have been marked, or are overflowed in the heap.
-void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
+void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack.is_empty()) {
HeapObject* object = marking_stack.Pop();
ASSERT(object->IsHeapObject());
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
- object->IterateBody(map->instance_type(), object->SizeFromMap(map),
- visitor);
+
+ StaticMarkingVisitor::IterateBody(map, object);
}
}
// stack. Before: the marking stack contains zero or more heap object
// pointers. After: the marking stack is empty and there are no overflowed
// objects in the heap.
-void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) {
- EmptyMarkingStack(visitor);
+void MarkCompactCollector::ProcessMarkingStack() {
+ EmptyMarkingStack();
while (marking_stack.overflowed()) {
RefillMarkingStack();
- EmptyMarkingStack(visitor);
+ EmptyMarkingStack();
}
}
-void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) {
+void MarkCompactCollector::ProcessObjectGroups() {
bool work_to_do = true;
ASSERT(marking_stack.is_empty());
while (work_to_do) {
MarkObjectGroups();
work_to_do = !marking_stack.is_empty();
- ProcessMarkingStack(visitor);
+ ProcessMarkingStack();
}
}
// objects are unmarked. Mark objects reachable from object groups
// containing at least one marked object, and continue until no new
// objects are reachable from the object groups.
- ProcessObjectGroups(root_visitor.stack_visitor());
+ ProcessObjectGroups();
// The objects reachable from the roots or object groups are marked,
// yet unreachable objects are unmarked. Mark objects reachable
GlobalHandles::IterateWeakRoots(&root_visitor);
while (marking_stack.overflowed()) {
RefillMarkingStack();
- EmptyMarkingStack(root_visitor.stack_visitor());
+ EmptyMarkingStack();
}
// Repeat the object groups to mark unmarked groups reachable from the
// weak roots.
- ProcessObjectGroups(root_visitor.stack_visitor());
+ ProcessObjectGroups();
// Prune the symbol table removing all symbols only pointed to by the
// symbol table. Cannot use symbol_table() here because the symbol
}
+class StaticPointersToNewGenUpdatingVisitor : public
+ StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
+ public:
+ static inline void VisitPointer(Object** p) {
+ if (!(*p)->IsHeapObject()) return;
+
+ HeapObject* obj = HeapObject::cast(*p);
+ Address old_addr = obj->address();
+
+ if (Heap::new_space()->Contains(obj)) {
+ ASSERT(Heap::InFromSpace(*p));
+ *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
+ }
+ }
+};
+
+
// Visitor for updating pointers from live objects in old spaces to new space.
// It does not expect to encounter pointers to dead objects.
class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
public:
void VisitPointer(Object** p) {
- UpdatePointer(p);
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
}
void VisitPointers(Object** start, Object** end) {
- for (Object** p = start; p < end; p++) UpdatePointer(p);
+ for (Object** p = start; p < end; p++) {
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+ }
}
void VisitCodeTarget(RelocInfo* rinfo) {
VisitPointer(&target);
rinfo->set_call_address(Code::cast(target)->instruction_start());
}
-
- private:
- void UpdatePointer(Object** p) {
- if (!(*p)->IsHeapObject()) return;
-
- HeapObject* obj = HeapObject::cast(*p);
- Address old_addr = obj->address();
-
- if (Heap::new_space()->Contains(obj)) {
- ASSERT(Heap::InFromSpace(*p));
- *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
- }
- }
};
PointersToNewGenUpdatingVisitor updating_visitor;
// Update pointers in to space.
- HeapObject* object;
- for (Address current = space->bottom();
- current < space->top();
- current += object->Size()) {
- object = HeapObject::FromAddress(current);
-
- object->IterateBody(object->map()->instance_type(),
- object->Size(),
- &updating_visitor);
+ Address current = space->bottom();
+ while (current < space->top()) {
+ HeapObject* object = HeapObject::FromAddress(current);
+ current +=
+ StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
+ object);
}
// Update roots.
SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
- SweepNewSpace(Heap::new_space());
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
+ SweepNewSpace(Heap::new_space());
+ }
SweepSpace(Heap::map_space(), &DeallocateMapBlock);
Heap::IterateDirtyRegions(Heap::map_space(),
#endif
}
+
+void MarkCompactCollector::Initialize() {
+ StaticPointersToNewGenUpdatingVisitor::Initialize();
+ StaticMarkingVisitor::Initialize();
+}
+
+
} } // namespace v8::internal
force_compaction_ = value;
}
+
+ static void Initialize();
+
// Prepares for GC by resetting relocation info in old and map spaces and
// choosing spaces to compact.
static void Prepare(GCTracer* tracer);
friend class RootMarkingVisitor;
friend class MarkingVisitor;
+ friend class StaticMarkingVisitor;
// Marking operations for objects reachable from roots.
static void MarkLiveObjects();
// Mark all objects in an object group with at least one marked
// object, then all objects reachable from marked objects in object
// groups, and repeat.
- static void ProcessObjectGroups(MarkingVisitor* visitor);
+ static void ProcessObjectGroups();
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
- static void ProcessMarkingStack(MarkingVisitor* visitor);
+ static void ProcessMarkingStack();
// Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
// overflow flag will be set.
- static void EmptyMarkingStack(MarkingVisitor* visitor);
+ static void EmptyMarkingStack();
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
ptr[index] = value;
}
-inline Scavenger Map::scavenger() {
- Scavenger callback = reinterpret_cast<Scavenger>(
- READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
- ASSERT(callback == Heap::GetScavenger(instance_type(),
- instance_size()));
-
- return callback;
-}
-
-inline void Map::set_scavenger(Scavenger callback) {
- WRITE_INTPTR_FIELD(this,
- kScavengerCallbackOffset,
- reinterpret_cast<intptr_t>(callback));
-}
+INT_ACCESSORS(Map, visitor_id, kScavengerCallbackOffset)
int Map::instance_size() {
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
(kStringTag | kConsStringTag) ||
instance_type == JS_ARRAY_TYPE) return map->instance_size();
if (instance_type == FIXED_ARRAY_TYPE) {
- return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
+ return FixedArray::BodyDescriptor::SizeOf(map, this);
}
if (instance_type == BYTE_ARRAY_TYPE) {
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
}
-void Proxy::ProxyIterateBody(ObjectVisitor* visitor) {
- visitor->VisitExternalReference(
- reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
-}
-
-
ACCESSORS(JSValue, value, Object, kValueOffset)
}
+int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
+ return map->instance_size();
+}
+
+
+void Proxy::ProxyIterateBody(ObjectVisitor* v) {
+ v->VisitExternalReference(
+ reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
+}
+
+
+template<typename StaticVisitor>
+void Proxy::ProxyIterateBody() {
+ StaticVisitor::VisitExternalReference(
+ reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
+}
+
+
+void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
+ typedef v8::String::ExternalAsciiStringResource Resource;
+ v->VisitExternalAsciiString(
+ reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+template<typename StaticVisitor>
+void ExternalAsciiString::ExternalAsciiStringIterateBody() {
+ typedef v8::String::ExternalAsciiStringResource Resource;
+ StaticVisitor::VisitExternalAsciiString(
+ reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
+ typedef v8::String::ExternalStringResource Resource;
+ v->VisitExternalTwoByteString(
+ reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+template<typename StaticVisitor>
+void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
+ typedef v8::String::ExternalStringResource Resource;
+ StaticVisitor::VisitExternalTwoByteString(
+ reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+#define SLOT_ADDR(obj, offset) \
+ reinterpret_cast<Object**>((obj)->address() + offset)
+
+template<int start_offset, int end_offset, int size>
+void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
+ HeapObject* obj,
+ ObjectVisitor* v) {
+ v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
+}
+
+
+template<int start_offset>
+void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
+ int object_size,
+ ObjectVisitor* v) {
+ v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
+}
+
+#undef SLOT_ADDR
+
+
#undef CAST_ACCESSOR
#undef INT_ACCESSORS
#undef SMI_ACCESSORS
--- /dev/null
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "ic-inl.h"
+#include "objects-visiting.h"
+
+namespace v8 {
+namespace internal {
+
+
+static inline bool IsShortcutCandidate(int type) {
+ return ((type & kShortcutTypeMask) == kShortcutTypeTag);
+}
+
+
+StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
+ int instance_type,
+ int instance_size) {
+ if (instance_type < FIRST_NONSTRING_TYPE) {
+ switch (instance_type & kStringRepresentationMask) {
+ case kSeqStringTag:
+ if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
+ return kVisitSeqAsciiString;
+ } else {
+ return kVisitSeqTwoByteString;
+ }
+
+ case kConsStringTag:
+ if (IsShortcutCandidate(instance_type)) {
+ return kVisitShortcutCandidate;
+ } else {
+ return kVisitConsString;
+ }
+
+ case kExternalStringTag:
+ return GetVisitorIdForSize(kVisitDataObject,
+ kVisitDataObjectGeneric,
+ ExternalString::kSize);
+ }
+ UNREACHABLE();
+ }
+
+ switch (instance_type) {
+ case BYTE_ARRAY_TYPE:
+ return kVisitByteArray;
+
+ case FIXED_ARRAY_TYPE:
+ return kVisitFixedArray;
+
+ case ODDBALL_TYPE:
+ return kVisitOddball;
+
+ case MAP_TYPE:
+ return kVisitMap;
+
+ case CODE_TYPE:
+ return kVisitCode;
+
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ return kVisitPropertyCell;
+
+ case SHARED_FUNCTION_INFO_TYPE:
+ return kVisitSharedFunctionInfo;
+
+ case PROXY_TYPE:
+ return GetVisitorIdForSize(kVisitDataObject,
+ kVisitDataObjectGeneric,
+ Proxy::kSize);
+
+ case FILLER_TYPE:
+ return kVisitDataObjectGeneric;
+
+ case JS_OBJECT_TYPE:
+ case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+ case JS_VALUE_TYPE:
+ case JS_ARRAY_TYPE:
+ case JS_REGEXP_TYPE:
+ case JS_FUNCTION_TYPE:
+ case JS_GLOBAL_PROXY_TYPE:
+ case JS_GLOBAL_OBJECT_TYPE:
+ case JS_BUILTINS_OBJECT_TYPE:
+ return GetVisitorIdForSize(kVisitJSObject,
+ kVisitJSObjectGeneric,
+ instance_size);
+
+ case HEAP_NUMBER_TYPE:
+ case PIXEL_ARRAY_TYPE:
+ case EXTERNAL_BYTE_ARRAY_TYPE:
+ case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+ case EXTERNAL_SHORT_ARRAY_TYPE:
+ case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+ case EXTERNAL_INT_ARRAY_TYPE:
+ case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+ case EXTERNAL_FLOAT_ARRAY_TYPE:
+ return GetVisitorIdForSize(kVisitDataObject,
+ kVisitDataObjectGeneric,
+ instance_size);
+
+#define MAKE_STRUCT_CASE(NAME, Name, name) \
+ case NAME##_TYPE:
+ STRUCT_LIST(MAKE_STRUCT_CASE)
+#undef MAKE_STRUCT_CASE
+ return GetVisitorIdForSize(kVisitStruct,
+ kVisitStructGeneric,
+ instance_size);
+
+ default:
+ UNREACHABLE();
+ return kVisitorIdCount;
+ }
+}
+
+} } // namespace v8::internal
--- /dev/null
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_OBJECTS_ITERATION_H_
+#define V8_OBJECTS_ITERATION_H_
+
+// This file provides base classes and auxiliary methods for defining
+// static object visitors used during GC.
+// Visiting HeapObject body with a normal ObjectVisitor requires performing
+// two switches on object's instance type to determine object size and layout
+// and one or more virtual method calls on visitor itself.
+// Static visitor is different: it provides a dispatch table which contains
+// pointers to specialized visit functions. Each map has the visitor_id
+// field which contains an index of specialized visitor to use.
+
+namespace v8 {
+namespace internal {
+
+
+// Base class for all static visitors.
+class StaticVisitorBase : public AllStatic {
+ public:
+ enum VisitorId {
+ kVisitSeqAsciiString = 0,
+ kVisitSeqTwoByteString,
+ kVisitShortcutCandidate,
+ kVisitByteArray,
+ kVisitFixedArray,
+
+ // For data objects, JS objects and structs along with generic visitor which
+ // can visit object of any size we provide visitors specialized by
+ // object size in words.
+ // Ids of specialized visitors are declared in a linear order (without
+ // holes) starting from the id of visitor specialized for 2 words objects
+ // (base visitor id) and ending with the id of generic visitor.
+ // Method GetVisitorIdForSize depends on this ordering to calculate visitor
+ // id of specialized visitor from given instance size, base visitor id and
+ // generic visitor's id.
+
+ kVisitDataObject,
+ kVisitDataObject2 = kVisitDataObject,
+ kVisitDataObject3,
+ kVisitDataObject4,
+ kVisitDataObject5,
+ kVisitDataObject6,
+ kVisitDataObject7,
+ kVisitDataObject8,
+ kVisitDataObject9,
+ kVisitDataObjectGeneric,
+
+ kVisitJSObject,
+ kVisitJSObject2 = kVisitJSObject,
+ kVisitJSObject3,
+ kVisitJSObject4,
+ kVisitJSObject5,
+ kVisitJSObject6,
+ kVisitJSObject7,
+ kVisitJSObject8,
+ kVisitJSObject9,
+ kVisitJSObjectGeneric,
+
+ kVisitStruct,
+ kVisitStruct2 = kVisitStruct,
+ kVisitStruct3,
+ kVisitStruct4,
+ kVisitStruct5,
+ kVisitStruct6,
+ kVisitStruct7,
+ kVisitStruct8,
+ kVisitStruct9,
+ kVisitStructGeneric,
+
+ kVisitConsString,
+ kVisitOddball,
+ kVisitCode,
+ kVisitMap,
+ kVisitPropertyCell,
+ kVisitSharedFunctionInfo,
+
+ kVisitorIdCount,
+ kMinObjectSizeInWords = 2
+ };
+
+ // Determine which specialized visitor should be used for given instance type
+ // and instance type.
+ static VisitorId GetVisitorId(int instance_type, int instance_size);
+
+ static VisitorId GetVisitorId(Map* map) {
+ return GetVisitorId(map->instance_type(), map->instance_size());
+ }
+
+ // For visitors that allow specialization by size calculate VisitorId based
+ // on size, base visitor id and generic visitor id.
+ static VisitorId GetVisitorIdForSize(VisitorId base,
+ VisitorId generic,
+ int object_size) {
+ ASSERT((base == kVisitDataObject) ||
+ (base == kVisitStruct) ||
+ (base == kVisitJSObject));
+ ASSERT(IsAligned(object_size, kPointerSize));
+ ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
+ ASSERT(object_size < Page::kMaxHeapObjectSize);
+
+ const VisitorId specialization = static_cast<VisitorId>(
+ base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
+
+ return Min(specialization, generic);
+ }
+};
+
+
+template<typename Callback>
+class VisitorDispatchTable {
+ public:
+ inline Callback GetVisitor(Map* map) {
+ return callbacks_[map->visitor_id()];
+ }
+
+ void Register(StaticVisitorBase::VisitorId id, Callback callback) {
+ ASSERT((0 <= id) && (id < StaticVisitorBase::kVisitorIdCount));
+ callbacks_[id] = callback;
+ }
+
+ template<typename Visitor,
+ StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic,
+ int object_size_in_words>
+ void RegisterSpecialization() {
+ static const int size = object_size_in_words * kPointerSize;
+ Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
+ &Visitor::template VisitSpecialized<size>);
+ }
+
+
+ template<typename Visitor,
+ StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic>
+ void RegisterSpecializations() {
+ STATIC_ASSERT(
+ (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
+ RegisterSpecialization<Visitor, base, generic, 2>();
+ RegisterSpecialization<Visitor, base, generic, 3>();
+ RegisterSpecialization<Visitor, base, generic, 4>();
+ RegisterSpecialization<Visitor, base, generic, 5>();
+ RegisterSpecialization<Visitor, base, generic, 6>();
+ RegisterSpecialization<Visitor, base, generic, 7>();
+ RegisterSpecialization<Visitor, base, generic, 8>();
+ RegisterSpecialization<Visitor, base, generic, 9>();
+ Register(generic, &Visitor::Visit);
+ }
+
+ private:
+ Callback callbacks_[StaticVisitorBase::kVisitorIdCount];
+};
+
+
+template<typename StaticVisitor>
+class BodyVisitorBase : public AllStatic {
+ public:
+ static inline void IteratePointers(HeapObject* object,
+ int start_offset,
+ int end_offset) {
+ Object** start_slot = reinterpret_cast<Object**>(object->address() +
+ start_offset);
+ Object** end_slot = reinterpret_cast<Object**>(object->address() +
+ end_offset);
+ StaticVisitor::VisitPointers(start_slot, end_slot);
+ }
+};
+
+
+template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+ static inline ReturnType Visit(Map* map, HeapObject* object) {
+ int object_size = BodyDescriptor::SizeOf(map, object);
+ IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
+ return static_cast<ReturnType>(object_size);
+ }
+
+ template<int object_size>
+ static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
+ IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
+ return static_cast<ReturnType>(object_size);
+ }
+};
+
+
+template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+ static inline ReturnType Visit(Map* map, HeapObject* object) {
+ IteratePointers(object,
+ BodyDescriptor::kStartOffset,
+ BodyDescriptor::kEndOffset);
+ return static_cast<ReturnType>(BodyDescriptor::kSize);
+ }
+};
+
+
+// Base class for visitors used for a linear new space iteration.
+// IterateBody returns size of visited object.
+// Certain types of objects (i.e. Code objects) are not handled
+// by dispatch table of this visitor because they cannot appear
+// in the new space.
+//
+// This class is intended to be used in the following way:
+//
+// class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
+// ...
+// }
+//
+// This is an example of Curiously recurring template pattern
+// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
+// We use CRTP to guarantee aggressive compile time optimizations (i.e.
+// inlining and specialization of StaticVisitor::VisitPointers methods).
+template<typename StaticVisitor>
+class StaticNewSpaceVisitor : public StaticVisitorBase {
+ public:
+ static void Initialize() {
+ table_.Register(kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticVisitor,
+ ConsString::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitConsString,
+ &FixedBodyVisitor<StaticVisitor,
+ ConsString::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitFixedArray,
+ &FlexibleBodyVisitor<StaticVisitor,
+ FixedArray::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitByteArray, &VisitByteArray);
+
+ table_.Register(kVisitSharedFunctionInfo,
+ &FixedBodyVisitor<StaticVisitor,
+ SharedFunctionInfo::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
+
+ table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
+
+ table_.RegisterSpecializations<DataObjectVisitor,
+ kVisitDataObject,
+ kVisitDataObjectGeneric>();
+ table_.RegisterSpecializations<JSObjectVisitor,
+ kVisitJSObject,
+ kVisitJSObjectGeneric>();
+ table_.RegisterSpecializations<StructVisitor,
+ kVisitStruct,
+ kVisitStructGeneric>();
+ }
+
+ static inline int IterateBody(Map* map, HeapObject* obj) {
+ return table_.GetVisitor(map)(map, obj);
+ }
+
+ static inline void VisitPointers(Object** start, Object** end) {
+ for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
+ }
+
+ private:
+ static inline int VisitByteArray(Map* map, HeapObject* object) {
+ return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
+ }
+
+ static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
+ return SeqAsciiString::cast(object)->
+ SeqAsciiStringSize(map->instance_type());
+ }
+
+ static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
+ return SeqTwoByteString::cast(object)->
+ SeqTwoByteStringSize(map->instance_type());
+ }
+
+ class DataObjectVisitor {
+ public:
+ template<int object_size>
+ static inline int VisitSpecialized(Map* map, HeapObject* object) {
+ return object_size;
+ }
+
+ static inline int Visit(Map* map, HeapObject* object) {
+ return map->instance_size();
+ }
+ };
+
+ typedef FlexibleBodyVisitor<StaticVisitor,
+ StructBodyDescriptor,
+ int> StructVisitor;
+
+ typedef FlexibleBodyVisitor<StaticVisitor,
+ JSObject::BodyDescriptor,
+ int> JSObjectVisitor;
+
+ typedef int (*Callback)(Map* map, HeapObject* object);
+
+ static VisitorDispatchTable<Callback> table_;
+};
+
+
+template<typename StaticVisitor>
+VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
+ StaticNewSpaceVisitor<StaticVisitor>::table_;
+
+
+void Code::CodeIterateBody(ObjectVisitor* v) {
+ int mode_mask = RelocInfo::kCodeTargetMask |
+ RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+ RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+ RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+ RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+ // Use the relocation info pointer before it is visited by
+ // the heap compaction in the next statement.
+ RelocIterator it(this, mode_mask);
+
+ IteratePointers(v,
+ kRelocationInfoOffset,
+ kRelocationInfoOffset + kPointerSize);
+
+ for (; !it.done(); it.next()) {
+ it.rinfo()->Visit(v);
+ }
+}
+
+
+template<typename StaticVisitor>
+void Code::CodeIterateBody() {
+ int mode_mask = RelocInfo::kCodeTargetMask |
+ RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+ RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+ RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+ RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+ // Use the relocation info pointer before it is visited by
+ // the heap compaction in the next statement.
+ RelocIterator it(this, mode_mask);
+
+ StaticVisitor::VisitPointer(
+ reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
+
+ for (; !it.done(); it.next()) {
+ it.rinfo()->Visit<StaticVisitor>();
+ }
+}
+
+
+} } // namespace v8::internal
+
+#endif // V8_OBJECTS_ITERATION_H_
#include "debug.h"
#include "execution.h"
#include "objects-inl.h"
+#include "objects-visiting.h"
#include "macro-assembler.h"
#include "scanner.h"
#include "scopeinfo.h"
switch (instance_type) {
case FIXED_ARRAY_TYPE:
- return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
+ return FixedArray::BodyDescriptor::SizeOf(map, this);
case BYTE_ARRAY_TYPE:
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
case CODE_TYPE:
case kSeqStringTag:
break;
case kConsStringTag:
- reinterpret_cast<ConsString*>(this)->ConsStringIterateBody(v);
+ ConsString::BodyDescriptor::IterateBody(this, v);
break;
case kExternalStringTag:
if ((type & kStringEncodingMask) == kAsciiStringTag) {
switch (type) {
case FIXED_ARRAY_TYPE:
- reinterpret_cast<FixedArray*>(this)->FixedArrayIterateBody(v);
+ FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
break;
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
- reinterpret_cast<JSObject*>(this)->JSObjectIterateBody(object_size, v);
+ JSObject::BodyDescriptor::IterateBody(this, object_size, v);
break;
case ODDBALL_TYPE:
- reinterpret_cast<Oddball*>(this)->OddballIterateBody(v);
+ Oddball::BodyDescriptor::IterateBody(this, v);
break;
case PROXY_TYPE:
reinterpret_cast<Proxy*>(this)->ProxyIterateBody(v);
break;
case MAP_TYPE:
- reinterpret_cast<Map*>(this)->MapIterateBody(v);
+ Map::BodyDescriptor::IterateBody(this, v);
break;
case CODE_TYPE:
reinterpret_cast<Code*>(this)->CodeIterateBody(v);
break;
case JS_GLOBAL_PROPERTY_CELL_TYPE:
- reinterpret_cast<JSGlobalPropertyCell*>(this)
- ->JSGlobalPropertyCellIterateBody(v);
+ JSGlobalPropertyCell::BodyDescriptor::IterateBody(this, v);
break;
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
case EXTERNAL_FLOAT_ARRAY_TYPE:
break;
- case SHARED_FUNCTION_INFO_TYPE: {
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
- shared->SharedFunctionInfoIterateBody(v);
+ case SHARED_FUNCTION_INFO_TYPE:
+ SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
break;
- }
+
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
- IterateStructBody(object_size, v);
+ StructBodyDescriptor::IterateBody(this, object_size, v);
break;
default:
PrintF("Unknown type: %d\n", type);
}
-void JSObject::JSObjectIterateBody(int object_size, ObjectVisitor* v) {
- // Iterate over all fields in the body. Assumes all are Object*.
- IteratePointers(v, kPropertiesOffset, object_size);
-}
-
-
Object* JSObject::AddFastPropertyUsingMap(Map* new_map,
String* name,
Object* value) {
int new_instance_size = map()->instance_size() - instance_size_delta;
new_map->set_inobject_properties(0);
new_map->set_instance_size(new_instance_size);
- new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
- new_map->instance_size()));
+ new_map->set_visitor_id(StaticVisitorBase::GetVisitorId(new_map));
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta);
}
}
-void FixedArray::FixedArrayIterateBody(ObjectVisitor* v) {
- IteratePointers(v, kHeaderSize, kHeaderSize + length() * kPointerSize);
-}
-
-
static bool HasKey(FixedArray* array, Object* key) {
int len0 = array->length();
for (int i = 0; i < len0; i++) {
}
-void ConsString::ConsStringIterateBody(ObjectVisitor* v) {
- IteratePointers(v, kFirstOffset, kSecondOffset + kPointerSize);
-}
-
-
-void JSGlobalPropertyCell::JSGlobalPropertyCellIterateBody(ObjectVisitor* v) {
- IteratePointers(v, kValueOffset, kValueOffset + kPointerSize);
-}
-
-
uint16_t ConsString::ConsStringGet(int index) {
ASSERT(index >= 0 && index < this->length());
}
-#define FIELD_ADDR(p, offset) \
- (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
-
-void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
- typedef v8::String::ExternalAsciiStringResource Resource;
- v->VisitExternalAsciiString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
- typedef v8::String::ExternalStringResource Resource;
- v->VisitExternalTwoByteString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-#undef FIELD_ADDR
-
template <typename IteratorA, typename IteratorB>
static inline bool CompareStringContents(IteratorA* ia, IteratorB* ib) {
// General slow case check. We know that the ia and ib iterators
}
-void Map::MapIterateBody(ObjectVisitor* v) {
- // Assumes all Object* members are contiguously allocated!
- IteratePointers(v, kPointerFieldsBeginOffset, kPointerFieldsEndOffset);
-}
-
-
Object* JSFunction::SetInstancePrototype(Object* value) {
ASSERT(value->IsJSObject());
}
-void Oddball::OddballIterateBody(ObjectVisitor* v) {
- // Assumes all Object* members are contiguously allocated!
- IteratePointers(v, kToStringOffset, kToNumberOffset + kPointerSize);
-}
-
-
Object* Oddball::Initialize(const char* to_string, Object* to_number) {
Object* symbol = Heap::LookupAsciiSymbol(to_string);
if (symbol->IsFailure()) return symbol;
}
-void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
- IteratePointers(v,
- kNameOffset,
- kThisPropertyAssignmentsOffset + kPointerSize);
-}
-
-
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
}
-void Code::CodeIterateBody(ObjectVisitor* v) {
- int mode_mask = RelocInfo::kCodeTargetMask |
- RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
- RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
- RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
- RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
- RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
-
- // Use the relocation info pointer before it is visited by
- // the heap compaction in the next statement.
- RelocIterator it(this, mode_mask);
-
- IteratePointers(v,
- kRelocationInfoOffset,
- kRelocationInfoOffset + kPointerSize);
-
- for (; !it.done(); it.next()) {
- it.rinfo()->Visit(v);
- }
-}
-
-
void Code::Relocate(intptr_t delta) {
for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
it.rinfo()->apply(delta);
};
+#define SLOT_ADDR(obj, offset) \
+ reinterpret_cast<Object**>((obj)->address() + offset)
+
+// This class describes a body of an object of a fixed size
+// in which all pointer fields are located in the [start_offset, end_offset)
+// interval.
+template<int start_offset, int end_offset, int size>
+class FixedBodyDescriptor {
+ public:
+ static const int kStartOffset = start_offset;
+ static const int kEndOffset = end_offset;
+ static const int kSize = size;
+
+ static inline void IterateBody(HeapObject* obj, ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ static inline void IterateBody(HeapObject* obj) {
+ StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
+ SLOT_ADDR(obj, end_offset));
+ }
+};
+
+
+// This class describes a body of an object of a variable size
+// in which all pointer fields are located in the [start_offset, object_size)
+// interval.
+template<int start_offset>
+class FlexibleBodyDescriptor {
+ public:
+ static const int kStartOffset = start_offset;
+
+ static inline void IterateBody(HeapObject* obj,
+ int object_size,
+ ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ static inline void IterateBody(HeapObject* obj, int object_size) {
+ StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
+ SLOT_ADDR(obj, object_size));
+ }
+};
+
+#undef SLOT_ADDR
+
+
// The HeapNumber class describes heap allocated numbers that cannot be
// represented in a Smi (small integer)
class HeapNumber: public HeapObject {
// Dispatched behavior.
- void JSObjectIterateBody(int object_size, ObjectVisitor* v);
void JSObjectShortPrint(StringStream* accumulator);
#ifdef DEBUG
void JSObjectPrint();
STATIC_CHECK(kHeaderSize == Internals::kJSObjectHeaderSize);
+ class BodyDescriptor : public FlexibleBodyDescriptor<kPropertiesOffset> {
+ public:
+ static inline int SizeOf(Map* map, HeapObject* object);
+ };
+
private:
Object* GetElementWithCallback(Object* receiver,
Object* structure,
static const int kMaxLength = (kMaxSize - kHeaderSize) / kPointerSize;
// Dispatched behavior.
- int FixedArraySize() { return SizeFor(length()); }
- void FixedArrayIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void FixedArrayPrint();
void FixedArrayVerify();
// object, the prefix of this array is sorted.
void SortPairs(FixedArray* numbers, uint32_t len);
+ class BodyDescriptor : public FlexibleBodyDescriptor<kHeaderSize> {
+ public:
+ static inline int SizeOf(Map* map, HeapObject* object) {
+ return SizeFor(reinterpret_cast<FixedArray*>(object)->length());
+ }
+ };
+
protected:
// Set operation on FixedArray without using write barriers. Can
// only be used for storing old space objects or smis.
static inline ByteArray* cast(Object* obj);
// Dispatched behavior.
- int ByteArraySize() { return SizeFor(length()); }
+ inline int ByteArraySize() {
+ return SizeFor(this->length());
+ }
#ifdef DEBUG
void ByteArrayPrint();
void ByteArrayVerify();
// Dispatched behavior.
int CodeSize() { return SizeFor(body_size()); }
- void CodeIterateBody(ObjectVisitor* v);
+ inline void CodeIterateBody(ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ inline void CodeIterateBody();
#ifdef DEBUG
void CodePrint();
void CodeVerify();
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
};
-typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
// All heap objects have a Map that describes their structure.
// A Map contains information about:
void ClearNonLiveTransitions(Object* real_prototype);
// Dispatched behavior.
- void MapIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void MapPrint();
void MapVerify();
#endif
- inline Scavenger scavenger();
- inline void set_scavenger(Scavenger callback);
-
- inline void Scavenge(HeapObject** slot, HeapObject* obj) {
- scavenger()(this, slot, obj);
- }
+ inline int visitor_id();
+ inline void set_visitor_id(int visitor_id);
static const int kMaxPreAllocatedPropertyFields = 255;
static const int kCodeCacheEntryNameOffset = 0;
static const int kCodeCacheEntryCodeOffset = 1;
+ typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
+ kPointerFieldsEndOffset,
+ kSize> BodyDescriptor;
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
};
int CalculateInObjectProperties();
// Dispatched behavior.
- void SharedFunctionInfoIterateBody(ObjectVisitor* v);
// Set max_length to -1 for unlimited length.
void SourceCodePrint(StringStream* accumulator, int max_length);
#ifdef DEBUG
#endif
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
+ typedef FixedBodyDescriptor<kNameOffset,
+ kThisPropertyAssignmentsOffset + kPointerSize,
+ kSize> BodyDescriptor;
+
private:
// Bit positions in start_position_and_type.
// The source code start position is in the 30 most significant bits of
// Casting.
static inline ConsString* cast(Object* obj);
- // Garbage collection support. This method is called during garbage
- // collection to iterate through the heap pointers in the body of
- // the ConsString.
- void ConsStringIterateBody(ObjectVisitor* v);
-
// Layout description.
static const int kFirstOffset = POINTER_SIZE_ALIGN(String::kSize);
static const int kSecondOffset = kFirstOffset + kPointerSize;
// Minimum length for a cons string.
static const int kMinLength = 13;
+ typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
+ BodyDescriptor;
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
};
static inline ExternalAsciiString* cast(Object* obj);
// Garbage collection support.
- void ExternalAsciiStringIterateBody(ObjectVisitor* v);
+ inline void ExternalAsciiStringIterateBody(ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ inline void ExternalAsciiStringIterateBody();
// Support for StringInputBuffer.
const unibrow::byte* ExternalAsciiStringReadBlock(unsigned* remaining,
static inline ExternalTwoByteString* cast(Object* obj);
// Garbage collection support.
- void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
+ inline void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ inline void ExternalTwoByteStringIterateBody();
+
// Support for StringInputBuffer.
void ExternalTwoByteStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
static inline Oddball* cast(Object* obj);
// Dispatched behavior.
- void OddballIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void OddballVerify();
#endif
static const int kToNumberOffset = kToStringOffset + kPointerSize;
static const int kSize = kToNumberOffset + kPointerSize;
+ typedef FixedBodyDescriptor<kToStringOffset,
+ kToNumberOffset + kPointerSize,
+ kSize> BodyDescriptor;
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Oddball);
};
// Casting.
static inline JSGlobalPropertyCell* cast(Object* obj);
- // Dispatched behavior.
- void JSGlobalPropertyCellIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void JSGlobalPropertyCellVerify();
void JSGlobalPropertyCellPrint();
static const int kValueOffset = HeapObject::kHeaderSize;
static const int kSize = kValueOffset + kPointerSize;
+ typedef FixedBodyDescriptor<kValueOffset,
+ kValueOffset + kPointerSize,
+ kSize> BodyDescriptor;
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalPropertyCell);
};
// Dispatched behavior.
inline void ProxyIterateBody(ObjectVisitor* v);
+
+ template<typename StaticVisitor>
+ inline void ProxyIterateBody();
+
#ifdef DEBUG
void ProxyPrint();
void ProxyVerify();
};
+class StructBodyDescriptor : public
+ FlexibleBodyDescriptor<HeapObject::kHeaderSize> {
+ public:
+ static inline int SizeOf(Map* map, HeapObject* object) {
+ return map->instance_size();
+ }
+};
+
+
// BooleanBit is a helper class for setting and getting a bit in an
// integer or Smi.
class BooleanBit : public AllStatic {
LOG(SnapshotPositionEvent(address, source_->position()));
}
ReadChunk(current, limit, space_number, address);
-
- if (space == Heap::map_space()) {
- ASSERT(size == Map::kSize);
- HeapObject* obj = HeapObject::FromAddress(address);
- Map* map = reinterpret_cast<Map*>(obj);
- map->set_scavenger(Heap::GetScavenger(map->instance_type(),
- map->instance_size()));
- }
}
}
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+ RelocInfo::Mode mode = rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT) {
+ StaticVisitor::VisitPointer(target_object_address());
+ } else if (RelocInfo::IsCodeTarget(mode)) {
+ StaticVisitor::VisitCodeTarget(this);
+ } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+ StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ } else if (Debug::has_break_points() &&
+ ((RelocInfo::IsJSReturn(mode) &&
+ IsPatchedReturnSequence()) ||
+ (RelocInfo::IsDebugBreakSlot(mode) &&
+ IsPatchedDebugBreakSlotSequence()))) {
+ StaticVisitor::VisitDebugTarget(this);
+#endif
+ } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+ StaticVisitor::VisitRuntimeEntry(this);
+ }
+}
+
+
// -----------------------------------------------------------------------------
// Implementation of Operand
from __future__ import with_statement
-import sys, types, re, subprocess
+import sys, types, re, subprocess, math
def flatten(l):
flat = []
],
]
+def freduce(f, field, trace, init):
+ return reduce(lambda t,r: f(t, r[field]), trace, init)
+
def calc_total(trace, field):
- return reduce(lambda t,r: t + r[field], trace, 0)
+ return freduce(lambda t,v: t + v, field, trace, 0)
def calc_max(trace, field):
- return reduce(lambda t,r: max(t, r[field]), trace, 0)
+ return freduce(lambda t,r: max(t, r), field, trace, 0)
-def process_trace(filename):
- trace = parse_gc_trace(filename)
- total_gc = calc_total(trace, 'pause')
- max_gc = calc_max(trace, 'pause')
- avg_gc = total_gc / len(trace)
+def count_nonzero(trace, field):
+ return freduce(lambda t,r: t if r == 0 else t + 1, field, trace, 0)
- total_sweep = calc_total(trace, 'sweep')
- max_sweep = calc_max(trace, 'sweep')
- total_mark = calc_total(trace, 'mark')
- max_mark = calc_max(trace, 'mark')
+def process_trace(filename):
+ trace = parse_gc_trace(filename)
+ marksweeps = filter(lambda r: r['gc'] == 'ms', trace)
+ markcompacts = filter(lambda r: r['gc'] == 'mc', trace)
scavenges = filter(lambda r: r['gc'] == 's', trace)
- total_scavenge = calc_total(scavenges, 'pause')
- max_scavenge = calc_max(scavenges, 'pause')
- avg_scavenge = total_scavenge / len(scavenges)
charts = plot_all(plots, trace, filename)
+ def stats(out, prefix, trace, field):
+ n = len(trace)
+ total = calc_total(trace, field)
+ max = calc_max(trace, field)
+ avg = total / n
+ if n > 1:
+ dev = math.sqrt(freduce(lambda t,r: (r - avg) ** 2, field, trace, 0) /
+ (n - 1))
+ else:
+ dev = 0
+
+ out.write('<tr><td>%s</td><td>%d</td><td>%d</td>'
+ '<td>%d</td><td>%d [dev %f]</td></tr>' %
+ (prefix, n, total, max, avg, dev))
+
+
with open(filename + '.html', 'w') as out:
out.write('<html><body>')
- out.write('<table><tr><td>')
- out.write('Total in GC: <b>%d</b><br/>' % total_gc)
- out.write('Max in GC: <b>%d</b><br/>' % max_gc)
- out.write('Avg in GC: <b>%d</b><br/>' % avg_gc)
- out.write('</td><td>')
- out.write('Total in Scavenge: <b>%d</b><br/>' % total_scavenge)
- out.write('Max in Scavenge: <b>%d</b><br/>' % max_scavenge)
- out.write('Avg in Scavenge: <b>%d</b><br/>' % avg_scavenge)
- out.write('</td><td>')
- out.write('Total in Sweep: <b>%d</b><br/>' % total_sweep)
- out.write('Max in Sweep: <b>%d</b><br/>' % max_sweep)
- out.write('</td><td>')
- out.write('Total in Mark: <b>%d</b><br/>' % total_mark)
- out.write('Max in Mark: <b>%d</b><br/>' % max_mark)
- out.write('</td></tr></table>')
+ out.write('<table>')
+ out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td><td>Max</td><td>Avg</td></tr>')
+ stats(out, 'Total in GC', trace, 'pause')
+ stats(out, 'Scavenge', scavenges, 'pause')
+ stats(out, 'MarkSweep', marksweeps, 'pause')
+ stats(out, 'MarkCompact', markcompacts, 'pause')
+ stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
+ stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
+ stats(out, 'Flush Code', filter(lambda r: r['flushcode'] != 0, trace), 'flushcode')
+ stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
+ out.write('</table>')
for chart in charts:
out.write('<img src="%s">' % chart)
out.write('</body></html>')