namespace v8 {
namespace internal {
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
- table_.Register(kVisitShortcutCandidate,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- int>::Visit);
+ table_.Register(
+ kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
- table_.Register(kVisitConsString,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- int>::Visit);
+ table_.Register(
+ kVisitConsString,
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
table_.Register(kVisitSlicedString,
- &FixedBodyVisitor<StaticVisitor,
- SlicedString::BodyDescriptor,
- int>::Visit);
+ &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
+ int>::Visit);
- table_.Register(kVisitSymbol,
- &FixedBodyVisitor<StaticVisitor,
- Symbol::BodyDescriptor,
- int>::Visit);
+ table_.Register(
+ kVisitSymbol,
+ &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
table_.Register(kVisitFixedArray,
&FlexibleBodyVisitor<StaticVisitor,
- FixedArray::BodyDescriptor,
- int>::Visit);
+ FixedArray::BodyDescriptor, int>::Visit);
table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
- table_.Register(kVisitNativeContext,
- &FixedBodyVisitor<StaticVisitor,
- Context::ScavengeBodyDescriptor,
- int>::Visit);
+ table_.Register(
+ kVisitNativeContext,
+ &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
+ int>::Visit);
table_.Register(kVisitByteArray, &VisitByteArray);
- table_.Register(kVisitSharedFunctionInfo,
- &FixedBodyVisitor<StaticVisitor,
- SharedFunctionInfo::BodyDescriptor,
- int>::Visit);
+ table_.Register(
+ kVisitSharedFunctionInfo,
+ &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
+ int>::Visit);
table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
- table_.template RegisterSpecializations<DataObjectVisitor,
- kVisitDataObject,
+ table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
kVisitDataObjectGeneric>();
- table_.template RegisterSpecializations<JSObjectVisitor,
- kVisitJSObject,
+ table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
kVisitJSObjectGeneric>();
- table_.template RegisterSpecializations<StructVisitor,
- kVisitStruct,
+ table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
kVisitStructGeneric>();
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
- STATIC_ASSERT(
- JSArrayBuffer::kWeakFirstViewOffset ==
- JSArrayBuffer::kWeakNextOffset + kPointerSize);
- VisitPointers(
- heap,
- HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
+ STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
+ JSArrayBuffer::kWeakNextOffset + kPointerSize);
+ VisitPointers(heap, HeapObject::RawField(
+ object, JSArrayBuffer::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
+ heap, HeapObject::RawField(
+ object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
return JSArrayBuffer::kSizeWithInternalFields;
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) {
VisitPointers(
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSTypedArray::kWeakNextOffset + kPointerSize),
+ map->GetHeap(), HeapObject::RawField(
+ object, JSTypedArray::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
return JSTypedArray::kSizeWithInternalFields;
}
-template<typename StaticVisitor>
-int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
+ HeapObject* object) {
VisitPointers(
map->GetHeap(),
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSDataView::kWeakNextOffset));
VisitPointers(
map->GetHeap(),
- HeapObject::RawField(object,
- JSDataView::kWeakNextOffset + kPointerSize),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
return JSDataView::kSizeWithInternalFields;
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitShortcutCandidate,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
+ void>::Visit);
table_.Register(kVisitConsString,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
+ void>::Visit);
table_.Register(kVisitSlicedString,
- &FixedBodyVisitor<StaticVisitor,
- SlicedString::BodyDescriptor,
- void>::Visit);
+ &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
+ void>::Visit);
- table_.Register(kVisitSymbol,
- &FixedBodyVisitor<StaticVisitor,
- Symbol::BodyDescriptor,
- void>::Visit);
+ table_.Register(
+ kVisitSymbol,
+ &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
- table_.Register(kVisitOddball,
- &FixedBodyVisitor<StaticVisitor,
- Oddball::BodyDescriptor,
- void>::Visit);
+ table_.Register(
+ kVisitOddball,
+ &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
table_.Register(kVisitMap, &VisitMap);
// Registration for kVisitJSRegExp is done by StaticVisitor.
- table_.Register(kVisitCell,
- &FixedBodyVisitor<StaticVisitor,
- Cell::BodyDescriptor,
- void>::Visit);
+ table_.Register(
+ kVisitCell,
+ &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
table_.Register(kVisitPropertyCell, &VisitPropertyCell);
- table_.template RegisterSpecializations<DataObjectVisitor,
- kVisitDataObject,
+ table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
kVisitDataObjectGeneric>();
- table_.template RegisterSpecializations<JSObjectVisitor,
- kVisitJSObject,
+ table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
kVisitJSObjectGeneric>();
- table_.template RegisterSpecializations<StructObjectVisitor,
- kVisitStruct,
+ table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
kVisitStructGeneric>();
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
Heap* heap, Address entry_address) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
Heap* heap, RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCell(
- Heap* heap, RelocInfo* rinfo) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
+ RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::CELL);
Cell* cell = rinfo->target_cell();
// No need to record slots because the cell space is not compacted during GC.
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
- Heap* heap, RelocInfo* rinfo) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
+ RelocInfo* rinfo) {
DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
- Heap* heap, RelocInfo* rinfo) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
+ RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
// Monomorphic ICs are preserved when possible, but need to be flushed
// when they might be keeping a Context alive, or when the heap is about
// to be serialized.
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
- && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
- target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
- heap->isolate()->serializer_enabled() ||
- target->ic_age() != heap->global_ic_age() ||
- target->is_invalidated_weak_stub())) {
+ if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
+ (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
+ target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
+ heap->isolate()->serializer_enabled() ||
+ target->ic_age() != heap->global_ic_age() ||
+ target->is_invalidated_weak_stub())) {
IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool());
target = Code::GetCodeFromTargetAddress(rinfo->target_address());
}
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
Heap* heap, RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
Map* map, HeapObject* object) {
- FixedBodyVisitor<StaticVisitor,
- Context::MarkCompactBodyDescriptor,
+ FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
void>::Visit(map, object);
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
- for (int idx = Context::FIRST_WEAK_SLOT;
- idx < Context::NATIVE_CONTEXT_SLOTS;
+ for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
++idx) {
Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
collector->RecordSlot(slot, slot, *slot);
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitMap(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
+ HeapObject* object) {
Heap* heap = map->GetHeap();
Map* map_object = Map::cast(object);
if (FLAG_collect_maps && map_object->CanTransition()) {
MarkMapContents(heap, map_object);
} else {
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
}
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
StaticVisitor::VisitPointer(heap, slot);
}
- StaticVisitor::VisitPointers(heap,
+ StaticVisitor::VisitPointers(
+ heap,
HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
StaticVisitor::VisitPointer(heap, slot);
}
- StaticVisitor::VisitPointers(heap,
+ StaticVisitor::VisitPointers(
+ heap,
HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
// Skip visiting the backing hash table containing the mappings and the
// pointer to the other enqueued weak collections, both are post-processed.
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
HeapObject::RawField(object, JSWeakCollection::kTableOffset));
STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
- JSWeakCollection::kNextOffset);
+ JSWeakCollection::kNextOffset);
STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
- JSWeakCollection::kSize);
+ JSWeakCollection::kSize);
// Partially initialized weak collection is enqueued, but table is ignored.
if (!weak_collection->table()->IsHashTable()) return;
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCode(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
+ HeapObject* object) {
Heap* heap = map->GetHeap();
Code* code = Code::cast(object);
if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfo();
}
- if (FLAG_cache_optimized_code &&
- FLAG_flush_optimized_code_cache &&
+ if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {
// Always flush the optimized code map if requested by flag.
shared->ClearOptimizedCodeMap();
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
heap->mark_compact_collector()->RecordSlot(slot, slot, object);
bool is_weak_object =
(array->get_weak_object_state() ==
- ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
+ ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
Code::IsWeakObjectInOptimizedCode(object)) ||
(array->get_weak_object_state() ==
- ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
+ ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
Code::IsWeakObjectInIC(object));
if (!is_weak_object) {
StaticVisitor::MarkObject(heap, object);
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
+ HeapObject* object) {
Heap* heap = map->GetHeap();
JSFunction* function = JSFunction::cast(object);
MarkCompactCollector* collector = heap->mark_compact_collector();
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
+ HeapObject* object) {
int last_property_offset =
JSRegExp::kSize + kPointerSize * map->inobject_properties();
- StaticVisitor::VisitPointers(map->GetHeap(),
- HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
+ StaticVisitor::VisitPointers(
+ map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
HeapObject::RawField(object, last_property_offset));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
- STATIC_ASSERT(
- JSArrayBuffer::kWeakFirstViewOffset ==
- JSArrayBuffer::kWeakNextOffset + kPointerSize);
+ STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
+ JSArrayBuffer::kWeakNextOffset + kPointerSize);
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
StaticVisitor::VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
+ heap, HeapObject::RawField(
+ object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) {
StaticVisitor::VisitPointers(
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
StaticVisitor::VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSTypedArray::kWeakNextOffset + kPointerSize),
+ map->GetHeap(), HeapObject::RawField(
+ object, JSTypedArray::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
- Map* map, HeapObject* object) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
+ HeapObject* object) {
StaticVisitor::VisitPointers(
map->GetHeap(),
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSDataView::kWeakNextOffset));
StaticVisitor::VisitPointers(
map->GetHeap(),
- HeapObject::RawField(object,
- JSDataView::kWeakNextOffset + kPointerSize),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
- Heap* heap, Map* map) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
+ Map* map) {
// Make sure that the back pointer stored either in the map itself or
// inside its transitions array is marked. Skip recording the back
// pointer slot since map space is not compacted.
DescriptorArray* descriptors = map->instance_descriptors();
if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
descriptors->length() > 0) {
- StaticVisitor::VisitPointers(heap,
- descriptors->GetFirstElementAddress(),
- descriptors->GetDescriptorEndSlot(0));
+ StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
+ descriptors->GetDescriptorEndSlot(0));
}
int start = 0;
int end = map->NumberOfOwnDescriptors();
if (start < end) {
StaticVisitor::VisitPointers(heap,
- descriptors->GetDescriptorStartSlot(start),
- descriptors->GetDescriptorEndSlot(end));
+ descriptors->GetDescriptorStartSlot(start),
+ descriptors->GetDescriptorEndSlot(end));
}
// Mark prototype dependent codes array but do not push it onto marking
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
// pointer to it.
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
Heap* heap, TransitionArray* transitions) {
if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
}
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
- Heap* heap, Code* code) {
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
+ Code* code) {
// Skip in absence of inlining.
// TODO(turbofan): Revisit once we support inlining.
if (code->is_turbofanned()) return;
DeoptimizationInputData* data =
DeoptimizationInputData::cast(code->deoptimization_data());
FixedArray* literals = data->LiteralArray();
- for (int i = 0, count = data->InlinedFunctionCount()->value();
- i < count;
+ for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
i++) {
JSFunction* inlined = JSFunction::cast(literals->get(i));
StaticVisitor::MarkObject(heap, inlined->shared()->code());
inline static bool IsValidNonBuiltinContext(Object* context) {
return context->IsContext() &&
- !Context::cast(context)->global_object()->IsJSBuiltinsObject();
+ !Context::cast(context)->global_object()->IsJSBuiltinsObject();
}
inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
Object* undefined = heap->undefined_value();
return (info->script() != undefined) &&
- (reinterpret_cast<Script*>(info->script())->source() != undefined);
+ (reinterpret_cast<Script*>(info->script())->source() != undefined);
}
-template<typename StaticVisitor>
-bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
- Heap* heap, JSFunction* function) {
+template <typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
+ JSFunction* function) {
SharedFunctionInfo* shared_info = function->shared();
// Code is either on stack, in compilation cache or referenced
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
Heap* heap, SharedFunctionInfo* shared_info) {
// Code is either on stack, in compilation cache or referenced
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
Heap* heap, HeapObject* object) {
- Object** start_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::BodyDescriptor::kStartOffset);
- Object** end_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ Object** start_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
+ Object** end_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
Heap* heap, HeapObject* object) {
Object** name_slot =
// Skip visiting kCodeOffset as it is treated weakly here.
STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
- SharedFunctionInfo::kCodeOffset);
+ SharedFunctionInfo::kCodeOffset);
STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
- SharedFunctionInfo::kOptimizedCodeMapOffset);
+ SharedFunctionInfo::kOptimizedCodeMapOffset);
Object** start_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::kOptimizedCodeMapOffset);
- Object** end_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
+ Object** end_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
Heap* heap, HeapObject* object) {
Object** start_slot =
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
- JSFunction::kPrototypeOrInitialMapOffset);
+ JSFunction::kPrototypeOrInitialMapOffset);
start_slot =
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
- end_slot =
- HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+ end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
Heap* heap, HeapObject* object) {
Object** start_slot =
// Skip visiting kCodeEntryOffset as it is treated weakly here.
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
- JSFunction::kPrototypeOrInitialMapOffset);
+ JSFunction::kPrototypeOrInitialMapOffset);
start_slot =
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
- end_slot =
- HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+ end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
}
-template<typename StaticVisitor>
+template <typename StaticVisitor>
void Code::CodeIterateBody(Heap* heap) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
heap,
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
+ heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
StaticVisitor::VisitPointer(
heap,
reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
heap,
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
StaticVisitor::VisitNextCodeLink(
- heap,
- reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
+ heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
+ heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
RelocIterator it(this, mode_mask);
it.rinfo()->template Visit<StaticVisitor>(heap);
}
}
-
-
-} } // namespace v8::internal
+}
+} // namespace v8::internal
#endif // V8_OBJECTS_VISITING_INL_H_
#include "src/v8.h"
+#include "src/heap/objects-visiting.h"
#include "src/ic-inl.h"
-#include "src/objects-visiting.h"
namespace v8 {
namespace internal {
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
- int instance_type,
- int instance_size) {
+ int instance_type, int instance_size) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
return kVisitSlicedString;
case kExternalStringTag:
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
+ return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
instance_size);
}
UNREACHABLE();
return kVisitPropertyCell;
case JS_SET_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSSet::kSize);
case JS_MAP_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSMap::kSize);
case JS_WEAK_MAP_TYPE:
return kVisitSharedFunctionInfo;
case JS_PROXY_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSProxy::kSize);
case JS_FUNCTION_PROXY_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSFunctionProxy::kSize);
case FOREIGN_TYPE:
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
+ return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Foreign::kSize);
case SYMBOL_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
case JS_SET_ITERATOR_TYPE:
case JS_MAP_ITERATOR_TYPE:
- return GetVisitorIdForSize(kVisitJSObject,
- kVisitJSObjectGeneric,
+ return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
instance_size);
case JS_FUNCTION_TYPE:
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
-#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
- case EXTERNAL_##TYPE##_ARRAY_TYPE:
+#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+ case EXTERNAL_##TYPE##_ARRAY_TYPE:
- TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
+ TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
+ return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
instance_size);
#undef EXTERNAL_ARRAY_CASE
case FIXED_FLOAT64_ARRAY_TYPE:
return kVisitFixedFloat64Array;
-#define MAKE_STRUCT_CASE(NAME, Name, name) \
- case NAME##_TYPE:
+#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
- if (instance_type == ALLOCATION_SITE_TYPE) {
- return kVisitAllocationSite;
- }
+ if (instance_type == ALLOCATION_SITE_TYPE) {
+ return kVisitAllocationSite;
+ }
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- instance_size);
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ instance_size);
default:
UNREACHABLE();
// have to record slots manually.
static bool MustRecordSlots(Heap* heap) {
return heap->gc_state() == Heap::MARK_COMPACT &&
- heap->mark_compact_collector()->is_compacting();
+ heap->mark_compact_collector()->is_compacting();
}
template <class T>
-Object* VisitWeakList(Heap* heap,
- Object* list,
- WeakObjectRetainer* retainer) {
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
WeakListVisitor<T>::SetWeakNext(tail, retained);
if (record_slots) {
Object** next_slot =
- HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
+ HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(next_slot, next_slot, retained);
}
}
template <class T>
-static void ClearWeakList(Heap* heap,
- Object* list) {
+static void ClearWeakList(Heap* heap, Object* list) {
Object* undefined = heap->undefined_value();
while (list != undefined) {
T* candidate = reinterpret_cast<T*>(list);
}
-template<>
+template <>
struct WeakListVisitor<JSFunction> {
static void SetWeakNext(JSFunction* function, Object* next) {
function->set_next_function_link(next);
return function->next_function_link();
}
- static int WeakNextOffset() {
- return JSFunction::kNextFunctionLinkOffset;
- }
+ static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
};
-template<>
+template <>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code* code, Object* next) {
code->set_next_code_link(next);
}
- static Object* WeakNext(Code* code) {
- return code->next_code_link();
- }
+ static Object* WeakNext(Code* code) { return code->next_code_link(); }
- static int WeakNextOffset() {
- return Code::kNextCodeLinkOffset;
- }
+ static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
};
-template<>
+template <>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context* context, Object* next) {
- context->set(Context::NEXT_CONTEXT_LINK,
- next,
- UPDATE_WRITE_BARRIER);
+ context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
}
static Object* WeakNext(Context* context) {
return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
}
- static void VisitLiveObject(Heap* heap,
- Context* context,
+ static void VisitLiveObject(Heap* heap, Context* context,
WeakObjectRetainer* retainer) {
// Process the three weak lists linked off the context.
DoWeakList<JSFunction>(heap, context, retainer,
- Context::OPTIMIZED_FUNCTIONS_LIST);
+ Context::OPTIMIZED_FUNCTIONS_LIST);
DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
}
- template<class T>
- static void DoWeakList(Heap* heap,
- Context* context,
- WeakObjectRetainer* retainer,
- int index) {
+ template <class T>
+ static void DoWeakList(Heap* heap, Context* context,
+ WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
if (MustRecordSlots(heap)) {
// Record the updated slot if necessary.
- Object** head_slot = HeapObject::RawField(
- context, FixedArray::SizeFor(index));
- heap->mark_compact_collector()->RecordSlot(
- head_slot, head_slot, list_head);
+ Object** head_slot =
+ HeapObject::RawField(context, FixedArray::SizeFor(index));
+ heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
+ list_head);
}
}
static void VisitPhantomObject(Heap* heap, Context* context) {
ClearWeakList<JSFunction>(heap,
- context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
+ context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
}
};
-template<>
+template <>
struct WeakListVisitor<JSArrayBufferView> {
static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
obj->set_weak_next(next);
}
- static Object* WeakNext(JSArrayBufferView* obj) {
- return obj->weak_next();
- }
+ static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
- static int WeakNextOffset() {
- return JSArrayBufferView::kWeakNextOffset;
- }
+ static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
};
-template<>
+template <>
struct WeakListVisitor<JSArrayBuffer> {
static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
- static Object* WeakNext(JSArrayBuffer* obj) {
- return obj->weak_next();
- }
+ static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
- static int WeakNextOffset() {
- return JSArrayBuffer::kWeakNextOffset;
- }
+ static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
- static void VisitLiveObject(Heap* heap,
- JSArrayBuffer* array_buffer,
+ static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer) {
- Object* typed_array_obj =
- VisitWeakList<JSArrayBufferView>(
- heap,
- array_buffer->weak_first_view(),
- retainer);
+ Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
+ heap, array_buffer->weak_first_view(), retainer);
array_buffer->set_weak_first_view(typed_array_obj);
if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
- Object** slot = HeapObject::RawField(
- array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
+ Object** slot = HeapObject::RawField(array_buffer,
+ JSArrayBuffer::kWeakFirstViewOffset);
heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
}
}
};
-template<>
+template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
obj->set_weak_next(next);
}
- static Object* WeakNext(AllocationSite* obj) {
- return obj->weak_next();
- }
+ static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
- static int WeakNextOffset() {
- return AllocationSite::kWeakNextOffset;
- }
+ static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
};
-template Object* VisitWeakList<Code>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
+template Object* VisitWeakList<Code>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
-template Object* VisitWeakList<JSFunction>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
+template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
-template Object* VisitWeakList<Context>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
+template Object* VisitWeakList<Context>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
-template Object* VisitWeakList<JSArrayBuffer>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
+template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
-template Object* VisitWeakList<AllocationSite>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-} } // namespace v8::internal
+template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
+}
+} // namespace v8::internal
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
public:
-#define VISITOR_ID_LIST(V) \
- V(SeqOneByteString) \
- V(SeqTwoByteString) \
- V(ShortcutCandidate) \
- V(ByteArray) \
- V(FreeSpace) \
- V(FixedArray) \
- V(FixedDoubleArray) \
- V(FixedTypedArray) \
- V(FixedFloat64Array) \
- V(ConstantPoolArray) \
- V(NativeContext) \
- V(AllocationSite) \
- V(DataObject2) \
- V(DataObject3) \
- V(DataObject4) \
- V(DataObject5) \
- V(DataObject6) \
- V(DataObject7) \
- V(DataObject8) \
- V(DataObject9) \
- V(DataObjectGeneric) \
- V(JSObject2) \
- V(JSObject3) \
- V(JSObject4) \
- V(JSObject5) \
- V(JSObject6) \
- V(JSObject7) \
- V(JSObject8) \
- V(JSObject9) \
- V(JSObjectGeneric) \
- V(Struct2) \
- V(Struct3) \
- V(Struct4) \
- V(Struct5) \
- V(Struct6) \
- V(Struct7) \
- V(Struct8) \
- V(Struct9) \
- V(StructGeneric) \
- V(ConsString) \
- V(SlicedString) \
- V(Symbol) \
- V(Oddball) \
- V(Code) \
- V(Map) \
- V(Cell) \
- V(PropertyCell) \
- V(SharedFunctionInfo) \
- V(JSFunction) \
- V(JSWeakCollection) \
- V(JSArrayBuffer) \
- V(JSTypedArray) \
- V(JSDataView) \
+#define VISITOR_ID_LIST(V) \
+ V(SeqOneByteString) \
+ V(SeqTwoByteString) \
+ V(ShortcutCandidate) \
+ V(ByteArray) \
+ V(FreeSpace) \
+ V(FixedArray) \
+ V(FixedDoubleArray) \
+ V(FixedTypedArray) \
+ V(FixedFloat64Array) \
+ V(ConstantPoolArray) \
+ V(NativeContext) \
+ V(AllocationSite) \
+ V(DataObject2) \
+ V(DataObject3) \
+ V(DataObject4) \
+ V(DataObject5) \
+ V(DataObject6) \
+ V(DataObject7) \
+ V(DataObject8) \
+ V(DataObject9) \
+ V(DataObjectGeneric) \
+ V(JSObject2) \
+ V(JSObject3) \
+ V(JSObject4) \
+ V(JSObject5) \
+ V(JSObject6) \
+ V(JSObject7) \
+ V(JSObject8) \
+ V(JSObject9) \
+ V(JSObjectGeneric) \
+ V(Struct2) \
+ V(Struct3) \
+ V(Struct4) \
+ V(Struct5) \
+ V(Struct6) \
+ V(Struct7) \
+ V(Struct8) \
+ V(Struct9) \
+ V(StructGeneric) \
+ V(ConsString) \
+ V(SlicedString) \
+ V(Symbol) \
+ V(Oddball) \
+ V(Code) \
+ V(Map) \
+ V(Cell) \
+ V(PropertyCell) \
+ V(SharedFunctionInfo) \
+ V(JSFunction) \
+ V(JSWeakCollection) \
+ V(JSArrayBuffer) \
+ V(JSTypedArray) \
+ V(JSDataView) \
V(JSRegExp)
// For data objects, JS objects and structs along with generic visitor which
// id of specialized visitor from given instance size, base visitor id and
// generic visitor's id.
enum VisitorId {
-#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
+#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
#undef VISITOR_ID_ENUM_DECL
kVisitorIdCount,
// For visitors that allow specialization by size calculate VisitorId based
// on size, base visitor id and generic visitor id.
- static VisitorId GetVisitorIdForSize(VisitorId base,
- VisitorId generic,
+ static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
int object_size) {
- DCHECK((base == kVisitDataObject) ||
- (base == kVisitStruct) ||
+ DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
(base == kVisitJSObject));
DCHECK(IsAligned(object_size, kPointerSize));
DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
};
-template<typename Callback>
+template <typename Callback>
class VisitorDispatchTable {
public:
void CopyFrom(VisitorDispatchTable* other) {
callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
}
- template<typename Visitor,
- StaticVisitorBase::VisitorId base,
- StaticVisitorBase::VisitorId generic,
- int object_size_in_words>
+ template <typename Visitor, StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic, int object_size_in_words>
void RegisterSpecialization() {
static const int size = object_size_in_words * kPointerSize;
Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
}
- template<typename Visitor,
- StaticVisitorBase::VisitorId base,
- StaticVisitorBase::VisitorId generic>
+ template <typename Visitor, StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic>
void RegisterSpecializations() {
- STATIC_ASSERT(
- (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
+ STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
+ 10);
RegisterSpecialization<Visitor, base, generic, 2>();
RegisterSpecialization<Visitor, base, generic, 3>();
RegisterSpecialization<Visitor, base, generic, 4>();
};
-template<typename StaticVisitor>
+template <typename StaticVisitor>
class BodyVisitorBase : public AllStatic {
public:
- INLINE(static void IteratePointers(Heap* heap,
- HeapObject* object,
- int start_offset,
- int end_offset)) {
- Object** start_slot = reinterpret_cast<Object**>(object->address() +
- start_offset);
- Object** end_slot = reinterpret_cast<Object**>(object->address() +
- end_offset);
+ INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
+ int start_offset, int end_offset)) {
+ Object** start_slot =
+ reinterpret_cast<Object**>(object->address() + start_offset);
+ Object** end_slot =
+ reinterpret_cast<Object**>(object->address() + end_offset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
};
-template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
public:
INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
int object_size = BodyDescriptor::SizeOf(map, object);
BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
- object_size);
+ map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
return static_cast<ReturnType>(object_size);
}
- template<int object_size>
+ template <int object_size>
static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
- object_size);
+ map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
return static_cast<ReturnType>(object_size);
}
};
-template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
public:
INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
+ map->GetHeap(), object, BodyDescriptor::kStartOffset,
BodyDescriptor::kEndOffset);
return static_cast<ReturnType>(BodyDescriptor::kSize);
}
// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
// We use CRTP to guarantee aggressive compile time optimizations (i.e.
// inlining and specialization of StaticVisitor::VisitPointers methods).
-template<typename StaticVisitor>
+template <typename StaticVisitor>
class StaticNewSpaceVisitor : public StaticVisitorBase {
public:
static void Initialize();
// Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSFunction::kCodeEntryOffset + kPointerSize),
- HeapObject::RawField(object,
- JSFunction::kNonWeakFieldsEndOffset));
+ heap, HeapObject::RawField(object,
+ JSFunction::kCodeEntryOffset + kPointerSize),
+ HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
return JSFunction::kSize;
}
}
INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
- return SeqOneByteString::cast(object)->
- SeqOneByteStringSize(map->instance_type());
+ return SeqOneByteString::cast(object)
+ ->SeqOneByteStringSize(map->instance_type());
}
INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
- return SeqTwoByteString::cast(object)->
- SeqTwoByteStringSize(map->instance_type());
+ return SeqTwoByteString::cast(object)
+ ->SeqTwoByteStringSize(map->instance_type());
}
INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
class DataObjectVisitor {
public:
- template<int object_size>
+ template <int object_size>
static inline int VisitSpecialized(Map* map, HeapObject* object) {
return object_size;
}
}
};
- typedef FlexibleBodyVisitor<StaticVisitor,
- StructBodyDescriptor,
- int> StructVisitor;
+ typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
+ StructVisitor;
- typedef FlexibleBodyVisitor<StaticVisitor,
- JSObject::BodyDescriptor,
- int> JSObjectVisitor;
+ typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
+ JSObjectVisitor;
typedef int (*Callback)(Map* map, HeapObject* object);
};
-template<typename StaticVisitor>
+template <typename StaticVisitor>
VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
StaticNewSpaceVisitor<StaticVisitor>::table_;
// }
//
// This is an example of Curiously recurring template pattern.
-template<typename StaticVisitor>
+template <typename StaticVisitor>
class StaticMarkingVisitor : public StaticVisitorBase {
public:
static void Initialize();
INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { }
- INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { }
+ INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
+ INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
// Skip the weak next code link in a code object.
- INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) { }
+ INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
// TODO(mstarzinger): This should be made protected once refactoring is done.
// Mark non-optimize code for functions inlined into the given optimized
class DataObjectVisitor {
public:
- template<int size>
- static inline void VisitSpecialized(Map* map, HeapObject* object) {
- }
+ template <int size>
+ static inline void VisitSpecialized(Map* map, HeapObject* object) {}
- INLINE(static void Visit(Map* map, HeapObject* object)) {
- }
+ INLINE(static void Visit(Map* map, HeapObject* object)) {}
};
- typedef FlexibleBodyVisitor<StaticVisitor,
- FixedArray::BodyDescriptor,
- void> FixedArrayVisitor;
+ typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
+ FixedArrayVisitor;
- typedef FlexibleBodyVisitor<StaticVisitor,
- JSObject::BodyDescriptor,
- void> JSObjectVisitor;
+ typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
+ JSObjectVisitor;
- typedef FlexibleBodyVisitor<StaticVisitor,
- StructBodyDescriptor,
- void> StructObjectVisitor;
+ typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
+ StructObjectVisitor;
typedef void (*Callback)(Map* map, HeapObject* object);
};
-template<typename StaticVisitor>
+template <typename StaticVisitor>
VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
StaticMarkingVisitor<StaticVisitor>::table_;
// access the next-element pointers.
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-} } // namespace v8::internal
+}
+} // namespace v8::internal
#endif // V8_OBJECTS_VISITING_H_