// Determine whether double field unboxing feature is enabled.
#if (V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64)
-#define V8_DOUBLE_FIELDS_UNBOXING 0
+#define V8_DOUBLE_FIELDS_UNBOXING 1
#else
#define V8_DOUBLE_FIELDS_UNBOXING 0
#endif
#if V8_DOUBLE_FIELDS_UNBOXING
if (!may_contain_raw_values &&
- (has_only_tagged_fields || helper.IsTagged(src_slot - src_addr)))
+ (has_only_tagged_fields ||
+ helper.IsTagged(static_cast<int>(src_slot - src_addr))))
#else
if (!may_contain_raw_values)
#endif
// Determine which specialized visitor should be used for given map.
static VisitorId GetVisitorId(Map* map) {
- return GetVisitorId(map->instance_type(), map->instance_size(),
- FLAG_unbox_double_fields &&
- !map->layout_descriptor()->IsFastPointerLayout());
+ return GetVisitorId(
+ map->instance_type(), map->instance_size(),
+ FLAG_unbox_double_fields && !map->HasFastPointerLayout());
}
// For visitors that allow specialization by size calculate VisitorId based
public:
INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
int start_offset, int end_offset)) {
- DCHECK(!FLAG_unbox_double_fields ||
- object->map()->layout_descriptor()->IsFastPointerLayout());
+ DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
IterateRawPointers(heap, object, start_offset, end_offset);
}
INLINE(static void IterateBody(Heap* heap, HeapObject* object,
int start_offset, int end_offset)) {
- if (!FLAG_unbox_double_fields ||
- object->map()->layout_descriptor()->IsFastPointerLayout()) {
+ if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
IterateRawPointers(heap, object, start_offset, end_offset);
} else {
IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
if (!has_only_tagged_fields) {
for (Address slot = start_address; slot < end_address;
slot += kPointerSize) {
- if (helper.IsTagged(slot - obj_address)) {
+ if (helper.IsTagged(static_cast<int>(slot - obj_address))) {
// TODO(ishell): call this once for contiguous region
// of tagged fields.
FindPointersToNewSpaceInRegion(slot, slot + kPointerSize,
bool LayoutDescriptor::IsFastPointerLayout() {
- return IsSmi() && (Smi::cast(this)->value() == 0);
+ return this == FastPointerLayout();
+}
+
+
+bool LayoutDescriptor::IsFastPointerLayout(Object* layout_descriptor) {
+ return layout_descriptor == FastPointerLayout();
}
// Returns true if this is a layout of the object having only tagged fields.
V8_INLINE bool IsFastPointerLayout();
+ V8_INLINE static bool IsFastPointerLayout(Object* layout_descriptor);
// Returns true if the layout descriptor is in non-Smi form.
V8_INLINE bool IsSlowLayout();
}
+bool Map::HasFastPointerLayout() const {
+ Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
+ return LayoutDescriptor::IsFastPointerLayout(layout_desc);
+}
+
+
void Map::UpdateDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_desc) {
set_instance_descriptors(descriptors);
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
HeapObject* obj,
ObjectVisitor* v) {
- if (!FLAG_unbox_double_fields ||
- obj->map()->layout_descriptor()->IsFastPointerLayout()) {
+ if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
v->VisitPointers(HeapObject::RawField(obj, start_offset),
HeapObject::RawField(obj, end_offset));
} else {
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
int object_size,
ObjectVisitor* v) {
- if (!FLAG_unbox_double_fields ||
- obj->map()->layout_descriptor()->IsFastPointerLayout()) {
+ if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
v->VisitPointers(HeapObject::RawField(obj, start_offset),
HeapObject::RawField(obj, object_size));
} else {
DECL_ACCESSORS(layout_descriptor, LayoutDescriptor)
// |layout descriptor| accessor which can be used from GC.
inline LayoutDescriptor* layout_descriptor_gc_safe();
+ inline bool HasFastPointerLayout() const;
// |layout descriptor| accessor that is safe to call even when
// FLAG_unbox_double_fields is disabled (in this case Map does not contain
PROP_SMI,
PROP_DOUBLE,
PROP_TAGGED,
- PROP_KIND_NUMBER,
+ PROP_KIND_NUMBER
};
static Representation representations[PROP_KIND_NUMBER] = {
double boom_value = bit_cast<double>(fake_object);
FieldIndex field_index = FieldIndex::ForDescriptor(obj->map(), 0);
- obj->FastPropertyAtPut(field_index,
- *factory->NewHeapNumber(boom_value, MUTABLE));
+ Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
+ obj->FastPropertyAtPut(field_index, *boom_number);
// Enforce scan on scavenge for the obj's page.
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());