void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors,
Register scratch) {
- ldr(descriptors,
- FieldMemOperand(map, Map::kInstanceDescriptorsOrBackPointerOffset));
+ Register temp = descriptors;
+ ldr(temp, FieldMemOperand(map, Map::kTransitionsOrBackPointerOffset));
Label ok, fail;
- CheckMap(descriptors,
+ CheckMap(temp,
scratch,
isolate()->factory()->fixed_array_map(),
&fail,
DONT_DO_SMI_CHECK);
+ ldr(descriptors, FieldMemOperand(temp, TransitionArray::kDescriptorsOffset));
jmp(&ok);
bind(&fail);
mov(descriptors, Operand(FACTORY->empty_descriptor_array()));
// Preload a couple of values used in the loop.
Register empty_fixed_array_value = r6;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = r7;
- LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
mov(r1, r0);
bind(&next);
// check for an enum cache. Leave the map in r2 for the subsequent
// prototype load.
ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBackPointerOffset));
+ ldr(r3, FieldMemOperand(r2, Map::kTransitionsOrBackPointerOffset));
CheckMap(r3,
r7,
call_runtime,
DONT_DO_SMI_CHECK);
+ LoadRoot(r7, Heap::kEmptyDescriptorArrayRootIndex);
+ ldr(r3, FieldMemOperand(r3, TransitionArray::kDescriptorsOffset));
+ cmp(r3, r7);
+ b(eq, call_runtime);
+
// Check that there is an enum cache in the non-empty instance
// descriptors (r3). This is the case if the next enumeration
// index field does not contain a smi.
}
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE | READ_ONLY);
- map->set_instance_descriptors(*descriptors);
+ Map::SetDescriptors(map, descriptors);
{ // Add length.
CallbacksDescriptor d(*factory()->length_symbol(), *length, attribs);
}
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE);
- map->set_instance_descriptors(*descriptors);
+ Map::SetDescriptors(map, descriptors);
{ // Add length.
CallbacksDescriptor d(*factory()->length_symbol(), *length, attribs);
// is 1.
array_function->shared()->set_length(1);
+ Handle<Map> initial_map(array_function->initial_map());
Handle<DescriptorArray> array_descriptors(factory->NewDescriptorArray(1));
DescriptorArray::WhitenessWitness witness(*array_descriptors);
Handle<Foreign> array_length(factory->NewForeign(&Accessors::ArrayLength));
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE);
- array_function->initial_map()->set_instance_descriptors(*array_descriptors);
+ Map::SetDescriptors(initial_map, array_descriptors);
{ // Add length.
CallbacksDescriptor d(*factory->length_symbol(), *array_length, attribs);
factory->NewForeign(&Accessors::StringLength));
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE | READ_ONLY);
- string_map->set_instance_descriptors(*string_descriptors);
+ Map::SetDescriptors(string_map, string_descriptors);
{ // Add length.
CallbacksDescriptor d(*factory->length_symbol(), *string_length, attribs);
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(5);
DescriptorArray::WhitenessWitness witness(*descriptors);
- initial_map->set_instance_descriptors(*descriptors);
+ Map::SetDescriptors(initial_map, descriptors);
{
// ECMA-262, section 15.10.7.1.
// Create the descriptor array for the arguments object.
Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(3);
DescriptorArray::WhitenessWitness witness(*descriptors);
- map->set_instance_descriptors(*descriptors);
+ Map::SetDescriptors(map, descriptors);
{ // length
FieldDescriptor d(*factory->length_symbol(), 0, DONT_ENUM);
factory()->NewForeign(&Accessors::ScriptEvalFromFunctionName));
PropertyAttributes attribs =
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
- script_map->set_instance_descriptors(*script_descriptors);
+ Map::SetDescriptors(script_map, script_descriptors);
{
CallbacksDescriptor d(
// elements in InternalArrays can be set to non-Smi values without going
// through a common bottleneck that would make the SMI_ONLY -> FAST_ELEMENT
// transition easy to trap. Moreover, they rarely are smi-only.
- MaybeObject* maybe_map =
- array_function->initial_map()->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_map = array_function->initial_map()->Copy();
Map* new_map;
if (!maybe_map->To(&new_map)) return false;
new_map->set_elements_kind(FAST_HOLEY_ELEMENTS);
array_function->set_initial_map(new_map);
// Make "length" magic on instances.
+ Handle<Map> initial_map(array_function->initial_map());
Handle<DescriptorArray> array_descriptors(factory()->NewDescriptorArray(1));
DescriptorArray::WhitenessWitness witness(*array_descriptors);
&Accessors::ArrayLength));
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE);
- array_function->initial_map()->set_instance_descriptors(*array_descriptors);
+ Map::SetDescriptors(initial_map, array_descriptors);
{ // Add length.
CallbacksDescriptor d(
Handle<DescriptorArray> reresult_descriptors =
factory()->NewDescriptorArray(3);
DescriptorArray::WhitenessWitness witness(*reresult_descriptors);
- initial_map->set_instance_descriptors(*reresult_descriptors);
+ Map::SetDescriptors(initial_map, reresult_descriptors);
{
JSFunction* array_function = global_context()->array_function();
Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
ASSERT(0 <= number_of_descriptors);
CALL_HEAP_FUNCTION(isolate(),
- DescriptorArray::Allocate(number_of_descriptors,
- DescriptorArray::MAY_BE_SHARED),
+ DescriptorArray::Allocate(number_of_descriptors),
DescriptorArray);
}
Handle<Map> Factory::CopyMap(Handle<Map> src) {
- CALL_HEAP_FUNCTION(isolate(), src->Copy(DescriptorArray::MAY_BE_SHARED), Map);
+ CALL_HEAP_FUNCTION(isolate(), src->Copy(), Map);
}
// constructors.
Map* new_map;
ASSERT(object_function->has_initial_map());
- MaybeObject* maybe_map =
- object_function->initial_map()->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_map = object_function->initial_map()->Copy();
if (!maybe_map->To(&new_map)) return maybe_map;
Object* prototype;
fun->shared()->ForbidInlineConstructor();
} else {
DescriptorArray* descriptors;
- MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(count, DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(count);
if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
DescriptorArray::WhitenessWitness witness(descriptors);
if (HasDuplicates(descriptors)) {
fun->shared()->ForbidInlineConstructor();
} else {
- map->InitializeDescriptors(descriptors);
+ MaybeObject* maybe_failure = map->InitializeDescriptors(descriptors);
+ if (maybe_failure->IsFailure()) return maybe_failure;
map->set_pre_allocated_property_fields(count);
map->set_unused_property_fields(in_object_properties - count);
}
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- mov(descriptors, FieldOperand(map,
- Map::kInstanceDescriptorsOrBackPointerOffset));
+ Register temp = descriptors;
+ mov(temp, FieldOperand(map, Map::kTransitionsOrBackPointerOffset));
Label ok, fail;
- CheckMap(descriptors,
+ CheckMap(temp,
isolate()->factory()->fixed_array_map(),
&fail,
DONT_DO_SMI_CHECK);
+ mov(descriptors, FieldOperand(temp, TransitionArray::kDescriptorsOffset));
jmp(&ok);
bind(&fail);
mov(descriptors, isolate()->factory()->empty_descriptor_array());
// check for an enum cache. Leave the map in ebx for the subsequent
// prototype load.
mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
- mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBackPointerOffset));
+ mov(edx, FieldOperand(ebx, Map::kTransitionsOrBackPointerOffset));
CheckMap(edx,
isolate()->factory()->fixed_array_map(),
call_runtime,
DONT_DO_SMI_CHECK);
+ mov(edx, FieldOperand(edx, TransitionArray::kDescriptorsOffset));
+ cmp(edx, isolate()->factory()->empty_descriptor_array());
+ j(equal, call_runtime);
+
// Check that there is an enum cache in the non-empty instance
// descriptors (edx). This is the case if the next enumeration
// index field does not contain a smi.
template <class T>
void Marker<T>::MarkMapContents(Map* map) {
// Make sure that the back pointer stored either in the map itself or inside
- // its prototype transitions array is marked. Treat pointers in the descriptor
- // array as weak and also mark that array to prevent visiting it later.
+ // its transitions array is marked. Treat pointers in the transitions array as
+ // weak and also mark that array to prevent visiting it later.
base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
- Object** descriptor_array_slot =
- HeapObject::RawField(map, Map::kInstanceDescriptorsOrBackPointerOffset);
- Object* descriptor_array = *descriptor_array_slot;
- if (descriptor_array->IsDescriptorArray()) {
- MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
+ Object** transitions_slot =
+ HeapObject::RawField(map, Map::kTransitionsOrBackPointerOffset);
+ Object* transitions = *transitions_slot;
+ if (transitions->IsTransitionArray()) {
+ MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions));
} else {
// Already marked by marking map->GetBackPointer().
- ASSERT(descriptor_array->IsMap() || descriptor_array->IsUndefined());
+ ASSERT(transitions->IsMap() || transitions->IsUndefined());
}
- // Mark the Object* fields of the Map. Since the descriptor array has been
- // marked already, it is fine that one of these fields contains a pointer
- // to it. But make sure to skip back pointer.
- STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
- Map::kBitField3Offset + kPointerSize);
+ // Mark the Object* fields of the Map. Since the transitions array has been
+ // marked already, it is fine that one of these fields contains a pointer to
+ // it.
Object** start_slot =
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset);
- Object** end_slot = HeapObject::RawField(map, Map::kBitField3Offset);
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
for (Object** slot = start_slot; slot < end_slot; slot++) {
Object* obj = *slot;
if (!obj->NonFailureIsHeapObject()) continue;
}
-template <class T>
-void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
- // Empty descriptor array is marked as a root before any maps are marked.
- ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
-
- if (!base_marker()->MarkObjectWithoutPush(descriptors)) return;
- Object** descriptor_start = descriptors->data_start();
-
- // Since the descriptor array itself is not pushed for scanning, all fields
- // that point to objects manually have to be pushed, marked, and their slots
- // recorded.
- if (descriptors->HasEnumCache()) {
- Object** enum_cache_slot = descriptors->GetEnumCacheSlot();
- Object* enum_cache = *enum_cache_slot;
- base_marker()->MarkObjectAndPush(
- reinterpret_cast<HeapObject*>(enum_cache));
- mark_compact_collector()->RecordSlot(descriptor_start,
- enum_cache_slot,
- enum_cache);
- }
-
- if (descriptors->HasTransitionArray()) {
- Object** transitions_slot = descriptors->GetTransitionsSlot();
- Object* transitions = *transitions_slot;
- mark_compact_collector()->RecordSlot(descriptor_start,
- transitions_slot,
- transitions);
- MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions));
- }
-
- // If the descriptor contains a transition (value is a Map), we don't mark the
- // value as live. It might be removed by ClearNonLiveTransitions later.
- for (int i = 0; i < descriptors->number_of_descriptors(); ++i) {
- Object** key_slot = descriptors->GetKeySlot(i);
- Object* key = *key_slot;
- if (key->IsHeapObject()) {
- base_marker()->MarkObjectAndPush(HeapObject::cast(key));
- mark_compact_collector()->RecordSlot(descriptor_start, key_slot, key);
- }
-
- Object** value_slot = descriptors->GetValueSlot(i);
- if (!(*value_slot)->IsHeapObject()) continue;
- HeapObject* value = HeapObject::cast(*value_slot);
-
- mark_compact_collector()->RecordSlot(descriptor_start,
- value_slot,
- value);
-
- PropertyDetails details(descriptors->GetDetails(i));
-
- switch (details.type()) {
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case CALLBACKS:
- base_marker()->MarkObjectAndPush(value);
- break;
- case TRANSITION:
- case NONEXISTENT:
- UNREACHABLE();
- break;
- }
- }
-}
-
template <class T>
void Marker<T>::MarkTransitionArray(TransitionArray* transitions) {
if (!base_marker()->MarkObjectWithoutPush(transitions)) return;
Object** transitions_start = transitions->data_start();
+ DescriptorArray* descriptors = transitions->descriptors();
+ base_marker()->MarkObjectAndPush(descriptors);
+ mark_compact_collector()->RecordSlot(
+ transitions_start, transitions->GetDescriptorsSlot(), descriptors);
+
if (transitions->HasPrototypeTransitions()) {
// Mark prototype transitions array but don't push it into marking stack.
// This will make references from it weak. We will clean dead prototype
// Mark pointers in a Map and its DescriptorArray together, possibly
// treating transitions or back pointers weak.
void MarkMapContents(Map* map);
- void MarkDescriptorArray(DescriptorArray* descriptors);
void MarkTransitionArray(TransitionArray* transitions);
private:
}
-bool DescriptorArray::MayContainTransitions() {
- return !IsEmpty();
-}
-
-
-bool DescriptorArray::HasTransitionArray() {
- return MayContainTransitions() && !get(kTransitionsIndex)->IsSmi();
-}
-
-
-Object* DescriptorArray::back_pointer_storage() {
- return READ_FIELD(this, kBackPointerStorageOffset);
-}
-
-
-void DescriptorArray::set_back_pointer_storage(Object* value,
- WriteBarrierMode mode) {
- ASSERT(length() > kBackPointerStorageIndex);
- Heap* heap = GetHeap();
- WRITE_FIELD(this, kBackPointerStorageOffset, value);
- CONDITIONAL_WRITE_BARRIER(heap, this, kBackPointerStorageOffset, value, mode);
-}
-
-
void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
int first,
int second) {
}
-TransitionArray* DescriptorArray::transitions() {
- ASSERT(MayContainTransitions());
- Object* array = get(kTransitionsIndex);
- return TransitionArray::cast(array);
-}
-
-
-void DescriptorArray::ClearTransitions() {
- WRITE_FIELD(this, kTransitionsOffset, Smi::FromInt(0));
-}
-
-
-void DescriptorArray::set_transitions(TransitionArray* transitions_array,
- WriteBarrierMode mode) {
- Heap* heap = GetHeap();
- WRITE_FIELD(this, kTransitionsOffset, transitions_array);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kTransitionsOffset, transitions_array, mode);
-}
-
-
Object** DescriptorArray::GetKeySlot(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
return HeapObject::RawField(
DescriptorArray* Map::instance_descriptors() {
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBackPointerOffset);
- if (!object->IsDescriptorArray()) {
- ASSERT(object->IsMap() || object->IsUndefined());
- return GetHeap()->empty_descriptor_array();
- } else {
- return DescriptorArray::cast(object);
- }
+ if (!HasTransitionArray()) return GetHeap()->empty_descriptor_array();
+ return transitions()->descriptors();
}
-void Map::set_instance_descriptors(DescriptorArray* value,
- WriteBarrierMode mode) {
- Heap* heap = GetHeap();
-
- if (value == heap->empty_descriptor_array()) {
- ClearDescriptorArray(heap, mode);
- return;
- }
+// If the descriptor is using the empty transition array, install a new empty
+// transition array that will have place for an element transition.
+static MaybeObject* EnsureHasTransitionArray(Map* map) {
+ if (map->HasTransitionArray()) return map;
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBackPointerOffset);
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions = TransitionArray::Allocate(0);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+ map->set_transitions(transitions);
+ return transitions;
+}
- if (object->IsDescriptorArray()) {
- value->set_back_pointer_storage(
- DescriptorArray::cast(object)->back_pointer_storage());
- } else {
- ASSERT(object->IsMap() || object->IsUndefined());
- value->set_back_pointer_storage(object);
- }
+MaybeObject* Map::SetDescriptors(DescriptorArray* value,
+ WriteBarrierMode mode) {
ASSERT(!is_shared());
- WRITE_FIELD(this, kInstanceDescriptorsOrBackPointerOffset, value);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kInstanceDescriptorsOrBackPointerOffset, value, mode);
+ MaybeObject* maybe_failure = EnsureHasTransitionArray(this);
+ if (maybe_failure->IsFailure()) return maybe_failure;
+
+ transitions()->set_descriptors(value, mode);
+ return this;
}
-void Map::InitializeDescriptors(DescriptorArray* descriptors) {
+MaybeObject* Map::InitializeDescriptors(DescriptorArray* descriptors) {
int len = descriptors->number_of_descriptors();
ASSERT(len <= DescriptorArray::kMaxNumberOfDescriptors);
SLOW_ASSERT(descriptors->IsSortedNoDuplicates());
}
#endif
- set_instance_descriptors(descriptors);
+ MaybeObject* maybe_failure = SetDescriptors(descriptors);
+ if (maybe_failure->IsFailure()) return maybe_failure;
for (int i = 0; i < len; ++i) {
if (descriptors->GetDetails(i).index() == len) {
SetLastAdded(i);
- break;
+ return this;
}
}
- ASSERT((len == 0 && LastAdded() == kNoneAdded) ||
- len == descriptors->GetDetails(LastAdded()).index());
+ ASSERT(len == 0 && LastAdded() == kNoneAdded);
+ return this;
}
SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
-void Map::ClearDescriptorArray(Heap* heap, WriteBarrierMode mode) {
+void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
Object* back_pointer = GetBackPointer();
#ifdef DEBUG
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBackPointerOffset);
- if (object->IsDescriptorArray()) {
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsTransitionArray()) {
ZapTransitions();
} else {
ASSERT(object->IsMap() || object->IsUndefined());
}
#endif
- WRITE_FIELD(this, kInstanceDescriptorsOrBackPointerOffset, back_pointer);
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
CONDITIONAL_WRITE_BARRIER(
- heap, this, kInstanceDescriptorsOrBackPointerOffset, back_pointer, mode);
+ heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
}
Object* Map::GetBackPointer() {
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBackPointerOffset);
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
if (object->IsDescriptorArray()) {
- return DescriptorArray::cast(object)->back_pointer_storage();
+ return TransitionArray::cast(object)->back_pointer_storage();
} else {
ASSERT(object->IsMap() || object->IsUndefined());
return object;
bool Map::HasTransitionArray() {
- return instance_descriptors()->HasTransitionArray();
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return object->IsTransitionArray();
}
}
-// If the map is using the empty descriptor array, install a new empty
-// descriptor array that will contain an elements transition.
-static MaybeObject* AllowTransitions(Map* map) {
- if (map->instance_descriptors()->MayContainTransitions()) return map;
- DescriptorArray* descriptors;
- MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(0, DescriptorArray::CANNOT_BE_SHARED);
- if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
- map->set_instance_descriptors(descriptors);
- return descriptors;
-}
-
-
-// If the descriptor is using the empty transition array, install a new empty
-// transition array that will have place for an element transition.
-static MaybeObject* EnsureHasTransitionArray(Map* map) {
- if (map->HasTransitionArray()) return map;
-
- AllowTransitions(map);
-
- TransitionArray* transitions;
- MaybeObject* maybe_transitions = TransitionArray::Allocate(0);
- if (!maybe_transitions->To(&transitions)) return maybe_transitions;
- MaybeObject* added_transitions = map->set_transitions(transitions);
- if (added_transitions->IsFailure()) return added_transitions;
- return transitions;
-}
-
-
MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
MaybeObject* allow_elements = EnsureHasTransitionArray(this);
if (allow_elements->IsFailure()) return allow_elements;
TransitionArray* Map::transitions() {
- return instance_descriptors()->transitions();
-}
-
-
-void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
-#ifdef DEBUG
- ZapTransitions();
-#endif
- DescriptorArray* descriptors = instance_descriptors();
- if (descriptors->number_of_descriptors() == 0) {
- ClearDescriptorArray(heap, mode);
- } else {
- descriptors->ClearTransitions();
- }
+ ASSERT(HasTransitionArray());
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return TransitionArray::cast(object);
}
-MaybeObject* Map::set_transitions(TransitionArray* transitions_array) {
- MaybeObject* allow_transitions = AllowTransitions(this);
- if (allow_transitions->IsFailure()) return allow_transitions;
+void Map::set_transitions(TransitionArray* transition_array,
+ WriteBarrierMode mode) {
+ transition_array->set_descriptors(instance_descriptors());
+ transition_array->set_back_pointer_storage(GetBackPointer());
#ifdef DEBUG
if (HasTransitionArray()) {
- ASSERT(transitions() != transitions_array);
+ ASSERT(transitions() != transition_array);
ZapTransitions();
}
#endif
- instance_descriptors()->set_transitions(transitions_array);
- return this;
+
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
+ CONDITIONAL_WRITE_BARRIER(
+ GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
}
void Map::init_back_pointer(Object* undefined) {
ASSERT(undefined->IsUndefined());
- WRITE_FIELD(this, kInstanceDescriptorsOrBackPointerOffset, undefined);
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
}
ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
(value->IsMap() && GetBackPointer()->IsUndefined()));
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBackPointerOffset);
- if (object->IsDescriptorArray()) {
- DescriptorArray::cast(object)->set_back_pointer_storage(value);
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsTransitionArray()) {
+ TransitionArray::cast(object)->set_back_pointer_storage(value);
} else {
- WRITE_FIELD(this, kInstanceDescriptorsOrBackPointerOffset, value);
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
CONDITIONAL_WRITE_BARRIER(
- GetHeap(), this, kInstanceDescriptorsOrBackPointerOffset, value, mode);
+ GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
}
}
// Can either be Smi (no transitions), normal transition array, or a transition
// array with the header overwritten as a Smi (thus iterating).
TransitionArray* Map::unchecked_transition_array() {
- ASSERT(HasTransitionArray());
- Object* object = *HeapObject::RawField(instance_descriptors(),
- DescriptorArray::kTransitionsOffset);
- ASSERT(!object->IsSmi());
+ Object* object = *HeapObject::RawField(this,
+ Map::kTransitionsOrBackPointerOffset);
TransitionArray* transition_array = static_cast<TransitionArray*>(object);
return transition_array;
}
// After this point the GC is not allowed to run anymore until the map is in a
// consistent state again, i.e., all the descriptors are appended and the
// descriptor array is trimmed to the right size.
- map->set_instance_descriptors(*result);
+ Map::SetDescriptors(map, result);
// Fill in new callback descriptors. Process the callbacks from
// back to front so that the last callback with a given name takes
int new_number_of_descriptors = map->NumberOfSetDescriptors();
// Reinstall the original descriptor array if no new elements were added.
if (new_number_of_descriptors == descriptor_count) {
- map->set_instance_descriptors(*array);
+ Map::SetDescriptors(map, array);
return;
}
// Do a map transition, other objects with this map may still
// be extensible.
Map* new_map;
- MaybeObject* maybe = map()->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe = map()->Copy();
if (!maybe->To(&new_map)) return maybe;
new_map->set_is_extensible(false);
}
+void Map::SetDescriptors(Handle<Map> map,
+ Handle<DescriptorArray> descriptors) {
+ Isolate* isolate = map->GetIsolate();
+ CALL_HEAP_FUNCTION_VOID(isolate, map->SetDescriptors(*descriptors));
+}
+
+
int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
} else {
ASSERT(descriptors->GetDetails(last_added).index() ==
descriptors->number_of_descriptors());
- result->set_instance_descriptors(descriptors);
+ MaybeObject* maybe_failure = result->SetDescriptors(descriptors);
+ if (maybe_failure->IsFailure()) return maybe_failure;
result->SetLastAdded(last_added);
}
MaybeObject* maybe_transitions = AddTransition(name, result);
if (!maybe_transitions->To(&transitions)) return maybe_transitions;
- MaybeObject* maybe_set = set_transitions(transitions);
- if (maybe_set->IsFailure()) return maybe_set;
-
+ set_transitions(transitions);
result->SetBackPointer(this);
}
MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) {
// Create a new free-floating map only if we are not allowed to store it.
Map* new_map = NULL;
- MaybeObject* maybe_new_map = Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
new_map->set_elements_kind(kind);
// array describing these properties.
ASSERT(constructor()->IsJSFunction());
JSFunction* ctor = JSFunction::cast(constructor());
- Map* initial_map = ctor->initial_map();
- DescriptorArray* initial_descriptors = initial_map->instance_descriptors();
- DescriptorArray* descriptors;
- MaybeObject* maybe_descriptors =
- initial_descriptors->Copy(DescriptorArray::MAY_BE_SHARED);
- if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
+ Map* map = ctor->initial_map();
+ DescriptorArray* descriptors = map->instance_descriptors();
- int last_added = initial_map->LastAdded();
+ int last_added = map->LastAdded();
return CopyReplaceDescriptors(descriptors, NULL, last_added, OMIT_TRANSITION);
}
-MaybeObject* Map::Copy(DescriptorArray::SharedMode shared_mode) {
- DescriptorArray* source_descriptors = instance_descriptors();
- DescriptorArray* descriptors;
- MaybeObject* maybe_descriptors = source_descriptors->Copy(shared_mode);
- if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
-
+MaybeObject* Map::Copy() {
+ DescriptorArray* descriptors = instance_descriptors();
int last_added = LastAdded();
return CopyReplaceDescriptors(descriptors, NULL, last_added, OMIT_TRANSITION);
int new_size = old_size + 1;
DescriptorArray* new_descriptors;
- MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(new_size, DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(new_size);
if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
FixedArray::WhitenessWitness witness(new_descriptors);
ASSERT(key == descriptors->GetKey(insertion_index));
DescriptorArray* new_descriptors;
- MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(size, DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(size);
if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
FixedArray::WhitenessWitness witness(new_descriptors);
// If we have an unvisited child map, return that one and advance. If we have
// none, return NULL and reset any destroyed FixedArray maps.
TraversableMap* ChildIteratorNext() {
- if (HasTransitionArray()) {
- TransitionArray* transition_array = unchecked_transition_array();
-
- if (transition_array->HasPrototypeTransitions()) {
- HeapObject* proto_transitions =
- transition_array->UncheckedPrototypeTransitions();
- IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
- if (proto_iterator.IsIterating()) {
- Map* next = proto_iterator.Next();
- if (next != NULL) return static_cast<TraversableMap*>(next);
- }
- }
+ TransitionArray* transition_array = unchecked_transition_array();
+ if (!transition_array->map()->IsSmi() &&
+ !transition_array->IsTransitionArray()) {
+ return NULL;
+ }
- IntrusiveMapTransitionIterator transition_iterator(transition_array);
- if (transition_iterator.IsIterating()) {
- Map* next = transition_iterator.Next();
+ if (transition_array->HasPrototypeTransitions()) {
+ HeapObject* proto_transitions =
+ transition_array->UncheckedPrototypeTransitions();
+ IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
+ if (proto_iterator.IsIterating()) {
+ Map* next = proto_iterator.Next();
if (next != NULL) return static_cast<TraversableMap*>(next);
}
}
+ IntrusiveMapTransitionIterator transition_iterator(transition_array);
+ if (transition_iterator.IsIterating()) {
+ Map* next = transition_iterator.Next();
+ if (next != NULL) return static_cast<TraversableMap*>(next);
+ }
+
return NULL;
}
};
#endif
-MaybeObject* DescriptorArray::Allocate(int number_of_descriptors,
- SharedMode shared_mode) {
+MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
Heap* heap = Isolate::Current()->heap();
// Do not use DescriptorArray::cast on incomplete object.
FixedArray* result;
- if (number_of_descriptors == 0 && shared_mode == MAY_BE_SHARED) {
- return heap->empty_descriptor_array();
- }
+ if (number_of_descriptors == 0) return heap->empty_descriptor_array();
// Allocate the array of keys.
MaybeObject* maybe_array =
heap->AllocateFixedArray(LengthFor(number_of_descriptors));
if (!maybe_array->To(&result)) return maybe_array;
result->set(kEnumCacheIndex, Smi::FromInt(0));
- result->set(kTransitionsIndex, Smi::FromInt(0));
return result;
}
}
-MaybeObject* DescriptorArray::Copy(SharedMode shared_mode) {
- // Allocate the new descriptor array.
- int number_of_descriptors = this->number_of_descriptors();
- DescriptorArray* new_descriptors;
- MaybeObject* maybe_result = Allocate(number_of_descriptors, shared_mode);
- if (!maybe_result->To(&new_descriptors)) return maybe_result;
-
- // Copy the content.
- if (number_of_descriptors > 0) {
- FixedArray::WhitenessWitness witness(new_descriptors);
- for (int i = 0; i < number_of_descriptors; i++) {
- new_descriptors->CopyFrom(i, this, i, witness);
- }
- }
-
- return new_descriptors;
-}
-
-
// We need the whiteness witness since sort will reshuffle the entries in the
// descriptor array. If the descriptor array were to be black, the shuffling
// would move a slot that was already recorded as pointing into an evacuation
// the transition array from the map.
if (transition_index == 0 &&
!t->HasElementsTransition() &&
- !t->HasPrototypeTransitions()) {
+ !t->HasPrototypeTransitions() &&
+ t->descriptors()->IsEmpty()) {
return ClearTransitions(heap);
}
// If the function has allocated the initial map
// replace it with a copy containing the new prototype.
Map* new_map;
- MaybeObject* maybe_new_map =
- initial_map()->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = initial_map()->Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
new_map->set_prototype(value);
MaybeObject* maybe_object = set_initial_map_and_cache_transitions(new_map);
// Remove map transitions because they point to maps with a
// different prototype.
Map* new_map;
- MaybeObject* maybe_new_map = map()->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = map()->Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
Heap* heap = new_map->GetHeap();
Map* new_map = map->GetPrototypeTransition(value);
if (new_map == NULL) {
- MaybeObject* maybe_new_map = map->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = map->Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
MaybeObject* maybe_new_cache =
// Allocate the instance descriptor.
DescriptorArray* descriptors;
MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(instance_descriptor_length,
- DescriptorArray::MAY_BE_SHARED);
+ DescriptorArray::Allocate(instance_descriptor_length);
if (!maybe_descriptors->To(&descriptors)) {
return maybe_descriptors;
}
descriptors->Sort(witness);
+ MaybeObject* maybe_failure = new_map->InitializeDescriptors(descriptors);
+ if (maybe_failure->IsFailure()) return maybe_failure;
new_map->set_unused_property_fields(unused_property_fields);
- new_map->InitializeDescriptors(descriptors);
// Transform the object.
obj->set_map(new_map);
// - Context
// - JSFunctionResultCache
// - ScopeInfo
+// - TransitionArray
// - FixedDoubleArray
// - ExternalArray
// - ExternalPixelArray
// DescriptorArrays are fixed arrays used to hold instance descriptors.
// The format of the these objects is:
-// TODO(1399): It should be possible to make room for bit_field3 in the map
-// without overloading the instance descriptors field in the map
-// (and storing it in the DescriptorArray when the map has one).
-// [0]: storage for bit_field3 for Map owning this object (Smi)
-// [1]: point to a fixed array with (value, detail) pairs.
-// [2]: next enumeration index (Smi), or pointer to small fixed array:
-// [0]: next enumeration index (Smi)
-// [1]: pointer to fixed array with enum cache
-// [3]: first key
+// [0]: Either Smi(0) if uninitialized, or a pointer to small fixed array:
+// [0]: pointer to fixed array with enum cache
+// [1]: either Smi(0) or pointer to fixed array with indices
+// [1]: first key
// [length() - kDescriptorSize]: last key
-//
class DescriptorArray: public FixedArray {
public:
// Returns true for both shared empty_descriptor_array and for smis, which the
// map uses to encode additional bit fields when the descriptor array is not
// yet used.
inline bool IsEmpty();
- inline bool MayContainTransitions();
- inline bool HasTransitionArray();
-
- DECL_ACCESSORS(transitions, TransitionArray)
- inline void ClearTransitions();
// Returns the number of descriptors in the array.
int number_of_descriptors() {
- ASSERT(MayContainTransitions() || IsEmpty());
+ ASSERT(length() >= kFirstIndex || IsEmpty());
int len = length();
return len <= kFirstIndex ? 0 : (len - kFirstIndex) / kDescriptorSize;
}
kEnumCacheOffset);
}
- Object** GetTransitionsSlot() {
- return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
- kTransitionsOffset);
- }
-
- DECL_ACCESSORS(back_pointer_storage, Object)
-
// Initialize or change the enum cache,
// using the supplied storage for the small "bridge".
void SetEnumCache(FixedArray* bridge_storage,
int src_index,
const WhitenessWitness&);
- // Indicates whether the search function should expect a sorted or an unsorted
- // descriptor array as input.
- enum SharedMode {
- MAY_BE_SHARED,
- CANNOT_BE_SHARED
- };
-
- // Return a copy of the array with all transitions and null descriptors
- // removed. Return a Failure object in case of an allocation failure.
- MUST_USE_RESULT MaybeObject* Copy(SharedMode shared_mode);
-
// Sort the instance descriptors by the hash codes of their keys.
void Sort(const WhitenessWitness&);
// Allocates a DescriptorArray, but returns the singleton
// empty descriptor array object if number_of_descriptors is 0.
- MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors,
- SharedMode shared_mode);
+ MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors);
// Casting.
static inline DescriptorArray* cast(Object* obj);
// Constant for denoting key was not found.
static const int kNotFound = -1;
- static const int kBackPointerStorageIndex = 0;
- static const int kEnumCacheIndex = 1;
- static const int kTransitionsIndex = 2;
- static const int kFirstIndex = 3;
+ static const int kEnumCacheIndex = 0;
+ static const int kFirstIndex = 1;
// The length of the "bridge" to the enum cache.
static const int kEnumCacheBridgeLength = 2;
static const int kEnumCacheBridgeIndicesCacheIndex = 1;
// Layout description.
- static const int kBackPointerStorageOffset = FixedArray::kHeaderSize;
- static const int kEnumCacheOffset = kBackPointerStorageOffset +
- kPointerSize;
- static const int kTransitionsOffset = kEnumCacheOffset + kPointerSize;
- static const int kFirstOffset = kTransitionsOffset + kPointerSize;
+ static const int kEnumCacheOffset = FixedArray::kHeaderSize;
+ static const int kFirstOffset = kEnumCacheOffset + kPointerSize;
// Layout description for the bridge array.
static const int kEnumCacheBridgeCacheOffset = FixedArray::kHeaderSize;
inline Map* elements_transition_map();
MUST_USE_RESULT inline MaybeObject* set_elements_transition_map(
Map* transitioned_map);
- inline TransitionArray* transitions();
inline void SetTransition(int index, Map* target);
MUST_USE_RESULT inline MaybeObject* AddTransition(String* key, Map* target);
- MUST_USE_RESULT inline MaybeObject* set_transitions(
- TransitionArray* transitions);
+ DECL_ACCESSORS(transitions, TransitionArray)
inline void ClearTransitions(Heap* heap,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline JSFunction* unchecked_constructor();
// [instance descriptors]: describes the object.
- DECL_ACCESSORS(instance_descriptors, DescriptorArray)
- inline void InitializeDescriptors(DescriptorArray* descriptors);
-
- // Should only be called to clear a descriptor array that was only used to
- // store transitions and does not contain any live transitions anymore.
- inline void ClearDescriptorArray(Heap* heap, WriteBarrierMode mode);
+ inline DescriptorArray* instance_descriptors();
+ MUST_USE_RESULT inline MaybeObject* SetDescriptors(
+ DescriptorArray* descriptors,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+ static void SetDescriptors(Handle<Map> map,
+ Handle<DescriptorArray> descriptors);
+ MUST_USE_RESULT inline MaybeObject* InitializeDescriptors(
+ DescriptorArray* descriptors);
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
- MUST_USE_RESULT MaybeObject* Copy(DescriptorArray::SharedMode shared_mode);
+ MUST_USE_RESULT MaybeObject* Copy();
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
static const int kInstanceAttributesOffset = kInstanceSizesOffset + kIntSize;
static const int kPrototypeOffset = kInstanceAttributesOffset + kIntSize;
static const int kConstructorOffset = kPrototypeOffset + kPointerSize;
- // Storage for instance descriptors is overloaded to also contain additional
- // map flags when unused (bit_field3). When the map has instance descriptors,
- // the flags are transferred to the instance descriptor array and accessed
- // through an extra indirection.
- static const int kInstanceDescriptorsOrBackPointerOffset =
+ // Storage for the transition array is overloaded to directly contain a back
+ // pointer if unused. When the map has transitions, the back pointer is
+ // transferred to the transition array and accessed through an extra
+ // indirection.
+ static const int kTransitionsOrBackPointerOffset =
kConstructorOffset + kPointerSize;
static const int kCodeCacheOffset =
- kInstanceDescriptorsOrBackPointerOffset + kPointerSize;
+ kTransitionsOrBackPointerOffset + kPointerSize;
static const int kBitField3Offset = kCodeCacheOffset + kPointerSize;
static const int kPadStart = kBitField3Offset + kPointerSize;
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
Map::kConstructorOffset);
if (!map->instance_descriptors()->IsEmpty()) {
TagObject(map->instance_descriptors(), "(map descriptors)");
+ // TODO(verwaest): Check what to do here.
SetInternalReference(map, entry,
"descriptors", map->instance_descriptors(),
- Map::kInstanceDescriptorsOrBackPointerOffset);
+ Map::kTransitionsOrBackPointerOffset);
}
SetInternalReference(map, entry,
"code_cache", map->code_cache(),
if (needs_access_checks) {
// Copy map so it won't interfere constructor's initial map.
Map* new_map;
- MaybeObject* maybe_new_map = old_map->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = old_map->Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
new_map->set_is_access_check_needed(false);
if (!old_map->is_access_check_needed()) {
// Copy map so it won't interfere constructor's initial map.
Map* new_map;
- MaybeObject* maybe_new_map = old_map->Copy(DescriptorArray::MAY_BE_SHARED);
+ MaybeObject* maybe_new_map = old_map->Copy();
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
new_map->set_is_access_check_needed(true);
}
+DescriptorArray* TransitionArray::descriptors() {
+ return DescriptorArray::cast(get(kDescriptorsIndex));
+}
+
+
+void TransitionArray::set_descriptors(DescriptorArray* descriptors,
+ WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kDescriptorsOffset, descriptors);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kDescriptorsOffset, descriptors, mode);
+}
+
+
+Object** TransitionArray::GetDescriptorsSlot() {
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kDescriptorsOffset);
+}
+
+
+Object* TransitionArray::back_pointer_storage() {
+ return get(kBackPointerStorageIndex);
+}
+
+
+void TransitionArray::set_back_pointer_storage(Object* back_pointer,
+ WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kBackPointerStorageOffset, back_pointer);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kBackPointerStorageOffset, back_pointer, mode);
+}
+
+
bool TransitionArray::HasPrototypeTransitions() {
Object* prototype_transitions = get(kPrototypeTransitionsIndex);
return prototype_transitions != Smi::FromInt(0);
HeapObject* TransitionArray::UncheckedPrototypeTransitions() {
- Object* prototype_transitions = get(kPrototypeTransitionsIndex);
- if (prototype_transitions == Smi::FromInt(0)) return NULL;
- return reinterpret_cast<HeapObject*>(prototype_transitions);
+ ASSERT(HasPrototypeTransitions());
+ return reinterpret_cast<HeapObject*>(get(kPrototypeTransitionsIndex));
}
Heap* heap = Isolate::Current()->heap();
// Use FixedArray to not use DescriptorArray::cast on incomplete object.
FixedArray* array;
- { MaybeObject* maybe_array =
- heap->AllocateFixedArray(ToKeyIndex(number_of_transitions));
- if (!maybe_array->To(&array)) return maybe_array;
- }
+ MaybeObject* maybe_array =
+ heap->AllocateFixedArray(ToKeyIndex(number_of_transitions));
+ if (!maybe_array->To(&array)) return maybe_array;
array->set(kElementsTransitionIndex, Smi::FromInt(0));
array->set(kPrototypeTransitionsIndex, Smi::FromInt(0));
// TransitionArrays are fixed arrays used to hold map transitions for property,
// constant, and element changes.
// The format of the these objects is:
-// [0] Elements transition
-// [1] First transition
+// [0] Descriptor array
+// [1] Undefined or back pointer map
+// [2] Smi(0) or elements transition map
+// [3] Smi(0) or fixed array of prototype transitions
+// [4] First transition
// [length() - kTransitionSize] Last transition
class TransitionArray: public FixedArray {
public:
inline bool HasElementsTransition();
inline void ClearElementsTransition();
+ inline DescriptorArray* descriptors();
+ inline void set_descriptors(DescriptorArray* descriptors,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+ inline Object** GetDescriptorsSlot();
+
+ inline Object* back_pointer_storage();
+ inline void set_back_pointer_storage(
+ Object* back_pointer,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+
inline FixedArray* GetPrototypeTransitions();
inline void SetPrototypeTransitions(
FixedArray* prototype_transitions,
// Constant for denoting key was not found.
static const int kNotFound = -1;
- static const int kElementsTransitionIndex = 0;
- static const int kPrototypeTransitionsIndex = 1;
- static const int kFirstIndex = 2;
+ static const int kDescriptorsIndex = 0;
+ static const int kBackPointerStorageIndex = 1;
+ static const int kElementsTransitionIndex = 2;
+ static const int kPrototypeTransitionsIndex = 3;
+ static const int kFirstIndex = 4;
// Layout transition array header.
- static const int kElementsTransitionOffset = FixedArray::kHeaderSize;
+ static const int kDescriptorsOffset = FixedArray::kHeaderSize;
+ static const int kBackPointerStorageOffset = kDescriptorsOffset +
+ kPointerSize;
+ static const int kElementsTransitionOffset = kBackPointerStorageOffset +
+ kPointerSize;
static const int kPrototypeTransitionsOffset = kElementsTransitionOffset +
kPointerSize;
- static const int kFirstOffset = kPrototypeTransitionsOffset + kPointerSize;
// Layout of map transition.
static const int kTransitionKey = 0;
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- movq(descriptors, FieldOperand(map,
- Map::kInstanceDescriptorsOrBackPointerOffset));
+ Register temp = descriptors;
+ movq(temp, FieldOperand(map, Map::kTransitionsOrBackPointerOffset));
Label ok, fail;
- CheckMap(descriptors,
+ CheckMap(temp,
isolate()->factory()->fixed_array_map(),
&fail,
DONT_DO_SMI_CHECK);
+ movq(descriptors, FieldOperand(temp, TransitionArray::kDescriptorsOffset));
jmp(&ok);
bind(&fail);
Move(descriptors, isolate()->factory()->empty_descriptor_array());
// check for an enum cache. Leave the map in rbx for the subsequent
// prototype load.
movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
- movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBackPointerOffset));
+ movq(rdx, FieldOperand(rbx, Map::kTransitionsOrBackPointerOffset));
CheckMap(rdx,
isolate()->factory()->fixed_array_map(),
call_runtime,
DONT_DO_SMI_CHECK);
+ movq(rdx, FieldOperand(rdx, TransitionArray::kDescriptorsOffset));
+ cmpq(rdx, empty_descriptor_array_value);
+ j(equal, call_runtime);
+
// Check that there is an enum cache in the non-empty instance
// descriptors (rdx). This is the case if the next enumeration
// index field does not contain a smi.
Handle<DescriptorArray> new_descriptors = FACTORY->NewDescriptorArray(1);
v8::internal::DescriptorArray::WhitenessWitness witness(*new_descriptors);
- map->set_instance_descriptors(*new_descriptors);
+ v8::internal::Map::SetDescriptors(map, new_descriptors);
CallbacksDescriptor d(*name,
*foreign,