return;
}
i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(length, data);
+ self->set_map(
+ *i::Factory::GetSlowElementsMap(i::Handle<i::Map>(self->map())));
self->set_elements(*pixels);
}
}
i::Handle<i::ExternalArray> array =
i::Factory::NewExternalArray(length, array_type, data);
+ self->set_map(
+ *i::Factory::GetSlowElementsMap(i::Handle<i::Map>(self->map())));
self->set_elements(*array);
}
void EndBlockConstPool() {
const_pool_blocked_nesting_--;
}
+ bool is_const_pool_blocked() const { return const_pool_blocked_nesting_ > 0; }
private:
// Code buffer:
// Get the elements array from the receiver and check that it
// is not a dictionary.
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
- __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
- __ cmp(scratch2, ip);
- deferred->Branch(ne);
+ if (FLAG_debug_code) {
+ __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
+ __ cmp(scratch2, ip);
+ __ Assert(eq, "JSObject with fast elements map has slow elements");
+ }
// Check that key is within bounds. Use unsigned comparison to handle
// negative keys.
__ mov(r0, scratch1);
// Make sure that the expected number of instructions are generated.
- ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatch,
+ ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
masm_->InstructionsGeneratedSince(&check_inlined_codesize));
}
static int InlineRuntimeCallArgumentsCount(Handle<String> name);
// Constants related to patching of inlined load/store.
- static const int kInlinedKeyedLoadInstructionsAfterPatch = 17;
+ static int GetInlinedKeyedLoadInstructionsAfterPatch() {
+ return FLAG_debug_code ? 27 : 13;
+ }
static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
private:
// Patch the map check.
Address ldr_map_instr_address =
inline_end_address -
- (CodeGenerator::kInlinedKeyedLoadInstructionsAfterPatch *
+ (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() *
Assembler::kInstrSize);
Assembler::set_target_address_at(ldr_map_instr_address,
reinterpret_cast<Address>(map));
void MacroAssembler::Abort(const char* msg) {
+ Label abort_start;
+ bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
push(r0);
CallRuntime(Runtime::kAbort, 2);
// will not return here
+ if (is_const_pool_blocked()) {
+ // If the calling code cares about the exact number of
+ // instructions generated, we insert padding here to keep the size
+ // of the Abort macro constant.
+ static const int kExpectedAbortInstructions = 10;
+ int abort_instructions = InstructionsGeneratedSince(&abort_start);
+ ASSERT(abort_instructions <= kExpectedAbortInstructions);
+ while (abort_instructions++ < kExpectedAbortInstructions) {
+ nop();
+ }
+ }
}
}
// 'array' now contains the JSArray we should initialize.
+ ASSERT(array->HasFastElements());
// Optimize the case where there is one argument and the argument is a
// small smi.
return copy;
}
+
Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
CALL_HEAP_FUNCTION(src->CopyDropTransitions(), Map);
}
+Handle<Map> Factory::GetFastElementsMap(Handle<Map> src) {
+ CALL_HEAP_FUNCTION(src->GetFastElementsMap(), Map);
+}
+
+
+Handle<Map> Factory::GetSlowElementsMap(Handle<Map> src) {
+ CALL_HEAP_FUNCTION(src->GetSlowElementsMap(), Map);
+}
+
+
Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
CALL_HEAP_FUNCTION(array->Copy(), FixedArray);
}
static Handle<Map> CopyMapDropTransitions(Handle<Map> map);
+ static Handle<Map> GetFastElementsMap(Handle<Map> map);
+
+ static Handle<Map> GetSlowElementsMap(Handle<Map> map);
+
static Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
// Numbers (eg, literals) are pretenured by the parser.
map->set_code_cache(empty_fixed_array());
map->set_unused_property_fields(0);
map->set_bit_field(0);
- map->set_bit_field2(1 << Map::kIsExtensible);
+ map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
// If the map object is aligned fill the padding area with Smi 0 objects.
if (Map::kPadStart < Map::kSize) {
map->set_inobject_properties(in_object_properties);
map->set_unused_property_fields(in_object_properties);
map->set_prototype(prototype);
+ ASSERT(map->has_fast_elements());
// If the function has only simple this property assignments add
// field descriptors for these to the initial map as the object
// properly initialized.
ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
- // Both types of globla objects should be allocated using
- // AllocateGloblaObject to be properly initialized.
+ // Both types of global objects should be allocated using
+ // AllocateGlobalObject to be properly initialized.
ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
InitializeJSObjectFromMap(JSObject::cast(obj),
FixedArray::cast(properties),
map);
+ ASSERT(JSObject::cast(obj)->HasFastElements());
return obj;
}
// Use masm-> here instead of the double underscore macro since extra
// coverage code can interfere with the patching.
masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
- Immediate(Factory::null_value()));
+ Immediate(Factory::null_value()));
deferred->Branch(not_equal);
// Check that the key is a smi.
// is not a dictionary.
__ mov(elements.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
- __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
- Immediate(Factory::fixed_array_map()));
- deferred->Branch(not_equal);
+ if (FLAG_debug_code) {
+ __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
+ Immediate(Factory::fixed_array_map()));
+ __ Assert(equal, "JSObject with fast elements map has slow elements");
+ }
// Check that the key is within bounds.
__ cmp(key.reg(),
}
}
set_target(stub);
- // For JSObjects that are not value wrappers and that do not have
- // indexed interceptors, we initialize the inlined fast case (if
- // present) by patching the inlined map check.
+ // For JSObjects with fast elements that are not value wrappers
+ // and that do not have indexed interceptors, we initialize the
+ // inlined fast case (if present) by patching the inlined map
+ // check.
if (object->IsJSObject() &&
!object->IsJSValue() &&
- !JSObject::cast(*object)->HasIndexedInterceptor()) {
+ !JSObject::cast(*object)->HasIndexedInterceptor() &&
+ JSObject::cast(*object)->HasFastElements()) {
Map* map = JSObject::cast(*object)->map();
PatchInlinedLoad(address(), map);
}
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
+ ASSERT(map()->has_fast_elements() ==
+ (elements()->map() == Heap::fixed_array_map()));
+ ASSERT(map()->has_fast_elements() == HasFastElements());
}
void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
+ ASSERT(map()->has_fast_elements() ==
+ (value->map() == Heap::fixed_array_map()));
// In the assert below Dictionary is covered under FixedArray.
ASSERT(value->IsFixedArray() || value->IsPixelArray() ||
value->IsExternalArray());
void JSObject::initialize_elements() {
+ ASSERT(map()->has_fast_elements());
ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
}
+Object* JSObject::ResetElements() {
+ Object* obj = map()->GetFastElementsMap();
+ if (obj->IsFailure()) return obj;
+ set_map(Map::cast(obj));
+ initialize_elements();
+ return this;
+}
+
+
ACCESSORS(Oddball, to_string, String, kToStringOffset)
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
}
+Object* Map::GetFastElementsMap() {
+ if (has_fast_elements()) return this;
+ Object* obj = CopyDropTransitions();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+ new_map->set_has_fast_elements(true);
+ return new_map;
+}
+
+
+Object* Map::GetSlowElementsMap() {
+ if (!has_fast_elements()) return this;
+ Object* obj = CopyDropTransitions();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+ new_map->set_has_fast_elements(false);
+ return new_map;
+}
+
+
ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
if (array->IsFixedArray()) {
// FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray.
if (array->map() == Heap::fixed_array_map()) {
+ ASSERT(map()->has_fast_elements());
return FAST_ELEMENTS;
}
ASSERT(array->IsDictionary());
+ ASSERT(!map()->has_fast_elements());
return DICTIONARY_ELEMENTS;
}
+ ASSERT(!map()->has_fast_elements());
if (array->IsExternalArray()) {
switch (array->map()->instance_type()) {
case EXTERNAL_BYTE_ARRAY_TYPE:
Object* JSObject::NormalizeElements() {
ASSERT(!HasPixelElements() && !HasExternalArrayElements());
if (HasDictionaryElements()) return this;
+ ASSERT(map()->has_fast_elements());
+
+ Object* obj = map()->GetSlowElementsMap();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
// Get number of entries.
FixedArray* array = FixedArray::cast(elements());
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
array->length();
- Object* obj = NumberDictionary::Allocate(length);
+ obj = NumberDictionary::Allocate(length);
if (obj->IsFailure()) return obj;
NumberDictionary* dictionary = NumberDictionary::cast(obj);
// Copy entries.
dictionary = NumberDictionary::cast(result);
}
}
- // Switch to using the dictionary as the backing storage for elements.
+ // Switch to using the dictionary as the backing storage for
+ // elements. Set the new map first to satify the elements type
+ // assert in set_elements().
+ set_map(new_map);
set_elements(dictionary);
Counters::elements_to_dictionary.Increment();
#endif // ENABLE_DISASSEMBLER
-void JSObject::SetFastElements(FixedArray* elems) {
+Object* JSObject::SetFastElementsCapacityAndLength(int capacity, int length) {
// We should never end in here with a pixel or external array.
ASSERT(!HasPixelElements() && !HasExternalArrayElements());
-#ifdef DEBUG
- // Check the provided array is filled with the_hole.
- uint32_t len = static_cast<uint32_t>(elems->length());
- for (uint32_t i = 0; i < len; i++) ASSERT(elems->get(i)->IsTheHole());
-#endif
+
+ Object* obj = Heap::AllocateFixedArrayWithHoles(capacity);
+ if (obj->IsFailure()) return obj;
+ FixedArray* elems = FixedArray::cast(obj);
+
+ obj = map()->GetFastElementsMap();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+
AssertNoAllocation no_gc;
WriteBarrierMode mode = elems->GetWriteBarrierMode(no_gc);
switch (GetElementsKind()) {
UNREACHABLE();
break;
}
+
+ set_map(new_map);
set_elements(elems);
+
+ if (IsJSArray()) {
+ JSArray::cast(this)->set_length(Smi::FromInt(length));
+ }
+
+ return this;
}
Object* smi_length = len->ToSmi();
if (smi_length->IsSmi()) {
- int value = Smi::cast(smi_length)->value();
+ const int value = Smi::cast(smi_length)->value();
if (value < 0) return ArrayLengthRangeError();
switch (GetElementsKind()) {
case FAST_ELEMENTS: {
int new_capacity = value > min ? value : min;
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
+ Object* obj = SetFastElementsCapacityAndLength(new_capacity, value);
if (obj->IsFailure()) return obj;
- if (IsJSArray()) {
- JSArray::cast(this)->set_length(Smi::cast(smi_length));
- }
- SetFastElements(FixedArray::cast(obj));
return this;
}
break;
// If the length of a slow array is reset to zero, we clear
// the array and flush backing storage. This has the added
// benefit that the array returns to fast mode.
- initialize_elements();
+ Object* obj = ResetElements();
+ if (obj->IsFailure()) return obj;
} else {
// Remove deleted elements.
uint32_t old_length =
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
+ Object* obj = SetFastElementsCapacityAndLength(new_capacity, index + 1);
if (obj->IsFailure()) return obj;
- SetFastElements(FixedArray::cast(obj));
- if (IsJSArray()) {
- JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
- }
FixedArray::cast(elements())->set(index, value);
return value;
}
uint32_t new_length = 0;
if (IsJSArray()) {
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&new_length));
- JSArray::cast(this)->set_length(Smi::FromInt(new_length));
} else {
new_length = NumberDictionary::cast(elements())->max_number_key() + 1;
}
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_length);
+ Object* obj = SetFastElementsCapacityAndLength(new_length, new_length);
if (obj->IsFailure()) return obj;
- SetFastElements(FixedArray::cast(obj));
#ifdef DEBUG
if (FLAG_trace_normalization) {
PrintF("Object elements are fast case again:\n");
}
// Convert to fast elements.
+ Object* obj = map()->GetFastElementsMap();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+
PretenureFlag tenure = Heap::InNewSpace(this) ? NOT_TENURED: TENURED;
Object* new_array =
Heap::AllocateFixedArray(dict->NumberOfElements(), tenure);
- if (new_array->IsFailure()) {
- return new_array;
- }
+ if (new_array->IsFailure()) return new_array;
FixedArray* fast_elements = FixedArray::cast(new_array);
dict->CopyValuesTo(fast_elements);
+
+ set_map(new_map);
set_elements(fast_elements);
}
ASSERT(HasFastElements());
// case, and a PixelArray or ExternalArray in special cases.
DECL_ACCESSORS(elements, HeapObject)
inline void initialize_elements();
+ inline Object* ResetElements();
inline ElementsKind GetElementsKind();
inline bool HasFastElements();
inline bool HasDictionaryElements();
// The undefined object if index is out of bounds.
Object* GetElementWithReceiver(JSObject* receiver, uint32_t index);
- void SetFastElements(FixedArray* elements);
+ Object* SetFastElementsCapacityAndLength(int capacity, int length);
Object* SetSlowElements(Object* length);
// Lookup interceptors are used for handling properties controlled by host
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
+ // Tells whether the instance has fast elements.
+ void set_has_fast_elements(bool value) {
+ if (value) {
+ set_bit_field2(bit_field2() | (1 << kHasFastElements));
+ } else {
+ set_bit_field2(bit_field2() & ~(1 << kHasFastElements));
+ }
+ }
+
+ bool has_fast_elements() {
+ return ((1 << kHasFastElements) & bit_field2()) != 0;
+ }
+
// Tells whether the instance needs security checks when accessing its
// properties.
inline void set_is_access_check_needed(bool access_check_needed);
// instance descriptors.
Object* CopyDropTransitions();
+ // Returns this map if it has the fast elements bit set, otherwise
+ // returns a copy of the map, with all transitions dropped from the
+ // descriptors and the fast elements bit set.
+ inline Object* GetFastElementsMap();
+
+ // Returns this map if it has the fast elements bit cleared,
+ // otherwise returns a copy of the map, with all transitions dropped
+ // from the descriptors and the fast elements bit cleared.
+ inline Object* GetSlowElementsMap();
+
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
// Bit positions for bit field 2
static const int kIsExtensible = 0;
static const int kFunctionWithPrototype = 1;
+ static const int kHasFastElements = 2;
// Layout of the default cache. It holds alternating name and code objects.
static const int kCodeCacheEntrySize = 2;
uint32_t index_limit_;
// Index after last seen index. Always less than or equal to index_limit_.
uint32_t index_offset_;
- bool fast_elements_;
+ const bool fast_elements_;
};
// The backing storage array must have non-existing elements to
// preserve holes across concat operations.
storage = Factory::NewFixedArrayWithHoles(result_length);
-
+ result->set_map(*Factory::GetFastElementsMap(Handle<Map>(result->map())));
} else {
// TODO(126): move 25% pre-allocation logic into Dictionary::Allocate
uint32_t at_least_space_for = estimate_nof_elements +
(estimate_nof_elements >> 2);
storage = Handle<FixedArray>::cast(
Factory::NewNumberDictionary(at_least_space_for));
+ result->set_map(*Factory::GetSlowElementsMap(Handle<Map>(result->map())));
}
Handle<Object> len = Factory::NewNumber(static_cast<double>(result_length));
ASSERT(args.length() == 2);
CONVERT_CHECKED(JSArray, from, args[0]);
CONVERT_CHECKED(JSArray, to, args[1]);
- to->SetContent(FixedArray::cast(from->elements()));
+ HeapObject* new_elements = from->elements();
+ Object* new_map;
+ if (new_elements->map() == Heap::fixed_array_map()) {
+ new_map = to->map()->GetFastElementsMap();
+ } else {
+ new_map = to->map()->GetSlowElementsMap();
+ }
+ if (new_map->IsFailure()) return new_map;
+ to->set_map(Map::cast(new_map));
+ to->set_elements(new_elements);
to->set_length(from->length());
- from->SetContent(Heap::empty_fixed_array());
+ Object* obj = from->ResetElements();
+ if (obj->IsFailure()) return obj;
from->set_length(Smi::FromInt(0));
return to;
}
// is not a dictionary.
__ movq(elements.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
- __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
- Factory::fixed_array_map());
- deferred->Branch(not_equal);
+ if (FLAG_debug_code) {
+ __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
+ Factory::fixed_array_map());
+ __ Assert(equal, "JSObject with fast elements map has slow elements");
+ }
// Check that key is within bounds.
__ SmiCompare(key.reg(),