__ JumpIfSmi(receiver, &slow);
// Get the map of the object.
__ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not observed.
- // The generic stub does not perform map checks or handle observed objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
- __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
__ b(ne, &slow);
// Check if the object is a JS array or not.
__ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
if (!receiver->IsJSArray()) return NULL;
JSArray* array = JSArray::cast(receiver);
- if (array->map()->is_observed()) return NULL;
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
__ JumpIfSmi(edx, &slow);
// Get the map from the receiver.
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not observed.
- // The generic stub does not perform map checks or handle observed objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ test_b(FieldOperand(edi, Map::kBitFieldOffset),
- 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
+ 1 << Map::kIsAccessCheckNeeded);
__ j(not_zero, &slow);
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &slow);
__ JumpIfSmi(receiver, &slow);
// Get the map of the object.
__ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not observed.
- // The generic stub does not perform map checks or handle observed objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
- __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded |
- 1 << Map::kIsObserved));
+ __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
__ Branch(&slow, ne, t0, Operand(zero_reg));
// Check if the object is a JS array or not.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this));
}
+ ASSERT(!is_observed() || instance_type() < FIRST_JS_OBJECT_TYPE ||
+ instance_type() > LAST_JS_OBJECT_TYPE ||
+ has_slow_elements_kind() || has_external_array_elements());
}
}
-void Map::set_has_instance_call_handler() {
- set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
+void Map::set_is_observed(bool is_observed) {
+ ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
+ instance_type() > LAST_JS_OBJECT_TYPE ||
+ has_slow_elements_kind() || has_external_array_elements());
+ set_bit_field3(IsObserved::update(bit_field3(), is_observed));
}
-bool Map::has_instance_call_handler() {
- return HasInstanceCallHandler::decode(bit_field3());
+bool Map::is_observed() {
+ return IsObserved::decode(bit_field3());
}
if (object->map()->is_observed())
return;
+ if (!object->HasExternalArrayElements()) {
+ // Go to dictionary mode, so that we don't skip map checks.
+ NormalizeElements(object);
+ ASSERT(!object->HasFastElements());
+ }
+
LookupResult result(isolate);
object->map()->LookupTransition(*object,
isolate->heap()->observed_symbol(),
new_map = Map::CopyForObserved(handle(object->map()));
} else {
new_map = Map::Copy(handle(object->map()));
- new_map->set_is_observed();
+ new_map->set_is_observed(true);
}
object->set_map(*new_map);
}
map->set_transitions(*transitions);
- new_map->set_is_observed();
+ new_map->set_is_observed(true);
if (map->owns_descriptors()) {
new_map->InitializeDescriptors(map->instance_descriptors());
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
+ ASSERT(!map()->is_observed());
// Allocate a new fast elements backing store.
FixedArray* new_elements;
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
+ ASSERT(!map()->is_observed());
FixedArrayBase* elems;
{ MaybeObject* maybe_obj =
if (!new_length_handle->ToArrayIndex(&new_length))
return Failure::InternalError();
+ // Observed arrays should always be in dictionary mode;
+ // if they were in fast mode, the below is slower than necessary
+ // as it iterates over the array backing store multiple times.
+ ASSERT(self->HasDictionaryElements());
static const PropertyAttributes kNoAttrFilter = NONE;
int num_elements = self->NumberOfLocalElements(kNoAttrFilter);
if (num_elements > 0) {
}
} else {
// For sparse arrays, only iterate over existing elements.
- // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
- // the to-be-removed indices twice.
Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
self->GetLocalElementKeys(*keys, kNoAttrFilter);
while (num_elements-- > 0) {
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
+ ASSERT(!map()->is_observed());
ElementsKind from_kind = map()->elements_kind();
if (IsFastHoleyElementsKind(from_kind)) {
class FunctionWithPrototype: public BitField<bool, 23, 1> {};
class DictionaryMap: public BitField<bool, 24, 1> {};
class OwnsDescriptors: public BitField<bool, 25, 1> {};
- class HasInstanceCallHandler: public BitField<bool, 26, 1> {};
+ class IsObserved: public BitField<bool, 26, 1> {};
class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {};
}
// Tells whether the instance has a call-as-function handler.
- inline void set_is_observed() {
- set_bit_field(bit_field() | (1 << kIsObserved));
+ inline void set_has_instance_call_handler() {
+ set_bit_field(bit_field() | (1 << kHasInstanceCallHandler));
}
- inline bool is_observed() {
- return ((1 << kIsObserved) & bit_field()) != 0;
+ inline bool has_instance_call_handler() {
+ return ((1 << kHasInstanceCallHandler) & bit_field()) != 0;
}
inline void set_is_extensible(bool value);
inline void set_elements_kind(ElementsKind elements_kind) {
ASSERT(elements_kind < kElementsKindCount);
ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount));
+ ASSERT(!is_observed() ||
+ elements_kind == DICTIONARY_ELEMENTS ||
+ elements_kind == NON_STRICT_ARGUMENTS_ELEMENTS ||
+ IsExternalArrayElementsKind(elements_kind));
set_bit_field2((bit_field2() & ~kElementsKindMask) |
(elements_kind << kElementsKindShift));
ASSERT(this->elements_kind() == elements_kind);
inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared);
- inline bool has_instance_call_handler();
- inline void set_has_instance_call_handler();
+ inline bool is_observed();
+ inline void set_is_observed(bool is_observed);
inline void freeze();
inline bool is_frozen();
inline void mark_unstable();
static const int kHasNamedInterceptor = 3;
static const int kHasIndexedInterceptor = 4;
static const int kIsUndetectable = 5;
- static const int kIsObserved = 6;
+ static const int kHasInstanceCallHandler = 6;
static const int kIsAccessCheckNeeded = 7;
// Bit positions for bit field 2
__ JumpIfSmi(rdx, &slow_with_tagged_index);
// Get the map from the receiver.
__ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not observed.
- // The generic stub does not perform map checks or handle observed objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ testb(FieldOperand(r9, Map::kBitFieldOffset),
- Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
+ Immediate(1 << Map::kIsAccessCheckNeeded));
__ j(not_zero, &slow_with_tagged_index);
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &slow_with_tagged_index);