From: rafaelw@chromium.org Date: Fri, 8 Nov 2013 19:33:05 +0000 (+0000) Subject: Revert "Reland [Object.observe] Don't force normalization of elements for observed... X-Git-Tag: upstream/4.7.83~11810 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=4bc568b77a8fd9fafd8570882a530a65826d3a64;p=platform%2Fupstream%2Fv8.git Revert "Reland [Object.observe] Don't force normalization of elements for observed objects" TBR=danno BUG= Review URL: https://codereview.chromium.org/67233002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17608 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc index a3b2a6e..025a590 100644 --- a/src/arm/ic-arm.cc +++ b/src/arm/ic-arm.cc @@ -1432,10 +1432,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, __ JumpIfSmi(receiver, &slow); // Get the map of the object. __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); - // Check that the receiver does not require access checks and is not observed. - // The generic stub does not perform map checks or handle observed objects. + // Check that the receiver does not require access checks. We need + // to do this because this generic stub does not perform map checks. __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); - __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved)); + __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); __ b(ne, &slow); // Check if the object is a JS array or not. __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); diff --git a/src/builtins.cc b/src/builtins.cc index 4077272..758967e 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -311,7 +311,6 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements( Heap* heap, Object* receiver, Arguments* args, int first_added_arg) { if (!receiver->IsJSArray()) return NULL; JSArray* array = JSArray::cast(receiver); - if (array->map()->is_observed()) return NULL; HeapObject* elms = array->elements(); Map* map = elms->map(); if (map == heap->fixed_array_map()) { diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc index dab9dd7..0b7c4a8 100644 --- a/src/ia32/ic-ia32.cc +++ b/src/ia32/ic-ia32.cc @@ -874,10 +874,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, __ JumpIfSmi(edx, &slow); // Get the map from the receiver. __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); - // Check that the receiver does not require access checks and is not observed. - // The generic stub does not perform map checks or handle observed objects. + // Check that the receiver does not require access checks. We need + // to do this because this generic stub does not perform map checks. __ test_b(FieldOperand(edi, Map::kBitFieldOffset), - 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved); + 1 << Map::kIsAccessCheckNeeded); __ j(not_zero, &slow); // Check that the key is a smi. __ JumpIfNotSmi(ecx, &slow); diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc index c7e1a2a..0fe044a 100644 --- a/src/mips/ic-mips.cc +++ b/src/mips/ic-mips.cc @@ -1354,11 +1354,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, __ JumpIfSmi(receiver, &slow); // Get the map of the object. __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); - // Check that the receiver does not require access checks and is not observed. - // The generic stub does not perform map checks or handle observed objects. + // Check that the receiver does not require access checks. We need + // to do this because this generic stub does not perform map checks. __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); - __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded | - 1 << Map::kIsObserved)); + __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded)); __ Branch(&slow, ne, t0, Operand(zero_reg)); // Check if the object is a JS array or not. __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 80610c6..6ab2ddf 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -366,6 +366,9 @@ void Map::MapVerify() { SLOW_ASSERT(transitions()->IsSortedNoDuplicates()); SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this)); } + ASSERT(!is_observed() || instance_type() < FIRST_JS_OBJECT_TYPE || + instance_type() > LAST_JS_OBJECT_TYPE || + has_slow_elements_kind() || has_external_array_elements()); } diff --git a/src/objects-inl.h b/src/objects-inl.h index d28fad3..bef807e 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -3649,13 +3649,16 @@ bool Map::owns_descriptors() { } -void Map::set_has_instance_call_handler() { - set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true)); +void Map::set_is_observed(bool is_observed) { + ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE || + instance_type() > LAST_JS_OBJECT_TYPE || + has_slow_elements_kind() || has_external_array_elements()); + set_bit_field3(IsObserved::update(bit_field3(), is_observed)); } -bool Map::has_instance_call_handler() { - return HasInstanceCallHandler::decode(bit_field3()); +bool Map::is_observed() { + return IsObserved::decode(bit_field3()); } diff --git a/src/objects.cc b/src/objects.cc index d9a48a8..441c25e 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -5614,6 +5614,12 @@ void JSObject::SetObserved(Handle object) { if (object->map()->is_observed()) return; + if (!object->HasExternalArrayElements()) { + // Go to dictionary mode, so that we don't skip map checks. + NormalizeElements(object); + ASSERT(!object->HasFastElements()); + } + LookupResult result(isolate); object->map()->LookupTransition(*object, isolate->heap()->observed_symbol(), @@ -5627,7 +5633,7 @@ void JSObject::SetObserved(Handle object) { new_map = Map::CopyForObserved(handle(object->map())); } else { new_map = Map::Copy(handle(object->map())); - new_map->set_is_observed(); + new_map->set_is_observed(true); } object->set_map(*new_map); } @@ -6965,7 +6971,7 @@ Handle Map::CopyForObserved(Handle map) { map->set_transitions(*transitions); - new_map->set_is_observed(); + new_map->set_is_observed(true); if (map->owns_descriptors()) { new_map->InitializeDescriptors(map->instance_descriptors()); @@ -11220,6 +11226,7 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( Heap* heap = GetHeap(); // We should never end in here with a pixel or external array. ASSERT(!HasExternalArrayElements()); + ASSERT(!map()->is_observed()); // Allocate a new fast elements backing store. FixedArray* new_elements; @@ -11304,6 +11311,7 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength( Heap* heap = GetHeap(); // We should never end in here with a pixel or external array. ASSERT(!HasExternalArrayElements()); + ASSERT(!map()->is_observed()); FixedArrayBase* elems; { MaybeObject* maybe_obj = @@ -11452,6 +11460,10 @@ MaybeObject* JSArray::SetElementsLength(Object* len) { if (!new_length_handle->ToArrayIndex(&new_length)) return Failure::InternalError(); + // Observed arrays should always be in dictionary mode; + // if they were in fast mode, the below is slower than necessary + // as it iterates over the array backing store multiple times. + ASSERT(self->HasDictionaryElements()); static const PropertyAttributes kNoAttrFilter = NONE; int num_elements = self->NumberOfLocalElements(kNoAttrFilter); if (num_elements > 0) { @@ -11462,8 +11474,6 @@ MaybeObject* JSArray::SetElementsLength(Object* len) { } } else { // For sparse arrays, only iterate over existing elements. - // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over - // the to-be-removed indices twice. Handle keys = isolate->factory()->NewFixedArray(num_elements); self->GetLocalElementKeys(*keys, kNoAttrFilter); while (num_elements-- > 0) { @@ -12862,6 +12872,7 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) { MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) { + ASSERT(!map()->is_observed()); ElementsKind from_kind = map()->elements_kind(); if (IsFastHoleyElementsKind(from_kind)) { diff --git a/src/objects.h b/src/objects.h index c4d82bc..3105579 100644 --- a/src/objects.h +++ b/src/objects.h @@ -5664,7 +5664,7 @@ class Map: public HeapObject { class FunctionWithPrototype: public BitField {}; class DictionaryMap: public BitField {}; class OwnsDescriptors: public BitField {}; - class HasInstanceCallHandler: public BitField {}; + class IsObserved: public BitField {}; class Deprecated: public BitField {}; class IsFrozen: public BitField {}; class IsUnstable: public BitField {}; @@ -5727,12 +5727,12 @@ class Map: public HeapObject { } // Tells whether the instance has a call-as-function handler. - inline void set_is_observed() { - set_bit_field(bit_field() | (1 << kIsObserved)); + inline void set_has_instance_call_handler() { + set_bit_field(bit_field() | (1 << kHasInstanceCallHandler)); } - inline bool is_observed() { - return ((1 << kIsObserved) & bit_field()) != 0; + inline bool has_instance_call_handler() { + return ((1 << kHasInstanceCallHandler) & bit_field()) != 0; } inline void set_is_extensible(bool value); @@ -5741,6 +5741,10 @@ class Map: public HeapObject { inline void set_elements_kind(ElementsKind elements_kind) { ASSERT(elements_kind < kElementsKindCount); ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount)); + ASSERT(!is_observed() || + elements_kind == DICTIONARY_ELEMENTS || + elements_kind == NON_STRICT_ARGUMENTS_ELEMENTS || + IsExternalArrayElementsKind(elements_kind)); set_bit_field2((bit_field2() & ~kElementsKindMask) | (elements_kind << kElementsKindShift)); ASSERT(this->elements_kind() == elements_kind); @@ -5993,8 +5997,8 @@ class Map: public HeapObject { inline bool owns_descriptors(); inline void set_owns_descriptors(bool is_shared); - inline bool has_instance_call_handler(); - inline void set_has_instance_call_handler(); + inline bool is_observed(); + inline void set_is_observed(bool is_observed); inline void freeze(); inline bool is_frozen(); inline void mark_unstable(); @@ -6253,7 +6257,7 @@ class Map: public HeapObject { static const int kHasNamedInterceptor = 3; static const int kHasIndexedInterceptor = 4; static const int kIsUndetectable = 5; - static const int kIsObserved = 6; + static const int kHasInstanceCallHandler = 6; static const int kIsAccessCheckNeeded = 7; // Bit positions for bit field 2 diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index fe8734c..721ae1d 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -749,10 +749,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, __ JumpIfSmi(rdx, &slow_with_tagged_index); // Get the map from the receiver. __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); - // Check that the receiver does not require access checks and is not observed. - // The generic stub does not perform map checks or handle observed objects. + // Check that the receiver does not require access checks. We need + // to do this because this generic stub does not perform map checks. __ testb(FieldOperand(r9, Map::kBitFieldOffset), - Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved)); + Immediate(1 << Map::kIsAccessCheckNeeded)); __ j(not_zero, &slow_with_tagged_index); // Check that the key is a smi. __ JumpIfNotSmi(rcx, &slow_with_tagged_index);