From ca2def98a58b242ac3f6139ced839a2f5d5874ab Mon Sep 17 00:00:00 2001 From: "ishell@chromium.org" Date: Fri, 2 May 2014 10:27:12 +0000 Subject: [PATCH] Map::Normalize() introduced as single entry point for map normalization and Map::NotifyLeafMapLayoutChange() made private. R=verwaest@chromium.org Review URL: https://codereview.chromium.org/263663002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21117 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/bootstrapper.cc | 5 +-- src/objects-debug.cc | 2 +- src/objects-inl.h | 16 ++++++-- src/objects.cc | 111 ++++++++++++++++++++++++++++++--------------------- src/objects.h | 37 +++++++++++------ 5 files changed, 105 insertions(+), 66 deletions(-) diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc index b644a37..d61ad57 100644 --- a/src/bootstrapper.cc +++ b/src/bootstrapper.cc @@ -2108,9 +2108,8 @@ void Genesis::InstallJSFunctionResultCaches() { void Genesis::InitializeNormalizedMapCaches() { - Handle array( - factory()->NewFixedArray(NormalizedMapCache::kEntries, TENURED)); - native_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array)); + native_context()->set_normalized_map_cache( + *NormalizedMapCache::New(isolate())); } diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 27c936e..df63924 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -1015,7 +1015,7 @@ void NormalizedMapCache::NormalizedMapCacheVerify() { FixedArray::cast(this)->FixedArrayVerify(); if (FLAG_enable_slow_asserts) { for (int i = 0; i < length(); i++) { - Object* e = get(i); + Object* e = FixedArray::get(i); if (e->IsMap()) { Map::cast(e)->SharedMapVerify(); } else { diff --git a/src/objects-inl.h b/src/objects-inl.h index 520a110..33b1bc9 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -850,13 +850,23 @@ bool Object::IsJSFunctionResultCache() { bool Object::IsNormalizedMapCache() { - if (!IsFixedArray()) return false; - if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) { + return NormalizedMapCache::IsNormalizedMapCache(this); +} + + +int NormalizedMapCache::GetIndex(Handle map) { + return map->Hash() % NormalizedMapCache::kEntries; +} + + +bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) { + if (!obj->IsFixedArray()) return false; + if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) { return false; } #ifdef VERIFY_HEAP if (FLAG_verify_heap) { - reinterpret_cast(this)->NormalizedMapCacheVerify(); + reinterpret_cast(obj)->NormalizedMapCacheVerify(); } #endif return true; diff --git a/src/objects.cc b/src/objects.cc index 8651ee6..6f30640 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -4659,48 +4659,30 @@ PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor( } -Handle NormalizedMapCache::Get(Handle cache, - Handle fast_map, - PropertyNormalizationMode mode) { - int index = fast_map->Hash() % kEntries; - Handle result = handle(cache->get(index), cache->GetIsolate()); - if (result->IsMap() && - Handle::cast(result)->EquivalentToForNormalization( - *fast_map, mode)) { -#ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - Handle::cast(result)->SharedMapVerify(); - } -#endif -#ifdef ENABLE_SLOW_ASSERTS - if (FLAG_enable_slow_asserts) { - // The cached map should match newly created normalized map bit-by-bit, - // except for the code cache, which can contain some ics which can be - // applied to the shared map. - Handle fresh = Map::CopyNormalized( - fast_map, mode, SHARED_NORMALIZED_MAP); +Handle NormalizedMapCache::New(Isolate* isolate) { + Handle array( + isolate->factory()->NewFixedArray(kEntries, TENURED)); + return Handle::cast(array); +} - ASSERT(memcmp(fresh->address(), - Handle::cast(result)->address(), - Map::kCodeCacheOffset) == 0); - STATIC_ASSERT(Map::kDependentCodeOffset == - Map::kCodeCacheOffset + kPointerSize); - int offset = Map::kDependentCodeOffset + kPointerSize; - ASSERT(memcmp(fresh->address() + offset, - Handle::cast(result)->address() + offset, - Map::kSize - offset) == 0); - } -#endif - return Handle::cast(result); + +MaybeHandle NormalizedMapCache::Get(Handle fast_map, + PropertyNormalizationMode mode) { + DisallowHeapAllocation no_gc; + Object* value = FixedArray::get(GetIndex(fast_map)); + if (!value->IsMap() || + !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) { + return MaybeHandle(); } + return handle(Map::cast(value)); +} - Isolate* isolate = cache->GetIsolate(); - Handle map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP); - ASSERT(map->is_dictionary_map()); - cache->set(index, *map); - isolate->counters()->normalized_maps()->Increment(); - return map; +void NormalizedMapCache::Set(Handle fast_map, + Handle normalized_map) { + DisallowHeapAllocation no_gc; + ASSERT(normalized_map->is_dictionary_map()); + FixedArray::set(GetIndex(fast_map), *normalized_map); } @@ -4733,6 +4715,7 @@ void JSObject::NormalizeProperties(Handle object, Isolate* isolate = object->GetIsolate(); HandleScope scope(isolate); Handle map(object->map()); + Handle new_map = Map::Normalize(map, mode); // Allocate new content. int real_size = map->NumberOfOwnDescriptors(); @@ -4787,12 +4770,6 @@ void JSObject::NormalizeProperties(Handle object, // Copy the next enumeration index from instance descriptor. dictionary->SetNextEnumerationIndex(real_size + 1); - Handle cache( - isolate->context()->native_context()->normalized_map_cache()); - Handle new_map = NormalizedMapCache::Get( - cache, handle(object->map()), mode); - ASSERT(new_map->is_dictionary_map()); - // From here on we cannot fail and we shouldn't GC anymore. DisallowHeapAllocation no_allocation; @@ -4811,8 +4788,6 @@ void JSObject::NormalizeProperties(Handle object, // the left-over space to avoid races with the sweeper thread. object->synchronized_set_map(*new_map); - map->NotifyLeafMapLayoutChange(); - object->set_properties(*dictionary); isolate->counters()->props_to_dictionary()->Increment(); @@ -7239,6 +7214,50 @@ Handle Map::RawCopy(Handle map, int instance_size) { } +Handle Map::Normalize(Handle fast_map, + PropertyNormalizationMode mode) { + ASSERT(!fast_map->is_dictionary_map()); + + Isolate* isolate = fast_map->GetIsolate(); + Handle cache( + isolate->context()->native_context()->normalized_map_cache()); + + Handle new_map; + if (cache->Get(fast_map, mode).ToHandle(&new_map)) { +#ifdef VERIFY_HEAP + if (FLAG_verify_heap) { + new_map->SharedMapVerify(); + } +#endif +#ifdef ENABLE_SLOW_ASSERTS + if (FLAG_enable_slow_asserts) { + // The cached map should match newly created normalized map bit-by-bit, + // except for the code cache, which can contain some ics which can be + // applied to the shared map. + Handle fresh = Map::CopyNormalized( + fast_map, mode, SHARED_NORMALIZED_MAP); + + ASSERT(memcmp(fresh->address(), + new_map->address(), + Map::kCodeCacheOffset) == 0); + STATIC_ASSERT(Map::kDependentCodeOffset == + Map::kCodeCacheOffset + kPointerSize); + int offset = Map::kDependentCodeOffset + kPointerSize; + ASSERT(memcmp(fresh->address() + offset, + new_map->address() + offset, + Map::kSize - offset) == 0); + } +#endif + } else { + new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP); + cache->Set(fast_map, new_map); + isolate->counters()->normalized_maps()->Increment(); + } + fast_map->NotifyLeafMapLayoutChange(); + return new_map; +} + + Handle Map::CopyNormalized(Handle map, PropertyNormalizationMode mode, NormalizedMapSharingMode sharing) { diff --git a/src/objects.h b/src/objects.h index 9fff61d..4be5184 100644 --- a/src/objects.h +++ b/src/objects.h @@ -4628,18 +4628,27 @@ class ScopeInfo : public FixedArray { // needs very limited number of distinct normalized maps. class NormalizedMapCache: public FixedArray { public: - static const int kEntries = 64; + static Handle New(Isolate* isolate); - static Handle Get(Handle cache, - Handle fast_map, - PropertyNormalizationMode mode); + MUST_USE_RESULT MaybeHandle Get(Handle fast_map, + PropertyNormalizationMode mode); + void Set(Handle fast_map, Handle normalized_map); void Clear(); // Casting static inline NormalizedMapCache* cast(Object* obj); + static inline bool IsNormalizedMapCache(Object* obj); DECLARE_VERIFIER(NormalizedMapCache) + private: + static const int kEntries = 64; + + static inline int GetIndex(Handle map); + + // The following declarations hide base class methods. + Object* get(int index); + void set(int index, Object* value); }; @@ -6134,6 +6143,8 @@ class Map: public HeapObject { PropertyAttributes attributes, const char* reason); + static Handle Normalize(Handle map, PropertyNormalizationMode mode); + // Returns the constructor name (the name (possibly, inferred name) of the // function that was used to instantiate the object). String* constructor_name(); @@ -6327,10 +6338,6 @@ class Map: public HeapObject { static Handle CopyForFreeze(Handle map); - static Handle CopyNormalized(Handle map, - PropertyNormalizationMode mode, - NormalizedMapSharingMode sharing); - inline void AppendDescriptor(Descriptor* desc); // Returns a copy of the map, with all transitions dropped from the @@ -6424,11 +6431,6 @@ class Map: public HeapObject { return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE; } - // Fires when the layout of an object with a leaf map changes. - // This includes adding transitions to the leaf map or changing - // the descriptor array. - inline void NotifyLeafMapLayoutChange(); - inline bool CanOmitMapChecks(); static void AddDependentCompilationInfo(Handle map, @@ -6578,6 +6580,15 @@ class Map: public HeapObject { MaybeHandle maybe_name, SimpleTransitionFlag simple_flag = FULL_TRANSITION); + static Handle CopyNormalized(Handle map, + PropertyNormalizationMode mode, + NormalizedMapSharingMode sharing); + + // Fires when the layout of an object with a leaf map changes. + // This includes adding transitions to the leaf map or changing + // the descriptor array. + inline void NotifyLeafMapLayoutChange(); + // Zaps the contents of backing data structures. Note that the // heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects // holding weak references when incremental marking is used, because it also -- 2.7.4