bool InstallNatives();
void InstallCustomCallGenerators();
void InstallJSFunctionResultCaches();
+ void InitializeNormalizedMapCaches();
// Used both for deserialized and from-scratch contexts to add the extensions
// provided.
static bool InstallExtensions(Handle<Context> global_context,
}
+void Genesis::InitializeNormalizedMapCaches() {
+ Handle<FixedArray> array(
+ Factory::NewFixedArray(NormalizedMapCache::kEntries, TENURED));
+ global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
+}
+
+
int BootstrapperActive::nesting_ = 0;
HookUpGlobalProxy(inner_global, global_proxy);
InitializeGlobal(inner_global, empty_function);
InstallJSFunctionResultCaches();
+ InitializeNormalizedMapCaches();
if (!InstallNatives()) return;
MakeFunctionInstancePrototypeWritable();
V(CONFIGURE_GLOBAL_INDEX, JSFunction, configure_global_fun) \
V(FUNCTION_CACHE_INDEX, JSObject, function_cache) \
V(JSFUNCTION_RESULT_CACHES_INDEX, FixedArray, jsfunction_result_caches) \
+ V(NORMALIZED_MAP_CACHE_INDEX, NormalizedMapCache, normalized_map_cache) \
V(RUNTIME_CONTEXT_INDEX, Context, runtime_context) \
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
CONFIGURE_GLOBAL_INDEX,
FUNCTION_CACHE_INDEX,
JSFUNCTION_RESULT_CACHES_INDEX,
+ NORMALIZED_MAP_CACHE_INDEX,
RUNTIME_CONTEXT_INDEX,
CALL_AS_FUNCTION_DELEGATE_INDEX,
CALL_AS_CONSTRUCTOR_DELEGATE_INDEX,
}
+class ClearThreadNormalizedMapCachesVisitor: public ThreadVisitor {
+ virtual void VisitThread(ThreadLocalTop* top) {
+ Context* context = top->context_;
+ if (context == NULL) return;
+ context->global()->global_context()->normalized_map_cache()->Clear();
+ }
+};
+
+
+void Heap::ClearNormalizedMapCaches() {
+ if (Bootstrapper::IsActive()) return;
+ ClearThreadNormalizedMapCachesVisitor visitor;
+ ThreadManager::IterateArchivedThreads(&visitor);
+}
+
+
#ifdef DEBUG
enum PageWatermarkValidity {
CompletelyClearInstanceofCache();
if (is_compacting) FlushNumberStringCache();
+
+ ClearNormalizedMapCaches();
}
static void ClearJSFunctionResultCaches();
+ static void ClearNormalizedMapCaches();
+
static GCTracer* tracer() { return tracer_; }
private:
}
+void Map::NormalizedMapVerify() {
+ MapVerify();
+ ASSERT(instance_descriptors() == Heap::empty_descriptor_array());
+ ASSERT(code_cache() == Heap::empty_fixed_array());
+ ASSERT(pre_allocated_property_fields() == 0);
+ ASSERT(unused_property_fields() == 0);
+ ASSERT(scavenger() == Heap::GetScavenger(instance_type(), instance_size()));
+}
+
+
void CodeCache::CodeCachePrint() {
HeapObject::PrintHeader("CodeCache");
PrintF("\n - default_cache: ");
}
+void NormalizedMapCache::NormalizedMapCacheVerify() {
+ FixedArray::cast(this)->Verify();
+ if (FLAG_enable_slow_asserts) {
+ for (int i = 0; i < length(); i++) {
+ Object* e = get(i);
+ if (e->IsMap()) {
+ Map::cast(e)->NormalizedMapVerify();
+ } else {
+ ASSERT(e->IsUndefined());
+ }
+ }
+ }
+}
+
+
#endif // DEBUG
} } // namespace v8::internal
}
+bool Object::IsNormalizedMapCache() {
+ if (!IsFixedArray()) return false;
+ if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
+ return false;
+ }
+#ifdef DEBUG
+ reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
+#endif
+ return true;
+}
+
+
bool Object::IsCompilationCacheTable() {
return IsHashTable();
}
CAST_ACCESSOR(DescriptorArray)
CAST_ACCESSOR(SymbolTable)
CAST_ACCESSOR(JSFunctionResultCache)
+CAST_ACCESSOR(NormalizedMapCache)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(MapCache)
}
+Object* NormalizedMapCache::Get(Map* fast, PropertyNormalizationMode mode) {
+ int index = Hash(fast) % kEntries;
+ Object* obj = get(index);
+
+ if (obj->IsMap() && CheckHit(Map::cast(obj), fast, mode)) {
+#ifdef DEBUG
+ if (FLAG_enable_slow_asserts) {
+ // The cached map should match freshly created normalized map bit-by-bit.
+ Object* fresh = fast->CopyNormalized(mode);
+ if (!fresh->IsFailure()) {
+ // Copy the unused byte so that the assertion below works.
+ Map::cast(fresh)->address()[Map::kUnusedOffset] =
+ Map::cast(obj)->address()[Map::kUnusedOffset];
+ ASSERT(memcmp(Map::cast(fresh)->address(),
+ Map::cast(obj)->address(),
+ Map::kSize) == 0);
+ }
+ }
+#endif
+ return obj;
+ }
+
+ obj = fast->CopyNormalized(mode);
+ if (obj->IsFailure()) return obj;
+ set(index, obj);
+ Counters::normalized_maps.Increment();
+
+ return obj;
+}
+
+
+void NormalizedMapCache::Clear() {
+ int entries = length();
+ for (int i = 0; i != entries; i++) {
+ set_undefined(i);
+ }
+}
+
+
+int NormalizedMapCache::Hash(Map* fast) {
+ // For performance reasons we only hash the 3 most variable fields of a map:
+ // constructor, prototype and bit_field2.
+
+ // Shift away the tag.
+ int hash = (static_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(fast->constructor())) >> 2);
+
+ // XOR-ing the prototype and constructor directly yields too many zero bits
+ // when the two pointers are close (which is fairly common).
+ // To avoid this we shift the prototype 4 bits relatively to the constructor.
+ hash ^= (static_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(fast->prototype())) << 2);
+
+ return hash ^ (hash >> 16) ^ fast->bit_field2();
+}
+
+
+bool NormalizedMapCache::CheckHit(Map* slow,
+ Map* fast,
+ PropertyNormalizationMode mode) {
+#ifdef DEBUG
+ slow->NormalizedMapVerify();
+#endif
+ return
+ slow->constructor() == fast->constructor() &&
+ slow->prototype() == fast->prototype() &&
+ slow->inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ?
+ 0 :
+ fast->inobject_properties()) &&
+ slow->instance_type() == fast->instance_type() &&
+ slow->bit_field() == fast->bit_field() &&
+ slow->bit_field2() == fast->bit_field2();
+}
+
+
Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int expected_additional_properties) {
if (!HasFastProperties()) return this;
int index = map()->instance_descriptors()->NextEnumerationIndex();
dictionary->SetNextEnumerationIndex(index);
- // Allocate new map.
- obj = map()->CopyDropDescriptors();
+ obj = Top::context()->global_context()->
+ normalized_map_cache()->Get(map(), mode);
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
- // Clear inobject properties if needed by adjusting the instance size and
- // putting in a filler object instead of the inobject properties.
- if (mode == CLEAR_INOBJECT_PROPERTIES && map()->inobject_properties() > 0) {
- int instance_size_delta = map()->inobject_properties() * kPointerSize;
- int new_instance_size = map()->instance_size() - instance_size_delta;
- new_map->set_inobject_properties(0);
- new_map->set_instance_size(new_instance_size);
- new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
- new_map->instance_size()));
- Heap::CreateFillerObjectAt(this->address() + new_instance_size,
- instance_size_delta);
- }
- new_map->set_unused_property_fields(0);
-
// We have now successfully allocated all the necessary objects.
// Changes can now be made with the guarantee that all of them take effect.
+
+ // Resize the object in the heap if necessary.
+ int new_instance_size = new_map->instance_size();
+ int instance_size_delta = map()->instance_size() - new_instance_size;
+ ASSERT(instance_size_delta >= 0);
+ Heap::CreateFillerObjectAt(this->address() + new_instance_size,
+ instance_size_delta);
+
set_map(new_map);
- map()->set_instance_descriptors(Heap::empty_descriptor_array());
set_properties(dictionary);
}
+Object* Map::CopyNormalized(PropertyNormalizationMode mode) {
+ int new_instance_size = instance_size();
+ if (mode == CLEAR_INOBJECT_PROPERTIES) {
+ new_instance_size -= inobject_properties() * kPointerSize;
+ }
+
+ Object* result = Heap::AllocateMap(instance_type(), new_instance_size);
+ if (result->IsFailure()) return result;
+
+ if (mode != CLEAR_INOBJECT_PROPERTIES) {
+ Map::cast(result)->set_inobject_properties(inobject_properties());
+ }
+
+ Map::cast(result)->set_prototype(prototype());
+ Map::cast(result)->set_constructor(constructor());
+
+ Map::cast(result)->set_bit_field(bit_field());
+ Map::cast(result)->set_bit_field2(bit_field2());
+
+#ifdef DEBUG
+ Map::cast(result)->NormalizedMapVerify();
+#endif
+
+ return result;
+}
+
+
Object* Map::CopyDropTransitions() {
Object* new_map = CopyDropDescriptors();
if (new_map->IsFailure()) return new_map;
inline bool IsDictionary();
inline bool IsSymbolTable();
inline bool IsJSFunctionResultCache();
+ inline bool IsNormalizedMapCache();
inline bool IsCompilationCacheTable();
inline bool IsCodeCacheHashTable();
inline bool IsMapCache();
};
+// The cache for maps used by normalized (dictionary mode) objects.
+// Such maps do not have property descriptors, so a typical program
+// needs very limited number of distinct normalized maps.
+class NormalizedMapCache: public FixedArray {
+ public:
+ static const int kEntries = 64;
+
+ Object* Get(Map* fast, PropertyNormalizationMode mode);
+
+ void Clear();
+
+ // Casting
+ static inline NormalizedMapCache* cast(Object* obj);
+
+#ifdef DEBUG
+ void NormalizedMapCacheVerify();
+#endif
+
+ private:
+ static int Hash(Map* fast);
+
+ static bool CheckHit(Map* slow, Map* fast, PropertyNormalizationMode mode);
+};
+
+
// ByteArray represents fixed sized byte arrays. Used by the outside world,
// such as PCRE, and also by the memory allocator and garbage collector to
// fill in free blocks in the heap.
Object* CopyDropDescriptors();
+ Object* CopyNormalized(PropertyNormalizationMode mode);
+
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
Object* CopyDropTransitions();
#ifdef DEBUG
void MapPrint();
void MapVerify();
+ void NormalizedMapVerify();
#endif
inline Scavenger scavenger();
static const int kPreAllocatedPropertyFieldsOffset =
kInstanceSizesOffset + kPreAllocatedPropertyFieldsByte;
// The byte at position 3 is not in use at the moment.
+ static const int kUnusedByte = 3;
+ static const int kUnusedOffset = kInstanceSizesOffset + kUnusedByte;
// Byte offsets within kInstanceAttributesOffset attributes.
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
SC(pcre_mallocs, V8.PcreMallocCount) \
/* OS Memory allocated */ \
SC(memory_allocated, V8.OsMemoryAllocated) \
+ SC(normalized_maps, V8.NormalizedMaps) \
SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
SC(alive_after_last_gc, V8.AliveAfterLastGC) \