static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
- static const int kEmptyStringRootIndex = 155;
+ static const int kEmptyStringRootIndex = 156;
// The external allocation limit should be below 256 MB on all architectures
// to avoid that resource-constrained embedders run low on memory.
DEFINE_INT(max_executable_size, 0, "max size of executable memory (in Mbytes)")
DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_INT(gc_interval, -1, "garbage collect after <n> allocations")
+DEFINE_INT(retain_maps_for_n_gc, 2,
+ "keeps maps alive for <n> old space garbage collections")
DEFINE_BOOL(trace_gc, false,
"print one trace line following each garbage collection")
DEFINE_BOOL(trace_gc_nvp, false,
isolate()->optimizing_compiler_thread()->Flush();
}
AgeInlineCaches();
+ set_retained_maps(ArrayList::cast(empty_fixed_array()));
tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis());
return ++contexts_disposed_;
}
}
set_detached_contexts(empty_fixed_array());
+ set_retained_maps(ArrayList::cast(empty_fixed_array()));
set_weak_object_to_code_table(
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
case kMicrotaskQueueRootIndex:
case kDetachedContextsRootIndex:
case kWeakObjectToCodeTableRootIndex:
+ case kRetainedMapsRootIndex:
// Smi values
#define SMI_ENTRY(type, name, Name) case k##Name##RootIndex:
SMI_ROOT_LIST(SMI_ENTRY)
}
+void Heap::AddRetainedMap(Handle<Map> map) {
+ if (FLAG_retain_maps_for_n_gc == 0) return;
+ Handle<WeakCell> cell = Map::WeakCellForMap(map);
+ Handle<ArrayList> array(retained_maps(), isolate());
+ array = ArrayList::Add(
+ array, cell, handle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()));
+ if (*array != retained_maps()) {
+ set_retained_maps(*array);
+ }
+}
+
+
void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot);
}
V(FixedArray, microtask_queue, MicrotaskQueue) \
V(FixedArray, keyed_load_dummy_vector, KeyedLoadDummyVector) \
V(FixedArray, detached_contexts, DetachedContexts) \
+ V(ArrayList, retained_maps, RetainedMaps) \
V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)
// Entries in this list are limited to Smis and are not visited during GC.
DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
+ void AddRetainedMap(Handle<Map> map);
+
static void FatalProcessOutOfMemory(const char* location,
bool take_snapshot = false);
}
+void MarkCompactCollector::RetainMaps() {
+ if (reduce_memory_footprint_ || abort_incremental_marking_ ||
+ FLAG_retain_maps_for_n_gc == 0) {
+ // Do not retain dead maps if flag disables it or there is
+ // - memory pressure (reduce_memory_footprint_),
+ // - GC is requested by tests or dev-tools (abort_incremental_marking_).
+ return;
+ }
+
+ ArrayList* retained_maps = heap()->retained_maps();
+ int length = retained_maps->Length();
+ int new_length = 0;
+ for (int i = 0; i < length; i += 2) {
+ WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
+ if (cell->cleared()) continue;
+ int age = Smi::cast(retained_maps->Get(i + 1))->value();
+ int new_age;
+ Map* map = Map::cast(cell->value());
+ MarkBit map_mark = Marking::MarkBitFrom(map);
+ if (!map_mark.Get()) {
+ if (age == 0) {
+ // The map has aged. Do not retain this map.
+ continue;
+ }
+ Object* constructor = map->GetConstructor();
+ if (!constructor->IsHeapObject() ||
+ !Marking::MarkBitFrom(HeapObject::cast(constructor)).Get()) {
+ // The constructor is dead, no new objects with this map can
+ // be created. Do not retain this map.
+ continue;
+ }
+ new_age = age - 1;
+ MarkObject(map, map_mark);
+ } else {
+ new_age = FLAG_retain_maps_for_n_gc;
+ }
+ if (i != new_length) {
+ retained_maps->Set(new_length, cell);
+ Object** slot = retained_maps->Slot(new_length);
+ RecordSlot(slot, slot, cell);
+ retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
+ } else if (new_age != age) {
+ retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
+ }
+ new_length += 2;
+ }
+ Object* undefined = heap()->undefined_value();
+ for (int i = new_length; i < length; i++) {
+ retained_maps->Clear(i, undefined);
+ }
+ if (new_length != length) retained_maps->SetLength(new_length);
+ ProcessMarkingDeque();
+}
+
+
void MarkCompactCollector::EnsureMarkingDequeIsCommittedAndInitialize() {
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new base::VirtualMemory(4 * MB);
ProcessTopOptimizedFrame(&root_visitor);
+ // Retaining dying maps should happen before or during ephemeral marking
+ // because a map could keep the key of an ephemeron alive. Note that map
+ // aging is imprecise: maps that are kept alive only by ephemerons will age.
+ RetainMaps();
+
{
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCLOSURE);
// otherwise a map can die and deoptimize the code.
void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
+ // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
+ // increase chances of reusing of map transition tree in future.
+ void RetainMaps();
+
// Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
}
}
for (int i = 0; i < maps.length(); i++) {
+ if (maps.at(i)->dependent_code()->number_of_entries(
+ DependentCode::kWeakCodeGroup) == 0) {
+ isolate()->heap()->AddRetainedMap(maps.at(i));
+ }
Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code);
}
for (int i = 0; i < objects.length(); i++) {
}
+bool Object::IsArrayList() const { return IsFixedArray(); }
+
+
bool Object::IsLayoutDescriptor() const {
return IsSmi() || IsFixedTypedArrayBase();
}
}
+int ArrayList::Length() {
+ if (FixedArray::cast(this)->length() == 0) return 0;
+ return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
+}
+
+
+void ArrayList::SetLength(int length) {
+ return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
+}
+
+
+Object* ArrayList::Get(int index) {
+ return FixedArray::cast(this)->get(kFirstIndex + index);
+}
+
+
+Object** ArrayList::Slot(int index) {
+ return data_start() + kFirstIndex + index;
+}
+
+
+void ArrayList::Set(int index, Object* obj) {
+ FixedArray::cast(this)->set(kFirstIndex + index, obj);
+}
+
+
+void ArrayList::Clear(int index, Object* undefined) {
+ DCHECK(undefined->IsUndefined());
+ FixedArray::cast(this)
+ ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
+}
+
+
void ConstantPoolArray::NumberOfEntries::increment(Type type) {
DCHECK(type < NUMBER_OF_TYPES);
element_counts_[type]++;
CAST_ACCESSOR(AccessorInfo)
+CAST_ACCESSOR(ArrayList)
CAST_ACCESSOR(ByteArray)
CAST_ACCESSOR(Cell)
CAST_ACCESSOR(Code)
}
+Handle<ArrayList> ArrayList::Add(Handle<ArrayList> array, Handle<Object> obj) {
+ int length = array->Length();
+ array = EnsureSpace(array, length + 1);
+ array->Set(length, *obj);
+ array->SetLength(length + 1);
+ return array;
+}
+
+
+Handle<ArrayList> ArrayList::Add(Handle<ArrayList> array, Handle<Object> obj1,
+ Handle<Object> obj2) {
+ int length = array->Length();
+ array = EnsureSpace(array, length + 2);
+ array->Set(length, *obj1);
+ array->Set(length + 1, *obj2);
+ array->SetLength(length + 2);
+ return array;
+}
+
+
+Handle<ArrayList> ArrayList::EnsureSpace(Handle<ArrayList> array, int length) {
+ int capacity = array->length();
+ if (capacity < kFirstIndex + length) {
+ capacity = kFirstIndex + length;
+ capacity = capacity + Max(capacity / 2, 2);
+ array = Handle<ArrayList>::cast(FixedArray::CopySize(array, capacity));
+ }
+ return array;
+}
+
+
Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
int number_of_descriptors,
int slack) {
V(FixedArray) \
V(FixedDoubleArray) \
V(WeakFixedArray) \
+ V(ArrayList) \
V(ConstantPoolArray) \
V(Context) \
V(ScriptContextTable) \
};
+// Generic array grows dynamically with O(1) amortized insertion.
+class ArrayList : public FixedArray {
+ public:
+ static Handle<ArrayList> Add(Handle<ArrayList> array, Handle<Object> obj);
+ static Handle<ArrayList> Add(Handle<ArrayList> array, Handle<Object> obj1,
+ Handle<Object> obj2);
+ inline int Length();
+ inline void SetLength(int length);
+ inline Object* Get(int index);
+ inline Object** Slot(int index);
+ inline void Set(int index, Object* obj);
+ inline void Clear(int index, Object* undefined);
+ DECLARE_CAST(ArrayList)
+
+ private:
+ static Handle<ArrayList> EnsureSpace(Handle<ArrayList> array, int length);
+ static const int kLengthIndex = 0;
+ static const int kFirstIndex = 1;
+ DISALLOW_IMPLICIT_CONSTRUCTORS(ArrayList);
+};
+
+
// ConstantPoolArray describes a fixed-sized array containing constant pool
// entries.
//
'test-api.cc',
'test-api.h',
'test-api-interceptors.cc',
+ 'test-array-list.cc',
'test-ast.cc',
'test-atomicops.cc',
'test-bignum.cc',
TEST(Regress528) {
v8::V8::Initialize();
v8::Isolate* isolate = CcTest::isolate();
+ i::FLAG_retain_maps_for_n_gc = 0;
v8::HandleScope scope(isolate);
v8::Local<Context> other_context;
int gc_count;
--- /dev/null
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdlib.h>
+
+#include "src/v8.h"
+
+#include "src/factory.h"
+#include "test/cctest/cctest.h"
+
+namespace {
+
+using namespace v8::internal;
+
+
+TEST(ArrayList) {
+ LocalContext context;
+ Isolate* isolate = CcTest::i_isolate();
+ HandleScope scope(isolate);
+ Handle<ArrayList> array(
+ ArrayList::cast(isolate->heap()->empty_fixed_array()));
+ CHECK_EQ(0, array->Length());
+ array = ArrayList::Add(array, handle(Smi::FromInt(100), isolate));
+ CHECK_EQ(1, array->Length());
+ CHECK_EQ(100, Smi::cast(array->Get(0))->value());
+ array = ArrayList::Add(array, handle(Smi::FromInt(200), isolate),
+ handle(Smi::FromInt(300), isolate));
+ CHECK_EQ(3, array->Length());
+ CHECK_EQ(100, Smi::cast(array->Get(0))->value());
+ CHECK_EQ(200, Smi::cast(array->Get(1))->value());
+ CHECK_EQ(300, Smi::cast(array->Get(2))->value());
+ array->Set(2, Smi::FromInt(400));
+ CHECK_EQ(400, Smi::cast(array->Get(2))->value());
+ array->Clear(2, isolate->heap()->undefined_value());
+ array->SetLength(2);
+ CHECK_EQ(2, array->Length());
+ CHECK_EQ(100, Smi::cast(array->Get(0))->value());
+ CHECK_EQ(200, Smi::cast(array->Get(1))->value());
+}
+}
// Some flags turn Scavenge collections into Mark-sweep collections
// and hence are incompatible with this test case.
if (FLAG_gc_global || FLAG_stress_compaction) return;
+ FLAG_retain_maps_for_n_gc = 0;
static const int kNumTestContexts = 10;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
i::FLAG_trace_incremental_marking = true;
+ i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
static const int transitions_count = 256;
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
i::FLAG_gc_interval = gc_count;
i::FLAG_gc_global = true;
+ i::FLAG_retain_maps_for_n_gc = 0;
CcTest::heap()->set_allocation_timeout(gc_count);
JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
}
// Now make sure that a gc should get rid of the function, even though we
// still have the allocation site alive.
for (int i = 0; i < 4; i++) {
- heap->CollectAllGarbage(Heap::kNoGCFlags);
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
}
// The site still exists because of our global handle, but the code is no
i::FLAG_weak_embedded_objects_in_optimized_code = true;
i::FLAG_allow_natives_syntax = true;
i::FLAG_compilation_cache = false;
+ i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
}
+void CheckMapRetainingFor(int n) {
+ FLAG_retain_maps_for_n_gc = n;
+ Isolate* isolate = CcTest::i_isolate();
+ Heap* heap = isolate->heap();
+ Handle<WeakCell> weak_cell;
+ {
+ HandleScope inner_scope(isolate);
+ Handle<Map> map = Map::Create(isolate, 1);
+ heap->AddRetainedMap(map);
+ weak_cell = inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
+ }
+ CHECK(!weak_cell->cleared());
+ for (int i = 0; i < n; i++) {
+ heap->CollectGarbage(OLD_POINTER_SPACE);
+ }
+ CHECK(!weak_cell->cleared());
+ heap->CollectGarbage(OLD_POINTER_SPACE);
+ CHECK(weak_cell->cleared());
+}
+
+
+TEST(MapRetaining) {
+ CcTest::InitializeVM();
+ v8::HandleScope scope(CcTest::isolate());
+ CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
+ CheckMapRetainingFor(0);
+ CheckMapRetainingFor(1);
+ CheckMapRetainingFor(7);
+}
+
+
#ifdef DEBUG
TEST(PathTracer) {
CcTest::InitializeVM();
TEST(MarkCompactCollector) {
FLAG_incremental_marking = false;
+ FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
TestHeap* heap = CcTest::test_heap();