static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
- static const int kEmptyStringRootIndex = 153;
+ static const int kEmptyStringRootIndex = 154;
// The external allocation limit should be below 256 MB on all architectures
// to avoid that resource-constrained embedders run low on memory.
static const int kNodeIsIndependentShift = 4;
static const int kNodeIsPartiallyDependentShift = 5;
- static const int kJSObjectType = 0xbc;
+ static const int kJSObjectType = 0xbd;
static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83;
static const int kForeignType = 0x88;
}
+Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value) {
+ AllowDeferredHandleDereference convert_to_cell;
+ CALL_HEAP_FUNCTION(isolate(), isolate()->heap()->AllocateWeakCell(*value),
+ WeakCell);
+}
+
+
Handle<AllocationSite> Factory::NewAllocationSite() {
Handle<Map> map = allocation_site_map();
Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE);
Handle<PropertyCell> NewPropertyCell(Handle<Object> value);
+ Handle<WeakCell> NewWeakCell(Handle<HeapObject> value);
+
// Allocate a tenured AllocationSite. It's payload is null.
Handle<AllocationSite> NewAllocationSite();
set_array_buffers_list(Smi::FromInt(0));
set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0));
+ set_encountered_weak_cells(Smi::FromInt(0));
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(NULL, false);
// Copy objects reachable from the encountered weak collections list.
scavenge_visitor.VisitPointer(&encountered_weak_collections_);
+ // Copy objects reachable from the encountered weak cells.
+ scavenge_visitor.VisitPointer(&encountered_weak_cells_);
// Copy objects reachable from the code flushing candidates list.
MarkCompactCollector* collector = mark_compact_collector();
ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
+ ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
}
+AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
+ int size = WeakCell::kSize;
+ STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
+ HeapObject* result;
+ {
+ AllocationResult allocation =
+ AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+ if (!allocation.To(&result)) return allocation;
+ }
+ result->set_map_no_write_barrier(weak_cell_map());
+ WeakCell::cast(result)->initialize(value);
+ WeakCell::cast(result)->set_next(undefined_value(), SKIP_WRITE_BARRIER);
+ return result;
+}
+
+
void Heap::CreateApiObjects() {
HandleScope scope(isolate());
Factory* factory = isolate()->factory();
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
+ V(Map, weak_cell_map, WeakCellMap) \
V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Map, hash_table_map, HashTableMap) \
V(Map, ordered_hash_table_map, OrderedHashTableMap) \
V(fixed_cow_array_map) \
V(fixed_double_array_map) \
V(constant_pool_array_map) \
+ V(weak_cell_map) \
V(no_interceptor_result_sentinel) \
V(hash_table_map) \
V(ordered_hash_table_map) \
return encountered_weak_collections_;
}
+ void set_encountered_weak_cells(Object* weak_cell) {
+ encountered_weak_cells_ = weak_cell;
+ }
+ Object* encountered_weak_cells() const { return encountered_weak_cells_; }
+
// Number of mark-sweeps.
unsigned int ms_count() { return ms_count_; }
// contains Smi(0) while marking is not active.
Object* encountered_weak_collections_;
+ Object* encountered_weak_cells_;
+
StoreBufferRebuilder store_buffer_rebuilder_;
struct StringTypeTable {
// Allocate a tenured JS global property cell initialized with the hole.
MUST_USE_RESULT AllocationResult AllocatePropertyCell();
+ MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
+
// Allocates a new utility object in the old generation.
MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
if (FLAG_collect_maps) ClearNonLiveReferences();
+ ProcessAndClearWeakCells();
+
ClearWeakCollections();
+ heap_->set_encountered_weak_cells(Smi::FromInt(0));
+
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyMarking(heap_);
heap()->incremental_marking()->Abort();
ClearMarkbits();
AbortWeakCollections();
+ AbortWeakCells();
AbortCompaction();
was_marked_incrementally_ = false;
}
}
+void MarkCompactCollector::ProcessAndClearWeakCells() {
+ HeapObject* undefined = heap()->undefined_value();
+ Object* weak_cell_obj = heap()->encountered_weak_cells();
+ while (weak_cell_obj != Smi::FromInt(0)) {
+ WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
+ HeapObject* value = weak_cell->value();
+ if (!MarkCompactCollector::IsMarked(value)) {
+ weak_cell->clear(undefined);
+ } else {
+ Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
+ heap()->mark_compact_collector()->RecordSlot(slot, slot, value);
+ }
+ weak_cell_obj = weak_cell->next();
+ weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
+ }
+ heap()->set_encountered_weak_cells(Smi::FromInt(0));
+}
+
+
+void MarkCompactCollector::AbortWeakCells() {
+ Object* undefined = heap()->undefined_value();
+ Object* weak_cell_obj = heap()->encountered_weak_cells();
+ while (weak_cell_obj != Smi::FromInt(0)) {
+ WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
+ weak_cell_obj = weak_cell->next();
+ weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
+ }
+ heap()->set_encountered_weak_cells(Smi::FromInt(0));
+}
+
+
void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) {
if (heap_->InNewSpace(value)) {
heap_->store_buffer()->Mark(slot);
}
-// We scavange new space simultaneously with sweeping. This is done in two
+// We scavenge new space simultaneously with sweeping. This is done in two
// passes.
//
// The first pass migrates all alive objects from one semispace to another or
// collections when incremental marking is aborted.
void AbortWeakCollections();
+
+ void ProcessAndClearWeakCells();
+ void AbortWeakCells();
+
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection.
table_.Register(kVisitPropertyCell, &VisitPropertyCell);
+ table_.Register(kVisitWeakCell, &VisitWeakCell);
+
table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
kVisitDataObjectGeneric>();
template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
+ HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
+ Object* undefined = heap->undefined_value();
+ // Enqueue weak cell in linked list of encountered weak collections.
+ // We can ignore weak cells with cleared values because they will always point
+ // to the undefined_value.
+ if (weak_cell->next() == undefined && weak_cell->value() != undefined) {
+ weak_cell->set_next(heap->encountered_weak_cells());
+ heap->set_encountered_weak_cells(weak_cell);
+ }
+}
+
+
+template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
case PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
+ case WEAK_CELL_TYPE:
+ return kVisitWeakCell;
+
case JS_SET_TYPE:
return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSSet::kSize);
V(Map) \
V(Cell) \
V(PropertyCell) \
+ V(WeakCell) \
V(SharedFunctionInfo) \
V(JSFunction) \
V(JSWeakCollection) \
}
INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
+ INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
case PROPERTY_CELL_TYPE:
PropertyCell::cast(this)->PropertyCellVerify();
break;
+ case WEAK_CELL_TYPE:
+ WeakCell::cast(this)->WeakCellVerify();
+ break;
case JS_ARRAY_TYPE:
JSArray::cast(this)->JSArrayVerify();
break;
}
+void WeakCell::WeakCellVerify() {
+ CHECK(IsWeakCell());
+ VerifyObjectField(kValueOffset);
+ VerifyObjectField(kNextOffset);
+}
+
+
void Code::CodeVerify() {
CHECK(IsAligned(reinterpret_cast<intptr_t>(instruction_start()),
kCodeAlignment));
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(Cell, CELL_TYPE)
TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
+TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
}
+HeapObject* WeakCell::value() const {
+ return HeapObject::cast(READ_FIELD(this, kValueOffset));
+}
+
+
+void WeakCell::clear(HeapObject* undefined) {
+ WRITE_FIELD(this, kValueOffset, undefined);
+}
+
+
+void WeakCell::initialize(HeapObject* val) {
+ WRITE_FIELD(this, kValueOffset, val);
+ WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
+}
+
+
+Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
+
+
+void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
+ WRITE_FIELD(this, kNextOffset, val);
+ if (mode == UPDATE_WRITE_BARRIER) {
+ WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
+ }
+}
+
+
int JSObject::GetHeaderSize() {
InstanceType type = map()->instance_type();
// Check for the most common kind of JavaScript object before
CAST_ACCESSOR(Struct)
CAST_ACCESSOR(Symbol)
CAST_ACCESSOR(UnseededNumberDictionary)
+CAST_ACCESSOR(WeakCell)
CAST_ACCESSOR(WeakHashTable)
case PROPERTY_CELL_TYPE:
PropertyCell::cast(this)->PropertyCellPrint(os);
break;
+ case WEAK_CELL_TYPE:
+ WeakCell::cast(this)->WeakCellPrint(os);
+ break;
case JS_ARRAY_BUFFER_TYPE:
JSArrayBuffer::cast(this)->JSArrayBufferPrint(os);
break;
}
+void WeakCell::WeakCellPrint(std::ostream& os) { // NOLINT
+ HeapObject::PrintHeader(os, "WeakCell");
+}
+
+
void Code::CodePrint(std::ostream& os) { // NOLINT
HeapObject::PrintHeader(os, "Code");
#ifdef ENABLE_DISASSEMBLER
case PROPERTY_CELL_TYPE:
PropertyCell::BodyDescriptor::IterateBody(this, v);
break;
+ case WEAK_CELL_TYPE:
+ WeakCell::BodyDescriptor::IterateBody(this, v);
+ break;
case SYMBOL_TYPE:
Symbol::BodyDescriptor::IterateBody(this, v);
break;
// - DebugInfo
// - BreakPointInfo
// - CodeCache
+// - WeakCell
//
// Formats of Object*:
// Smi: [31 bit signed int] 0
V(FIXED_DOUBLE_ARRAY_TYPE) \
V(CONSTANT_POOL_ARRAY_TYPE) \
V(SHARED_FUNCTION_INFO_TYPE) \
+ V(WEAK_CELL_TYPE) \
\
V(JS_MESSAGE_OBJECT_TYPE) \
\
FIXED_ARRAY_TYPE,
CONSTANT_POOL_ARRAY_TYPE,
SHARED_FUNCTION_INFO_TYPE,
+ WEAK_CELL_TYPE,
// All the following types are subtypes of JSReceiver, which corresponds to
// objects in the JS sense. The first and the last type in this range are
V(AccessCheckNeeded) \
V(Cell) \
V(PropertyCell) \
+ V(WeakCell) \
V(ObjectHashTable) \
V(WeakHashTable) \
V(OrderedHashTable)
};
+class WeakCell : public HeapObject {
+ public:
+ inline HeapObject* value() const;
+
+ // This should not be called by anyone except GC.
+ inline void clear(HeapObject* undefined);
+
+ // This should not be called by anyone except allocator.
+ inline void initialize(HeapObject* value);
+
+ DECL_ACCESSORS(next, Object)
+
+ DECLARE_CAST(WeakCell)
+
+ DECLARE_PRINTER(WeakCell)
+ DECLARE_VERIFIER(WeakCell)
+
+ // Layout description.
+ static const int kValueOffset = HeapObject::kHeaderSize;
+ static const int kNextOffset = kValueOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
+
+ typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(WeakCell);
+};
+
+
// The JSProxy describes EcmaScript Harmony proxies
class JSProxy: public JSReceiver {
public:
}
+TEST(WeakCell) {
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ v8::internal::Heap* heap = CcTest::heap();
+ v8::internal::Factory* factory = isolate->factory();
+
+ HandleScope outer_scope(isolate);
+ Handle<WeakCell> weak_cell1;
+ {
+ HandleScope inner_scope(isolate);
+ Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
+ weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
+ }
+
+ Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
+ Handle<WeakCell> weak_cell2;
+ {
+ HandleScope inner_scope(isolate);
+ weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
+ }
+ CHECK(weak_cell1->value()->IsFixedArray());
+ CHECK_EQ(*survivor, weak_cell2->value());
+ heap->CollectGarbage(NEW_SPACE);
+ CHECK(weak_cell1->value()->IsFixedArray());
+ CHECK_EQ(*survivor, weak_cell2->value());
+ heap->CollectGarbage(NEW_SPACE);
+ CHECK(weak_cell1->value()->IsFixedArray());
+ CHECK_EQ(*survivor, weak_cell2->value());
+ heap->CollectAllAvailableGarbage();
+ CHECK_EQ(*survivor, weak_cell2->value());
+ CHECK(weak_cell2->value()->IsFixedArray());
+}
+
+
+TEST(WeakCellsWithIncrementalMarking) {
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ v8::internal::Heap* heap = CcTest::heap();
+ v8::internal::Factory* factory = isolate->factory();
+
+ const int N = 16;
+ HandleScope outer_scope(isolate);
+ Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
+ Handle<WeakCell> weak_cells[N];
+
+ for (int i = 0; i < N; i++) {
+ HandleScope inner_scope(isolate);
+ Handle<HeapObject> value =
+ i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
+ Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
+ CHECK(weak_cell->value()->IsFixedArray());
+ IncrementalMarking* marking = heap->incremental_marking();
+ if (marking->IsStopped()) marking->Start();
+ marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ heap->CollectGarbage(NEW_SPACE);
+ CHECK(weak_cell->value()->IsFixedArray());
+ weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
+ }
+ heap->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK_EQ(*survivor, weak_cells[0]->value());
+ for (int i = 1; i < N; i++) {
+ CHECK(weak_cells[i]->value()->IsUndefined());
+ }
+}
+
+
#ifdef DEBUG
TEST(AddInstructionChangesNewSpacePromotion) {
i::FLAG_allow_natives_syntax = true;