for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
ASSERT(Marking::IsWhite(mark_bit));
+ ASSERT_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
#endif
MarkBit mark_bit = Marking::MarkBitFrom(obj);
mark_bit.Clear();
mark_bit.Next().Clear();
+ Page::FromAddress(obj->address())->ResetLiveBytes();
}
}
ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
for (int i = 0; i < table->Capacity(); i++) {
if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
- Object* value = table->get(table->EntryToValueIndex(i));
- StaticMarkingVisitor::VisitPointer(heap(), &value);
- table->set_unchecked(heap(),
- table->EntryToValueIndex(i),
- value,
- UPDATE_WRITE_BARRIER);
+ int idx = ObjectHashTable::EntryToValueIndex(i);
+ Object** slot =
+ HeapObject::RawField(table, FixedArray::OffsetOfElementAt(idx));
+ StaticMarkingVisitor::VisitPointer(heap(), slot);
}
}
weak_map_obj = weak_map->next();
static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
Handle<JSObject> key,
- int value) {
+ Handle<Object> value) {
Handle<ObjectHashTable> table = PutIntoObjectHashTable(
Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
Handle<JSObject>(JSObject::cast(*key)),
- Handle<Smi>(Smi::FromInt(value)));
+ value);
weakmap->set_table(*table);
}
// Put entry into weak map.
{
v8::HandleScope scope;
- PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ PutIntoWeakMap(weakmap,
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(23)));
}
CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
- PutIntoWeakMap(weakmap, object, i);
+ PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i)));
}
}
// Check shrunk capacity.
CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
}
+
+
+// Test that weak map values on an evacuation candidate which are not reachable
+// by other paths are correctly recorded in the slots buffer.
+TEST(Regress2060) {
+ FLAG_always_compact = true;
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSFunction> function =
+ FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
+ Handle<JSObject> key = FACTORY->NewJSObject(function);
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Start second old-space page so that values land on evacuation candidate.
+ Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
+ FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
+
+ // Fill up weak map with values on an evacuation candidate.
+ {
+ v8::HandleScope scope;
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObject(function, TENURED);
+ CHECK(!HEAP->InNewSpace(object->address()));
+ CHECK(!first_page->Contains(object->address()));
+ PutIntoWeakMap(weakmap, key, object);
+ }
+ }
+
+ // Force compacting garbage collection.
+ CHECK(FLAG_always_compact);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+}