&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<SharedFunctionInfo::kSize>);
+ table_.Register(kVisitJSWeakMap,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ Visit);
+
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
+ friend class StaticMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);
live_bytes_(0),
#endif
heap_(NULL),
- code_flusher_(NULL) { }
+ code_flusher_(NULL),
+ encountered_weak_maps_(NULL) { }
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
ASSERT(state_ == PREPARE_GC);
+ ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
// Prepare has selected whether to compact the old generation or not.
// Tell the tracer.
if (FLAG_collect_maps) ClearNonLiveTransitions();
+ ClearWeakMaps();
+
SweepLargeObjectSpace();
if (IsCompacting()) {
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+ table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
+
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
StructBodyDescriptor,
void> StructObjectVisitor;
+ static void VisitJSWeakMap(Map* map, HeapObject* object) {
+ MarkCompactCollector* collector = map->heap()->mark_compact_collector();
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
+
+ // Enqueue weak map in linked list of encountered weak maps.
+ ASSERT(weak_map->next() == Smi::FromInt(0));
+ weak_map->set_next(collector->encountered_weak_maps());
+ collector->set_encountered_weak_maps(weak_map);
+
+ // Skip visiting the backing hash table containing the mappings.
+ int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::BodyDescriptor::kStartOffset,
+ JSWeakMap::kTableOffset);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::kTableOffset + kPointerSize,
+ object_size);
+
+ // Mark the backing hash table without pushing it on the marking stack.
+ ASSERT(!weak_map->unchecked_table()->IsMarked());
+ ASSERT(weak_map->unchecked_table()->map()->IsMarked());
+ collector->SetMark(weak_map->unchecked_table());
+ }
+
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
map->heap());
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack_.is_empty()) {
- HeapObject* object = marking_stack_.Pop();
- ASSERT(object->IsHeapObject());
- ASSERT(heap()->Contains(object));
- ASSERT(object->IsMarked());
- ASSERT(!object->IsOverflowed());
-
- // Because the object is marked, we have to recover the original map
- // pointer and use it to mark the object's body.
- MapWord map_word = object->map_word();
- map_word.ClearMark();
- Map* map = map_word.ToMap();
- MarkObject(map);
+ while (!marking_stack_.is_empty()) {
+ HeapObject* object = marking_stack_.Pop();
+ ASSERT(object->IsHeapObject());
+ ASSERT(heap()->Contains(object));
+ ASSERT(object->IsMarked());
+ ASSERT(!object->IsOverflowed());
- StaticMarkingVisitor::IterateBody(map, object);
+ // Because the object is marked, we have to recover the original map
+ // pointer and use it to mark the object's body.
+ MapWord map_word = object->map_word();
+ map_word.ClearMark();
+ Map* map = map_word.ToMap();
+ MarkObject(map);
+
+ StaticMarkingVisitor::IterateBody(map, object);
+ }
+
+ // Process encountered weak maps, mark objects only reachable by those
+ // weak maps and repeat until fix-point is reached.
+ ProcessWeakMaps();
}
}
}
}
+
+void MarkCompactCollector::ProcessWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ Object* value = table->get(table->EntryToValueIndex(i));
+ StaticMarkingVisitor::MarkObjectByPointer(heap(), &value);
+ table->set_unchecked(heap(),
+ table->EntryToValueIndex(i),
+ value,
+ UPDATE_WRITE_BARRIER);
+ }
+ }
+ weak_map_obj = weak_map->next();
+ }
+}
+
+
+void MarkCompactCollector::ClearWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ table->RemoveEntry(i, heap());
+ }
+ }
+ weak_map_obj = weak_map->next();
+ weak_map->set_next(Smi::FromInt(0));
+ }
+ set_encountered_weak_maps(Smi::FromInt(0));
+}
+
// -------------------------------------------------------------------------
// Phase 2: Encode forwarding addresses.
// When compacting, forwarding addresses for objects in old space and map
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
void EnableCodeFlushing(bool enable);
+ inline Object* encountered_weak_maps() { return encountered_weak_maps_; }
+ inline void set_encountered_weak_maps(Object* weak_map) {
+ encountered_weak_maps_ = weak_map;
+ }
+
private:
MarkCompactCollector();
~MarkCompactCollector();
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
+ // Mark all values associated with reachable keys in weak maps encountered
+ // so far. This might push new object or even new weak maps onto the
+ // marking stack.
+ void ProcessWeakMaps();
+
+ // After all reachable objects have been marked those weak map entries
+ // with an unreachable key are removed from all encountered weak maps.
+ // The linked list of all encountered weak maps is destroyed.
+ void ClearWeakMaps();
+
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection, or else computing and encoding
Heap* heap_;
MarkingStack marking_stack_;
CodeFlusher* code_flusher_;
+ Object* encountered_weak_maps_;
friend class Heap;
friend class OverflowedObjectsScanner;
ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
+ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
+
+
+ObjectHashTable* JSWeakMap::unchecked_table() {
+ return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
+}
Address Foreign::address() {
}
+void ObjectHashTable::RemoveEntry(int entry) {
+ RemoveEntry(entry, GetHeap());
+}
+
+
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
+ case JS_WEAK_MAP_TYPE:
+ return kVisitJSWeakMap;
+
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
- case JS_WEAK_MAP_TYPE:
return GetVisitorIdForSize(kVisitJSObject,
kVisitJSObjectGeneric,
instance_size);
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
+ kVisitJSWeakMap,
kVisitJSRegExp,
kVisitorIdCount,
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
- table_.Register(kVisitJSRegExp, &VisitJSRegExp);
+ table_.Register(kVisitJSWeakMap, &VisitJSObject);
+
+ table_.Register(kVisitJSRegExp, &VisitJSObject);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
return FixedDoubleArray::SizeFor(length);
}
+ static inline int VisitJSObject(Map* map, HeapObject* object) {
+ return JSObjectVisitor::Visit(map, object);
+ }
+
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
- static inline int VisitJSRegExp(Map* map, HeapObject* object) {
- return JSObjectVisitor::Visit(map, object);
- }
-
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
}
-void ObjectHashTable::RemoveEntry(int entry) {
- Object* null_value = GetHeap()->null_value();
- set(EntryToIndex(entry), null_value);
- set(EntryToIndex(entry) + 1, null_value);
+void ObjectHashTable::RemoveEntry(int entry, Heap* heap) {
+ set_null(heap, EntryToIndex(entry));
+ set_null(heap, EntryToIndex(entry) + 1);
ElementRemoved();
}
MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
private:
+ friend class MarkCompactCollector;
+
void AddEntry(int entry, JSObject* key, Object* value);
- void RemoveEntry(int entry);
+ void RemoveEntry(int entry, Heap* heap);
+ inline void RemoveEntry(int entry);
+
+ // Returns the index to the value of an entry.
+ static inline int EntryToValueIndex(int entry) {
+ return EntryToIndex(entry) + 1;
+ }
};
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, ObjectHashTable)
+ // [next]: linked list of encountered weak maps during GC.
+ DECL_ACCESSORS(next, Object)
+
+ // Unchecked accessors to be used during GC.
+ inline ObjectHashTable* unchecked_table();
+
// Casting.
static inline JSWeakMap* cast(Object* obj);
#endif
static const int kTableOffset = JSObject::kHeaderSize;
- static const int kSize = kTableOffset + kPointerSize;
+ static const int kNextOffset = kTableOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);
ASSERT(weakmap->map()->inobject_properties() == 0);
Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
weakmap->set_table(*table);
+ weakmap->set_next(Smi::FromInt(0));
return *weakmap;
}
'test-threads.cc',
'test-unbound-queue.cc',
'test-utils.cc',
- 'test-version.cc'
+ 'test-version.cc',
+ 'test-weakmaps.cc'
],
'arch:arm': [
'test-assembler-arm.cc',
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
+# We do not yet shrink weak maps after they have been emptied by the GC
+test-weakmaps/Shrinking: FAIL
+
##############################################################################
[ $arch == arm ]
--- /dev/null
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "global-handles.h"
+#include "snapshot.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+
+static Handle<JSWeakMap> AllocateJSWeakMap() {
+ Handle<Map> map = FACTORY->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
+ Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
+ Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
+ // Do not use handles for the hash table, it would make entries strong.
+ Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
+ ObjectHashTable* table = ObjectHashTable::cast(table_obj);
+ weakmap->set_table(table);
+ weakmap->set_next(Smi::FromInt(0));
+ return weakmap;
+}
+
+static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
+ Handle<JSObject> key,
+ int value) {
+ Handle<ObjectHashTable> table = PutIntoObjectHashTable(
+ Handle<ObjectHashTable>(weakmap->table()),
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(value)));
+ weakmap->set_table(*table);
+}
+
+static int NumberOfWeakCalls = 0;
+static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
+ ASSERT(id == reinterpret_cast<void*>(1234));
+ NumberOfWeakCalls++;
+ handle.Dispose();
+}
+
+
+TEST(Weakness) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
+
+ // Keep global reference to the key.
+ Handle<Object> key;
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ key = global_handles->Create(*object);
+ }
+ CHECK(!global_handles->IsWeak(key.location()));
+
+ // Put entry into weak map.
+ {
+ v8::HandleScope scope;
+ PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ }
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+
+ // Force a full GC.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+
+ // Make the global reference to the key weak.
+ {
+ v8::HandleScope scope;
+ global_handles->MakeWeak(key.location(),
+ reinterpret_cast<void*>(1234),
+ &WeakPointerCallback);
+ }
+ CHECK(global_handles->IsWeak(key.location()));
+
+ // Force a full GC.
+ // Perform two consecutive GCs because the first one will only clear
+ // weak references whereas the second one will also clear weak maps.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(1, weakmap->table()->NumberOfDeletedElements());
+}
+
+
+TEST(Shrinking) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Check initial capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+
+ // Fill up weak map to trigger capacity change.
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ PutIntoWeakMap(weakmap, object, i);
+ }
+ }
+
+ // Check increased capacity.
+ CHECK_EQ(128, weakmap->table()->Capacity());
+
+ // Force a full GC.
+ CHECK_EQ(32, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(32, weakmap->table()->NumberOfDeletedElements());
+
+ // Check shrunk capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+}
assertTrue(m instanceof WeakMap);
assertTrue(WeakMap.prototype.set instanceof Function)
assertTrue(WeakMap.prototype.get instanceof Function)
+
+
+// Stress Test
+// There is a proposed stress-test available at the es-discuss mailing list
+// which cannot be reasonably automated. Check it out by hand if you like:
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html