From de672226c7554213221ffe5a049733bad6a68e90 Mon Sep 17 00:00:00 2001 From: "ulan@chromium.org" Date: Fri, 31 Oct 2014 13:11:30 +0000 Subject: [PATCH] Clear old backing store of WeakCollection on updates. Not clearing can lead to a crash under following conditions: 1. Backing store of a weak map is allocated in large object space. 2. The backing store is marked incrementaly via the weak map. 3. The weak map is updated and gets a new backing store. 4. The store buffer overflows and marks the chunk of the old backing store as "scan on scavenge." 5. Mark-compact collection kills some elements of the weak map. Note that the old backing store survives because it was marked incrementally, but its dead elements are not cleared. 6. Scavenger iterates over the old backing store, tries to move a dead object and crashes. BUG=v8:3631 LOG=N TEST=cctest/test-heap/Regress3631 R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/686783003 Cr-Commit-Position: refs/heads/master@{#25032} git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@25032 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/runtime/runtime-collections.cc | 8 ++++++++ test/cctest/test-heap.cc | 40 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/src/runtime/runtime-collections.cc b/src/runtime/runtime-collections.cc index c4f7418..c1a63dc 100644 --- a/src/runtime/runtime-collections.cc +++ b/src/runtime/runtime-collections.cc @@ -288,6 +288,10 @@ RUNTIME_FUNCTION(Runtime_WeakCollectionDelete) { Handle new_table = ObjectHashTable::Remove(table, key, &was_present); weak_collection->set_table(*new_table); + if (*table != *new_table) { + // Zap the old table since we didn't record slots for its elements. + table->FillWithHoles(0, table->length()); + } return isolate->heap()->ToBoolean(was_present); } @@ -304,6 +308,10 @@ RUNTIME_FUNCTION(Runtime_WeakCollectionSet) { RUNTIME_ASSERT(table->IsKey(*key)); Handle new_table = ObjectHashTable::Put(table, key, value); weak_collection->set_table(*new_table); + if (*table != *new_table) { + // Zap the old table since we didn't record slots for its elements. + table->FillWithHoles(0, table->length()); + } return *weak_collection; } diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index ac121e0..e652282 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -4608,6 +4608,46 @@ TEST(Regress388880) { } +TEST(Regress3631) { + i::FLAG_expose_gc = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + Isolate* isolate = CcTest::i_isolate(); + Heap* heap = isolate->heap(); + IncrementalMarking* marking = CcTest::heap()->incremental_marking(); + v8::Local result = CompileRun( + "var weak_map = new WeakMap();" + "var future_keys = [];" + "for (var i = 0; i < 50; i++) {" + " var key = {'k' : i + 0.1};" + " weak_map.set(key, 1);" + " future_keys.push({'x' : i + 0.2});" + "}" + "weak_map"); + if (marking->IsStopped()) { + marking->Start(); + } + // Incrementally mark the backing store. + Handle obj = + v8::Utils::OpenHandle(*v8::Handle::Cast(result)); + Handle weak_map(reinterpret_cast(*obj)); + while (!Marking::IsBlack( + Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) && + !marking->IsStopped()) { + marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD); + } + // Stash the backing store in a handle. + Handle save(weak_map->table(), isolate); + // The following line will update the backing store. + CompileRun( + "for (var i = 0; i < 50; i++) {" + " weak_map.set(future_keys[i], i);" + "}"); + heap->incremental_marking()->set_should_hurry(true); + heap->CollectGarbage(OLD_POINTER_SPACE); +} + + #ifdef DEBUG TEST(PathTracer) { CcTest::InitializeVM(); -- 2.7.4