From d4ac509f3cdcd22523ad2df64ec69d9cda370887 Mon Sep 17 00:00:00 2001 From: mstarzinger Date: Thu, 6 Aug 2015 07:22:01 -0700 Subject: [PATCH] Fix stale entries in optimized code map. This fixes a corner-case where extending an optimized code map left stale entries in the abandoned copy. This can cause havoc not only in the heap verifier but also in the GC, because stale entries have not been recorded when being trated weakly. Note that this also pre-tenures all optimized code maps into old-space because their lifetime is coupled to the SharedFunctionInfo anyways. R=hpayer@chromium.org TEST=cctest/test-heap/Regress514122 BUG=chromium:514122 LOG=N Review URL: https://codereview.chromium.org/1277873002 Cr-Commit-Position: refs/heads/master@{#30047} --- src/objects.cc | 19 +++++------ test/cctest/test-heap.cc | 88 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 97 insertions(+), 10 deletions(-) diff --git a/src/objects.cc b/src/objects.cc index 687426b..a86d053 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -9508,6 +9508,7 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( Handle literals, BailoutId osr_ast_id) { Isolate* isolate = shared->GetIsolate(); + DCHECK(!shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); DCHECK(native_context->IsNativeContext()); STATIC_ASSERT(kEntryLength == 4); @@ -9517,20 +9518,18 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( if (value->IsSmi()) { // No optimized code map. DCHECK_EQ(0, Smi::cast(*value)->value()); - new_code_map = isolate->factory()->NewFixedArray(kInitialLength); + new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); old_length = kEntriesStart; } else { - // Copy old map and append one new entry. + // Copy old optimized code map and append one new entry. Handle old_code_map = Handle::cast(value); - DCHECK(!shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); - new_code_map = - isolate->factory()->CopyFixedArrayAndGrow(old_code_map, kEntryLength); + new_code_map = isolate->factory()->CopyFixedArrayAndGrow( + old_code_map, kEntryLength, TENURED); old_length = old_code_map->length(); - // Zap the old map for the sake of the heap verifier. - if (Heap::ShouldZapGarbage()) { - Object** data = old_code_map->data_start(); - MemsetPointer(data, isolate->heap()->the_hole_value(), old_length); - } + // Zap the old map to avoid any stale entries. Note that this is required + // for correctness because entries are being treated weakly by the GC. + MemsetPointer(old_code_map->data_start(), isolate->heap()->the_hole_value(), + old_length); } new_code_map->set(old_length + kContextOffset, *native_context); new_code_map->set(old_length + kCachedCodeOffset, *code); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index dfabb92..6172d77 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -4557,6 +4557,94 @@ TEST(Regress513507) { #endif // DEBUG +TEST(Regress514122) { + i::FLAG_flush_optimized_code_cache = false; + i::FLAG_allow_natives_syntax = true; + CcTest::InitializeVM(); + Isolate* isolate = CcTest::i_isolate(); + Heap* heap = isolate->heap(); + HandleScope scope(isolate); + + // Perfrom one initial GC to enable code flushing. + CcTest::heap()->CollectAllGarbage(); + + // Prepare function whose optimized code map we can use. + Handle shared; + { + HandleScope inner_scope(isolate); + CompileRun("function f() { return 1 }" + "f(); %OptimizeFunctionOnNextCall(f); f();"); + + Handle f = + v8::Utils::OpenHandle( + *v8::Handle::Cast( + CcTest::global()->Get(v8_str("f")))); + shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate)); + CompileRun("f = null"); + } + + // Prepare optimized code that we can use. + Handle code; + { + HandleScope inner_scope(isolate); + CompileRun("function g() { return 2 }" + "g(); %OptimizeFunctionOnNextCall(g); g();"); + + Handle g = + v8::Utils::OpenHandle( + *v8::Handle::Cast( + CcTest::global()->Get(v8_str("g")))); + code = inner_scope.CloseAndEscape(handle(g->code(), isolate)); + if (!code->is_optimized_code()) return; + } + + Handle lit = isolate->factory()->empty_fixed_array(); + Handle context(isolate->context()); + + // Add the code several times to the optimized code map. + for (int i = 0; i < 3; ++i) { + HandleScope inner_scope(isolate); + BailoutId id = BailoutId(i); + SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); + } + shared->optimized_code_map()->Print(); + + // Add the code with a literals array to be evacuated. + Page* evac_page; + { + HandleScope inner_scope(isolate); + AlwaysAllocateScope always_allocate(isolate); + // Make sure literal is placed on an old-space evacuation candidate. + SimulateFullSpace(heap->old_space()); + Handle lit = isolate->factory()->NewFixedArray(23, TENURED); + evac_page = Page::FromAddress(lit->address()); + BailoutId id = BailoutId(100); + SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); + } + + // Heap is ready, force {lit_page} to become an evacuation candidate and + // simulate incremental marking to enqueue optimized code map. + FLAG_manual_evacuation_candidates_selection = true; + evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + SimulateIncrementalMarking(heap); + + // No matter whether reachable or not, {boomer} is doomed. + Handle boomer(shared->optimized_code_map(), isolate); + + // Add the code several times to the optimized code map. This will leave old + // copies of the optimized code map unreachable but still marked. + for (int i = 3; i < 6; ++i) { + HandleScope inner_scope(isolate); + BailoutId id = BailoutId(i); + SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); + } + + // Trigger a GC to flush out the bug. + heap->CollectGarbage(i::OLD_SPACE, "fire in the hole"); + boomer->Print(); +} + + class DummyVisitor : public ObjectVisitor { public: void VisitPointers(Object** start, Object** end) { } -- 2.7.4