From: svenpanne@chromium.org Date: Fri, 9 May 2014 09:02:40 +0000 (+0000) Subject: Cleaned up the weak lists hanging off the heap a bit. X-Git-Tag: upstream/4.7.83~9217 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=6eaf7826ac942e07343b21a5fda9ba67272e5045;p=platform%2Fupstream%2Fv8.git Cleaned up the weak lists hanging off the heap a bit. * Route all access to the 3 weak lists through getters/setters. * Removed superfluous visiting already done by ProcessWeakReferences. R=mstarzinger@chromium.org Review URL: https://codereview.chromium.org/273653006 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21207 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/heap.cc b/src/heap.cc index 18103c3..a282b0c 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -144,9 +144,9 @@ Heap::Heap() ASSERT(MB >= Page::kPageSize); memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); - native_contexts_list_ = NULL; - array_buffers_list_ = Smi::FromInt(0); - allocation_sites_list_ = Smi::FromInt(0); + set_native_contexts_list(NULL); + set_array_buffers_list(Smi::FromInt(0)); + set_allocation_sites_list(Smi::FromInt(0)); // Put a dummy entry in the remembered pages so we can find the list the // minidump even if there are no real unmapped pages. RememberUnmappedPage(NULL, false); @@ -962,7 +962,7 @@ void Heap::EnsureFromSpaceIsCommitted() { void Heap::ClearJSFunctionResultCaches() { if (isolate_->bootstrapper()->IsActive()) return; - Object* context = native_contexts_list_; + Object* context = native_contexts_list(); while (!context->IsUndefined()) { // Get the caches for this context. GC can happen when the context // is not fully initialized, so the caches can be undefined. @@ -988,7 +988,7 @@ void Heap::ClearNormalizedMapCaches() { return; } - Object* context = native_contexts_list_; + Object* context = native_contexts_list(); while (!context->IsUndefined()) { // GC can happen when the context is not fully initialized, // so the cache can be undefined. @@ -1569,9 +1569,6 @@ void Heap::Scavenge() { collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); } - // Scavenge object reachable from the native contexts list directly. - scavenge_visitor.VisitPointer(BitCast(&native_contexts_list_)); - new_space_front = DoScavenge(&scavenge_visitor, new_space_front); while (isolate()->global_handles()->IterateObjectGroups( @@ -1704,7 +1701,7 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, VisitWeakList( this, native_contexts_list(), retainer, record_slots); // Update the head of the list of contexts. - native_contexts_list_ = head; + set_native_contexts_list(head); } @@ -1725,7 +1722,7 @@ void Heap::TearDownArrayBuffers() { Runtime::FreeArrayBuffer(isolate(), buffer); o = buffer->weak_next(); } - array_buffers_list_ = undefined; + set_array_buffers_list(undefined); } @@ -5283,9 +5280,9 @@ bool Heap::CreateHeapObjects() { CreateInitialObjects(); CHECK_EQ(0, gc_count_); - native_contexts_list_ = undefined_value(); - array_buffers_list_ = undefined_value(); - allocation_sites_list_ = undefined_value(); + set_native_contexts_list(undefined_value()); + set_array_buffers_list(undefined_value()); + set_allocation_sites_list(undefined_value()); weak_object_to_code_table_ = undefined_value(); return true; } diff --git a/src/heap.h b/src/heap.h index 38e7a9f..0ea4529 100644 --- a/src/heap.h +++ b/src/heap.h @@ -833,17 +833,19 @@ class Heap { void set_native_contexts_list(Object* object) { native_contexts_list_ = object; } - Object* native_contexts_list() { return native_contexts_list_; } + Object* native_contexts_list() const { return native_contexts_list_; } void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } - Object* array_buffers_list() { return array_buffers_list_; } + Object* array_buffers_list() const { return array_buffers_list_; } void set_allocation_sites_list(Object* object) { allocation_sites_list_ = object; } Object* allocation_sites_list() { return allocation_sites_list_; } + + // Used in CreateAllocationSiteStub and the (de)serializer. Object** allocation_sites_list_address() { return &allocation_sites_list_; } Object* weak_object_to_code_table() { return weak_object_to_code_table_; } @@ -936,11 +938,6 @@ class Heap { return reinterpret_cast(&roots_[kStoreBufferTopRootIndex]); } - // Get address of native contexts list for serialization support. - Object** native_contexts_list_address() { - return &native_contexts_list_; - } - #ifdef VERIFY_HEAP // Verify the heap is in its normal state before or after a GC. void Verify(); diff --git a/src/mark-compact.cc b/src/mark-compact.cc index ec8e941..3801396 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -2483,7 +2483,7 @@ void MarkCompactCollector::AfterMarking() { void MarkCompactCollector::ProcessMapCaches() { - Object* raw_context = heap()->native_contexts_list_; + Object* raw_context = heap()->native_contexts_list(); while (raw_context != heap()->undefined_value()) { Context* context = reinterpret_cast(raw_context); if (IsMarked(context)) { @@ -3642,9 +3642,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { } } - // Update the head of the native contexts list in the heap. - updating_visitor.VisitPointer(heap_->native_contexts_list_address()); - heap_->string_table()->Iterate(&updating_visitor); updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address()); if (heap_->weak_object_to_code_table()->IsHashTable()) {