i::Counters::contexts_created_from_scratch.Increment();
}
+ // Add this context to the weak list of global contexts.
+ (*global_context_)->set(Context::NEXT_CONTEXT_LINK,
+ Heap::global_contexts_list());
+ Heap::set_global_contexts_list(*global_context_);
+
result_ = global_context_;
}
OUT_OF_MEMORY_INDEX,
MAP_CACHE_INDEX,
CONTEXT_DATA_INDEX,
- GLOBAL_CONTEXT_SLOTS
+
+ // Properties from here are treated as weak references by the full GC.
+ // Scavenge treats them as strong references.
+ NEXT_CONTEXT_LINK,
+
+ // Total number of slots.
+ GLOBAL_CONTEXT_SLOTS,
+
+ FIRST_WEAK_SLOT = NEXT_CONTEXT_LINK
};
// Direct slot access.
return kHeaderSize + index * kPointerSize - kHeapObjectTag;
}
+ static const int kSize = kHeaderSize + GLOBAL_CONTEXT_SLOTS * kPointerSize;
+
+ // GC support.
+ typedef FixedBodyDescriptor<
+ kHeaderSize, kSize, kSize> ScavengeBodyDescriptor;
+
+ typedef FixedBodyDescriptor<
+ kHeaderSize,
+ kHeaderSize + FIRST_WEAK_SLOT * kPointerSize,
+ kSize> MarkCompactBodyDescriptor;
+
private:
// Unchecked access to the slots.
Object* unchecked_previous() { return get(PREVIOUS_INDEX); }
String* Heap::hidden_symbol_;
Object* Heap::roots_[Heap::kRootListLength];
+Object* Heap::global_contexts_list_;
NewSpace Heap::new_space_;
OldSpace* Heap::old_pointer_space_ = NULL;
}
}
+ // Scavenge object reachable from the global contexts list directly.
+ scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
+
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
}
+void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
+ Object* head = undefined_value();
+ Context* tail = NULL;
+ Object* candidate = global_contexts_list_;
+ while (!candidate->IsUndefined()) {
+ // Check whether to keep the candidate in the list.
+ Context* candidate_context = reinterpret_cast<Context*>(candidate);
+ Object* retain = retainer->RetainAs(candidate);
+ if (retain != NULL) {
+ if (head->IsUndefined()) {
+ // First element in the list.
+ head = candidate_context;
+ } else {
+ // Subsequent elements in the list.
+ ASSERT(tail != NULL);
+ tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+ candidate_context,
+ UPDATE_WRITE_BARRIER);
+ }
+ // Retained context is new tail.
+ tail = candidate_context;
+ }
+ // Move to next element in the list.
+ candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
+ }
+
+ // Terminate the list if there is one or more elements.
+ if (tail != NULL) {
+ tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+ Heap::undefined_value(),
+ UPDATE_WRITE_BARRIER);
+ }
+
+ // Update the head of the list of contexts.
+ Heap::global_contexts_list_ = head;
+}
+
+
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
public:
static inline void VisitPointer(Object** p) {
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
table_.Register(kVisitByteArray, &EvacuateByteArray);
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+ table_.Register(kVisitGlobalContext,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ VisitSpecialized<Context::kSize>);
typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (obj->IsFailure()) return false;
- set_global_context_map(Map::cast(obj));
+ Map* global_context_map = Map::cast(obj);
+ global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
+ set_global_context_map(global_context_map);
obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
SharedFunctionInfo::kAlignedSize);
// Create initial objects
if (!CreateInitialObjects()) return false;
+
+ global_contexts_list_ = undefined_value();
}
LOG(IntPtrTEvent("heap-capacity", Capacity()));
V(closure_symbol, "(closure)")
-// Forward declaration of the GCTracer class.
+// Forward declarations.
class GCTracer;
class HeapStats;
+class WeakObjectRetainer;
typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
// not match the empty string.
static String* hidden_symbol() { return hidden_symbol_; }
+ static void set_global_contexts_list(Object* object) {
+ global_contexts_list_ = object;
+ }
+ static Object* global_contexts_list() { return global_contexts_list_; }
+
// Iterates over all roots in the heap.
static void IterateRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over all strong roots in the heap.
// Generated code can embed this address to get access to the roots.
static Object** roots_address() { return roots_; }
+ // Get address of global contexts list for serialization support.
+ static Object** global_contexts_list_address() {
+ return &global_contexts_list_;
+ }
+
#ifdef DEBUG
static void Print();
static void PrintHandles();
static void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
+ static void ProcessWeakReferences(WeakObjectRetainer* retainer);
+
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
static Object* roots_[kRootListLength];
+ static Object* global_contexts_list_;
+
struct StringTypeTable {
InstanceType type;
int size;
static List<Object*> old_space_strings_;
};
+
+// Abstract base class for checking whether a weak object should be retained.
+class WeakObjectRetainer {
+ public:
+ virtual ~WeakObjectRetainer() {}
+
+ // Return whether this object should be retained. If NULL is returned the
+ // object has no references. Otherwise the address of the retained object
+ // should be returned as in some GC situations the object has been moved.
+ virtual Object* RetainAs(Object* object) = 0;
+};
+
+
} } // namespace v8::internal
#endif // V8_HEAP_H_
FixedArray::BodyDescriptor,
void>::Visit);
+ table_.Register(kVisitGlobalContext,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ Context::MarkCompactBodyDescriptor,
+ void>::Visit);
+
table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
VisitPointers(SLOT_ADDR(object,
JSFunction::kCodeEntryOffset + kPointerSize),
SLOT_ADDR(object, JSFunction::kSize));
+
#undef SLOT_ADDR
}
};
+// Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
+// are retained.
+class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
+ public:
+ virtual Object* RetainAs(Object* object) {
+ MapWord first_word = HeapObject::cast(object)->map_word();
+ if (first_word.IsMarked()) {
+ return object;
+ } else {
+ return NULL;
+ }
+ }
+};
+
+
void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
ASSERT(!object->IsMarked());
ASSERT(Heap::Contains(object));
ExternalStringTable::Iterate(&v);
ExternalStringTable::CleanUp();
+ // Process the weak references.
+ MarkCompactWeakObjectRetainer mark_compact_object_retainer;
+ Heap::ProcessWeakReferences(&mark_compact_object_retainer);
+
// Remove object groups after marking phase.
GlobalHandles::RemoveObjectGroups();
}
}
}
+ // Update pointer from the global contexts list.
+ updating_visitor.VisitPointer(Heap::global_contexts_list_address());
+
// Update pointers from external string table.
Heap::UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
GlobalHandles::IterateWeakRoots(&updating_visitor);
+ // Update the pointer to the head of the weak list of global contexts.
+ updating_visitor.VisitPointer(&Heap::global_contexts_list_);
+
int live_maps_size = IterateLiveObjects(Heap::map_space(),
&UpdatePointersInOldObject);
int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
}
+void FixedArray::set_unchecked(int index,
+ Object* value,
+ WriteBarrierMode mode) {
+ int offset = kHeaderSize + index * kPointerSize;
+ WRITE_FIELD(this, offset, value);
+ CONDITIONAL_WRITE_BARRIER(this, offset, mode);
+}
+
+
void FixedArray::set_null_unchecked(int index) {
ASSERT(index >= 0 && index < this->length());
ASSERT(!Heap::InNewSpace(Heap::null_value()));
kVisitShortcutCandidate,
kVisitByteArray,
kVisitFixedArray,
+ kVisitGlobalContext,
// For data objects, JS objects and structs along with generic visitor which
// can visit object of any size we provide visitors specialized by
FixedArray::BodyDescriptor,
int>::Visit);
+ table_.Register(kVisitGlobalContext,
+ &FixedBodyVisitor<StaticVisitor,
+ Context::ScavengeBodyDescriptor,
+ int>::Visit);
+
table_.Register(kVisitByteArray, &VisitByteArray);
table_.Register(kVisitSharedFunctionInfo,
// Setters with less debug checks for the GC to use.
inline void set_unchecked(int index, Smi* value);
inline void set_null_unchecked(int index);
+ inline void set_unchecked(int index, Object* value, WriteBarrierMode mode);
// Gives access to raw memory which stores the array's data.
inline Object** data_start();
ExternalReferenceDecoder::ExternalReferenceDecoder()
- : encodings_(NewArray<Address*>(kTypeCodeCount)) {
+ : encodings_(NewArray<Address*>(kTypeCodeCount)) {
ExternalReferenceTable* external_references =
ExternalReferenceTable::instance();
for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
external_reference_decoder_ = new ExternalReferenceDecoder();
Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG);
Heap::IterateWeakRoots(this, VISIT_ALL);
+
+ Heap::set_global_contexts_list(Heap::undefined_value());
}
CHECK(page->IsRegionDirty(clone_addr + (object_size - kPointerSize)));
}
+
TEST(TestCodeFlushing) {
i::FLAG_allow_natives_syntax = true;
// If we do not flush code this test is invalid.
CHECK(function->shared()->is_compiled());
CHECK(function->is_compiled());
}
+
+
+// Count the number of global contexts in the weak list of global contexts.
+static int CountGlobalContexts() {
+ int count = 0;
+ Object* object = Heap::global_contexts_list();
+ while (!object->IsUndefined()) {
+ count++;
+ object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
+ }
+ return count;
+}
+
+
+TEST(TestInternalWeakLists) {
+ static const int kNumTestContexts = 10;
+
+ v8::HandleScope scope;
+ v8::Persistent<v8::Context> ctx[kNumTestContexts];
+
+ CHECK_EQ(0, CountGlobalContexts());
+
+ // Create a number of global contests which gets linked together.
+ for (int i = 0; i < kNumTestContexts; i++) {
+ ctx[i] = v8::Context::New();
+ CHECK_EQ(i + 1, CountGlobalContexts());
+
+ ctx[i]->Enter();
+ ctx[i]->Exit();
+ }
+
+ // Dispose the global contexts one by one.
+ for (int i = 0; i < kNumTestContexts; i++) {
+ ctx[i].Dispose();
+ ctx[i].Clear();
+
+ // Scavenge treats these references as strong.
+ for (int j = 0; j < 10; j++) {
+ Heap::PerformScavenge();
+ CHECK_EQ(kNumTestContexts - i, CountGlobalContexts());
+ }
+
+ // Mark compact handles the weak references.
+ Heap::CollectAllGarbage(true);
+ CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts());
+ }
+
+ CHECK_EQ(0, CountGlobalContexts());
+}
+
+
+// Count the number of global contexts in the weak list of global contexts
+// causing a GC after the specified number of elements.
+static int CountGlobalContextsWithGC(int n) {
+ int count = 0;
+ Handle<Object> object(Heap::global_contexts_list());
+ while (!object->IsUndefined()) {
+ count++;
+ if (count == n) Heap::CollectAllGarbage(true);
+ object =
+ Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK));
+ }
+ return count;
+}
+
+
+TEST(TestInternalWeakListsTraverseWithGC) {
+ static const int kNumTestContexts = 10;
+
+ v8::HandleScope scope;
+ v8::Persistent<v8::Context> ctx[kNumTestContexts];
+
+ CHECK_EQ(0, CountGlobalContexts());
+
+ // Create an number of contexts and check the length of the weak list both
+ // with and without GCs while iterating the list.
+ for (int i = 0; i < kNumTestContexts; i++) {
+ ctx[i] = v8::Context::New();
+ CHECK_EQ(i + 1, CountGlobalContexts());
+ CHECK_EQ(i + 1, CountGlobalContextsWithGC(i / 2 + 1));
+
+ ctx[i]->Enter();
+ ctx[i]->Exit();
+ }
+}