Move code flushing support into shared visitor.
authormstarzinger@chromium.org <mstarzinger@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 12 Oct 2012 12:41:29 +0000 (12:41 +0000)
committermstarzinger@chromium.org <mstarzinger@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 12 Oct 2012 12:41:29 +0000 (12:41 +0000)
This is a first step towards incremental code flushing. The code
flushing support is now shared between full and incremental marking.
The code flusher itself is not yet activated in incremental mode and
will require some additional adaptations.

R=ulan@chromium.org
BUG=v8:1609

Review URL: https://codereview.chromium.org/11028016

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12714 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/incremental-marking.cc
src/mark-compact.cc
src/mark-compact.h
src/objects-visiting-inl.h
src/objects-visiting.h

index ba973c9..e51d6c1 100644 (file)
@@ -181,10 +181,6 @@ class IncrementalMarkingMarkingVisitor
   static void Initialize() {
     StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
 
-    table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
-
-    table_.Register(kVisitJSFunction, &VisitJSFunction);
-
     table_.Register(kVisitJSRegExp, &VisitJSRegExp);
   }
 
@@ -195,31 +191,7 @@ class IncrementalMarkingMarkingVisitor
                   HeapObject::RawField(object, JSWeakMap::kSize));
   }
 
-  static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
-    Heap* heap = map->GetHeap();
-    SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
-    if (shared->ic_age() != heap->global_ic_age()) {
-      shared->ResetForNewContext(heap->global_ic_age());
-    }
-    FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
-                     SharedFunctionInfo::BodyDescriptor,
-                     void>::Visit(map, object);
-  }
-
-  static inline void VisitJSFunction(Map* map, HeapObject* object) {
-    Heap* heap = map->GetHeap();
-    // Iterate over all fields in the body but take care in dealing with
-    // the code entry and skip weak fields.
-    VisitPointers(heap,
-                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
-                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
-    VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
-    VisitPointers(heap,
-                  HeapObject::RawField(object,
-                      JSFunction::kCodeEntryOffset + kPointerSize),
-                  HeapObject::RawField(object,
-                      JSFunction::kNonWeakFieldsEndOffset));
-  }
+  static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {}
 
   INLINE(static void VisitPointer(Heap* heap, Object** p)) {
     Object* obj = *p;
index c078782..f8fd0cd 100644 (file)
@@ -859,133 +859,69 @@ void MarkCompactCollector::Finish() {
 // and continue with marking.  This process repeats until all reachable
 // objects have been marked.
 
-class CodeFlusher {
- public:
-  explicit CodeFlusher(Isolate* isolate)
-      : isolate_(isolate),
-        jsfunction_candidates_head_(NULL),
-        shared_function_info_candidates_head_(NULL) {}
-
-  void AddCandidate(SharedFunctionInfo* shared_info) {
-    SetNextCandidate(shared_info, shared_function_info_candidates_head_);
-    shared_function_info_candidates_head_ = shared_info;
-  }
+void CodeFlusher::ProcessJSFunctionCandidates() {
+  Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
 
-  void AddCandidate(JSFunction* function) {
-    ASSERT(function->code() == function->shared()->code());
+  JSFunction* candidate = jsfunction_candidates_head_;
+  JSFunction* next_candidate;
+  while (candidate != NULL) {
+    next_candidate = GetNextCandidate(candidate);
 
-    SetNextCandidate(function, jsfunction_candidates_head_);
-    jsfunction_candidates_head_ = function;
-  }
-
-  void ProcessCandidates() {
-    ProcessSharedFunctionInfoCandidates();
-    ProcessJSFunctionCandidates();
-  }
+    SharedFunctionInfo* shared = candidate->shared();
 
- private:
-  void ProcessJSFunctionCandidates() {
-    Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
-
-    JSFunction* candidate = jsfunction_candidates_head_;
-    JSFunction* next_candidate;
-    while (candidate != NULL) {
-      next_candidate = GetNextCandidate(candidate);
-
-      SharedFunctionInfo* shared = candidate->shared();
-
-      Code* code = shared->code();
-      MarkBit code_mark = Marking::MarkBitFrom(code);
-      if (!code_mark.Get()) {
-        shared->set_code(lazy_compile);
-        candidate->set_code(lazy_compile);
-      } else {
-        candidate->set_code(shared->code());
-      }
-
-      // We are in the middle of a GC cycle so the write barrier in the code
-      // setter did not record the slot update and we have to do that manually.
-      Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
-      Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
-      isolate_->heap()->mark_compact_collector()->
-          RecordCodeEntrySlot(slot, target);
-
-      RecordSharedFunctionInfoCodeSlot(shared);
-
-      candidate = next_candidate;
+    Code* code = shared->code();
+    MarkBit code_mark = Marking::MarkBitFrom(code);
+    if (!code_mark.Get()) {
+      shared->set_code(lazy_compile);
+      candidate->set_code(lazy_compile);
+    } else {
+      candidate->set_code(shared->code());
     }
 
-    jsfunction_candidates_head_ = NULL;
-  }
-
-
-  void ProcessSharedFunctionInfoCandidates() {
-    Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
-
-    SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
-    SharedFunctionInfo* next_candidate;
-    while (candidate != NULL) {
-      next_candidate = GetNextCandidate(candidate);
-      SetNextCandidate(candidate, NULL);
-
-      Code* code = candidate->code();
-      MarkBit code_mark = Marking::MarkBitFrom(code);
-      if (!code_mark.Get()) {
-        candidate->set_code(lazy_compile);
-      }
-
-      RecordSharedFunctionInfoCodeSlot(candidate);
+    // We are in the middle of a GC cycle so the write barrier in the code
+    // setter did not record the slot update and we have to do that manually.
+    Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
+    Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
+    isolate_->heap()->mark_compact_collector()->
+        RecordCodeEntrySlot(slot, target);
 
-      candidate = next_candidate;
-    }
+    Object** shared_code_slot =
+        HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
+    isolate_->heap()->mark_compact_collector()->
+        RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
 
-    shared_function_info_candidates_head_ = NULL;
+    candidate = next_candidate;
   }
 
-  void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
-    Object** slot = HeapObject::RawField(shared,
-                                         SharedFunctionInfo::kCodeOffset);
-    isolate_->heap()->mark_compact_collector()->
-        RecordSlot(slot, slot, HeapObject::cast(*slot));
-  }
+  jsfunction_candidates_head_ = NULL;
+}
 
-  static JSFunction** GetNextCandidateField(JSFunction* candidate) {
-    return reinterpret_cast<JSFunction**>(
-        candidate->address() + JSFunction::kCodeEntryOffset);
-  }
 
-  static JSFunction* GetNextCandidate(JSFunction* candidate) {
-    return *GetNextCandidateField(candidate);
-  }
+void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
+  Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
 
-  static void SetNextCandidate(JSFunction* candidate,
-                               JSFunction* next_candidate) {
-    *GetNextCandidateField(candidate) = next_candidate;
-  }
+  SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
+  SharedFunctionInfo* next_candidate;
+  while (candidate != NULL) {
+    next_candidate = GetNextCandidate(candidate);
+    SetNextCandidate(candidate, NULL);
 
-  static SharedFunctionInfo** GetNextCandidateField(
-      SharedFunctionInfo* candidate) {
     Code* code = candidate->code();
-    return reinterpret_cast<SharedFunctionInfo**>(
-        code->address() + Code::kGCMetadataOffset);
-  }
+    MarkBit code_mark = Marking::MarkBitFrom(code);
+    if (!code_mark.Get()) {
+      candidate->set_code(lazy_compile);
+    }
 
-  static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
-    return reinterpret_cast<SharedFunctionInfo*>(
-        candidate->code()->gc_metadata());
-  }
+    Object** code_slot =
+        HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
+    isolate_->heap()->mark_compact_collector()->
+        RecordSlot(code_slot, code_slot, *code_slot);
 
-  static void SetNextCandidate(SharedFunctionInfo* candidate,
-                               SharedFunctionInfo* next_candidate) {
-    candidate->code()->set_gc_metadata(next_candidate);
+    candidate = next_candidate;
   }
 
-  Isolate* isolate_;
-  JSFunction* jsfunction_candidates_head_;
-  SharedFunctionInfo* shared_function_info_candidates_head_;
-
-  DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
-};
+  shared_function_info_candidates_head_ = NULL;
+}
 
 
 MarkCompactCollector::~MarkCompactCollector() {
@@ -1137,6 +1073,11 @@ class MarkCompactMarkingVisitor
     return true;
   }
 
+  INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
+    SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+    shared->BeforeVisitingPointers();
+  }
+
   static void VisitJSWeakMap(Map* map, HeapObject* object) {
     MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
     JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
@@ -1180,123 +1121,8 @@ class MarkCompactMarkingVisitor
 
   // Code flushing support.
 
-  // How many collections newly compiled code object will survive before being
-  // flushed.
-  static const int kCodeAgeThreshold = 5;
-
   static const int kRegExpCodeThreshold = 5;
 
-  inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
-    Object* undefined = heap->undefined_value();
-    return (info->script() != undefined) &&
-        (reinterpret_cast<Script*>(info->script())->source() != undefined);
-  }
-
-
-  inline static bool IsCompiled(JSFunction* function) {
-    return function->code() !=
-        function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
-  }
-
-  inline static bool IsCompiled(SharedFunctionInfo* function) {
-    return function->code() !=
-        function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
-  }
-
-  inline static bool IsFlushable(Heap* heap, JSFunction* function) {
-    SharedFunctionInfo* shared_info = function->unchecked_shared();
-
-    // Code is either on stack, in compilation cache or referenced
-    // by optimized version of function.
-    MarkBit code_mark = Marking::MarkBitFrom(function->code());
-    if (code_mark.Get()) {
-      if (!Marking::MarkBitFrom(shared_info).Get()) {
-        shared_info->set_code_age(0);
-      }
-      return false;
-    }
-
-    // We do not flush code for optimized functions.
-    if (function->code() != shared_info->code()) {
-      return false;
-    }
-
-    return IsFlushable(heap, shared_info);
-  }
-
-  inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
-    // Code is either on stack, in compilation cache or referenced
-    // by optimized version of function.
-    MarkBit code_mark =
-        Marking::MarkBitFrom(shared_info->code());
-    if (code_mark.Get()) {
-      return false;
-    }
-
-    // The function must be compiled and have the source code available,
-    // to be able to recompile it in case we need the function again.
-    if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
-      return false;
-    }
-
-    // We never flush code for Api functions.
-    Object* function_data = shared_info->function_data();
-    if (function_data->IsFunctionTemplateInfo()) {
-      return false;
-    }
-
-    // Only flush code for functions.
-    if (shared_info->code()->kind() != Code::FUNCTION) {
-      return false;
-    }
-
-    // Function must be lazy compilable.
-    if (!shared_info->allows_lazy_compilation()) {
-      return false;
-    }
-
-    // If this is a full script wrapped in a function we do no flush the code.
-    if (shared_info->is_toplevel()) {
-      return false;
-    }
-
-    // Age this shared function info.
-    if (shared_info->code_age() < kCodeAgeThreshold) {
-      shared_info->set_code_age(shared_info->code_age() + 1);
-      return false;
-    }
-
-    return true;
-  }
-
-
-  static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
-    if (!IsFlushable(heap, function)) return false;
-
-    // This function's code looks flushable. But we have to postpone the
-    // decision until we see all functions that point to the same
-    // SharedFunctionInfo because some of them might be optimized.
-    // That would make the nonoptimized version of the code nonflushable,
-    // because it is required for bailing out from optimized code.
-    heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
-    return true;
-  }
-
-  static inline bool IsValidNotBuiltinContext(Object* ctx) {
-    return ctx->IsContext() &&
-        !Context::cast(ctx)->global_object()->IsJSBuiltinsObject();
-  }
-
-
-  static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
-    SharedFunctionInfo::cast(object)->BeforeVisitingPointers();
-
-    FixedBodyVisitor<MarkCompactMarkingVisitor,
-                     SharedFunctionInfo::BodyDescriptor,
-                     void>::Visit(map, object);
-  }
-
-
   static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
                                           JSRegExp* re,
                                           bool is_ascii) {
@@ -1370,138 +1196,6 @@ class MarkCompactMarkingVisitor
     VisitJSRegExp(map, object);
   }
 
-
-  static void VisitSharedFunctionInfoAndFlushCode(Map* map,
-                                                  HeapObject* object) {
-    Heap* heap = map->GetHeap();
-    SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
-    if (shared->ic_age() != heap->global_ic_age()) {
-      shared->ResetForNewContext(heap->global_ic_age());
-    }
-
-    MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
-    if (!collector->is_code_flushing_enabled()) {
-      VisitSharedFunctionInfoGeneric(map, object);
-      return;
-    }
-    VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
-  }
-
-
-  static void VisitSharedFunctionInfoAndFlushCodeGeneric(
-      Map* map, HeapObject* object, bool known_flush_code_candidate) {
-    Heap* heap = map->GetHeap();
-    SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
-
-    shared->BeforeVisitingPointers();
-
-    if (!known_flush_code_candidate) {
-      known_flush_code_candidate = IsFlushable(heap, shared);
-      if (known_flush_code_candidate) {
-        heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
-      }
-    }
-
-    VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
-  }
-
-
-  static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
-    Heap* heap = map->GetHeap();
-    MarkCompactCollector* collector = heap->mark_compact_collector();
-    if (!collector->is_code_flushing_enabled()) {
-      VisitJSFunction(map, object);
-      return;
-    }
-
-    JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
-    // The function must have a valid context and not be a builtin.
-    bool flush_code_candidate = false;
-    if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
-      flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
-    }
-
-    if (!flush_code_candidate) {
-      Code* code = jsfunction->shared()->code();
-      MarkBit code_mark = Marking::MarkBitFrom(code);
-      collector->MarkObject(code, code_mark);
-
-      if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
-        collector->MarkInlinedFunctionsCode(jsfunction->code());
-      }
-    }
-
-    VisitJSFunctionFields(map,
-                          reinterpret_cast<JSFunction*>(object),
-                          flush_code_candidate);
-  }
-
-
-  static void VisitJSFunction(Map* map, HeapObject* object) {
-    VisitJSFunctionFields(map,
-                          reinterpret_cast<JSFunction*>(object),
-                          false);
-  }
-
-
-  static inline void VisitJSFunctionFields(Map* map,
-                                           JSFunction* object,
-                                           bool flush_code_candidate) {
-    Heap* heap = map->GetHeap();
-
-    VisitPointers(heap,
-                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
-                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
-
-    if (!flush_code_candidate) {
-      VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
-    } else {
-      // Don't visit code object.
-
-      // Visit shared function info to avoid double checking of its
-      // flushability.
-      SharedFunctionInfo* shared_info = object->unchecked_shared();
-      MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
-      if (!shared_info_mark.Get()) {
-        Map* shared_info_map = shared_info->map();
-        MarkBit shared_info_map_mark =
-            Marking::MarkBitFrom(shared_info_map);
-        heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
-        heap->mark_compact_collector()->MarkObject(shared_info_map,
-                                                   shared_info_map_mark);
-        VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
-                                                   shared_info,
-                                                   true);
-      }
-    }
-
-    VisitPointers(
-        heap,
-        HeapObject::RawField(object,
-                             JSFunction::kCodeEntryOffset + kPointerSize),
-        HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
-  }
-
-
-  static void VisitSharedFunctionInfoFields(Heap* heap,
-                                            HeapObject* object,
-                                            bool flush_code_candidate) {
-    VisitPointer(heap,
-                 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset));
-
-    if (!flush_code_candidate) {
-      VisitPointer(heap,
-                   HeapObject::RawField(object,
-                                        SharedFunctionInfo::kCodeOffset));
-    }
-
-    VisitPointers(
-        heap,
-        HeapObject::RawField(object,
-                             SharedFunctionInfo::kOptimizedCodeMapOffset),
-        HeapObject::RawField(object, SharedFunctionInfo::kSize));
-  }
-
   static VisitorDispatchTable<Callback> non_count_table_;
 };
 
@@ -1638,12 +1332,6 @@ class MarkCompactMarkingVisitor::ObjectStatsTracker<
 void MarkCompactMarkingVisitor::Initialize() {
   StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
 
-  table_.Register(kVisitSharedFunctionInfo,
-                  &VisitSharedFunctionInfoAndFlushCode);
-
-  table_.Register(kVisitJSFunction,
-                  &VisitJSFunctionAndFlushCode);
-
   table_.Register(kVisitJSRegExp,
                   &VisitRegExpAndFlushCode);
 
@@ -1718,26 +1406,6 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
 };
 
 
-void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
-  // For optimized functions we should retain both non-optimized version
-  // of its code and non-optimized version of all inlined functions.
-  // This is required to support bailing out from inlined code.
-  DeoptimizationInputData* data =
-      DeoptimizationInputData::cast(code->deoptimization_data());
-
-  FixedArray* literals = data->LiteralArray();
-
-  for (int i = 0, count = data->InlinedFunctionCount()->value();
-       i < count;
-       i++) {
-    JSFunction* inlined = JSFunction::cast(literals->get(i));
-    Code* inlined_code = inlined->shared()->code();
-    MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
-    MarkObject(inlined_code, inlined_code_mark);
-  }
-}
-
-
 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
                                                         ThreadLocalTop* top) {
   for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
@@ -1750,7 +1418,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
     MarkBit code_mark = Marking::MarkBitFrom(code);
     MarkObject(code, code_mark);
     if (frame->is_optimized()) {
-      MarkInlinedFunctionsCode(frame->LookupCode());
+      MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(),
+                                                          frame->LookupCode());
     }
   }
 }
index ffa27e6..1d17582 100644 (file)
@@ -403,6 +403,81 @@ class SlotsBuffer {
 };
 
 
+// CodeFlusher collects candidates for code flushing during marking and
+// processes those candidates after marking has completed in order to
+// reset those functions referencing code objects that would otherwise
+// be unreachable. Code objects can be referenced in two ways:
+//    - SharedFunctionInfo references unoptimized code.
+//    - JSFunction references either unoptimized or optimized code.
+// We are not allowed to flush unoptimized code for functions that got
+// optimized or inlined into optimized code, because we might bailout
+// into the unoptimized code again during deoptimization.
+class CodeFlusher {
+ public:
+  explicit CodeFlusher(Isolate* isolate)
+      : isolate_(isolate),
+        jsfunction_candidates_head_(NULL),
+        shared_function_info_candidates_head_(NULL) {}
+
+  void AddCandidate(SharedFunctionInfo* shared_info) {
+    SetNextCandidate(shared_info, shared_function_info_candidates_head_);
+    shared_function_info_candidates_head_ = shared_info;
+  }
+
+  void AddCandidate(JSFunction* function) {
+    ASSERT(function->code() == function->shared()->code());
+    SetNextCandidate(function, jsfunction_candidates_head_);
+    jsfunction_candidates_head_ = function;
+  }
+
+  void ProcessCandidates() {
+    ProcessSharedFunctionInfoCandidates();
+    ProcessJSFunctionCandidates();
+  }
+
+ private:
+  void ProcessJSFunctionCandidates();
+  void ProcessSharedFunctionInfoCandidates();
+
+  static JSFunction** GetNextCandidateField(JSFunction* candidate) {
+    return reinterpret_cast<JSFunction**>(
+        candidate->address() + JSFunction::kCodeEntryOffset);
+  }
+
+  static JSFunction* GetNextCandidate(JSFunction* candidate) {
+    return *GetNextCandidateField(candidate);
+  }
+
+  static void SetNextCandidate(JSFunction* candidate,
+                               JSFunction* next_candidate) {
+    *GetNextCandidateField(candidate) = next_candidate;
+  }
+
+  static SharedFunctionInfo** GetNextCandidateField(
+      SharedFunctionInfo* candidate) {
+    Code* code = candidate->code();
+    return reinterpret_cast<SharedFunctionInfo**>(
+        code->address() + Code::kGCMetadataOffset);
+  }
+
+  static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
+    return reinterpret_cast<SharedFunctionInfo*>(
+        candidate->code()->gc_metadata());
+  }
+
+  static void SetNextCandidate(SharedFunctionInfo* candidate,
+                               SharedFunctionInfo* next_candidate) {
+    candidate->code()->set_gc_metadata(next_candidate);
+  }
+
+  Isolate* isolate_;
+  JSFunction* jsfunction_candidates_head_;
+  SharedFunctionInfo* shared_function_info_candidates_head_;
+
+  DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
+};
+
+
 // Defined in isolate.h.
 class ThreadLocalTop;
 
@@ -631,10 +706,6 @@ class MarkCompactCollector {
   friend class CodeMarkingVisitor;
   friend class SharedFunctionInfoMarkingVisitor;
 
-  // Mark non-optimize code for functions inlined into the given optimized
-  // code. This will prevent it from being flushed.
-  void MarkInlinedFunctionsCode(Code* code);
-
   // Mark code objects that are active on the stack to prevent them
   // from being flushed.
   void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
index 3d4f1da..0440dd2 100644 (file)
@@ -138,9 +138,9 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
 
   table_.Register(kVisitCode, &VisitCode);
 
-  // Registration for kVisitSharedFunctionInfo is done by StaticVisitor.
+  table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
 
-  // Registration for kVisitJSFunction is done by StaticVisitor.
+  table_.Register(kVisitJSFunction, &VisitJSFunction);
 
   // Registration for kVisitJSRegExp is done by StaticVisitor.
 
@@ -282,6 +282,71 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCode(
 
 
 template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
+    Map* map, HeapObject* object) {
+  Heap* heap = map->GetHeap();
+  SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+  if (shared->ic_age() != heap->global_ic_age()) {
+    shared->ResetForNewContext(heap->global_ic_age());
+  }
+  MarkCompactCollector* collector = heap->mark_compact_collector();
+  if (collector->is_code_flushing_enabled()) {
+    if (IsFlushable(heap, shared)) {
+      // This function's code looks flushable. But we have to postpone
+      // the decision until we see all functions that point to the same
+      // SharedFunctionInfo because some of them might be optimized.
+      // That would also make the non-optimized version of the code
+      // non-flushable, because it is required for bailing out from
+      // optimized code.
+      collector->code_flusher()->AddCandidate(shared);
+      // Treat the reference to the code object weakly.
+      VisitSharedFunctionInfoWeakCode(heap, object);
+      return;
+    }
+  }
+  VisitSharedFunctionInfoStrongCode(heap, object);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
+    Map* map, HeapObject* object) {
+  Heap* heap = map->GetHeap();
+  JSFunction* function = JSFunction::cast(object);
+  MarkCompactCollector* collector = heap->mark_compact_collector();
+  if (collector->is_code_flushing_enabled()) {
+    if (IsFlushable(heap, function)) {
+      // This function's code looks flushable. But we have to postpone
+      // the decision until we see all functions that point to the same
+      // SharedFunctionInfo because some of them might be optimized.
+      // That would also make the non-optimized version of the code
+      // non-flushable, because it is required for bailing out from
+      // optimized code.
+      collector->code_flusher()->AddCandidate(function);
+      // Visit shared function info immediately to avoid double checking
+      // of its flushability later. This is just an optimization because
+      // the shared function info would eventually be visited.
+      SharedFunctionInfo* shared = function->unchecked_shared();
+      if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
+        StaticVisitor::MarkObject(heap, shared->map());
+        VisitSharedFunctionInfoWeakCode(heap, shared);
+      }
+      // Treat the reference to the code object weakly.
+      VisitJSFunctionWeakCode(heap, object);
+      return;
+    } else {
+      // Visit all unoptimized code objects to prevent flushing them.
+      StaticVisitor::MarkObject(heap, function->shared()->code());
+      if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
+        MarkInlinedFunctionsCode(heap, function->code());
+      }
+    }
+  }
+  VisitJSFunctionStrongCode(heap, object);
+}
+
+
+template<typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
     Map* map, HeapObject* object) {
   int last_property_offset =
@@ -353,6 +418,200 @@ void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
 }
 
 
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
+    Heap* heap, Code* code) {
+  // For optimized functions we should retain both non-optimized version
+  // of its code and non-optimized version of all inlined functions.
+  // This is required to support bailing out from inlined code.
+  DeoptimizationInputData* data =
+      DeoptimizationInputData::cast(code->deoptimization_data());
+  FixedArray* literals = data->LiteralArray();
+  for (int i = 0, count = data->InlinedFunctionCount()->value();
+       i < count;
+       i++) {
+    JSFunction* inlined = JSFunction::cast(literals->get(i));
+    StaticVisitor::MarkObject(heap, inlined->shared()->code());
+  }
+}
+
+
+inline static bool IsValidNonBuiltinContext(Object* context) {
+  return context->IsContext() &&
+      !Context::cast(context)->global_object()->IsJSBuiltinsObject();
+}
+
+
+inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
+  Object* undefined = heap->undefined_value();
+  return (info->script() != undefined) &&
+      (reinterpret_cast<Script*>(info->script())->source() != undefined);
+}
+
+
+template<typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
+    Heap* heap, JSFunction* function) {
+  SharedFunctionInfo* shared_info = function->unchecked_shared();
+
+  // Code is either on stack, in compilation cache or referenced
+  // by optimized version of function.
+  MarkBit code_mark = Marking::MarkBitFrom(function->code());
+  if (code_mark.Get()) {
+    if (!Marking::MarkBitFrom(shared_info).Get()) {
+      shared_info->set_code_age(0);
+    }
+    return false;
+  }
+
+  // The function must have a valid context and not be a builtin.
+  if (!IsValidNonBuiltinContext(function->unchecked_context())) {
+    return false;
+  }
+
+  // We do not flush code for optimized functions.
+  if (function->code() != shared_info->code()) {
+    return false;
+  }
+
+  return IsFlushable(heap, shared_info);
+}
+
+
+template<typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
+    Heap* heap, SharedFunctionInfo* shared_info) {
+  // Code is either on stack, in compilation cache or referenced
+  // by optimized version of function.
+  MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
+  if (code_mark.Get()) {
+    return false;
+  }
+
+  // The function must be compiled and have the source code available,
+  // to be able to recompile it in case we need the function again.
+  if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
+    return false;
+  }
+
+  // We never flush code for API functions.
+  Object* function_data = shared_info->function_data();
+  if (function_data->IsFunctionTemplateInfo()) {
+    return false;
+  }
+
+  // Only flush code for functions.
+  if (shared_info->code()->kind() != Code::FUNCTION) {
+    return false;
+  }
+
+  // Function must be lazy compilable.
+  if (!shared_info->allows_lazy_compilation()) {
+    return false;
+  }
+
+  // If this is a full script wrapped in a function we do no flush the code.
+  if (shared_info->is_toplevel()) {
+    return false;
+  }
+
+  // TODO(mstarzinger): The following will soon be replaced by a new way of
+  // aging code, that is based on an aging stub in the function prologue.
+
+  // How many collections newly compiled code object will survive before being
+  // flushed.
+  static const int kCodeAgeThreshold = 5;
+
+  // Age this shared function info.
+  if (shared_info->code_age() < kCodeAgeThreshold) {
+    shared_info->set_code_age(shared_info->code_age() + 1);
+    return false;
+  }
+
+  return true;
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
+    Heap* heap, HeapObject* object) {
+  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
+  Object** start_slot =
+      HeapObject::RawField(object,
+                           SharedFunctionInfo::BodyDescriptor::kStartOffset);
+  Object** end_slot =
+      HeapObject::RawField(object,
+                           SharedFunctionInfo::BodyDescriptor::kEndOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
+    Heap* heap, HeapObject* object) {
+  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
+  Object** name_slot =
+      HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
+  StaticVisitor::VisitPointer(heap, name_slot);
+
+  // Skip visiting kCodeOffset as it is treated weakly here.
+  STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
+      SharedFunctionInfo::kCodeOffset);
+  STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
+      SharedFunctionInfo::kOptimizedCodeMapOffset);
+
+  Object** start_slot =
+      HeapObject::RawField(object,
+                           SharedFunctionInfo::kOptimizedCodeMapOffset);
+  Object** end_slot =
+      HeapObject::RawField(object,
+                           SharedFunctionInfo::BodyDescriptor::kEndOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
+    Heap* heap, HeapObject* object) {
+  Object** start_slot =
+      HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+  Object** end_slot =
+      HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+  VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
+  STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+      JSFunction::kPrototypeOrInitialMapOffset);
+
+  start_slot =
+      HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
+  end_slot =
+      HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
+    Heap* heap, HeapObject* object) {
+  Object** start_slot =
+      HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+  Object** end_slot =
+      HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+  // Skip visiting kCodeEntryOffset as it is treated weakly here.
+  STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+      JSFunction::kPrototypeOrInitialMapOffset);
+
+  start_slot =
+      HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
+  end_slot =
+      HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
 void Code::CodeIterateBody(ObjectVisitor* v) {
   int mode_mask = RelocInfo::kCodeTargetMask |
                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
index 7a3b3f4..26d1b12 100644 (file)
@@ -397,9 +397,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   // TODO(mstarzinger): This should be made protected once refactoring is done.
   static inline void VisitNativeContext(Map* map, HeapObject* object);
 
+  // TODO(mstarzinger): This should be made protected once refactoring is done.
+  // Mark non-optimize code for functions inlined into the given optimized
+  // code. This will prevent it from being flushed.
+  static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
+
  protected:
   static inline void VisitMap(Map* map, HeapObject* object);
   static inline void VisitCode(Map* map, HeapObject* object);
+  static inline void VisitSharedFunctionInfo(Map* map, HeapObject* object);
+  static inline void VisitJSFunction(Map* map, HeapObject* object);
   static inline void VisitJSRegExp(Map* map, HeapObject* object);
 
   // Mark pointers in a Map and its TransitionArray together, possibly
@@ -407,6 +414,17 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   static void MarkMapContents(Heap* heap, Map* map);
   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
 
+  // Code flushing support.
+  static inline bool IsFlushable(Heap* heap, JSFunction* function);
+  static inline bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info);
+
+  // Helpers used by code flushing support that visit pointer fields and treat
+  // references to code objects either strongly or weakly.
+  static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
+  static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
+  static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
+  static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
+
   class DataObjectVisitor {
    public:
     template<int size>