Revert of [heap] More flag cleanup. (patchset #8 id:140001 of https://codereview...
authorhablich <hablich@chromium.org>
Mon, 31 Aug 2015 10:23:26 +0000 (03:23 -0700)
committerCommit bot <commit-bot@chromium.org>
Mon, 31 Aug 2015 10:23:35 +0000 (10:23 +0000)
Reason for revert:
Breaks http://build.chromium.org/p/client.v8/builders/V8%20Arm%20-%20debug%20-%202/builds/2372

Original issue's description:
> [heap] GC flag cleanup/restructuring.
>
> * GC's flags are now proper flags and not int.
> * Callback flags are not threaded through but only set once like gc flags
> * Callers of methods that trigger GCs need to pass a reason when not using
>   the default parameters.
>
> Furthermore, each GC invocation can be passed the GC and GCCallback flags. We
> usually override the currently set flags upon finishing a GC cylce, but are able
> to restore the previously set if desired. This is useful for explicitely
> triggered scavenges or external requests that interrupt the current behaviour.
>
> BUG=
>
> Committed: https://crrev.com/f4f3b431b9ce0778d926acf03c0d36dae5c0cba4
> Cr-Commit-Position: refs/heads/master@{#30457}

TBR=hpayer@chromium.org,yangguo@chromium.org,mlippautz@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=

Review URL: https://codereview.chromium.org/1303393004

Cr-Commit-Position: refs/heads/master@{#30463}

18 files changed:
src/api.cc
src/debug/debug.cc
src/extensions/statistics-extension.cc
src/heap-snapshot-generator.cc
src/heap/heap-inl.h
src/heap/heap.cc
src/heap/heap.h
src/heap/memory-reducer.cc
src/log.cc
src/runtime/runtime-debug.cc
test/cctest/test-api.cc
test/cctest/test-debug.cc
test/cctest/test-heap.cc
test/cctest/test-log.cc
test/cctest/test-mementos.cc
test/cctest/test-object-observe.cc
test/cctest/test-weakmaps.cc
test/cctest/test-weaksets.cc

index 2c8c937..42a8875 100644 (file)
@@ -6880,7 +6880,7 @@ void Isolate::CollectAllGarbage(const char* gc_reason) {
           kGCCallbackFlagSynchronousPhantomCallbackProcessing, gc_reason);
     } else {
       heap->CollectAllGarbage(
-          gc_reason, i::Heap::kNoGCFlags,
+          i::Heap::kNoGCFlags, gc_reason,
           kGCCallbackFlagSynchronousPhantomCallbackProcessing);
     }
   } else {
@@ -7071,13 +7071,13 @@ void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
   CHECK(i::FLAG_expose_gc);
   if (type == kMinorGarbageCollection) {
     reinterpret_cast<i::Isolate*>(this)->heap()->CollectGarbage(
-        i::NEW_SPACE, "Isolate::RequestGarbageCollection", i::Heap::kNoGCFlags,
+        i::NEW_SPACE, "Isolate::RequestGarbageCollection",
         kGCCallbackFlagForced);
   } else {
     DCHECK_EQ(kFullGarbageCollection, type);
     reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage(
-        "Isolate::RequestGarbageCollection",
-        i::Heap::kAbortIncrementalMarkingMask, kGCCallbackFlagForced);
+        i::Heap::kAbortIncrementalMarkingMask,
+        "Isolate::RequestGarbageCollection", kGCCallbackFlagForced);
   }
 }
 
index 5053682..de7cc23 100644 (file)
@@ -1309,8 +1309,8 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
   }
 
   // Make sure we abort incremental marking.
-  isolate_->heap()->CollectAllGarbage("prepare for break points",
-                                      Heap::kMakeHeapIterableMask);
+  isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                                      "prepare for break points");
 
   {
     HeapIterator iterator(isolate_->heap());
index 9151c39..5612259 100644 (file)
@@ -68,7 +68,7 @@ void StatisticsExtension::GetCounters(
         args[0]
             ->BooleanValue(args.GetIsolate()->GetCurrentContext())
             .FromMaybe(false)) {
-      heap->CollectAllGarbage("counters extension", Heap::kNoGCFlags);
+      heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
     }
   }
 
index 05581b2..3fda04f 100644 (file)
@@ -478,8 +478,8 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
            entries_map_.occupancy());
   }
-  heap_->CollectAllGarbage("HeapObjectsMap::UpdateHeapObjectsMap",
-                           Heap::kMakeHeapIterableMask);
+  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                          "HeapObjectsMap::UpdateHeapObjectsMap");
   HeapIterator iterator(heap_);
   for (HeapObject* obj = iterator.next();
        obj != NULL;
@@ -2554,10 +2554,12 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
   // full GC is reachable from the root when computing dominators.
   // This is not true for weakly reachable objects.
   // As a temporary solution we call GC twice.
-  heap_->CollectAllGarbage("HeapSnapshotGenerator::GenerateSnapshot",
-                           Heap::kMakeHeapIterableMask);
-  heap_->CollectAllGarbage("HeapSnapshotGenerator::GenerateSnapshot",
-                           Heap::kMakeHeapIterableMask);
+  heap_->CollectAllGarbage(
+      Heap::kMakeHeapIterableMask,
+      "HeapSnapshotGenerator::GenerateSnapshot");
+  heap_->CollectAllGarbage(
+      Heap::kMakeHeapIterableMask,
+      "HeapSnapshotGenerator::GenerateSnapshot");
 
 #ifdef VERIFY_HEAP
   Heap* debug_heap = heap_;
index f4411db..e5e4981 100644 (file)
@@ -502,24 +502,6 @@ AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
 }
 
 
-bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
-                          const GCFlags flags,
-                          const GCCallbackFlags callback_flags,
-                          const GCFlagOverride override) {
-  GCFlagScope flag_scope(this, flags, callback_flags, override);
-  const char* collector_reason = nullptr;
-  const GarbageCollector collector =
-      SelectGarbageCollector(space, &collector_reason);
-  return CollectGarbage(collector, gc_reason, collector_reason);
-}
-
-
-bool Heap::CollectGarbageNewSpace(const char* gc_reason) {
-  return CollectGarbage(NEW_SPACE, gc_reason, kNoGCFlags, kNoGCCallbackFlags,
-                        kDontOverride);
-}
-
-
 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
                                         ScratchpadSlotMode mode) {
   Heap* heap = object->GetHeap();
@@ -565,6 +547,14 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
 }
 
 
+bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
+                          const v8::GCCallbackFlags callbackFlags) {
+  const char* collector_reason = NULL;
+  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
+  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
+}
+
+
 Isolate* Heap::isolate() {
   return reinterpret_cast<Isolate*>(
       reinterpret_cast<intptr_t>(this) -
@@ -585,29 +575,28 @@ Isolate* Heap::isolate() {
     RETURN_VALUE;                                         \
   }
 
-#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)     \
-  do {                                                                         \
-    AllocationResult __allocation__ = FUNCTION_CALL;                           \
-    Object* __object__ = NULL;                                                 \
-    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                          \
-    /* Two GCs before panicking.  In newspace will almost always succeed. */   \
-    for (int __i__ = 0; __i__ < 2; __i__++) {                                  \
-      (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),           \
-                                        "allocation failure",                  \
-                                        Heap::kNoGCFlags, kNoGCCallbackFlags); \
-      __allocation__ = FUNCTION_CALL;                                          \
-      RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                        \
-    }                                                                          \
-    (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();         \
-    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");           \
-    {                                                                          \
-      AlwaysAllocateScope __scope__(ISOLATE);                                  \
-      __allocation__ = FUNCTION_CALL;                                          \
-    }                                                                          \
-    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                          \
-    /* TODO(1181417): Fix this. */                                             \
-    v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);  \
-    RETURN_EMPTY;                                                              \
+#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)    \
+  do {                                                                        \
+    AllocationResult __allocation__ = FUNCTION_CALL;                          \
+    Object* __object__ = NULL;                                                \
+    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
+    /* Two GCs before panicking.  In newspace will almost always succeed. */  \
+    for (int __i__ = 0; __i__ < 2; __i__++) {                                 \
+      (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),          \
+                                        "allocation failure");                \
+      __allocation__ = FUNCTION_CALL;                                         \
+      RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                       \
+    }                                                                         \
+    (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
+    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
+    {                                                                         \
+      AlwaysAllocateScope __scope__(ISOLATE);                                 \
+      __allocation__ = FUNCTION_CALL;                                         \
+    }                                                                         \
+    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
+    /* TODO(1181417): Fix this. */                                            \
+    v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
+    RETURN_EMPTY;                                                             \
   } while (false)
 
 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
index f9fb0ff..ab52719 100644 (file)
@@ -50,9 +50,6 @@ struct Heap::StrongRootsList {
 };
 
 
-DEFINE_OPERATORS_FOR_FLAGS(Heap::GCFlags)
-
-
 Heap::Heap()
     : amount_of_external_allocated_memory_(0),
       amount_of_external_allocated_memory_at_last_global_gc_(0),
@@ -136,7 +133,7 @@ Heap::Heap()
       ring_buffer_end_(0),
       promotion_queue_(this),
       configured_(false),
-      current_gc_flags_(kNoGCFlags),
+      current_gc_flags_(Heap::kNoGCFlags),
       current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
       external_string_table_(this),
       chunks_queued_for_free_(NULL),
@@ -753,7 +750,7 @@ void Heap::PreprocessStackTraces() {
 void Heap::HandleGCRequest() {
   if (incremental_marking()->request_type() ==
       IncrementalMarking::COMPLETE_MARKING) {
-    CollectAllGarbage("GC interrupt", current_gc_flags_,
+    CollectAllGarbage(current_gc_flags_, "GC interrupt",
                       current_gc_callback_flags_);
     return;
   }
@@ -797,12 +794,14 @@ void Heap::OverApproximateWeakClosure(const char* gc_reason) {
 }
 
 
-void Heap::CollectAllGarbage(const char* gc_reason, const GCFlags flags,
+void Heap::CollectAllGarbage(int flags, const char* gc_reason,
                              const v8::GCCallbackFlags gc_callback_flags) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
-  CollectGarbage(OLD_SPACE, gc_reason, flags, gc_callback_flags);
+  set_current_gc_flags(flags);
+  CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
+  set_current_gc_flags(kNoGCFlags);
 }
 
 
@@ -824,18 +823,18 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
     isolate()->optimizing_compile_dispatcher()->Flush();
   }
   isolate()->ClearSerializerData();
-  isolate()->compilation_cache()->Clear();
+  set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
+  isolate_->compilation_cache()->Clear();
   const int kMaxNumberOfAttempts = 7;
   const int kMinNumberOfAttempts = 2;
   for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
-    if (!CollectGarbage(OLD_SPACE, gc_reason,
-                        Heap::kAbortIncrementalMarkingMask |
-                            Heap::kReduceMemoryFootprintMask,
-                        kGCCallbackFlagForced) &&
-        ((attempt + 1) >= kMinNumberOfAttempts)) {
+    if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL,
+                        v8::kGCCallbackFlagForced) &&
+        attempt + 1 >= kMinNumberOfAttempts) {
       break;
     }
   }
+  set_current_gc_flags(kNoGCFlags);
   new_space_.Shrink();
   UncommitFromSpace();
 }
@@ -861,7 +860,8 @@ void Heap::EnsureFillerObjectAtTop() {
 
 
 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
-                          const char* collector_reason) {
+                          const char* collector_reason,
+                          const v8::GCCallbackFlags gc_callback_flags) {
   // The VM is in the GC state until exiting this function.
   VMState<GC> state(isolate_);
 
@@ -916,7 +916,8 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
       HistogramTimerScope histogram_timer_scope(
           (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
                                    : isolate_->counters()->gc_compactor());
-      next_gc_likely_to_collect_more = PerformGarbageCollection(collector);
+      next_gc_likely_to_collect_more =
+          PerformGarbageCollection(collector, gc_callback_flags);
     }
 
     GarbageCollectionEpilogue();
@@ -947,7 +948,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
   }
 
   if (collector == MARK_COMPACTOR &&
-      (current_gc_callback_flags_ & kGCCallbackFlagForced) != 0) {
+      (gc_callback_flags & kGCCallbackFlagForced) != 0) {
     isolate()->CountUsage(v8::Isolate::kForcedGC);
   }
 
@@ -982,7 +983,7 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
 }
 
 
-void Heap::StartIncrementalMarking(const GCFlags gc_flags,
+void Heap::StartIncrementalMarking(int gc_flags,
                                    const GCCallbackFlags gc_callback_flags,
                                    const char* reason) {
   DCHECK(incremental_marking()->IsStopped());
@@ -1084,17 +1085,17 @@ bool Heap::ReserveSpace(Reservation* reservations) {
       }
       if (perform_gc) {
         if (space == NEW_SPACE) {
-          CollectGarbageNewSpace("failed to reserve space in the new space");
+          CollectGarbage(NEW_SPACE, "failed to reserve space in the new space");
         } else {
           if (counter > 1) {
             CollectAllGarbage(
+                kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
                 "failed to reserve space in paged or large "
-                "object space, trying to reduce memory footprint",
-                kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask);
+                "object space, trying to reduce memory footprint");
           } else {
             CollectAllGarbage(
-                "failed to reserve space in paged or large object space",
-                kAbortIncrementalMarkingMask);
+                kAbortIncrementalMarkingMask,
+                "failed to reserve space in paged or large object space");
           }
         }
         gc_performed = true;
@@ -1163,8 +1164,8 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
   }
 }
 
-
-bool Heap::PerformGarbageCollection(GarbageCollector collector) {
+bool Heap::PerformGarbageCollection(
+    GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
   int freed_global_handles = 0;
 
   if (collector != SCAVENGER) {
@@ -1241,7 +1242,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector) {
     GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
     freed_global_handles =
         isolate_->global_handles()->PostGarbageCollectionProcessing(
-            collector, current_gc_callback_flags_);
+            collector, gc_callback_flags);
   }
   gc_post_processing_depth_--;
 
@@ -1272,7 +1273,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector) {
       GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
       VMState<EXTERNAL> state(isolate_);
       HandleScope handle_scope(isolate_);
-      CallGCEpilogueCallbacks(gc_type, current_gc_callback_flags_);
+      CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
     }
   }
 
@@ -4536,7 +4537,7 @@ bool Heap::IsHeapIterable() {
 void Heap::MakeHeapIterable() {
   DCHECK(AllowHeapAllocation::IsAllowed());
   if (!IsHeapIterable()) {
-    CollectAllGarbage("Heap::MakeHeapIterable", kMakeHeapIterableMask);
+    CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
   }
   if (mark_compact_collector()->sweeping_in_progress()) {
     mark_compact_collector()->EnsureSweepingCompleted();
@@ -4661,8 +4662,8 @@ bool Heap::TryFinalizeIdleIncrementalMarking(
               gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact(
                   static_cast<size_t>(idle_time_in_ms), size_of_objects,
                   final_incremental_mark_compact_speed_in_bytes_per_ms))) {
-    CollectAllGarbage("idle notification: finalize incremental",
-                      current_gc_flags_);
+    CollectAllGarbage(current_gc_flags_,
+                      "idle notification: finalize incremental");
     return true;
   }
   return false;
@@ -4745,11 +4746,11 @@ bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
     case DO_FULL_GC: {
       DCHECK(contexts_disposed_ > 0);
       HistogramTimerScope scope(isolate_->counters()->gc_context());
-      CollectAllGarbage("idle notification: contexts disposed", kNoGCFlags);
+      CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
       break;
     }
     case DO_SCAVENGE:
-      CollectGarbageNewSpace("idle notification: scavenge");
+      CollectGarbage(NEW_SPACE, "idle notification: scavenge");
       break;
     case DO_FINALIZE_SWEEPING:
       mark_compact_collector()->EnsureSweepingCompleted();
index 8cc286c..a4ecdf7 100644 (file)
@@ -10,7 +10,6 @@
 
 #include "src/allocation.h"
 #include "src/assert-scope.h"
-#include "src/base/flags.h"
 #include "src/globals.h"
 #include "src/heap/gc-idle-time-handler.h"
 #include "src/heap/incremental-marking.h"
@@ -577,26 +576,6 @@ class Heap {
     kSmiRootsStart = kStringTableRootIndex + 1
   };
 
-  // Flags to indicate modes for a GC run.
-  enum GCFlag {
-    kNoGCFlags = 0u,
-    kReduceMemoryFootprintMask = 1u << 0,
-    kAbortIncrementalMarkingMask = 1u << 1,
-    kFinalizeIncrementalMarkingMask = 1u << 2,
-
-    // Making the heap iterable requires us to abort incremental marking.
-    kMakeHeapIterableMask = kAbortIncrementalMarkingMask,
-  };
-  typedef base::Flags<GCFlag> GCFlags;
-
-  // A GC invocation always respects the passed flags. Upon finished the current
-  // cycle the previously set flags are either restored (kDontOverride), or
-  // overriden with the flags indicating no special behavior (kOverride).
-  enum GCFlagOverride {
-    kOverride,
-    kDontOverride,
-  };
-
   // Indicates whether live bytes adjustment is triggered
   // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
   // - or from within GC (CONCURRENT_TO_SWEEPER),
@@ -619,36 +598,6 @@ class Heap {
     OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
   };
 
-  class GCFlagScope {
-   public:
-    GCFlagScope(Heap* heap, GCFlags gc_flags, GCCallbackFlags callback_flags,
-                GCFlagOverride override)
-        : heap_(heap), override_(override) {
-      if (override_ == kDontOverride) {
-        saved_gc_flags_ = heap->current_gc_flags_;
-        saved_gc_callback_flags_ = heap->current_gc_callback_flags_;
-      }
-      heap->set_current_gc_flags(gc_flags);
-      heap->current_gc_callback_flags_ = callback_flags;
-    }
-
-    ~GCFlagScope() {
-      if (override_ == kDontOverride) {
-        heap_->set_current_gc_flags(saved_gc_flags_);
-        heap_->current_gc_callback_flags_ = saved_gc_callback_flags_;
-      } else {
-        heap_->set_current_gc_flags(kNoGCFlags);
-        heap_->current_gc_callback_flags_ = kNoGCCallbackFlags;
-      }
-    }
-
-   private:
-    Heap* heap_;
-    GCFlagOverride override_;
-    GCFlags saved_gc_flags_;
-    GCCallbackFlags saved_gc_callback_flags_;
-  };
-
   // Taking this lock prevents the GC from entering a phase that relocates
   // object references.
   class RelocationLock {
@@ -751,6 +700,14 @@ class Heap {
   // callee is only valid in sloppy mode.
   static const int kArgumentsCalleeIndex = 1;
 
+  static const int kNoGCFlags = 0;
+  static const int kReduceMemoryFootprintMask = 1;
+  static const int kAbortIncrementalMarkingMask = 2;
+  static const int kFinalizeIncrementalMarkingMask = 4;
+
+  // Making the heap iterable requires us to abort incremental marking.
+  static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
+
   // The roots that have an index less than this are always in old space.
   static const int kOldSpaceRoots = 0x20;
 
@@ -1310,25 +1267,22 @@ class Heap {
   // Methods triggering GCs. ===================================================
   // ===========================================================================
 
-  // Perform a garbage collection operation in a given space.
+  // Performs garbage collection operation.
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
   inline bool CollectGarbage(
-      AllocationSpace space, const char* gc_reason = nullptr,
-      const GCFlags flags = kNoGCFlags,
-      const GCCallbackFlags callback_flags = kNoGCCallbackFlags,
-      const GCFlagOverride override = kOverride);
-
-  inline bool CollectGarbageNewSpace(const char* gc_reason = nullptr);
+      AllocationSpace space, const char* gc_reason = NULL,
+      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
-  // Performs a full garbage collection.
+  // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
+  // non-zero, then the slower precise sweeper is used, which leaves the heap
+  // in a state where we can iterate over the heap visiting all objects.
   void CollectAllGarbage(
-      const char* gc_reason = nullptr,
-      const GCFlags flags = Heap::kFinalizeIncrementalMarkingMask,
+      int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
   // Last hope GC, should try to squeeze as much as possible.
-  void CollectAllAvailableGarbage(const char* gc_reason = nullptr);
+  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
 
   // Invoked when GC was requested via the stack guard.
   void HandleGCRequest();
@@ -1377,7 +1331,7 @@ class Heap {
 
   // Starts incremental marking assuming incremental marking is currently
   // stopped.
-  void StartIncrementalMarking(const GCFlags = kNoGCFlags,
+  void StartIncrementalMarking(int gc_flags = kNoGCFlags,
                                const GCCallbackFlags gc_callback_flags =
                                    GCCallbackFlags::kNoGCCallbackFlags,
                                const char* reason = nullptr);
@@ -1741,7 +1695,7 @@ class Heap {
 
   StoreBuffer* store_buffer() { return &store_buffer_; }
 
-  void set_current_gc_flags(GCFlags flags) {
+  void set_current_gc_flags(int flags) {
     current_gc_flags_ = flags;
     DCHECK(!ShouldFinalizeIncrementalMarking() ||
            !ShouldAbortIncrementalMarking());
@@ -1783,13 +1737,17 @@ class Heap {
   // Performs garbage collection operation.
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
-  bool CollectGarbage(GarbageCollector collector, const char* gc_reason,
-                      const char* collector_reason);
+  bool CollectGarbage(
+      GarbageCollector collector, const char* gc_reason,
+      const char* collector_reason,
+      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
   // Performs garbage collection
   // Returns whether there is a chance another major GC could
   // collect more garbage.
-  bool PerformGarbageCollection(GarbageCollector collector);
+  bool PerformGarbageCollection(
+      GarbageCollector collector,
+      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
   inline void UpdateOldSpaceLimits();
 
@@ -2410,7 +2368,7 @@ class Heap {
   bool configured_;
 
   // Currently set GC flags that are respected by all GC components.
-  GCFlags current_gc_flags_;
+  int current_gc_flags_;
 
   // Currently set GC callback flags that are used to pass information between
   // the embedder and V8's GC.
index 7f51ef2..4cf8bc2 100644 (file)
@@ -103,8 +103,8 @@ void MemoryReducer::NotifyBackgroundIdleNotification(const Event& event) {
     // TODO(ulan): Replace it with incremental marking GC once
     // chromium:490559 is fixed.
     if (event.time_ms > state_.last_gc_time_ms + kLongDelayMs) {
-      heap()->CollectAllGarbage("memory reducer background GC",
-                                Heap::kReduceMemoryFootprintMask);
+      heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
+                                "memory reducer background GC");
     } else {
       DCHECK(FLAG_incremental_marking);
       heap()->StartIdleIncrementalMarking();
index 80f010c..0e932cf 100644 (file)
@@ -1656,8 +1656,8 @@ void Logger::LogCodeObject(Object* object) {
 
 void Logger::LogCodeObjects() {
   Heap* heap = isolate_->heap();
-  heap->CollectAllGarbage("Logger::LogCodeObjects",
-                          Heap::kMakeHeapIterableMask);
+  heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                          "Logger::LogCodeObjects");
   HeapIterator iterator(heap);
   DisallowHeapAllocation no_gc;
   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
@@ -1716,8 +1716,8 @@ void Logger::LogExistingFunction(Handle<SharedFunctionInfo> shared,
 
 void Logger::LogCompiledFunctions() {
   Heap* heap = isolate_->heap();
-  heap->CollectAllGarbage("Logger::LogCompiledFunctions",
-                          Heap::kMakeHeapIterableMask);
+  heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                          "Logger::LogCompiledFunctions");
   HandleScope scope(isolate_);
   const int compiled_funcs_count = EnumerateCompiledFunctions(heap, NULL, NULL);
   ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
@@ -1736,8 +1736,8 @@ void Logger::LogCompiledFunctions() {
 
 void Logger::LogAccessorCallbacks() {
   Heap* heap = isolate_->heap();
-  heap->CollectAllGarbage("Logger::LogAccessorCallbacks",
-                          Heap::kMakeHeapIterableMask);
+  heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                          "Logger::LogAccessorCallbacks");
   HeapIterator iterator(heap);
   DisallowHeapAllocation no_gc;
   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
index a47db22..18f987c 100644 (file)
@@ -1574,7 +1574,7 @@ RUNTIME_FUNCTION(Runtime_GetDebugContext) {
 RUNTIME_FUNCTION(Runtime_CollectGarbage) {
   SealHandleScope shs(isolate);
   DCHECK(args.length() == 1);
-  isolate->heap()->CollectAllGarbage("%CollectGarbage", Heap::kNoGCFlags);
+  isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
   return isolate->heap()->undefined_value();
 }
 
index 9a849ec..76f3304 100644 (file)
@@ -462,8 +462,8 @@ THREADED_TEST(ScriptMakingExternalString) {
     Local<String> source =
         String::NewFromTwoByte(env->GetIsolate(), two_byte_source);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    CcTest::heap()->CollectGarbageNewSpace();  // in survivor space now
-    CcTest::heap()->CollectGarbageNewSpace();  // in old gen now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
     CHECK_EQ(source->IsExternal(), false);
     CHECK_EQ(source->IsExternalOneByte(), false);
     String::Encoding encoding = String::UNKNOWN_ENCODING;
@@ -493,8 +493,8 @@ THREADED_TEST(ScriptMakingExternalOneByteString) {
     v8::HandleScope scope(env->GetIsolate());
     Local<String> source = v8_str(c_source);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    CcTest::heap()->CollectGarbageNewSpace();  // in survivor space now
-    CcTest::heap()->CollectGarbageNewSpace();  // in old gen now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
     bool success = source->MakeExternal(
         new TestOneByteResource(i::StrDup(c_source), &dispose_count));
     CHECK(success);
@@ -516,8 +516,8 @@ TEST(MakingExternalStringConditions) {
   v8::HandleScope scope(env->GetIsolate());
 
   // Free some space in the new space so that we can check freshness.
-  CcTest::heap()->CollectGarbageNewSpace();
-  CcTest::heap()->CollectGarbageNewSpace();
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);
 
   uint16_t* two_byte_string = AsciiToTwoByteString("s1");
   Local<String> small_string =
@@ -556,8 +556,8 @@ TEST(MakingExternalOneByteStringConditions) {
   v8::HandleScope scope(env->GetIsolate());
 
   // Free some space in the new space so that we can check freshness.
-  CcTest::heap()->CollectGarbageNewSpace();
-  CcTest::heap()->CollectGarbageNewSpace();
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);
 
   Local<String> small_string = String::NewFromUtf8(env->GetIsolate(), "s1");
   // We should refuse to externalize small strings.
@@ -594,8 +594,8 @@ TEST(MakingExternalUnalignedOneByteString) {
 
   // Trigger GCs so that the newly allocated string moves to old gen.
   SimulateFullSpace(CcTest::heap()->old_space());
-  CcTest::heap()->CollectGarbageNewSpace();  // in survivor space now
-  CcTest::heap()->CollectGarbageNewSpace();  // in old gen now
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
   // Turn into external string with unaligned resource data.
   const char* c_cons = "_abcdefghijklmnopqrstuvwxyz";
@@ -609,8 +609,7 @@ TEST(MakingExternalUnalignedOneByteString) {
 
   // Trigger GCs and force evacuation.
   CcTest::heap()->CollectAllGarbage();
-  CcTest::heap()->CollectAllGarbage("MakingExternalUnalignedOneByteString",
-                                    i::Heap::kReduceMemoryFootprintMask);
+  CcTest::heap()->CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask);
 }
 
 
@@ -623,8 +622,8 @@ THREADED_TEST(UsingExternalString) {
         CcTest::isolate(), new TestResource(two_byte_string));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    CcTest::heap()->CollectGarbageNewSpace();  // in survivor space now
-    CcTest::heap()->CollectGarbageNewSpace();  // in old gen now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
     i::Handle<i::String> isymbol =
         factory->InternalizeString(istring);
     CHECK(isymbol->IsInternalizedString());
@@ -643,8 +642,8 @@ THREADED_TEST(UsingExternalOneByteString) {
         CcTest::isolate(), new TestOneByteResource(i::StrDup(one_byte_string)));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    CcTest::heap()->CollectGarbageNewSpace();  // in survivor space now
-    CcTest::heap()->CollectGarbageNewSpace();  // in old gen now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
     i::Handle<i::String> isymbol =
         factory->InternalizeString(istring);
     CHECK(isymbol->IsInternalizedString());
@@ -712,7 +711,7 @@ THREADED_TEST(ScavengeExternalString) {
     Local<String> string = String::NewExternal(
         CcTest::isolate(), new TestResource(two_byte_string, &dispose_count));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
     CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
@@ -734,7 +733,7 @@ THREADED_TEST(ScavengeExternalOneByteString) {
         CcTest::isolate(),
         new TestOneByteResource(i::StrDup(one_byte_string), &dispose_count));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
     CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
@@ -3391,7 +3390,7 @@ void TestGlobalValueMap() {
   CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
   if (map.IsWeak()) {
     CcTest::i_isolate()->heap()->CollectAllGarbage(
-        "TestGlobalValueMap", i::Heap::kAbortIncrementalMarkingMask);
+        i::Heap::kAbortIncrementalMarkingMask);
   } else {
     map.Clear();
   }
@@ -6544,7 +6543,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) {
     if (global_gc) {
       CcTest::heap()->CollectAllGarbage();
     } else {
-      CcTest::heap()->CollectGarbageNewSpace();
+      CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     }
     // We are relying on this creating a big flag array and reserving the space
     // up front.
@@ -6662,7 +6661,7 @@ void InternalFieldCallback(bool global_gc) {
   if (global_gc) {
     CcTest::heap()->CollectAllGarbage();
   } else {
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);
   }
 
   CHECK_EQ(1729, t1->x());
@@ -6707,10 +6706,9 @@ void v8::internal::HeapTester::ResetWeakHandle(bool global_gc) {
     object_a.handle.Reset(iso, a);
     object_b.handle.Reset(iso, b);
     if (global_gc) {
-      CcTest::heap()->CollectAllGarbage("ResetWeakHandle",
-                                        Heap::kAbortIncrementalMarkingMask);
+      CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
     } else {
-      CcTest::heap()->CollectGarbageNewSpace();
+      CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     }
   }
 
@@ -6726,8 +6724,7 @@ void v8::internal::HeapTester::ResetWeakHandle(bool global_gc) {
     CHECK(object_b.handle.IsIndependent());
   }
   if (global_gc) {
-    CcTest::heap()->CollectAllGarbage("ResetWeakHandle",
-                                      Heap::kAbortIncrementalMarkingMask);
+    CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   } else {
     CcTest::heap()->CollectGarbage(i::NEW_SPACE);
   }
@@ -6742,7 +6739,7 @@ THREADED_HEAP_TEST(ResetWeakHandle) {
 }
 
 
-static void InvokeScavenge() { CcTest::heap()->CollectGarbageNewSpace(); }
+static void InvokeScavenge() { CcTest::heap()->CollectGarbage(i::NEW_SPACE); }
 
 
 static void InvokeMarkSweep() { CcTest::heap()->CollectAllGarbage(); }
@@ -11715,8 +11712,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
   // been marked at that point.  Therefore some of the maps are not
   // collected until the second garbage collection.
   CcTest::heap()->CollectAllGarbage();
-  CcTest::heap()->CollectAllGarbage("CheckSurvivingGlobalObjectsCount",
-                                    i::Heap::kMakeHeapIterableMask);
+  CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
   int count = GetGlobalObjectsCount();
 #ifdef DEBUG
   if (count != expected) CcTest::heap()->TracePathToGlobal();
@@ -11815,7 +11811,7 @@ TEST(WeakCallbackApi) {
         handle, WeakApiCallback, v8::WeakCallbackType::kParameter);
   }
   reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
-      "WeakCallbackApi", i::Heap::kAbortIncrementalMarkingMask);
+      i::Heap::kAbortIncrementalMarkingMask);
   // Verify disposed.
   CHECK_EQ(initial_handles, globals->global_handles_count());
 }
@@ -15450,7 +15446,7 @@ TEST(TestIdleNotification) {
 TEST(Regress2333) {
   LocalContext env;
   for (int i = 0; i < 3; i++) {
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(i::NEW_SPACE);
   }
 }
 
@@ -16463,8 +16459,8 @@ void PrologueCallbackAlloc(v8::Isolate* isolate,
   Local<Object> obj = Object::New(isolate);
   CHECK(!obj.IsEmpty());
 
-  CcTest::heap()->CollectAllGarbage("PrologueCallbackAlloc",
-                                    i::Heap::kAbortIncrementalMarkingMask);
+  CcTest::heap()->CollectAllGarbage(
+      i::Heap::kAbortIncrementalMarkingMask);
 }
 
 
@@ -16483,8 +16479,8 @@ void EpilogueCallbackAlloc(v8::Isolate* isolate,
   Local<Object> obj = Object::New(isolate);
   CHECK(!obj.IsEmpty());
 
-  CcTest::heap()->CollectAllGarbage("EpilogueCallbackAlloc",
-                                    i::Heap::kAbortIncrementalMarkingMask);
+  CcTest::heap()->CollectAllGarbage(
+      i::Heap::kAbortIncrementalMarkingMask);
 }
 
 
@@ -16559,8 +16555,8 @@ TEST(GCCallbacks) {
   CHECK_EQ(0, epilogue_call_count_alloc);
   isolate->AddGCPrologueCallback(PrologueCallbackAlloc);
   isolate->AddGCEpilogueCallback(EpilogueCallbackAlloc);
-  CcTest::heap()->CollectAllGarbage("GCCallbacks",
-                                    i::Heap::kAbortIncrementalMarkingMask);
+  CcTest::heap()->CollectAllGarbage(
+      i::Heap::kAbortIncrementalMarkingMask);
   CHECK_EQ(1, prologue_call_count_alloc);
   CHECK_EQ(1, epilogue_call_count_alloc);
   isolate->RemoveGCPrologueCallback(PrologueCallbackAlloc);
index 640fafa..1e3f0ab 100644 (file)
@@ -398,8 +398,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
 
   // Collect garbage to ensure weak handles are cleared.
   CcTest::heap()->CollectAllGarbage();
-  CcTest::heap()->CollectAllGarbage("CheckDebuggerUnloaded",
-                                    Heap::kMakeHeapIterableMask);
+  CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
 
   // Iterate the head and check that there are no debugger related objects left.
   HeapIterator iterator(CcTest::heap());
@@ -814,7 +813,7 @@ static void DebugEventBreakPointCollectGarbage(
     break_point_hit_count++;
     if (break_point_hit_count % 2 == 0) {
       // Scavenge.
-      CcTest::heap()->CollectGarbageNewSpace();
+      CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
     } else {
       // Mark sweep compact.
       CcTest::heap()->CollectAllGarbage();
@@ -838,7 +837,7 @@ static void DebugEventBreak(
 
     // Run the garbage collector to enforce heap verification if option
     // --verify-heap is set.
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
 
     // Set the break flag again to come back here as soon as possible.
     v8::Debug::DebugBreak(CcTest::isolate());
@@ -1222,7 +1221,7 @@ static void CallAndGC(v8::Local<v8::Object> recv,
     CHECK_EQ(1 + i * 3, break_point_hit_count);
 
     // Scavenge and call function.
-    CcTest::heap()->CollectGarbageNewSpace();
+    CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
     f->Call(recv, 0, NULL);
     CHECK_EQ(2 + i * 3, break_point_hit_count);
 
index 38e5a47..2c85596 100644 (file)
@@ -435,7 +435,7 @@ TEST(GarbageCollection) {
 
   HandleScope sc(isolate);
   // Check GC.
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
 
   Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
   Handle<String> name = factory->InternalizeUtf8String("theFunction");
@@ -461,7 +461,7 @@ TEST(GarbageCollection) {
              *Object::GetProperty(obj, prop_namex).ToHandleChecked());
   }
 
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
 
   // Function should be alive.
   CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
@@ -551,7 +551,7 @@ TEST(GlobalHandles) {
   }
 
   // after gc, it should survive
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
 
   CHECK((*h1)->IsString());
   CHECK((*h2)->IsHeapNumber());
@@ -609,7 +609,7 @@ TEST(WeakGlobalHandlesScavenge) {
                           &TestWeakGlobalHandleCallback);
 
   // Scavenge treats weak pointers as normal roots.
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
 
   CHECK((*h1)->IsString());
   CHECK((*h2)->IsHeapNumber());
@@ -647,7 +647,7 @@ TEST(WeakGlobalHandlesMark) {
 
   // Make sure the objects are promoted.
   heap->CollectGarbage(OLD_SPACE);
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
 
   std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
@@ -694,7 +694,7 @@ TEST(DeleteWeakGlobalHandle) {
                           &TestWeakGlobalHandleCallback);
 
   // Scanvenge does not recognize weak reference.
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
 
   CHECK(!WeakPointerCleared);
 
@@ -1513,7 +1513,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
   // perform a scavenge while incremental marking is still running.
   SimulateIncrementalMarking(CcTest::heap());
   *function2.location() = NULL;
-  CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking");
+  CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
 
   // Simulate one final GC to make sure the candidate queue is sane.
   CcTest::heap()->CollectAllGarbage();
@@ -1787,7 +1787,7 @@ TEST(TestInternalWeakLists) {
 
     // Scavenge treats these references as strong.
     for (int j = 0; j < 10; j++) {
-      CcTest::heap()->CollectGarbageNewSpace();
+      CcTest::heap()->CollectGarbage(NEW_SPACE);
       CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
     }
 
@@ -1799,7 +1799,7 @@ TEST(TestInternalWeakLists) {
     // Get rid of f3 and f5 in the same way.
     CompileRun("f3=null");
     for (int j = 0; j < 10; j++) {
-      CcTest::heap()->CollectGarbageNewSpace();
+      CcTest::heap()->CollectGarbage(NEW_SPACE);
       CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
     }
     CcTest::heap()->CollectAllGarbage();
@@ -2382,7 +2382,7 @@ TEST(GrowAndShrinkNewSpace) {
   CHECK(old_capacity == new_capacity);
 
   // Let the scavenger empty the new space.
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   CHECK_LE(new_space->Size(), old_capacity);
 
   // Explicitly shrinking should halve the space capacity.
@@ -2837,7 +2837,7 @@ HEAP_TEST(GCFlags) {
 
   // Set the flags to check whether we appropriately resets them after the GC.
   heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
-  heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask);
+  heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
   CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
 
   MarkCompactCollector* collector = heap->mark_compact_collector();
@@ -2850,11 +2850,11 @@ HEAP_TEST(GCFlags) {
   heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
   CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
 
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   // NewSpace scavenges should not overwrite the flags.
   CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
 
-  heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask);
+  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
 }
 
@@ -3740,18 +3740,17 @@ TEST(ReleaseOverReservedPages) {
 
   // Triggering one GC will cause a lot of garbage to be discovered but
   // even spread across all allocated pages.
-  heap->CollectAllGarbage("triggered for preparation",
-                          Heap::kFinalizeIncrementalMarkingMask);
+  heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+                          "triggered for preparation");
   CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
 
   // Triggering subsequent GCs should cause at least half of the pages
   // to be released to the OS after at most two cycles.
-  heap->CollectAllGarbage("triggered by test 1",
-                          Heap::kFinalizeIncrementalMarkingMask);
-  ;
+  heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+                          "triggered by test 1");
   CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
-  heap->CollectAllGarbage("triggered by test 2",
-                          Heap::kFinalizeIncrementalMarkingMask);
+  heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+                          "triggered by test 2");
   CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
 
   // Triggering a last-resort GC should cause all pages to be released to the
@@ -4449,7 +4448,7 @@ TEST(Regress169928) {
   CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
 
   // First make sure we flip spaces
-  CcTest::heap()->CollectGarbageNewSpace();
+  CcTest::heap()->CollectGarbage(NEW_SPACE);
 
   // Allocate the object.
   Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
@@ -5519,10 +5518,10 @@ TEST(WeakCell) {
   }
   CHECK(weak_cell1->value()->IsFixedArray());
   CHECK_EQ(*survivor, weak_cell2->value());
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   CHECK(weak_cell1->value()->IsFixedArray());
   CHECK_EQ(*survivor, weak_cell2->value());
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   CHECK(weak_cell1->value()->IsFixedArray());
   CHECK_EQ(*survivor, weak_cell2->value());
   heap->CollectAllAvailableGarbage();
@@ -5553,7 +5552,7 @@ TEST(WeakCellsWithIncrementalMarking) {
       heap->StartIncrementalMarking();
     }
     marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
-    heap->CollectGarbageNewSpace();
+    heap->CollectGarbage(NEW_SPACE);
     CHECK(weak_cell->value()->IsFixedArray());
     weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
   }
@@ -5759,8 +5758,8 @@ UNINITIALIZED_TEST(PromotionQueue) {
     CHECK(2 * old_capacity == new_space->TotalCapacity());
 
     // Call the scavenger two times to get an empty new space
-    heap->CollectGarbageNewSpace();
-    heap->CollectGarbageNewSpace();
+    heap->CollectGarbage(NEW_SPACE);
+    heap->CollectGarbage(NEW_SPACE);
 
     // First create a few objects which will survive a scavenge, and will get
     // promoted to the old generation later on. These objects will create
@@ -5770,7 +5769,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
     for (int i = 0; i < number_handles; i++) {
       handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
     }
-    heap->CollectGarbageNewSpace();
+    heap->CollectGarbage(NEW_SPACE);
 
     // Create the first huge object which will exactly fit the first semi-space
     // page.
@@ -5795,7 +5794,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
 
     // This scavenge will corrupt memory if the promotion queue is not
     // evacuated.
-    heap->CollectGarbageNewSpace();
+    heap->CollectGarbage(NEW_SPACE);
   }
   isolate->Dispose();
 }
@@ -6220,12 +6219,12 @@ TEST(NewSpaceAllocationCounter) {
   Isolate* isolate = CcTest::i_isolate();
   Heap* heap = isolate->heap();
   size_t counter1 = heap->NewSpaceAllocationCounter();
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   const size_t kSize = 1024;
   AllocateInSpace(isolate, kSize, NEW_SPACE);
   size_t counter2 = heap->NewSpaceAllocationCounter();
   CHECK_EQ(kSize, counter2 - counter1);
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   size_t counter3 = heap->NewSpaceAllocationCounter();
   CHECK_EQ(0U, counter3 - counter2);
   // Test counter overflow.
@@ -6247,14 +6246,14 @@ TEST(OldSpaceAllocationCounter) {
   Isolate* isolate = CcTest::i_isolate();
   Heap* heap = isolate->heap();
   size_t counter1 = heap->OldGenerationAllocationCounter();
-  heap->CollectGarbageNewSpace();
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
+  heap->CollectGarbage(NEW_SPACE);
   const size_t kSize = 1024;
   AllocateInSpace(isolate, kSize, OLD_SPACE);
   size_t counter2 = heap->OldGenerationAllocationCounter();
   // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
   CHECK_LE(kSize, counter2 - counter1);
-  heap->CollectGarbageNewSpace();
+  heap->CollectGarbage(NEW_SPACE);
   size_t counter3 = heap->OldGenerationAllocationCounter();
   CHECK_EQ(0u, counter3 - counter2);
   AllocateInSpace(isolate, kSize, OLD_SPACE);
index 0cda4d5..0938a9e 100644 (file)
@@ -464,7 +464,7 @@ TEST(EquivalenceOfLoggingAndTraversal) {
         "})(this);");
     logger->StopProfiler();
     reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
-        "EquivalenceOfLoggingAndTraversal", i::Heap::kMakeHeapIterableMask);
+        i::Heap::kMakeHeapIterableMask);
     logger->StringEvent("test-logging-done", "");
 
     // Iterate heap to find compiled functions, will write to log.
index 518c93b..a976663 100644 (file)
@@ -63,7 +63,7 @@ TEST(Regress340063) {
   // Call GC to see if we can handle a poisonous memento right after the
   // current new space top pointer.
   CcTest::i_isolate()->heap()->CollectAllGarbage(
-      "Regress340063", Heap::kAbortIncrementalMarkingMask);
+      Heap::kAbortIncrementalMarkingMask);
 }
 
 
@@ -81,7 +81,7 @@ TEST(Regress470390) {
   // Call GC to see if we can handle a poisonous memento right after the
   // current new space top pointer.
   CcTest::i_isolate()->heap()->CollectAllGarbage(
-      "Regress470390", Heap::kAbortIncrementalMarkingMask);
+      Heap::kAbortIncrementalMarkingMask);
 }
 
 
@@ -93,7 +93,7 @@ TEST(BadMementoAfterTopForceScavenge) {
   SetUpNewSpaceWithPoisonedMementoAtTop();
 
   // Force GC to test the poisoned memento handling
-  CcTest::i_isolate()->heap()->CollectGarbageNewSpace();
+  CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
 }
 
 
index 4b6f216..37dde07 100644 (file)
@@ -683,8 +683,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
   // been marked at that point.  Therefore some of the maps are not
   // collected until the second garbage collection.
   CcTest::heap()->CollectAllGarbage();
-  CcTest::heap()->CollectAllGarbage("CheckSurvivingGlobalObjectsCount",
-                                    i::Heap::kMakeHeapIterableMask);
+  CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
   int count = GetGlobalObjectsCount();
 #ifdef DEBUG
   if (count != expected) CcTest::heap()->TracePathToGlobal();
index 9e99556..6cab081 100644 (file)
@@ -96,7 +96,7 @@ TEST(Weakness) {
   CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
 
   // Force a full GC.
-  heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(0, NumberOfWeakCalls);
   CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
   CHECK_EQ(
@@ -115,12 +115,12 @@ TEST(Weakness) {
   // Force a full GC.
   // Perform two consecutive GCs because the first one will only clear
   // weak references whereas the second one will also clear weak maps.
-  heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(1, NumberOfWeakCalls);
   CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
   CHECK_EQ(
       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
-  heap->CollectAllGarbage("Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(1, NumberOfWeakCalls);
   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
   CHECK_EQ(2,
@@ -158,7 +158,7 @@ TEST(Shrinking) {
   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
   CHECK_EQ(
       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
-  heap->CollectAllGarbage("Shrinking", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
   CHECK_EQ(
       32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
index b536a67..1abf1a6 100644 (file)
@@ -95,7 +95,7 @@ TEST(WeakSet_Weakness) {
   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
 
   // Force a full GC.
-  heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(0, NumberOfWeakCalls);
   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
   CHECK_EQ(
@@ -114,12 +114,12 @@ TEST(WeakSet_Weakness) {
   // Force a full GC.
   // Perform two consecutive GCs because the first one will only clear
   // weak references whereas the second one will also clear weak sets.
-  heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(1, NumberOfWeakCalls);
   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
   CHECK_EQ(
       0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
-  heap->CollectAllGarbage("WeakSet_Weakness", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(1, NumberOfWeakCalls);
   CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
   CHECK_EQ(
@@ -157,7 +157,7 @@ TEST(WeakSet_Shrinking) {
   CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->NumberOfElements());
   CHECK_EQ(
       0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
-  heap->CollectAllGarbage("WeakSet_Shrinking", Heap::kNoGCFlags);
+  heap->CollectAllGarbage(false);
   CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
   CHECK_EQ(
       32, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());