kStoreBufferOverflow = 4,
kSlotsBufferOverflow = 5,
kObjectObserve = 6,
+ kForcedGC = 7,
kUseCounterFeatureCount // This enum value must be last.
};
#else
if (FLAG_log_gc) new_space_.ReportStatistics();
#endif // DEBUG
+ for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
+ ++i) {
+ int count = deferred_counters_[i];
+ deferred_counters_[i] = 0;
+ while (count > 0) {
+ count--;
+ isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i));
+ }
+ }
+}
+
+
+void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
+ deferred_counters_[feature]++;
}
tracer()->Stop(collector);
}
+ if (collector == MARK_COMPACTOR &&
+ (gc_callback_flags & kGCCallbackFlagForced) != 0) {
+ isolate()->CountUsage(v8::Isolate::kForcedGC);
+ }
+
// Start incremental marking for the next cycle. The heap snapshot
// generator needs incremental marking to stay off after it aborted.
if (!mark_compact_collector()->abort_incremental_marking() &&
}
}
+ for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
+ i++) {
+ deferred_counters_[i] = 0;
+ }
+
+
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
// Returns minimal interval between two subsequent collections.
double get_min_in_mutator() { return min_in_mutator_; }
+ void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
+
MarkCompactCollector* mark_compact_collector() {
return &mark_compact_collector_;
}
// Total RegExp code ever generated
double total_regexp_code_generated_;
+ int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
+
GCTracer tracer_;
// Creates and installs the full-sized number string cache.
void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
- if (use_counter_callback_) {
- use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
+ // The counter callback may cause the embedder to call into V8, which is not
+ // generally possible during GC.
+ if (heap_.gc_state() == Heap::NOT_IN_GC) {
+ if (use_counter_callback_) {
+ HandleScope handle_scope(this);
+ use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
+ }
+ } else {
+ heap_.IncrementDeferredCount(feature);
}
}
CHECK_EQ(1, old_space->CountTotalPages());
}
+static int forced_gc_counter = 0;
+
+void MockUseCounterCallback(v8::Isolate* isolate,
+ v8::Isolate::UseCounterFeature feature) {
+ isolate->GetCallingContext();
+ if (feature == v8::Isolate::kForcedGC) {
+ forced_gc_counter++;
+ }
+}
+
+
+TEST(CountForcedGC) {
+ i::FLAG_expose_gc = true;
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ v8::HandleScope scope(CcTest::isolate());
+
+ isolate->SetUseCounterCallback(MockUseCounterCallback);
+
+ forced_gc_counter = 0;
+ const char* source = "gc();";
+ CompileRun(source);
+ CHECK_GT(forced_gc_counter, 0);
+}
+
TEST(Regress2237) {
i::FLAG_stress_compaction = false;