#include "bootstrapper.h"
#include "codegen.h"
#include "debug.h"
+#include "deoptimizer.h"
#include "isolate-inl.h"
#include "runtime-profiler.h"
#include "simulator.h"
}
+bool StackGuard::IsFullDeopt() {
+ ExecutionAccess access(isolate_);
+ return (thread_local_.interrupt_flags_ & FULL_DEOPT) != 0;
+}
+
+
+void StackGuard::FullDeopt() {
+ ExecutionAccess access(isolate_);
+ thread_local_.interrupt_flags_ |= FULL_DEOPT;
+ set_interrupt_limits(access);
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
bool StackGuard::IsDebugBreak() {
ExecutionAccess access(isolate_);
stack_guard->Continue(GC_REQUEST);
}
-
isolate->counters()->stack_interrupts()->Increment();
isolate->counters()->runtime_profiler_ticks()->Increment();
isolate->runtime_profiler()->OptimizeNow();
stack_guard->Continue(INTERRUPT);
return isolate->StackOverflow();
}
+ if (stack_guard->IsFullDeopt()) {
+ stack_guard->Continue(FULL_DEOPT);
+ Deoptimizer::DeoptimizeAll(isolate);
+ }
return isolate->heap()->undefined_value();
}
PrintPID("Limited new space size due to high promotion rate: %d MB\n",
new_space_.InitialCapacity() / MB);
}
+ // Support for global pre-tenuring uses the high promotion mode as a
+ // heuristic indicator of whether to pretenure or not, we trigger
+ // deoptimization here to take advantage of pre-tenuring as soon as
+ // possible.
+ if (FLAG_pretenure_literals) {
+ isolate_->stack_guard()->FullDeopt();
+ }
} else if (new_space_high_promotion_mode_active_ &&
IsStableOrDecreasingSurvivalTrend() &&
IsLowSurvivalRate()) {
MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
PretenureFlag pretenure);
+ // Predicate that governs global pre-tenuring decisions based on observed
+ // promotion rates of previous collections.
+ inline bool ShouldGloballyPretenure() {
+ return new_space_high_promotion_mode_active_;
+ }
+
inline intptr_t PromotedTotalSize() {
return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
}