Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
- mov(scratch1, Operand(high_promotion_mode));
- ldr(scratch1, MemOperand(scratch1, 0));
- cmp(scratch1, Operand::Zero());
- b(eq, &allocate_new_space);
-
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
- jmp(&install_map);
-
- bind(&allocate_new_space);
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- flags);
-
- bind(&install_map);
+ TAG_OBJECT);
InitializeNewString(result,
length,
Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
- Mov(scratch1, high_promotion_mode);
- Ldr(scratch1, MemOperand(scratch1));
- Cbz(scratch1, &allocate_new_space);
-
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
- B(&install_map);
-
- Bind(&allocate_new_space);
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- flags);
-
- Bind(&install_map);
+ TAG_OBJECT);
InitializeNewString(result,
length,
}
-ExternalReference ExternalReference::
- new_space_high_promotion_mode_active_address(Isolate* isolate) {
- return ExternalReference(
- isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
-}
-
-
ExternalReference ExternalReference::handle_scope_level_address(
Isolate* isolate) {
return ExternalReference(HandleScope::current_level_address(isolate));
Isolate* isolate);
static ExternalReference old_data_space_allocation_limit_address(
Isolate* isolate);
- static ExternalReference new_space_high_promotion_mode_active_address(
- Isolate* isolate);
static ExternalReference mod_two_doubles_operation(Isolate* isolate);
static ExternalReference power_double_double_function(Isolate* isolate);
return isolate_->TerminateExecution();
}
- if (CheckAndClearInterrupt(FULL_DEOPT, access)) {
- Deoptimizer::DeoptimizeAll(isolate_);
- }
-
if (CheckAndClearInterrupt(DEOPT_MARKED_ALLOCATION_SITES, access)) {
isolate_->heap()->DeoptMarkedAllocationSites();
}
V(DEBUGCOMMAND, DebugCommand) \
V(TERMINATE_EXECUTION, TerminateExecution) \
V(GC_REQUEST, GC) \
- V(FULL_DEOPT, FullDeopt) \
V(INSTALL_CODE, InstallCode) \
V(API_INTERRUPT, ApiInterrupt) \
V(DEOPT_MARKED_ALLOCATION_SITES, DeoptMarkedAllocationSites)
#ifdef DEBUG
allocation_timeout_(0),
#endif // DEBUG
- new_space_high_promotion_mode_active_(false),
old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
size_of_old_gen_at_last_old_space_gc_(0),
external_allocation_limit_(0),
total_regexp_code_generated_(0),
tracer_(NULL),
high_survival_rate_period_length_(0),
- low_survival_rate_period_length_(0),
- survival_rate_(0),
promoted_objects_size_(0),
promotion_rate_(0),
semi_space_copied_object_size_(0),
semi_space_copied_rate_(0),
- previous_survival_rate_trend_(Heap::STABLE),
- survival_rate_trend_(Heap::STABLE),
max_gc_pause_(0.0),
total_gc_time_ms_(0.0),
max_alive_after_gc_(0),
}
-void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
+void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
if (start_new_space_size == 0) return;
promotion_rate_ =
} else {
high_survival_rate_period_length_ = 0;
}
-
- if (survival_rate < kYoungSurvivalRateLowThreshold) {
- low_survival_rate_period_length_++;
- } else {
- low_survival_rate_period_length_ = 0;
- }
-
- double survival_rate_diff = survival_rate_ - survival_rate;
-
- if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
- set_survival_rate_trend(DECREASING);
- } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
- set_survival_rate_trend(INCREASING);
- } else {
- set_survival_rate_trend(STABLE);
- }
-
- survival_rate_ = survival_rate;
}
bool Heap::PerformGarbageCollection(
tracer_ = NULL;
}
- UpdateSurvivalRateTrend(start_new_space_size);
-
- if (!new_space_high_promotion_mode_active_ &&
- new_space_.Capacity() == new_space_.MaximumCapacity() &&
- IsStableOrIncreasingSurvivalTrend() &&
- IsHighSurvivalRate()) {
- // Stable high survival rates even though young generation is at
- // maximum capacity indicates that most objects will be promoted.
- // To decrease scavenger pauses and final mark-sweep pauses, we
- // have to limit maximal capacity of the young generation.
- SetNewSpaceHighPromotionModeActive(true);
- if (FLAG_trace_gc) {
- PrintPID("Limited new space size due to high promotion rate: %d MB\n",
- new_space_.InitialCapacity() / MB);
- }
- // The high promotion mode is our indicator to turn on pretenuring. We have
- // to deoptimize all optimized code in global pretenuring mode and all
- // code which should be tenured in local pretenuring mode.
- if (FLAG_pretenuring) {
- if (!FLAG_allocation_site_pretenuring) {
- isolate_->stack_guard()->RequestFullDeopt();
- }
- }
- } else if (new_space_high_promotion_mode_active_ &&
- IsStableOrDecreasingSurvivalTrend() &&
- IsLowSurvivalRate()) {
- // Decreasing low survival rates might indicate that the above high
- // promotion mode is over and we should allow the young generation
- // to grow again.
- SetNewSpaceHighPromotionModeActive(false);
- if (FLAG_trace_gc) {
- PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
- new_space_.MaximumCapacity() / MB);
- }
- // Trigger deoptimization here to turn off global pretenuring as soon as
- // possible.
- if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) {
- isolate_->stack_guard()->RequestFullDeopt();
- }
- }
-
- if (new_space_high_promotion_mode_active_ &&
- new_space_.Capacity() > new_space_.InitialCapacity()) {
- new_space_.Shrink();
- }
+ UpdateSurvivalStatistics(start_new_space_size);
isolate_->counters()->objs_since_last_young()->Set(0);
void Heap::CheckNewSpaceExpansionCriteria() {
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
- survived_since_last_expansion_ > new_space_.Capacity() &&
- !new_space_high_promotion_mode_active_) {
+ survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow, enough data
// has survived scavenge since the last expansion and we are not in
// high promotion mode.
PrintF("nodes_died_in_new=%d ", nodes_died_in_new_space_);
PrintF("nodes_copied_in_new=%d ", nodes_copied_in_new_space_);
PrintF("nodes_promoted=%d ", nodes_promoted_);
- PrintF("survival_rate=%.1f%% ", heap_->survival_rate_);
PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);
inline int64_t AdjustAmountOfExternalAllocatedMemory(
int64_t change_in_bytes);
- // This is only needed for testing high promotion mode.
- void SetNewSpaceHighPromotionModeActive(bool mode) {
- new_space_high_promotion_mode_active_ = mode;
- }
-
// Returns the allocation mode (pre-tenuring) based on observed promotion
// rates of previous collections.
inline PretenureFlag GetPretenureMode() {
- return FLAG_pretenuring && new_space_high_promotion_mode_active_
- ? TENURED : NOT_TENURED;
- }
-
- inline Address* NewSpaceHighPromotionModeActiveAddress() {
- return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
+ return FLAG_pretenuring ? TENURED : NOT_TENURED;
}
inline intptr_t PromotedTotalSize() {
int allocation_timeout_;
#endif // DEBUG
- // Indicates that the new space should be kept small due to high promotion
- // rates caused by the mutator allocating a lot of long-lived objects.
- // TODO(hpayer): change to bool if no longer accessed from generated code
- intptr_t new_space_high_promotion_mode_active_;
-
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
// which collector to invoke, before expanding a paged space in the old
void AddAllocationSiteToScratchpad(AllocationSite* site,
ScratchpadSlotMode mode);
- void UpdateSurvivalRateTrend(int start_new_space_size);
-
- enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
+ void UpdateSurvivalStatistics(int start_new_space_size);
static const int kYoungSurvivalRateHighThreshold = 90;
- static const int kYoungSurvivalRateLowThreshold = 10;
static const int kYoungSurvivalRateAllowedDeviation = 15;
static const int kOldSurvivalRateLowThreshold = 10;
int high_survival_rate_period_length_;
- int low_survival_rate_period_length_;
- double survival_rate_;
intptr_t promoted_objects_size_;
double promotion_rate_;
intptr_t semi_space_copied_object_size_;
double semi_space_copied_rate_;
- SurvivalRateTrend previous_survival_rate_trend_;
- SurvivalRateTrend survival_rate_trend_;
-
- void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
- ASSERT(survival_rate_trend != FLUCTUATING);
- previous_survival_rate_trend_ = survival_rate_trend_;
- survival_rate_trend_ = survival_rate_trend;
- }
-
- SurvivalRateTrend survival_rate_trend() {
- if (survival_rate_trend_ == STABLE) {
- return STABLE;
- } else if (previous_survival_rate_trend_ == STABLE) {
- return survival_rate_trend_;
- } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
- return FLUCTUATING;
- } else {
- return survival_rate_trend_;
- }
- }
-
- bool IsStableOrIncreasingSurvivalTrend() {
- switch (survival_rate_trend()) {
- case STABLE:
- case INCREASING:
- return true;
- default:
- return false;
- }
- }
-
- bool IsStableOrDecreasingSurvivalTrend() {
- switch (survival_rate_trend()) {
- case STABLE:
- case DECREASING:
- return true;
- default:
- return false;
- }
- }
-
- bool IsIncreasingSurvivalTrend() {
- return survival_rate_trend() == INCREASING;
- }
+ // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
+ // Re-visit incremental marking heuristics.
bool IsHighSurvivalRate() {
return high_survival_rate_period_length_ > 0;
}
- bool IsLowSurvivalRate() {
- return low_survival_rate_period_length_ > 0;
- }
-
void SelectScavengingVisitorsTable();
void StartIdleRound() {
Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
-
- test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
- j(zero, &allocate_new_space);
-
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
- jmp(&install_map);
-
- bind(&allocate_new_space);
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- flags);
+ TAG_OBJECT);
- bind(&install_map);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
Immediate(isolate()->factory()->cons_ascii_string_map()));
Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
- li(scratch1, Operand(high_promotion_mode));
- lw(scratch1, MemOperand(scratch1, 0));
- Branch(&allocate_new_space, eq, scratch1, Operand(zero_reg));
-
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
- jmp(&install_map);
-
- bind(&allocate_new_space);
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- flags);
-
- bind(&install_map);
+ TAG_OBJECT);
InitializeNewString(result,
length,
UNCLASSIFIED,
58,
"Heap::OldDataSpaceAllocationLimitAddress");
- Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
- address(),
- UNCLASSIFIED,
- 59,
- "Heap::NewSpaceAllocationLimitAddress");
Add(ExternalReference::allocation_sites_list_address(isolate).address(),
UNCLASSIFIED,
- 60,
+ 59,
"Heap::allocation_sites_list_address()");
Add(ExternalReference::address_of_uint32_bias().address(),
UNCLASSIFIED,
- 61,
+ 60,
"uint32_bias");
Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
UNCLASSIFIED,
- 62,
+ 61,
"Code::MarkCodeAsExecuted");
Add(ExternalReference::is_profiling_address(isolate).address(),
UNCLASSIFIED,
- 63,
+ 62,
"CpuProfiler::is_profiling");
Add(ExternalReference::scheduled_exception_address(isolate).address(),
UNCLASSIFIED,
- 64,
+ 63,
"Isolate::scheduled_exception");
Add(ExternalReference::invoke_function_callback(isolate).address(),
UNCLASSIFIED,
- 65,
+ 64,
"InvokeFunctionCallback");
Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(),
UNCLASSIFIED,
- 66,
+ 65,
"InvokeAccessorGetterCallback");
// Debug addresses
Add(ExternalReference::debug_after_break_target_address(isolate).address(),
UNCLASSIFIED,
- 67,
+ 66,
"Debug::after_break_target_address()");
Add(ExternalReference::debug_restarter_frame_function_pointer_address(
isolate).address(),
UNCLASSIFIED,
- 68,
+ 67,
"Debug::restarter_frame_function_pointer_address()");
// Add a small set of deopt entry addresses to encoder without generating the
Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
-
- Load(scratch1, high_promotion_mode);
- testb(scratch1, Immediate(1));
- j(zero, &allocate_new_space);
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
- jmp(&install_map);
-
- bind(&allocate_new_space);
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- flags);
-
- bind(&install_map);
+ TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
Register scratch1,
Register scratch2,
Label* gc_required) {
- Label allocate_new_space, install_map;
- AllocationFlags flags = TAG_OBJECT;
-
- ExternalReference high_promotion_mode = ExternalReference::
- new_space_high_promotion_mode_active_address(isolate());
-
- test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
- j(zero, &allocate_new_space);
-
- Allocate(ConsString::kSize,
- result,
- scratch1,
- scratch2,
- gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
- jmp(&install_map);
-
- bind(&allocate_new_space);
Allocate(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
- flags);
+ TAG_OBJECT);
- bind(&install_map);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
Immediate(isolate()->factory()->cons_ascii_string_map()));
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
- CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"function DataObject() {"
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
- CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 30000;"
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
- CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
v8::Local<v8::Value> res = CompileRun(
CcTest::heap()->DisableInlineAllocation();
CompileRun("run()");
- // Run test with inline allocation disabled and pretenuring.
- CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
- CompileRun("run()");
-
// Run test with inline allocation re-enabled.
CcTest::heap()->EnableInlineAllocation();
CompileRun("run()");