From 6ed0102b1dd073fc5e128b98a60d8afb5394fd1e Mon Sep 17 00:00:00 2001 From: "hpayer@chromium.org" Date: Mon, 26 May 2014 12:58:55 +0000 Subject: [PATCH] Remove high promotion mode. BUG= R=mstarzinger@chromium.org Review URL: https://codereview.chromium.org/296413004 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21493 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/macro-assembler-arm.cc | 24 +----------- src/arm64/macro-assembler-arm64.cc | 23 +----------- src/assembler.cc | 7 ---- src/assembler.h | 2 - src/execution.cc | 4 -- src/execution.h | 1 - src/heap.cc | 75 ++------------------------------------ src/heap.h | 74 ++----------------------------------- src/ia32/macro-assembler-ia32.cc | 21 +---------- src/mips/macro-assembler-mips.cc | 23 +----------- src/serialize.cc | 23 +++++------- src/x64/macro-assembler-x64.cc | 23 +----------- src/x87/macro-assembler-x87.cc | 21 +---------- test/cctest/test-heap.cc | 7 ---- 14 files changed, 22 insertions(+), 306 deletions(-) diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index c57aa26..7b3028f 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -1980,34 +1980,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - mov(scratch1, Operand(high_promotion_mode)); - ldr(scratch1, MemOperand(scratch1, 0)); - cmp(scratch1, Operand::Zero()); - b(eq, &allocate_new_space); - Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - - jmp(&install_map); - - bind(&allocate_new_space); - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - flags); - - bind(&install_map); + TAG_OBJECT); InitializeNewString(result, length, diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index f485de4..75fd2b6 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -3539,33 +3539,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - Mov(scratch1, high_promotion_mode); - Ldr(scratch1, MemOperand(scratch1)); - Cbz(scratch1, &allocate_new_space); - Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - - B(&install_map); - - Bind(&allocate_new_space); - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - flags); - - Bind(&install_map); + TAG_OBJECT); InitializeNewString(result, length, diff --git a/src/assembler.cc b/src/assembler.cc index efb3941..f4e609e 100644 --- a/src/assembler.cc +++ b/src/assembler.cc @@ -1204,13 +1204,6 @@ ExternalReference ExternalReference::old_data_space_allocation_limit_address( } -ExternalReference ExternalReference:: - new_space_high_promotion_mode_active_address(Isolate* isolate) { - return ExternalReference( - isolate->heap()->NewSpaceHighPromotionModeActiveAddress()); -} - - ExternalReference ExternalReference::handle_scope_level_address( Isolate* isolate) { return ExternalReference(HandleScope::current_level_address(isolate)); diff --git a/src/assembler.h b/src/assembler.h index 2d9bde6..49e934e 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -874,8 +874,6 @@ class ExternalReference BASE_EMBEDDED { Isolate* isolate); static ExternalReference old_data_space_allocation_limit_address( Isolate* isolate); - static ExternalReference new_space_high_promotion_mode_active_address( - Isolate* isolate); static ExternalReference mod_two_doubles_operation(Isolate* isolate); static ExternalReference power_double_double_function(Isolate* isolate); diff --git a/src/execution.cc b/src/execution.cc index f16ee05..fc51980 100644 --- a/src/execution.cc +++ b/src/execution.cc @@ -729,10 +729,6 @@ Object* StackGuard::HandleInterrupts() { return isolate_->TerminateExecution(); } - if (CheckAndClearInterrupt(FULL_DEOPT, access)) { - Deoptimizer::DeoptimizeAll(isolate_); - } - if (CheckAndClearInterrupt(DEOPT_MARKED_ALLOCATION_SITES, access)) { isolate_->heap()->DeoptMarkedAllocationSites(); } diff --git a/src/execution.h b/src/execution.h index 513ec73..9f27637 100644 --- a/src/execution.h +++ b/src/execution.h @@ -155,7 +155,6 @@ class StackGuard V8_FINAL { V(DEBUGCOMMAND, DebugCommand) \ V(TERMINATE_EXECUTION, TerminateExecution) \ V(GC_REQUEST, GC) \ - V(FULL_DEOPT, FullDeopt) \ V(INSTALL_CODE, InstallCode) \ V(API_INTERRUPT, ApiInterrupt) \ V(DEOPT_MARKED_ALLOCATION_SITES, DeoptMarkedAllocationSites) diff --git a/src/heap.cc b/src/heap.cc index 24e1d53..741d33e 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -84,7 +84,6 @@ Heap::Heap() #ifdef DEBUG allocation_timeout_(0), #endif // DEBUG - new_space_high_promotion_mode_active_(false), old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit), size_of_old_gen_at_last_old_space_gc_(0), external_allocation_limit_(0), @@ -98,14 +97,10 @@ Heap::Heap() total_regexp_code_generated_(0), tracer_(NULL), high_survival_rate_period_length_(0), - low_survival_rate_period_length_(0), - survival_rate_(0), promoted_objects_size_(0), promotion_rate_(0), semi_space_copied_object_size_(0), semi_space_copied_rate_(0), - previous_survival_rate_trend_(Heap::STABLE), - survival_rate_trend_(Heap::STABLE), max_gc_pause_(0.0), total_gc_time_ms_(0.0), max_alive_after_gc_(0), @@ -1013,7 +1008,7 @@ void Heap::ClearNormalizedMapCaches() { } -void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { +void Heap::UpdateSurvivalStatistics(int start_new_space_size) { if (start_new_space_size == 0) return; promotion_rate_ = @@ -1031,24 +1026,6 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { } else { high_survival_rate_period_length_ = 0; } - - if (survival_rate < kYoungSurvivalRateLowThreshold) { - low_survival_rate_period_length_++; - } else { - low_survival_rate_period_length_ = 0; - } - - double survival_rate_diff = survival_rate_ - survival_rate; - - if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) { - set_survival_rate_trend(DECREASING); - } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { - set_survival_rate_trend(INCREASING); - } else { - set_survival_rate_trend(STABLE); - } - - survival_rate_ = survival_rate; } bool Heap::PerformGarbageCollection( @@ -1108,51 +1085,7 @@ bool Heap::PerformGarbageCollection( tracer_ = NULL; } - UpdateSurvivalRateTrend(start_new_space_size); - - if (!new_space_high_promotion_mode_active_ && - new_space_.Capacity() == new_space_.MaximumCapacity() && - IsStableOrIncreasingSurvivalTrend() && - IsHighSurvivalRate()) { - // Stable high survival rates even though young generation is at - // maximum capacity indicates that most objects will be promoted. - // To decrease scavenger pauses and final mark-sweep pauses, we - // have to limit maximal capacity of the young generation. - SetNewSpaceHighPromotionModeActive(true); - if (FLAG_trace_gc) { - PrintPID("Limited new space size due to high promotion rate: %d MB\n", - new_space_.InitialCapacity() / MB); - } - // The high promotion mode is our indicator to turn on pretenuring. We have - // to deoptimize all optimized code in global pretenuring mode and all - // code which should be tenured in local pretenuring mode. - if (FLAG_pretenuring) { - if (!FLAG_allocation_site_pretenuring) { - isolate_->stack_guard()->RequestFullDeopt(); - } - } - } else if (new_space_high_promotion_mode_active_ && - IsStableOrDecreasingSurvivalTrend() && - IsLowSurvivalRate()) { - // Decreasing low survival rates might indicate that the above high - // promotion mode is over and we should allow the young generation - // to grow again. - SetNewSpaceHighPromotionModeActive(false); - if (FLAG_trace_gc) { - PrintPID("Unlimited new space size due to low promotion rate: %d MB\n", - new_space_.MaximumCapacity() / MB); - } - // Trigger deoptimization here to turn off global pretenuring as soon as - // possible. - if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) { - isolate_->stack_guard()->RequestFullDeopt(); - } - } - - if (new_space_high_promotion_mode_active_ && - new_space_.Capacity() > new_space_.InitialCapacity()) { - new_space_.Shrink(); - } + UpdateSurvivalStatistics(start_new_space_size); isolate_->counters()->objs_since_last_young()->Set(0); @@ -1353,8 +1286,7 @@ static void VerifyNonPointerSpacePointers(Heap* heap) { void Heap::CheckNewSpaceExpansionCriteria() { if (new_space_.Capacity() < new_space_.MaximumCapacity() && - survived_since_last_expansion_ > new_space_.Capacity() && - !new_space_high_promotion_mode_active_) { + survived_since_last_expansion_ > new_space_.Capacity()) { // Grow the size of new space if there is room to grow, enough data // has survived scavenge since the last expansion and we are not in // high promotion mode. @@ -6203,7 +6135,6 @@ GCTracer::~GCTracer() { PrintF("nodes_died_in_new=%d ", nodes_died_in_new_space_); PrintF("nodes_copied_in_new=%d ", nodes_copied_in_new_space_); PrintF("nodes_promoted=%d ", nodes_promoted_); - PrintF("survival_rate=%.1f%% ", heap_->survival_rate_); PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_); PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_); diff --git a/src/heap.h b/src/heap.h index e46fdb4..0101fd6 100644 --- a/src/heap.h +++ b/src/heap.h @@ -1034,20 +1034,10 @@ class Heap { inline int64_t AdjustAmountOfExternalAllocatedMemory( int64_t change_in_bytes); - // This is only needed for testing high promotion mode. - void SetNewSpaceHighPromotionModeActive(bool mode) { - new_space_high_promotion_mode_active_ = mode; - } - // Returns the allocation mode (pre-tenuring) based on observed promotion // rates of previous collections. inline PretenureFlag GetPretenureMode() { - return FLAG_pretenuring && new_space_high_promotion_mode_active_ - ? TENURED : NOT_TENURED; - } - - inline Address* NewSpaceHighPromotionModeActiveAddress() { - return reinterpret_cast(&new_space_high_promotion_mode_active_); + return FLAG_pretenuring ? TENURED : NOT_TENURED; } inline intptr_t PromotedTotalSize() { @@ -1581,11 +1571,6 @@ class Heap { int allocation_timeout_; #endif // DEBUG - // Indicates that the new space should be kept small due to high promotion - // rates caused by the mutator allocating a lot of long-lived objects. - // TODO(hpayer): change to bool if no longer accessed from generated code - intptr_t new_space_high_promotion_mode_active_; - // Limit that triggers a global GC on the next (normally caused) GC. This // is checked when we have already decided to do a GC to help determine // which collector to invoke, before expanding a paged space in the old @@ -2026,76 +2011,25 @@ class Heap { void AddAllocationSiteToScratchpad(AllocationSite* site, ScratchpadSlotMode mode); - void UpdateSurvivalRateTrend(int start_new_space_size); - - enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING }; + void UpdateSurvivalStatistics(int start_new_space_size); static const int kYoungSurvivalRateHighThreshold = 90; - static const int kYoungSurvivalRateLowThreshold = 10; static const int kYoungSurvivalRateAllowedDeviation = 15; static const int kOldSurvivalRateLowThreshold = 10; int high_survival_rate_period_length_; - int low_survival_rate_period_length_; - double survival_rate_; intptr_t promoted_objects_size_; double promotion_rate_; intptr_t semi_space_copied_object_size_; double semi_space_copied_rate_; - SurvivalRateTrend previous_survival_rate_trend_; - SurvivalRateTrend survival_rate_trend_; - - void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) { - ASSERT(survival_rate_trend != FLUCTUATING); - previous_survival_rate_trend_ = survival_rate_trend_; - survival_rate_trend_ = survival_rate_trend; - } - - SurvivalRateTrend survival_rate_trend() { - if (survival_rate_trend_ == STABLE) { - return STABLE; - } else if (previous_survival_rate_trend_ == STABLE) { - return survival_rate_trend_; - } else if (survival_rate_trend_ != previous_survival_rate_trend_) { - return FLUCTUATING; - } else { - return survival_rate_trend_; - } - } - - bool IsStableOrIncreasingSurvivalTrend() { - switch (survival_rate_trend()) { - case STABLE: - case INCREASING: - return true; - default: - return false; - } - } - - bool IsStableOrDecreasingSurvivalTrend() { - switch (survival_rate_trend()) { - case STABLE: - case DECREASING: - return true; - default: - return false; - } - } - - bool IsIncreasingSurvivalTrend() { - return survival_rate_trend() == INCREASING; - } + // TODO(hpayer): Allocation site pretenuring may make this method obsolete. + // Re-visit incremental marking heuristics. bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; } - bool IsLowSurvivalRate() { - return low_survival_rate_period_length_ > 0; - } - void SelectScavengingVisitorsTable(); void StartIdleRound() { diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index a8a96f0..a2d2280 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -1796,32 +1796,13 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - - test(Operand::StaticVariable(high_promotion_mode), Immediate(1)); - j(zero, &allocate_new_space); - - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - jmp(&install_map); - - bind(&allocate_new_space); Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - flags); + TAG_OBJECT); - bind(&install_map); // Set the map. The other fields are left uninitialized. mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(isolate()->factory()->cons_ascii_string_map())); diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index 98b1da7..13e67a3 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -3117,33 +3117,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - li(scratch1, Operand(high_promotion_mode)); - lw(scratch1, MemOperand(scratch1, 0)); - Branch(&allocate_new_space, eq, scratch1, Operand(zero_reg)); - - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - - jmp(&install_map); - - bind(&allocate_new_space); Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - flags); - - bind(&install_map); + TAG_OBJECT); InitializeNewString(result, length, diff --git a/src/serialize.cc b/src/serialize.cc index ab0f3ca..e179880 100644 --- a/src/serialize.cc +++ b/src/serialize.cc @@ -487,54 +487,49 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 58, "Heap::OldDataSpaceAllocationLimitAddress"); - Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate). - address(), - UNCLASSIFIED, - 59, - "Heap::NewSpaceAllocationLimitAddress"); Add(ExternalReference::allocation_sites_list_address(isolate).address(), UNCLASSIFIED, - 60, + 59, "Heap::allocation_sites_list_address()"); Add(ExternalReference::address_of_uint32_bias().address(), UNCLASSIFIED, - 61, + 60, "uint32_bias"); Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(), UNCLASSIFIED, - 62, + 61, "Code::MarkCodeAsExecuted"); Add(ExternalReference::is_profiling_address(isolate).address(), UNCLASSIFIED, - 63, + 62, "CpuProfiler::is_profiling"); Add(ExternalReference::scheduled_exception_address(isolate).address(), UNCLASSIFIED, - 64, + 63, "Isolate::scheduled_exception"); Add(ExternalReference::invoke_function_callback(isolate).address(), UNCLASSIFIED, - 65, + 64, "InvokeFunctionCallback"); Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(), UNCLASSIFIED, - 66, + 65, "InvokeAccessorGetterCallback"); // Debug addresses Add(ExternalReference::debug_after_break_target_address(isolate).address(), UNCLASSIFIED, - 67, + 66, "Debug::after_break_target_address()"); Add(ExternalReference::debug_restarter_frame_function_pointer_address( isolate).address(), UNCLASSIFIED, - 68, + 67, "Debug::restarter_frame_function_pointer_address()"); // Add a small set of deopt entry addresses to encoder without generating the diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 8bd08d3..6873eff 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -4560,33 +4560,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - - Load(scratch1, high_promotion_mode); - testb(scratch1, Immediate(1)); - j(zero, &allocate_new_space); Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - - jmp(&install_map); - - bind(&allocate_new_space); - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - flags); - - bind(&install_map); + TAG_OBJECT); // Set the map. The other fields are left uninitialized. LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); diff --git a/src/x87/macro-assembler-x87.cc b/src/x87/macro-assembler-x87.cc index 04a9119..57fb53a 100644 --- a/src/x87/macro-assembler-x87.cc +++ b/src/x87/macro-assembler-x87.cc @@ -1689,32 +1689,13 @@ void MacroAssembler::AllocateAsciiConsString(Register result, Register scratch1, Register scratch2, Label* gc_required) { - Label allocate_new_space, install_map; - AllocationFlags flags = TAG_OBJECT; - - ExternalReference high_promotion_mode = ExternalReference:: - new_space_high_promotion_mode_active_address(isolate()); - - test(Operand::StaticVariable(high_promotion_mode), Immediate(1)); - j(zero, &allocate_new_space); - - Allocate(ConsString::kSize, - result, - scratch1, - scratch2, - gc_required, - static_cast(flags | PRETENURE_OLD_POINTER_SPACE)); - jmp(&install_map); - - bind(&allocate_new_space); Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, - flags); + TAG_OBJECT); - bind(&install_map); // Set the map. The other fields are left uninitialized. mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(isolate()->factory()->cons_ascii_string_map())); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index f797ecc..7946d70 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -2200,7 +2200,6 @@ TEST(OptimizedPretenuringAllocationFolding) { if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; v8::HandleScope scope(CcTest::isolate()); - CcTest::heap()->SetNewSpaceHighPromotionModeActive(true); v8::Local res = CompileRun( "function DataObject() {" @@ -2243,7 +2242,6 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) { if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; v8::HandleScope scope(CcTest::isolate()); - CcTest::heap()->SetNewSpaceHighPromotionModeActive(true); v8::Local res = CompileRun( "var number_elements = 30000;" @@ -2590,7 +2588,6 @@ TEST(OptimizedPretenuringCallNew) { if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; v8::HandleScope scope(CcTest::isolate()); - CcTest::heap()->SetNewSpaceHighPromotionModeActive(true); AlwaysAllocateScope always_allocate(CcTest::i_isolate()); v8::Local res = CompileRun( @@ -3723,10 +3720,6 @@ TEST(DisableInlineAllocation) { CcTest::heap()->DisableInlineAllocation(); CompileRun("run()"); - // Run test with inline allocation disabled and pretenuring. - CcTest::heap()->SetNewSpaceHighPromotionModeActive(true); - CompileRun("run()"); - // Run test with inline allocation re-enabled. CcTest::heap()->EnableInlineAllocation(); CompileRun("run()"); -- 2.7.4