ASSERT(heap->InNewSpace(object));
if (!FLAG_allocation_site_pretenuring ||
- !heap->new_space_high_promotion_mode_active_ ||
!AllocationSite::CanTrack(object->map()->instance_type())) return;
// Either object is the last object in the from space, or there is another
void Heap::ProcessPretenuringFeedback() {
- if (FLAG_allocation_site_pretenuring &&
- new_space_high_promotion_mode_active_) {
+ if (FLAG_allocation_site_pretenuring) {
int tenure_decisions = 0;
int dont_tenure_decisions = 0;
int allocation_mementos_found = 0;
// to deoptimize all optimized code in global pretenuring mode and all
// code which should be tenured in local pretenuring mode.
if (FLAG_pretenuring) {
- if (FLAG_allocation_site_pretenuring) {
- ResetAllAllocationSitesDependentCode(NOT_TENURED);
- } else {
+ if (!FLAG_allocation_site_pretenuring) {
isolate_->stack_guard()->FullDeopt();
}
}
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
- const int divisor = FLAG_stress_compaction ? 10 :
- new_space_high_promotion_mode_active_ ? 1 : 3;
+ const int divisor = FLAG_stress_compaction ? 10 : 1;
intptr_t limit =
Max(old_gen_size + old_gen_size / divisor,
kMinimumOldGenerationAllocationLimit);
limit += new_space_.Capacity();
- // TODO(hpayer): Can be removed when when pretenuring is supported for all
- // allocation sites.
- if (IsHighSurvivalRate() && IsStableOrIncreasingSurvivalTrend()) {
- limit *= 2;
- }
intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
return Min(limit, halfway_to_the_max);
}
PretenureFlag AllocationSite::GetPretenureMode() {
PretenureDecision mode = pretenure_decision();
// Zombie objects "decide" to be untenured.
- return (mode == kTenure && GetHeap()->GetPretenureMode() == TENURED)
- ? TENURED : NOT_TENURED;
+ return mode == kTenure ? TENURED : NOT_TENURED;
}