From: jarin@chromium.org Date: Sun, 31 Aug 2014 07:27:38 +0000 (+0000) Subject: Reland "More aggressive reuse of spill slots in the register allocator." X-Git-Tag: upstream/4.7.83~7265 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=d8414c57fe07026e44c82afbeb9f6d4c30baeb58;p=platform%2Fupstream%2Fv8.git Reland "More aggressive reuse of spill slots in the register allocator." This relands r23532 (https://codereview.chromium.org/310003003). Flakes seem unrelated. TBR=titzer@chromium.org Review URL: https://codereview.chromium.org/522173003 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23536 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/hydrogen.cc b/src/hydrogen.cc index 14ef417..5b2982b 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -12359,12 +12359,17 @@ void HTracer::TraceLiveRange(LiveRange* range, const char* type, trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg)); } } else if (range->IsSpilled()) { - LOperand* op = range->TopLevel()->GetSpillOperand(); - if (op->IsDoubleStackSlot()) { - trace_.Add(" \"double_stack:%d\"", op->index()); + int index = -1; + if (range->TopLevel()->GetSpillRange()->id() != -1) { + index = range->TopLevel()->GetSpillRange()->id(); } else { - DCHECK(op->IsStackSlot()); - trace_.Add(" \"stack:%d\"", op->index()); + index = range->TopLevel()->GetSpillOperand()->index(); + } + if (range->TopLevel()->Kind() == DOUBLE_REGISTERS) { + trace_.Add(" \"double_stack:%d\"", index); + } else { + DCHECK(range->TopLevel()->Kind() == GENERAL_REGISTERS); + trace_.Add(" \"stack:%d\"", index); } } int parent_index = -1; diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc index 5f4f17f..abf98e1 100644 --- a/src/lithium-allocator.cc +++ b/src/lithium-allocator.cc @@ -109,7 +109,8 @@ LiveRange::LiveRange(int id, Zone* zone) last_processed_use_(NULL), current_hint_operand_(NULL), spill_operand_(new (zone) LOperand()), - spill_start_index_(kMaxInt) {} + spill_start_index_(kMaxInt), + spill_range_(NULL) {} void LiveRange::set_assigned_register(int reg, Zone* zone) { @@ -124,13 +125,15 @@ void LiveRange::MakeSpilled(Zone* zone) { DCHECK(TopLevel()->HasAllocatedSpillOperand()); spilled_ = true; assigned_register_ = kInvalidAssignment; - ConvertOperands(zone); + if (spill_range_ == NULL) { + ConvertOperands(zone); + } } bool LiveRange::HasAllocatedSpillOperand() const { DCHECK(spill_operand_ != NULL); - return !spill_operand_->IsIgnored(); + return !spill_operand_->IsIgnored() || spill_range_ != NULL; } @@ -142,6 +145,19 @@ void LiveRange::SetSpillOperand(LOperand* operand) { } +void LiveRange::CommitSpillOperand(LOperand* operand) { + DCHECK(spill_range_ != NULL); + DCHECK(!IsChild()); + spill_range_ = NULL; + SetSpillOperand(operand); + for (LiveRange* range = this; range != NULL; range = range->next()) { + if (range->IsSpilled()) { + range->ConvertUsesToOperand(operand); + } + } +} + + UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { UsePosition* use_pos = last_processed_use_; if (use_pos == NULL) use_pos = first_pos(); @@ -446,8 +462,7 @@ void LiveRange::AddUsePosition(LifetimePosition pos, } -void LiveRange::ConvertOperands(Zone* zone) { - LOperand* op = CreateAssignedOperand(zone); +void LiveRange::ConvertUsesToOperand(LOperand* op) { UsePosition* use_pos = first_pos(); while (use_pos != NULL) { DCHECK(Start().Value() <= use_pos->pos().Value() && @@ -463,6 +478,12 @@ void LiveRange::ConvertOperands(Zone* zone) { } +void LiveRange::ConvertOperands(Zone* zone) { + LOperand* op = CreateAssignedOperand(zone); + ConvertUsesToOperand(op); +} + + bool LiveRange::CanCover(LifetimePosition position) const { if (IsEmpty()) return false; return Start().Value() <= position.Value() && @@ -521,6 +542,7 @@ LAllocator::LAllocator(int num_values, HGraph* graph) active_live_ranges_(8, zone()), inactive_live_ranges_(8, zone()), reusable_slots_(8, zone()), + spill_ranges_(8, zone()), next_virtual_register_(num_values), first_artificial_register_(num_values), mode_(UNALLOCATED_REGISTERS), @@ -1016,7 +1038,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) { for (int i = 0; i < phis->length(); ++i) { HPhi* phi = phis->at(i); LUnallocated* phi_operand = - new (chunk()->zone()) LUnallocated(LUnallocated::NONE); + new (chunk()->zone()) LUnallocated(LUnallocated::ANY); phi_operand->set_virtual_register(phi->id()); for (int j = 0; j < phi->OperandCount(); ++j) { HValue* op = phi->OperandAt(j); @@ -1083,6 +1105,7 @@ bool LAllocator::Allocate(LChunk* chunk) { if (!AllocationOk()) return false; AllocateDoubleRegisters(); if (!AllocationOk()) return false; + ReuseSpillSlots(); PopulatePointerMaps(); ConnectRanges(); ResolveControlFlow(); @@ -1090,6 +1113,139 @@ bool LAllocator::Allocate(LChunk* chunk) { } +static bool AreUseIntervalsIntersecting(UseInterval* interval1, + UseInterval* interval2) { + while (interval1 != NULL && interval2 != NULL) { + if (interval1->start().Value() < interval2->start().Value()) { + if (interval1->end().Value() > interval2->start().Value()) { + return true; + } + interval1 = interval1->next(); + } else { + if (interval2->end().Value() > interval1->start().Value()) { + return true; + } + interval2 = interval2->next(); + } + } + return false; +} + + +SpillRange::SpillRange(LiveRange* range, int id, Zone* zone) + : id_(id), live_ranges_(1, zone), end_position_(range->End()) { + UseInterval* src = range->first_interval(); + UseInterval* result = NULL; + UseInterval* node = NULL; + // Copy the nodes + while (src != NULL) { + UseInterval* new_node = new (zone) UseInterval(src->start(), src->end()); + if (result == NULL) { + result = new_node; + } else { + node->set_next(new_node); + } + node = new_node; + src = src->next(); + } + use_interval_ = result; + live_ranges_.Add(range, zone); + DCHECK(range->GetSpillRange() == NULL); + range->SetSpillRange(this); +} + + +bool SpillRange::IsIntersectingWith(SpillRange* other) { + if (End().Value() <= other->use_interval_->start().Value() || + other->End().Value() <= use_interval_->start().Value()) { + return false; + } + return AreUseIntervalsIntersecting(use_interval_, other->use_interval_); +} + + +bool SpillRange::TryMerge(SpillRange* other, Zone* zone) { + if (Kind() == other->Kind() && + !AreUseIntervalsIntersecting(use_interval_, other->use_interval_)) { + if (End().Value() < other->End().Value()) { + end_position_ = other->End(); + } + + MergeDisjointIntervals(other->use_interval_, zone); + other->use_interval_ = NULL; + + for (int i = 0; i < other->live_ranges_.length(); i++) { + DCHECK(other->live_ranges_.at(i)->GetSpillRange() == other); + other->live_ranges_.at(i)->SetSpillRange(this); + } + + live_ranges_.AddAll(other->live_ranges_, zone); + other->live_ranges_.Clear(); + + return true; + } + return false; +} + + +void SpillRange::SetOperand(LOperand* op) { + for (int i = 0; i < live_ranges_.length(); i++) { + DCHECK(live_ranges_.at(i)->GetSpillRange() == this); + live_ranges_.at(i)->CommitSpillOperand(op); + } +} + + +void SpillRange::MergeDisjointIntervals(UseInterval* other, Zone* zone) { + UseInterval* tail = NULL; + UseInterval* current = use_interval_; + while (other != NULL) { + // Make sure the 'current' list starts first + if (current == NULL || current->start().Value() > other->start().Value()) { + std::swap(current, other); + } + + // Check disjointness + DCHECK(other == NULL || current->end().Value() <= other->start().Value()); + + // Append the 'current' node to the result accumulator and move forward + if (tail == NULL) { + use_interval_ = current; + } else { + tail->set_next(current); + } + tail = current; + current = current->next(); + } + // Other list is empty => we are done +} + + +void LAllocator::ReuseSpillSlots() { + // Merge disjoint spill ranges + for (int i = 0; i < spill_ranges_.length(); i++) { + SpillRange* range = spill_ranges_.at(i); + if (!range->IsEmpty()) { + for (int j = i + 1; j < spill_ranges_.length(); j++) { + SpillRange* other = spill_ranges_.at(j); + if (!other->IsEmpty()) { + range->TryMerge(spill_ranges_.at(j), zone()); + } + } + } + } + + // Allocate slots for the merged spill ranges. + for (int i = 0; i < spill_ranges_.length(); i++) { + SpillRange* range = spill_ranges_.at(i); + if (!range->IsEmpty()) { + LOperand* op = chunk_->GetNextSpillSlot(range->Kind()); + range->SetOperand(op); + } + } +} + + void LAllocator::MeetRegisterConstraints() { LAllocatorPhase phase("L_Register constraints", this); const ZoneList* blocks = graph_->blocks(); @@ -1480,6 +1636,102 @@ void LAllocator::AllocateDoubleRegisters() { } +bool LAllocator::TryReuseSpillForPhi(LiveRange* range) { + DCHECK(!range->HasAllocatedSpillOperand()); + if (range->IsChild()) { + return false; + } + HValue* instr = graph_->LookupValue(range->id()); + if (instr == NULL || !instr->IsPhi()) { + return false; + } + + // Count the number of spilled operands. + HPhi* phi = HPhi::cast(instr); + int spilled_count = 0; + LiveRange* first_op = NULL; + for (int i = 0; i < phi->OperandCount(); i++) { + HValue* op = phi->OperandAt(i); + LiveRange* op_range = LiveRangeFor(op->id()); + if (op_range->GetSpillRange() != NULL) { + HBasicBlock* pred = instr->block()->predecessors()->at(i); + LifetimePosition pred_end = LifetimePosition::FromInstructionIndex( + pred->last_instruction_index()); + while (op_range != NULL && !op_range->CanCover(pred_end)) { + op_range = op_range->next(); + } + if (op_range != NULL && op_range->IsSpilled()) { + spilled_count++; + if (first_op == NULL) { + first_op = op_range->TopLevel(); + } + } + } + } + + // Only continue if more than half of the operands are spilled. + if (spilled_count * 2 <= phi->OperandCount()) { + return false; + } + + // Try to merge the spilled operands and count the number of merged spilled + // operands. + DCHECK(first_op != NULL); + SpillRange* first_op_spill = first_op->GetSpillRange(); + int num_merged = 1; + for (int i = 1; i < phi->OperandCount(); i++) { + HValue* op = phi->OperandAt(i); + LiveRange* op_range = LiveRangeFor(op->id()); + SpillRange* op_spill = op_range->GetSpillRange(); + if (op_spill != NULL) { + if (op_spill->id() == first_op_spill->id() || + first_op_spill->TryMerge(op_spill, zone())) { + num_merged++; + } + } + } + + // Only continue if enough operands could be merged to the + // same spill slot. + if (num_merged * 2 <= phi->OperandCount() || + AreUseIntervalsIntersecting(first_op_spill->interval(), + range->first_interval())) { + return false; + } + + // If the range does not need register soon, spill it to the merged + // spill range. + LifetimePosition next_pos = range->Start(); + if (IsGapAt(next_pos.InstructionIndex())) { + next_pos = next_pos.NextInstruction(); + } + UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos); + if (pos == NULL) { + SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); + CHECK(first_op_spill->TryMerge(spill_range, zone())); + Spill(range); + return true; + } else if (pos->pos().Value() > range->Start().NextInstruction().Value()) { + SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); + CHECK(first_op_spill->TryMerge(spill_range, zone())); + SpillBetween(range, range->Start(), pos->pos()); + if (!AllocationOk()) return false; + DCHECK(UnhandledIsSorted()); + return true; + } + + return false; +} + + +SpillRange* LAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { + int spill_id = spill_ranges_.length(); + SpillRange* spill_range = new (zone()) SpillRange(range, spill_id, zone()); + spill_ranges_.Add(spill_range, zone()); + return spill_range; +} + + void LAllocator::AllocateRegisters() { DCHECK(unhandled_live_ranges_.is_empty()); @@ -1549,6 +1801,11 @@ void LAllocator::AllocateRegisters() { } } + if (TryReuseSpillForPhi(current)) { + continue; + } + if (!AllocationOk()) return; + for (int i = 0; i < active_live_ranges_.length(); ++i) { LiveRange* cur_active = active_live_ranges_.at(i); if (cur_active->End().Value() <= position.Value()) { @@ -1699,36 +1956,10 @@ bool LAllocator::UnhandledIsSorted() { } -void LAllocator::FreeSpillSlot(LiveRange* range) { - // Check that we are the last range. - if (range->next() != NULL) return; - - if (!range->TopLevel()->HasAllocatedSpillOperand()) return; - - int index = range->TopLevel()->GetSpillOperand()->index(); - if (index >= 0) { - reusable_slots_.Add(range, zone()); - } -} - - -LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { - if (reusable_slots_.is_empty()) return NULL; - if (reusable_slots_.first()->End().Value() > - range->TopLevel()->Start().Value()) { - return NULL; - } - LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); - reusable_slots_.Remove(0); - return result; -} - - void LAllocator::ActiveToHandled(LiveRange* range) { DCHECK(active_live_ranges_.Contains(range)); active_live_ranges_.RemoveElement(range); TraceAlloc("Moving live range %d from active to handled\n", range->id()); - FreeSpillSlot(range); } @@ -1744,7 +1975,6 @@ void LAllocator::InactiveToHandled(LiveRange* range) { DCHECK(inactive_live_ranges_.Contains(range)); inactive_live_ranges_.RemoveElement(range); TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); - FreeSpillSlot(range); } @@ -1828,7 +2058,6 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) { AddToUnhandledSorted(tail); } - // Register reg is available at the range start and is free until // the range end. DCHECK(pos.Value() >= current->End().Value()); @@ -2136,9 +2365,7 @@ void LAllocator::Spill(LiveRange* range) { LiveRange* first = range->TopLevel(); if (!first->HasAllocatedSpillOperand()) { - LOperand* op = TryReuseSpillSlot(range); - if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); - first->SetSpillOperand(op); + AssignSpillRangeToLiveRange(first); } range->MakeSpilled(chunk()->zone()); } diff --git a/src/lithium-allocator.h b/src/lithium-allocator.h index f63077e..a0bceb7 100644 --- a/src/lithium-allocator.h +++ b/src/lithium-allocator.h @@ -153,6 +153,8 @@ class UseInterval: public ZoneObject { LifetimePosition end_; UseInterval* next_; + friend class LAllocator; // Assigns to next_. + friend class SpillRange; // Assigns to next_. friend class LiveRange; // Assigns to start_. }; @@ -185,6 +187,8 @@ class UsePosition: public ZoneObject { friend class LiveRange; }; +class SpillRange; + // Representation of SSA values' live ranges as a collection of (continuous) // intervals over the instruction ordering. class LiveRange: public ZoneObject { @@ -267,6 +271,10 @@ class LiveRange: public ZoneObject { LOperand* GetSpillOperand() const { return spill_operand_; } void SetSpillOperand(LOperand* operand); + void SetSpillRange(SpillRange* spill_range) { spill_range_ = spill_range; } + SpillRange* GetSpillRange() const { return spill_range_; } + void CommitSpillOperand(LOperand* operand); + void SetSpillStartIndex(int start) { spill_start_index_ = Min(start, spill_start_index_); } @@ -299,6 +307,7 @@ class LiveRange: public ZoneObject { private: void ConvertOperands(Zone* zone); + void ConvertUsesToOperand(LOperand* op); UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; void AdvanceLastProcessedMarker(UseInterval* to_start_of, LifetimePosition but_not_past) const; @@ -319,11 +328,36 @@ class LiveRange: public ZoneObject { LOperand* current_hint_operand_; LOperand* spill_operand_; int spill_start_index_; + SpillRange* spill_range_; friend class LAllocator; // Assigns to kind_. }; +class SpillRange : public ZoneObject { + public: + SpillRange(LiveRange* range, int id, Zone* zone); + + bool TryMerge(SpillRange* other, Zone* zone); + void SetOperand(LOperand* op); + bool IsEmpty() { return live_ranges_.length() == 0; } + int id() const { return id_; } + UseInterval* interval() { return use_interval_; } + RegisterKind Kind() const { return live_ranges_.at(0)->Kind(); } + LifetimePosition End() const { return end_position_; } + bool IsIntersectingWith(SpillRange* other); + + private: + int id_; + ZoneList live_ranges_; + UseInterval* use_interval_; + LifetimePosition end_position_; + + // Merge intervals, making sure the use intervals are sorted + void MergeDisjointIntervals(UseInterval* other, Zone* zone); +}; + + class LAllocator BASE_EMBEDDED { public: LAllocator(int first_virtual_register, HGraph* graph); @@ -390,6 +424,7 @@ class LAllocator BASE_EMBEDDED { void ResolveControlFlow(); void PopulatePointerMaps(); void AllocateRegisters(); + void ReuseSpillSlots(); bool CanEagerlyResolveControlFlow(HBasicBlock* block) const; inline bool SafePointsAreInOrder() const; @@ -425,12 +460,12 @@ class LAllocator BASE_EMBEDDED { void ActiveToInactive(LiveRange* range); void InactiveToHandled(LiveRange* range); void InactiveToActive(LiveRange* range); - void FreeSpillSlot(LiveRange* range); - LOperand* TryReuseSpillSlot(LiveRange* range); + SpillRange* AssignSpillRangeToLiveRange(LiveRange* range); // Helper methods for allocating registers. bool TryAllocateFreeReg(LiveRange* range); void AllocateBlockedReg(LiveRange* range); + bool TryReuseSpillForPhi(LiveRange* range); // Live range splitting helpers. @@ -530,6 +565,7 @@ class LAllocator BASE_EMBEDDED { ZoneList active_live_ranges_; ZoneList inactive_live_ranges_; ZoneList reusable_slots_; + ZoneList spill_ranges_; // Next virtual register number to be assigned to temporaries. int next_virtual_register_;