[turbofan] More aggressive reuse of spill slots in the register allocator.
authordcarney <dcarney@chromium.org>
Thu, 20 Nov 2014 14:40:56 +0000 (06:40 -0800)
committerCommit bot <commit-bot@chromium.org>
Thu, 20 Nov 2014 14:41:08 +0000 (14:41 +0000)
BUG=

Review URL: https://codereview.chromium.org/725083004

Cr-Commit-Position: refs/heads/master@{#25440}

src/compiler/graph-visualizer.cc
src/compiler/pipeline.cc
src/compiler/register-allocator.cc
src/compiler/register-allocator.h
src/compiler/x64/code-generator-x64.cc

index 19f24cfdb2fc3f3da9fe11d38fd97cc1766b8c91..40b182a34c881352cef2b3242db5b1860e83e6a6 100644 (file)
@@ -723,14 +723,18 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) {
         os_ << " \"" << Register::AllocationIndexToString(assigned_reg) << "\"";
       }
     } else if (range->IsSpilled()) {
-      InstructionOperand* op = range->TopLevel()->GetSpillOperand();
-      if (op->IsDoubleStackSlot()) {
-        os_ << " \"double_stack:" << op->index() << "\"";
-      } else if (op->IsStackSlot()) {
-        os_ << " \"stack:" << op->index() << "\"";
+      int index = -1;
+      if (range->TopLevel()->GetSpillRange()->id() != -1) {
+        index = range->TopLevel()->GetSpillRange()->id();
       } else {
-        DCHECK(op->IsConstant());
-        os_ << " \"const(nostack):" << op->index() << "\"";
+        index = range->TopLevel()->GetSpillOperand()->index();
+      }
+      if (range->TopLevel()->Kind() == DOUBLE_REGISTERS) {
+        os_ << " \"double_stack:" << index << "\"";
+      } else if (range->TopLevel()->Kind() == GENERAL_REGISTERS) {
+        os_ << " \"stack:" << index << "\"";
+      } else {
+        os_ << " \"const(nostack):" << index << "\"";
       }
     }
     int parent_index = -1;
index 236e7f655267e36f7aa20ced8a607a58bf2a03b1..10bbe7c1bc6d38e5b962b0646d6465b4b0e811cf 100644 (file)
@@ -533,6 +533,15 @@ struct AllocateDoubleRegistersPhase {
 };
 
 
+struct ReuseSpillSlotsPhase {
+  static const char* phase_name() { return "reuse spill slots"; }
+
+  void Run(PipelineData* data, Zone* temp_zone) {
+    data->register_allocator()->ReuseSpillSlots();
+  }
+};
+
+
 struct PopulatePointerMapsPhase {
   static const char* phase_name() { return "populate pointer maps"; }
 
@@ -935,6 +944,7 @@ void Pipeline::AllocateRegisters(const RegisterConfiguration* config,
     data->set_compilation_failed();
     return;
   }
+  Run<ReuseSpillSlotsPhase>();
   Run<PopulatePointerMapsPhase>();
   Run<ConnectRangesPhase>();
   Run<ResolveControlFlowPhase>();
index a9cdf51b5636d7d2d0f484c6e2e36875e89cf3c9..59d80c6f7c6fbfadda50fde5fac33da698b22892 100644 (file)
@@ -111,13 +111,16 @@ LiveRange::LiveRange(int id, Zone* zone)
       last_processed_use_(NULL),
       current_hint_operand_(NULL),
       spill_operand_(new (zone) InstructionOperand()),
-      spill_start_index_(kMaxInt) {}
+      spill_start_index_(kMaxInt),
+      spill_range_(NULL) {}
 
 
 void LiveRange::set_assigned_register(int reg, Zone* zone) {
   DCHECK(!HasRegisterAssigned() && !IsSpilled());
   assigned_register_ = reg;
-  ConvertOperands(zone);
+  if (spill_range_ == nullptr) {
+    ConvertOperands(zone);
+  }
 }
 
 
@@ -132,7 +135,7 @@ void LiveRange::MakeSpilled(Zone* zone) {
 
 bool LiveRange::HasAllocatedSpillOperand() const {
   DCHECK(spill_operand_ != NULL);
-  return !spill_operand_->IsIgnored();
+  return !spill_operand_->IsIgnored() || spill_range_ != NULL;
 }
 
 
@@ -144,6 +147,19 @@ void LiveRange::SetSpillOperand(InstructionOperand* operand) {
 }
 
 
+void LiveRange::CommitSpillOperand(InstructionOperand* operand) {
+  DCHECK(spill_range_ != NULL);
+  DCHECK(!IsChild());
+  spill_range_ = NULL;
+  SetSpillOperand(operand);
+  for (LiveRange* range = this; range != NULL; range = range->next()) {
+    if (range->IsSpilled()) {
+      range->ConvertUsesToOperand(operand);
+    }
+  }
+}
+
+
 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
   UsePosition* use_pos = last_processed_use_;
   if (use_pos == NULL) use_pos = first_pos();
@@ -440,8 +456,7 @@ void LiveRange::AddUsePosition(LifetimePosition pos,
 }
 
 
-void LiveRange::ConvertOperands(Zone* zone) {
-  InstructionOperand* op = CreateAssignedOperand(zone);
+void LiveRange::ConvertUsesToOperand(InstructionOperand* op) {
   UsePosition* use_pos = first_pos();
   while (use_pos != NULL) {
     DCHECK(Start().Value() <= use_pos->pos().Value() &&
@@ -457,6 +472,11 @@ void LiveRange::ConvertOperands(Zone* zone) {
 }
 
 
+void LiveRange::ConvertOperands(Zone* zone) {
+  ConvertUsesToOperand(CreateAssignedOperand(zone));
+}
+
+
 bool LiveRange::CanCover(LifetimePosition position) const {
   if (IsEmpty()) return false;
   return Start().Value() <= position.Value() &&
@@ -522,6 +542,7 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config,
       active_live_ranges_(8, local_zone()),
       inactive_live_ranges_(8, local_zone()),
       reusable_slots_(8, local_zone()),
+      spill_ranges_(8, local_zone()),
       mode_(UNALLOCATED_REGISTERS),
       num_registers_(-1),
       allocation_ok_(true) {
@@ -751,6 +772,157 @@ void RegisterAllocator::AddConstraintsGapMove(int index,
 }
 
 
+static bool AreUseIntervalsIntersecting(UseInterval* interval1,
+                                        UseInterval* interval2) {
+  while (interval1 != NULL && interval2 != NULL) {
+    if (interval1->start().Value() < interval2->start().Value()) {
+      if (interval1->end().Value() >= interval2->start().Value()) {
+        return true;
+      }
+      interval1 = interval1->next();
+    } else {
+      if (interval2->end().Value() >= interval1->start().Value()) {
+        return true;
+      }
+      interval2 = interval2->next();
+    }
+  }
+  return false;
+}
+
+
+SpillRange::SpillRange(LiveRange* range, int id, Zone* zone)
+    : id_(id), live_ranges_(1, zone), end_position_(range->End()) {
+  UseInterval* src = range->first_interval();
+  UseInterval* result = NULL;
+  UseInterval* node = NULL;
+  // Copy the nodes
+  while (src != NULL) {
+    UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
+    if (result == NULL) {
+      result = new_node;
+    } else {
+      node->set_next(new_node);
+    }
+    node = new_node;
+    src = src->next();
+  }
+  use_interval_ = result;
+  live_ranges_.Add(range, zone);
+  DCHECK(range->GetSpillRange() == NULL);
+  range->SetSpillRange(this);
+}
+
+
+bool SpillRange::IsIntersectingWith(SpillRange* other) {
+  if (End().Value() <= other->use_interval_->start().Value() ||
+      other->End().Value() <= use_interval_->start().Value()) {
+    return false;
+  }
+  return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
+}
+
+
+bool SpillRange::TryMerge(SpillRange* other, Zone* zone) {
+  if (Kind() == other->Kind() &&
+      !AreUseIntervalsIntersecting(use_interval_, other->use_interval_)) {
+    if (End().Value() < other->End().Value()) {
+      end_position_ = other->End();
+    }
+
+    MergeDisjointIntervals(other->use_interval_, zone);
+    other->use_interval_ = NULL;
+
+    for (int i = 0; i < other->live_ranges_.length(); i++) {
+      DCHECK(other->live_ranges_.at(i)->GetSpillRange() == other);
+      other->live_ranges_.at(i)->SetSpillRange(this);
+    }
+
+    live_ranges_.AddAll(other->live_ranges_, zone);
+    other->live_ranges_.Clear();
+
+    return true;
+  }
+  return false;
+}
+
+
+void SpillRange::SetOperand(InstructionOperand* op) {
+  for (int i = 0; i < live_ranges_.length(); i++) {
+    DCHECK(live_ranges_.at(i)->GetSpillRange() == this);
+    live_ranges_.at(i)->CommitSpillOperand(op);
+  }
+}
+
+
+void SpillRange::MergeDisjointIntervals(UseInterval* other, Zone* zone) {
+  UseInterval* tail = NULL;
+  UseInterval* current = use_interval_;
+  while (other != NULL) {
+    // Make sure the 'current' list starts first
+    if (current == NULL || current->start().Value() > other->start().Value()) {
+      std::swap(current, other);
+    }
+
+    // Check disjointness
+    DCHECK(other == NULL || current->end().Value() <= other->start().Value());
+
+    // Append the 'current' node to the result accumulator and move forward
+    if (tail == NULL) {
+      use_interval_ = current;
+    } else {
+      tail->set_next(current);
+    }
+    tail = current;
+    current = current->next();
+  }
+  // Other list is empty => we are done
+}
+
+
+void RegisterAllocator::ReuseSpillSlots() {
+  // Merge disjoint spill ranges
+  for (int i = 0; i < spill_ranges_.length(); i++) {
+    SpillRange* range = spill_ranges_.at(i);
+    if (!range->IsEmpty()) {
+      for (int j = i + 1; j < spill_ranges_.length(); j++) {
+        SpillRange* other = spill_ranges_.at(j);
+        if (!other->IsEmpty()) {
+          range->TryMerge(spill_ranges_.at(j), local_zone());
+        }
+      }
+    }
+  }
+
+  // Allocate slots for the merged spill ranges.
+  for (int i = 0; i < spill_ranges_.length(); i++) {
+    SpillRange* range = spill_ranges_.at(i);
+    if (!range->IsEmpty()) {
+      // Allocate a new operand referring to the spill slot.
+      RegisterKind kind = range->Kind();
+      int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
+      InstructionOperand* op = nullptr;
+      if (kind == DOUBLE_REGISTERS) {
+        op = DoubleStackSlotOperand::Create(index, local_zone());
+      } else {
+        DCHECK(kind == GENERAL_REGISTERS);
+        op = StackSlotOperand::Create(index, local_zone());
+      }
+      range->SetOperand(op);
+    }
+  }
+}
+
+
+SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
+  int spill_id = spill_ranges_.length();
+  SpillRange* spill_range =
+      new (local_zone()) SpillRange(range, spill_id, local_zone());
+  spill_ranges_.Add(spill_range, local_zone());
+  return spill_range;
+}
+
+
 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) {
   int start = block->first_instruction_index();
   int end = block->last_instruction_index();
@@ -1760,38 +1932,10 @@ bool RegisterAllocator::UnhandledIsSorted() {
 }
 
 
-void RegisterAllocator::FreeSpillSlot(LiveRange* range) {
-  // Check that we are the last range.
-  if (range->next() != NULL) return;
-
-  if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
-
-  InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand();
-  if (spill_operand->IsConstant()) return;
-  if (spill_operand->index() >= 0) {
-    reusable_slots_.Add(range, local_zone());
-  }
-}
-
-
-InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) {
-  if (reusable_slots_.is_empty()) return NULL;
-  if (reusable_slots_.first()->End().Value() >
-      range->TopLevel()->Start().Value()) {
-    return NULL;
-  }
-  InstructionOperand* result =
-      reusable_slots_.first()->TopLevel()->GetSpillOperand();
-  reusable_slots_.Remove(0);
-  return result;
-}
-
-
 void RegisterAllocator::ActiveToHandled(LiveRange* range) {
   DCHECK(active_live_ranges_.Contains(range));
   active_live_ranges_.RemoveElement(range);
   TraceAlloc("Moving live range %d from active to handled\n", range->id());
-  FreeSpillSlot(range);
 }
 
 
@@ -1807,7 +1951,6 @@ void RegisterAllocator::InactiveToHandled(LiveRange* range) {
   DCHECK(inactive_live_ranges_.Contains(range));
   inactive_live_ranges_.RemoveElement(range);
   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
-  FreeSpillSlot(range);
 }
 
 
@@ -2192,19 +2335,7 @@ void RegisterAllocator::Spill(LiveRange* range) {
   LiveRange* first = range->TopLevel();
 
   if (!first->HasAllocatedSpillOperand()) {
-    InstructionOperand* op = TryReuseSpillSlot(range);
-    if (op == NULL) {
-      // Allocate a new operand referring to the spill slot.
-      RegisterKind kind = range->Kind();
-      int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
-      if (kind == DOUBLE_REGISTERS) {
-        op = DoubleStackSlotOperand::Create(index, local_zone());
-      } else {
-        DCHECK(kind == GENERAL_REGISTERS);
-        op = StackSlotOperand::Create(index, local_zone());
-      }
-    }
-    first->SetSpillOperand(op);
+    AssignSpillRangeToLiveRange(first);
   }
   range->MakeSpilled(code_zone());
 }
index 7ffb3795d7b61f7bcb5553276501515463166efb..5fbb821ccfbcb90a252caff1a5770342a538511f 100644 (file)
@@ -171,6 +171,7 @@ class UsePosition FINAL : public ZoneObject {
   DISALLOW_COPY_AND_ASSIGN(UsePosition);
 };
 
+class SpillRange;
 
 // Representation of SSA values' live ranges as a collection of (continuous)
 // intervals over the instruction ordering.
@@ -257,6 +258,10 @@ class LiveRange FINAL : public ZoneObject {
   InstructionOperand* GetSpillOperand() const { return spill_operand_; }
   void SetSpillOperand(InstructionOperand* operand);
 
+  void SetSpillRange(SpillRange* spill_range) { spill_range_ = spill_range; }
+  SpillRange* GetSpillRange() const { return spill_range_; }
+  void CommitSpillOperand(InstructionOperand* operand);
+
   void SetSpillStartIndex(int start) {
     spill_start_index_ = Min(start, spill_start_index_);
   }
@@ -283,6 +288,7 @@ class LiveRange FINAL : public ZoneObject {
 
  private:
   void ConvertOperands(Zone* zone);
+  void ConvertUsesToOperand(InstructionOperand* op);
   UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
   void AdvanceLastProcessedMarker(UseInterval* to_start_of,
                                   LifetimePosition but_not_past) const;
@@ -303,6 +309,7 @@ class LiveRange FINAL : public ZoneObject {
   InstructionOperand* current_hint_operand_;
   InstructionOperand* spill_operand_;
   int spill_start_index_;
+  SpillRange* spill_range_;
 
   friend class RegisterAllocator;  // Assigns to kind_.
 
@@ -310,6 +317,29 @@ class LiveRange FINAL : public ZoneObject {
 };
 
 
+class SpillRange : public ZoneObject {
+ public:
+  SpillRange(LiveRange* range, int id, Zone* zone);
+  bool TryMerge(SpillRange* other, Zone* zone);
+  void SetOperand(InstructionOperand* op);
+  bool IsEmpty() { return live_ranges_.length() == 0; }
+  int id() const { return id_; }
+  UseInterval* interval() { return use_interval_; }
+  RegisterKind Kind() const { return live_ranges_.at(0)->Kind(); }
+  LifetimePosition End() const { return end_position_; }
+  bool IsIntersectingWith(SpillRange* other);
+
+ private:
+  int id_;
+  ZoneList<LiveRange*> live_ranges_;
+  UseInterval* use_interval_;
+  LifetimePosition end_position_;
+
+  // Merge intervals, making sure the use intervals are sorted
+  void MergeDisjointIntervals(UseInterval* other, Zone* zone);
+};
+
+
 class RegisterAllocator FINAL : public ZoneObject {
  public:
   explicit RegisterAllocator(const RegisterConfiguration* config,
@@ -341,13 +371,16 @@ class RegisterAllocator FINAL : public ZoneObject {
   void AllocateGeneralRegisters();
   void AllocateDoubleRegisters();
 
-  // Phase 4: compute values for pointer maps.
+  // Phase 4: reassign spill splots for maximal reuse.
+  void ReuseSpillSlots();
+
+  // Phase 5: compute values for pointer maps.
   void PopulatePointerMaps();  // TODO(titzer): rename to PopulateReferenceMaps.
 
-  // Phase 5: reconnect split ranges with moves.
+  // Phase 6: reconnect split ranges with moves.
   void ConnectRanges();
 
-  // Phase 6: insert moves to connect ranges across basic blocks.
+  // Phase 7: insert moves to connect ranges across basic blocks.
   void ResolveControlFlow();
 
  private:
@@ -419,12 +452,11 @@ class RegisterAllocator FINAL : public ZoneObject {
   void ActiveToInactive(LiveRange* range);
   void InactiveToHandled(LiveRange* range);
   void InactiveToActive(LiveRange* range);
-  void FreeSpillSlot(LiveRange* range);
-  InstructionOperand* TryReuseSpillSlot(LiveRange* range);
 
   // Helper methods for allocating registers.
   bool TryAllocateFreeReg(LiveRange* range);
   void AllocateBlockedReg(LiveRange* range);
+  SpillRange* AssignSpillRangeToLiveRange(LiveRange* range);
 
   // Live range splitting helpers.
 
@@ -521,6 +553,7 @@ class RegisterAllocator FINAL : public ZoneObject {
   ZoneList<LiveRange*> active_live_ranges_;
   ZoneList<LiveRange*> inactive_live_ranges_;
   ZoneList<LiveRange*> reusable_slots_;
+  ZoneList<SpillRange*> spill_ranges_;
 
   RegisterKind mode_;
   int num_registers_;
index 051ff9f5f42c61a1cdf3723f3bfd8e2746d8d96b..212c1de776d84e96119626ac3d13ce04757bdd03 100644 (file)
@@ -1007,7 +1007,7 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
     __ movsd(xmm0, src);
     __ movsd(src, dst);
     __ movsd(dst, xmm0);
-  } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
+  } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) {
     // XMM register-memory swap.  We rely on having xmm0
     // available as a fixed scratch register.
     XMMRegister src = g.ToDoubleRegister(source);