1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_
8 #include "src/compiler/instruction.h"
9 #include "src/zone-containers.h"
16 UNALLOCATED_REGISTERS,
22 // This class represents a single point of a InstructionOperand's lifetime. For
23 // each instruction there are exactly two lifetime positions: the beginning and
24 // the end of the instruction. Lifetime positions for different instructions are
26 class LifetimePosition FINAL {
28 // Return the lifetime position that corresponds to the beginning of
29 // the instruction with the given index.
30 static LifetimePosition FromInstructionIndex(int index) {
31 return LifetimePosition(index * kStep);
34 // Returns a numeric representation of this lifetime position.
35 int Value() const { return value_; }
37 // Returns the index of the instruction to which this lifetime position
39 int InstructionIndex() const {
41 return value_ / kStep;
44 // Returns true if this lifetime position corresponds to the instruction
46 bool IsInstructionStart() const { return (value_ & (kStep - 1)) == 0; }
48 // Returns the lifetime position for the start of the instruction which
49 // corresponds to this lifetime position.
50 LifetimePosition InstructionStart() const {
52 return LifetimePosition(value_ & ~(kStep - 1));
55 // Returns the lifetime position for the end of the instruction which
56 // corresponds to this lifetime position.
57 LifetimePosition InstructionEnd() const {
59 return LifetimePosition(InstructionStart().Value() + kStep / 2);
62 // Returns the lifetime position for the beginning of the next instruction.
63 LifetimePosition NextInstruction() const {
65 return LifetimePosition(InstructionStart().Value() + kStep);
68 // Returns the lifetime position for the beginning of the previous
70 LifetimePosition PrevInstruction() const {
73 return LifetimePosition(InstructionStart().Value() - kStep);
76 // Constructs the lifetime position which does not correspond to any
78 LifetimePosition() : value_(-1) {}
80 // Returns true if this lifetime positions corrensponds to some
82 bool IsValid() const { return value_ != -1; }
84 static inline LifetimePosition Invalid() { return LifetimePosition(); }
86 static inline LifetimePosition MaxPosition() {
87 // We have to use this kind of getter instead of static member due to
89 return LifetimePosition(kMaxInt);
93 static const int kStep = 2;
95 // Code relies on kStep being a power of two.
96 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
98 explicit LifetimePosition(int value) : value_(value) {}
104 // Representation of the non-empty interval [start,end[.
105 class UseInterval FINAL : public ZoneObject {
107 UseInterval(LifetimePosition start, LifetimePosition end)
108 : start_(start), end_(end), next_(nullptr) {
109 DCHECK(start.Value() < end.Value());
112 LifetimePosition start() const { return start_; }
113 LifetimePosition end() const { return end_; }
114 UseInterval* next() const { return next_; }
116 // Split this interval at the given position without effecting the
117 // live range that owns it. The interval must contain the position.
118 void SplitAt(LifetimePosition pos, Zone* zone);
120 // If this interval intersects with other return smallest position
121 // that belongs to both of them.
122 LifetimePosition Intersect(const UseInterval* other) const {
123 if (other->start().Value() < start_.Value()) return other->Intersect(this);
124 if (other->start().Value() < end_.Value()) return other->start();
125 return LifetimePosition::Invalid();
128 bool Contains(LifetimePosition point) const {
129 return start_.Value() <= point.Value() && point.Value() < end_.Value();
132 void set_start(LifetimePosition start) { start_ = start; }
133 void set_next(UseInterval* next) { next_ = next; }
135 LifetimePosition start_;
136 LifetimePosition end_;
140 DISALLOW_COPY_AND_ASSIGN(UseInterval);
144 enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
147 // Representation of a use position.
148 class UsePosition FINAL : public ZoneObject {
150 UsePosition(LifetimePosition pos, InstructionOperand* operand,
151 InstructionOperand* hint);
153 InstructionOperand* operand() const { return operand_; }
154 bool HasOperand() const { return operand_ != nullptr; }
156 InstructionOperand* hint() const { return hint_; }
157 bool HasHint() const;
158 bool RegisterIsBeneficial() const {
159 return RegisterBeneficialField::decode(flags_);
161 UsePositionType type() const { return TypeField::decode(flags_); }
163 LifetimePosition pos() const { return pos_; }
164 UsePosition* next() const { return next_; }
166 void set_next(UsePosition* next) { next_ = next; }
167 void set_type(UsePositionType type, bool register_beneficial);
169 InstructionOperand* const operand_;
170 InstructionOperand* const hint_;
171 LifetimePosition const pos_;
175 typedef BitField8<UsePositionType, 0, 2> TypeField;
176 typedef BitField8<bool, 2, 1> RegisterBeneficialField;
179 DISALLOW_COPY_AND_ASSIGN(UsePosition);
185 // TODO(dcarney): remove this cache.
186 class InstructionOperandCache FINAL : public ZoneObject {
188 InstructionOperandCache();
190 InstructionOperand* RegisterOperand(int index) {
192 index < static_cast<int>(arraysize(general_register_operands_)));
193 return &general_register_operands_[index];
195 InstructionOperand* DoubleRegisterOperand(int index) {
197 index < static_cast<int>(arraysize(double_register_operands_)));
198 return &double_register_operands_[index];
203 general_register_operands_[RegisterConfiguration::kMaxGeneralRegisters];
205 double_register_operands_[RegisterConfiguration::kMaxDoubleRegisters];
207 DISALLOW_COPY_AND_ASSIGN(InstructionOperandCache);
211 // Representation of SSA values' live ranges as a collection of (continuous)
212 // intervals over the instruction ordering.
213 class LiveRange FINAL : public ZoneObject {
215 static const int kInvalidAssignment = 0x7fffffff;
217 LiveRange(int id, Zone* zone);
219 UseInterval* first_interval() const { return first_interval_; }
220 UsePosition* first_pos() const { return first_pos_; }
221 LiveRange* parent() const { return parent_; }
222 LiveRange* TopLevel() { return (parent_ == nullptr) ? this : parent_; }
223 const LiveRange* TopLevel() const {
224 return (parent_ == nullptr) ? this : parent_;
226 LiveRange* next() const { return next_; }
227 bool IsChild() const { return parent() != nullptr; }
228 int id() const { return id_; }
229 bool IsFixed() const { return id_ < 0; }
230 bool IsEmpty() const { return first_interval() == nullptr; }
231 // TODO(dcarney): remove this.
232 InstructionOperand* GetAssignedOperand(InstructionOperandCache* cache) const;
233 InstructionOperand GetAssignedOperand() const;
234 int assigned_register() const { return assigned_register_; }
235 int spill_start_index() const { return spill_start_index_; }
236 void set_assigned_register(int reg, InstructionOperandCache* cache);
238 bool is_phi() const { return is_phi_; }
239 void set_is_phi(bool is_phi) { is_phi_ = is_phi; }
240 bool is_non_loop_phi() const { return is_non_loop_phi_; }
241 void set_is_non_loop_phi(bool is_non_loop_phi) {
242 is_non_loop_phi_ = is_non_loop_phi;
244 bool has_slot_use() const { return has_slot_use_; }
245 void set_has_slot_use(bool has_slot_use) { has_slot_use_ = has_slot_use; }
247 // Returns use position in this live range that follows both start
248 // and last processed use position.
249 // Modifies internal state of live range!
250 UsePosition* NextUsePosition(LifetimePosition start);
252 // Returns use position for which register is required in this live
253 // range and which follows both start and last processed use position
254 // Modifies internal state of live range!
255 UsePosition* NextRegisterPosition(LifetimePosition start);
257 // Returns use position for which register is beneficial in this live
258 // range and which follows both start and last processed use position
259 // Modifies internal state of live range!
260 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
262 // Returns use position for which register is beneficial in this live
263 // range and which precedes start.
264 UsePosition* PreviousUsePositionRegisterIsBeneficial(LifetimePosition start);
266 // Can this live range be spilled at this position.
267 bool CanBeSpilled(LifetimePosition pos);
269 // Split this live range at the given position which must follow the start of
271 // All uses following the given position will be moved from this
272 // live range to the result live range.
273 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
275 RegisterKind Kind() const { return kind_; }
276 bool HasRegisterAssigned() const {
277 return assigned_register_ != kInvalidAssignment;
279 bool IsSpilled() const { return spilled_; }
281 InstructionOperand* current_hint_operand() const {
282 DCHECK(current_hint_operand_ == FirstHint());
283 return current_hint_operand_;
285 InstructionOperand* FirstHint() const {
286 UsePosition* pos = first_pos_;
287 while (pos != nullptr && !pos->HasHint()) pos = pos->next();
288 if (pos != nullptr) return pos->hint();
292 LifetimePosition Start() const {
294 return first_interval()->start();
297 LifetimePosition End() const {
299 return last_interval_->end();
302 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
303 SpillType spill_type() const { return spill_type_; }
304 InstructionOperand* GetSpillOperand() const {
305 return spill_type_ == SpillType::kSpillOperand ? spill_operand_ : nullptr;
307 SpillRange* GetSpillRange() const {
308 return spill_type_ == SpillType::kSpillRange ? spill_range_ : nullptr;
310 bool HasNoSpillType() const { return spill_type_ == SpillType::kNoSpillType; }
311 bool HasSpillOperand() const {
312 return spill_type_ == SpillType::kSpillOperand;
314 bool HasSpillRange() const { return spill_type_ == SpillType::kSpillRange; }
316 void SpillAtDefinition(Zone* zone, int gap_index,
317 InstructionOperand* operand);
318 void SetSpillOperand(InstructionOperand* operand);
319 void SetSpillRange(SpillRange* spill_range);
320 void CommitSpillOperand(InstructionOperand* operand);
321 void CommitSpillsAtDefinition(InstructionSequence* sequence,
322 InstructionOperand* operand,
323 bool might_be_duplicated);
325 void SetSpillStartIndex(int start) {
326 spill_start_index_ = Min(start, spill_start_index_);
329 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
330 bool CanCover(LifetimePosition position) const;
331 bool Covers(LifetimePosition position);
332 LifetimePosition FirstIntersection(LiveRange* other);
334 // Add a new interval or a new use position to this live range.
335 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
336 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
337 void AddUsePosition(LifetimePosition pos, InstructionOperand* operand,
338 InstructionOperand* hint, Zone* zone);
340 // Shorten the most recently added interval by setting a new start.
341 void ShortenTo(LifetimePosition start);
344 // True if target overlaps an existing interval.
345 bool HasOverlap(UseInterval* target) const;
350 struct SpillAtDefinitionList;
352 void ConvertUsesToOperand(InstructionOperand* op,
353 InstructionOperand* spill_op);
354 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
355 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
356 LifetimePosition but_not_past) const;
358 // TODO(dcarney): pack this structure better.
361 bool has_slot_use_ : 1; // Relevant only for parent.
363 bool is_non_loop_phi_ : 1;
365 int assigned_register_;
366 UseInterval* last_interval_;
367 UseInterval* first_interval_;
368 UsePosition* first_pos_;
371 // This is used as a cache, it doesn't affect correctness.
372 mutable UseInterval* current_interval_;
373 UsePosition* last_processed_use_;
374 // This is used as a cache, it's invalid outside of BuildLiveRanges.
375 InstructionOperand* current_hint_operand_;
376 int spill_start_index_;
377 SpillType spill_type_;
379 InstructionOperand* spill_operand_;
380 SpillRange* spill_range_;
382 SpillAtDefinitionList* spills_at_definition_;
384 friend class RegisterAllocator; // Assigns to kind_.
386 DISALLOW_COPY_AND_ASSIGN(LiveRange);
390 class SpillRange FINAL : public ZoneObject {
392 SpillRange(LiveRange* range, Zone* zone);
394 UseInterval* interval() const { return use_interval_; }
395 RegisterKind Kind() const { return live_ranges_[0]->Kind(); }
396 bool IsEmpty() const { return live_ranges_.empty(); }
397 bool TryMerge(SpillRange* other);
398 void SetOperand(InstructionOperand* op);
401 LifetimePosition End() const { return end_position_; }
402 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; }
403 bool IsIntersectingWith(SpillRange* other) const;
404 // Merge intervals, making sure the use intervals are sorted
405 void MergeDisjointIntervals(UseInterval* other);
407 ZoneVector<LiveRange*> live_ranges_;
408 UseInterval* use_interval_;
409 LifetimePosition end_position_;
411 DISALLOW_COPY_AND_ASSIGN(SpillRange);
415 class RegisterAllocator FINAL : public ZoneObject {
417 explicit RegisterAllocator(const RegisterConfiguration* config,
418 Zone* local_zone, Frame* frame,
419 InstructionSequence* code,
420 const char* debug_name = nullptr);
422 const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; }
423 const ZoneVector<LiveRange*>& fixed_live_ranges() const {
424 return fixed_live_ranges_;
426 const ZoneVector<LiveRange*>& fixed_double_live_ranges() const {
427 return fixed_double_live_ranges_;
429 InstructionSequence* code() const { return code_; }
430 // This zone is for datastructures only needed during register allocation.
431 Zone* local_zone() const { return local_zone_; }
433 // Phase 1 : insert moves to account for fixed register operands.
434 void MeetRegisterConstraints();
436 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
437 // of blocks containing phis.
440 // Phase 3: compute liveness of all virtual register.
441 void BuildLiveRanges();
442 bool ExistsUseWithoutDefinition();
444 // Phase 4: compute register assignments.
445 void AllocateGeneralRegisters();
446 void AllocateDoubleRegisters();
448 // Phase 5: assign spill splots.
449 void AssignSpillSlots();
451 // Phase 6: commit assignment.
452 void CommitAssignment();
454 // Phase 7: compute values for pointer maps.
455 void PopulatePointerMaps(); // TODO(titzer): rename to PopulateReferenceMaps.
457 // Phase 8: reconnect split ranges with moves.
458 void ConnectRanges();
460 // Phase 9: insert moves to connect ranges across basic blocks.
461 void ResolveControlFlow();
464 int GetVirtualRegister() { return code()->NextVirtualRegister(); }
466 // Checks whether the value of a given virtual register is a reference.
467 // TODO(titzer): rename this to IsReference.
468 bool HasTaggedValue(int virtual_register) const;
470 // Returns the register kind required by the given virtual register.
471 RegisterKind RequiredRegisterKind(int virtual_register) const;
473 // Creates a new live range.
474 LiveRange* NewLiveRange(int index);
476 // This zone is for InstructionOperands and moves that live beyond register
478 Zone* code_zone() const { return code()->zone(); }
480 BitVector* assigned_registers() { return assigned_registers_; }
481 BitVector* assigned_double_registers() { return assigned_double_registers_; }
487 void AllocateRegisters();
488 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
489 bool SafePointsAreInOrder() const;
491 // Liveness analysis support.
492 BitVector* ComputeLiveOut(const InstructionBlock* block);
493 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
494 bool IsOutputRegisterOf(Instruction* instr, int index);
495 bool IsOutputDoubleRegisterOf(Instruction* instr, int index);
496 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
497 void MeetRegisterConstraints(const InstructionBlock* block);
498 void MeetConstraintsBetween(Instruction* first, Instruction* second,
500 void MeetRegisterConstraintsForLastInstructionInBlock(
501 const InstructionBlock* block);
502 void ResolvePhis(const InstructionBlock* block);
504 // Helper methods for building intervals.
505 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
507 LiveRange* LiveRangeFor(InstructionOperand* operand);
508 void Define(LifetimePosition position, InstructionOperand* operand,
509 InstructionOperand* hint);
510 void Use(LifetimePosition block_start, LifetimePosition position,
511 InstructionOperand* operand, InstructionOperand* hint);
512 void AddGapMove(int index, GapInstruction::InnerPosition position,
513 InstructionOperand* from, InstructionOperand* to);
515 // Helper methods for updating the life range lists.
516 void AddToActive(LiveRange* range);
517 void AddToInactive(LiveRange* range);
518 void AddToUnhandledSorted(LiveRange* range);
519 void AddToUnhandledUnsorted(LiveRange* range);
520 void SortUnhandled();
521 bool UnhandledIsSorted();
522 void ActiveToHandled(LiveRange* range);
523 void ActiveToInactive(LiveRange* range);
524 void InactiveToHandled(LiveRange* range);
525 void InactiveToActive(LiveRange* range);
527 // Helper methods for allocating registers.
528 bool TryReuseSpillForPhi(LiveRange* range);
529 bool TryAllocateFreeReg(LiveRange* range);
530 void AllocateBlockedReg(LiveRange* range);
531 SpillRange* AssignSpillRangeToLiveRange(LiveRange* range);
533 // Live range splitting helpers.
535 // Split the given range at the given position.
536 // If range starts at or after the given position then the
537 // original range is returned.
538 // Otherwise returns the live range that starts at pos and contains
539 // all uses from the original range that follow pos. Uses at pos will
540 // still be owned by the original range after splitting.
541 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
543 // Split the given range in a position from the interval [start, end].
544 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
545 LifetimePosition end);
547 // Find a lifetime position in the interval [start, end] which
548 // is optimal for splitting: it is either header of the outermost
549 // loop covered by this interval or the latest possible position.
550 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
551 LifetimePosition end);
553 // Spill the given life range after position pos.
554 void SpillAfter(LiveRange* range, LifetimePosition pos);
556 // Spill the given life range after position [start] and up to position [end].
557 void SpillBetween(LiveRange* range, LifetimePosition start,
558 LifetimePosition end);
560 // Spill the given life range after position [start] and up to position [end].
561 // Range is guaranteed to be spilled at least until position [until].
562 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
563 LifetimePosition until, LifetimePosition end);
565 void SplitAndSpillIntersecting(LiveRange* range);
567 // If we are trying to spill a range inside the loop try to
568 // hoist spill position out to the point just before the loop.
569 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
570 LifetimePosition pos);
572 void Spill(LiveRange* range);
573 bool IsBlockBoundary(LifetimePosition pos);
575 // Helper methods for resolving control flow.
576 void ResolveControlFlow(const InstructionBlock* block,
577 InstructionOperand* cur_op,
578 const InstructionBlock* pred,
579 InstructionOperand* pred_op);
581 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
583 // Return the block which contains give lifetime position.
584 const InstructionBlock* GetInstructionBlock(LifetimePosition pos);
586 // Helper methods for the fixed registers.
587 int RegisterCount() const;
588 static int FixedLiveRangeID(int index) { return -index - 1; }
589 int FixedDoubleLiveRangeID(int index);
590 LiveRange* FixedLiveRangeFor(int index);
591 LiveRange* FixedDoubleLiveRangeFor(int index);
592 LiveRange* LiveRangeFor(int index);
593 GapInstruction* GetLastGap(const InstructionBlock* block);
595 const char* RegisterName(int allocation_index);
597 Instruction* InstructionAt(int index) { return code()->InstructionAt(index); }
599 Frame* frame() const { return frame_; }
600 const char* debug_name() const { return debug_name_; }
601 const RegisterConfiguration* config() const { return config_; }
602 InstructionOperandCache* operand_cache() const { return operand_cache_; }
603 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; }
604 ZoneVector<LiveRange*>& fixed_live_ranges() { return fixed_live_ranges_; }
605 ZoneVector<LiveRange*>& fixed_double_live_ranges() {
606 return fixed_double_live_ranges_;
608 ZoneVector<LiveRange*>& unhandled_live_ranges() {
609 return unhandled_live_ranges_;
611 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
612 ZoneVector<LiveRange*>& inactive_live_ranges() {
613 return inactive_live_ranges_;
615 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
618 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block)
619 : phi(phi), block(block) {}
620 PhiInstruction* const phi;
621 const InstructionBlock* const block;
623 typedef ZoneMap<int, PhiMapValue> PhiMap;
625 Zone* const local_zone_;
627 InstructionSequence* const code_;
628 const char* const debug_name_;
630 const RegisterConfiguration* config_;
631 InstructionOperandCache* const operand_cache_;
634 // During liveness analysis keep a mapping from block id to live_in sets
635 // for blocks already analyzed.
636 ZoneVector<BitVector*> live_in_sets_;
638 // Liveness analysis results.
639 ZoneVector<LiveRange*> live_ranges_;
641 // Lists of live ranges
642 ZoneVector<LiveRange*> fixed_live_ranges_;
643 ZoneVector<LiveRange*> fixed_double_live_ranges_;
644 ZoneVector<LiveRange*> unhandled_live_ranges_;
645 ZoneVector<LiveRange*> active_live_ranges_;
646 ZoneVector<LiveRange*> inactive_live_ranges_;
647 ZoneVector<SpillRange*> spill_ranges_;
652 BitVector* assigned_registers_;
653 BitVector* assigned_double_registers_;
656 LifetimePosition allocation_finger_;
659 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
662 } // namespace compiler
663 } // namespace internal
666 #endif // V8_REGISTER_ALLOCATOR_H_