1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_LITHIUM_ALLOCATOR_H_
29 #define V8_LITHIUM_ALLOCATOR_H_
33 #include "allocation.h"
40 // Forward declarations.
59 // This class represents a single point of a LOperand's lifetime.
60 // For each lithium instruction there are exactly two lifetime positions:
61 // the beginning and the end of the instruction. Lifetime positions for
62 // different lithium instructions are disjoint.
63 class LifetimePosition {
65 // Return the lifetime position that corresponds to the beginning of
66 // the instruction with the given index.
67 static LifetimePosition FromInstructionIndex(int index) {
68 return LifetimePosition(index * kStep);
71 // Returns a numeric representation of this lifetime position.
76 // Returns the index of the instruction to which this lifetime position
78 int InstructionIndex() const {
80 return value_ / kStep;
83 // Returns true if this lifetime position corresponds to the instruction
85 bool IsInstructionStart() const {
86 return (value_ & (kStep - 1)) == 0;
89 // Returns the lifetime position for the start of the instruction which
90 // corresponds to this lifetime position.
91 LifetimePosition InstructionStart() const {
93 return LifetimePosition(value_ & ~(kStep - 1));
96 // Returns the lifetime position for the end of the instruction which
97 // corresponds to this lifetime position.
98 LifetimePosition InstructionEnd() const {
100 return LifetimePosition(InstructionStart().Value() + kStep/2);
103 // Returns the lifetime position for the beginning of the next instruction.
104 LifetimePosition NextInstruction() const {
106 return LifetimePosition(InstructionStart().Value() + kStep);
109 // Returns the lifetime position for the beginning of the previous
111 LifetimePosition PrevInstruction() const {
114 return LifetimePosition(InstructionStart().Value() - kStep);
117 // Constructs the lifetime position which does not correspond to any
119 LifetimePosition() : value_(-1) {}
121 // Returns true if this lifetime positions corrensponds to some
123 bool IsValid() const { return value_ != -1; }
125 static inline LifetimePosition Invalid() { return LifetimePosition(); }
127 static inline LifetimePosition MaxPosition() {
128 // We have to use this kind of getter instead of static member due to
130 return LifetimePosition(kMaxInt);
134 static const int kStep = 2;
136 // Code relies on kStep being a power of two.
137 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
139 explicit LifetimePosition(int value) : value_(value) { }
146 UNALLOCATED_REGISTERS,
154 inline bool IsSIMD128RegisterKind(RegisterKind kind) {
155 return kind == FLOAT32x4_REGISTERS || kind == INT32x4_REGISTERS;
159 // A register-allocator view of a Lithium instruction. It contains the id of
160 // the output operand and a list of input operand uses.
165 // Iterator for non-null temp operands.
166 class TempIterator BASE_EMBEDDED {
168 inline explicit TempIterator(LInstruction* instr);
170 inline LOperand* Current();
171 inline void Advance();
174 inline void SkipUninteresting();
175 LInstruction* instr_;
181 // Iterator for non-constant input operands.
182 class InputIterator BASE_EMBEDDED {
184 inline explicit InputIterator(LInstruction* instr);
186 inline LOperand* Current();
187 inline void Advance();
190 inline void SkipUninteresting();
191 LInstruction* instr_;
197 class UseIterator BASE_EMBEDDED {
199 inline explicit UseIterator(LInstruction* instr);
201 inline LOperand* Current();
202 inline void Advance();
205 InputIterator input_iterator_;
206 DeepIterator env_iterator_;
210 // Representation of the non-empty interval [start,end[.
211 class UseInterval: public ZoneObject {
213 UseInterval(LifetimePosition start, LifetimePosition end)
214 : start_(start), end_(end), next_(NULL) {
215 ASSERT(start.Value() < end.Value());
218 LifetimePosition start() const { return start_; }
219 LifetimePosition end() const { return end_; }
220 UseInterval* next() const { return next_; }
222 // Split this interval at the given position without effecting the
223 // live range that owns it. The interval must contain the position.
224 void SplitAt(LifetimePosition pos, Zone* zone);
226 // If this interval intersects with other return smallest position
227 // that belongs to both of them.
228 LifetimePosition Intersect(const UseInterval* other) const {
229 if (other->start().Value() < start_.Value()) return other->Intersect(this);
230 if (other->start().Value() < end_.Value()) return other->start();
231 return LifetimePosition::Invalid();
234 bool Contains(LifetimePosition point) const {
235 return start_.Value() <= point.Value() && point.Value() < end_.Value();
239 void set_start(LifetimePosition start) { start_ = start; }
240 void set_next(UseInterval* next) { next_ = next; }
242 LifetimePosition start_;
243 LifetimePosition end_;
246 friend class LiveRange; // Assigns to start_.
249 // Representation of a use position.
250 class UsePosition: public ZoneObject {
252 UsePosition(LifetimePosition pos, LOperand* operand, LOperand* hint);
254 LOperand* operand() const { return operand_; }
255 bool HasOperand() const { return operand_ != NULL; }
257 LOperand* hint() const { return hint_; }
258 bool HasHint() const;
259 bool RequiresRegister() const;
260 bool RegisterIsBeneficial() const;
262 LifetimePosition pos() const { return pos_; }
263 UsePosition* next() const { return next_; }
266 void set_next(UsePosition* next) { next_ = next; }
268 LOperand* const operand_;
269 LOperand* const hint_;
270 LifetimePosition const pos_;
273 bool register_beneficial_;
275 friend class LiveRange;
278 // Representation of SSA values' live ranges as a collection of (continuous)
279 // intervals over the instruction ordering.
280 class LiveRange: public ZoneObject {
282 static const int kInvalidAssignment = 0x7fffffff;
284 LiveRange(int id, Zone* zone);
286 UseInterval* first_interval() const { return first_interval_; }
287 UsePosition* first_pos() const { return first_pos_; }
288 LiveRange* parent() const { return parent_; }
289 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
290 LiveRange* next() const { return next_; }
291 bool IsChild() const { return parent() != NULL; }
292 int id() const { return id_; }
293 bool IsFixed() const { return id_ < 0; }
294 bool IsEmpty() const { return first_interval() == NULL; }
295 LOperand* CreateAssignedOperand(Zone* zone);
296 int assigned_register() const { return assigned_register_; }
297 int spill_start_index() const { return spill_start_index_; }
298 void set_assigned_register(int reg, Zone* zone);
299 void MakeSpilled(Zone* zone);
301 // Returns use position in this live range that follows both start
302 // and last processed use position.
303 // Modifies internal state of live range!
304 UsePosition* NextUsePosition(LifetimePosition start);
306 // Returns use position for which register is required in this live
307 // range and which follows both start and last processed use position
308 // Modifies internal state of live range!
309 UsePosition* NextRegisterPosition(LifetimePosition start);
311 // Returns use position for which register is beneficial in this live
312 // range and which follows both start and last processed use position
313 // Modifies internal state of live range!
314 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
316 // Returns use position for which register is beneficial in this live
317 // range and which precedes start.
318 UsePosition* PreviousUsePositionRegisterIsBeneficial(LifetimePosition start);
320 // Can this live range be spilled at this position.
321 bool CanBeSpilled(LifetimePosition pos);
323 // Split this live range at the given position which must follow the start of
325 // All uses following the given position will be moved from this
326 // live range to the result live range.
327 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
329 RegisterKind Kind() const { return kind_; }
330 bool HasRegisterAssigned() const {
331 return assigned_register_ != kInvalidAssignment;
333 bool IsSpilled() const { return spilled_; }
335 LOperand* current_hint_operand() const {
336 ASSERT(current_hint_operand_ == FirstHint());
337 return current_hint_operand_;
339 LOperand* FirstHint() const {
340 UsePosition* pos = first_pos_;
341 while (pos != NULL && !pos->HasHint()) pos = pos->next();
342 if (pos != NULL) return pos->hint();
346 LifetimePosition Start() const {
348 return first_interval()->start();
351 LifetimePosition End() const {
353 return last_interval_->end();
356 bool HasAllocatedSpillOperand() const;
357 LOperand* GetSpillOperand() const { return spill_operand_; }
358 void SetSpillOperand(LOperand* operand);
360 void SetSpillStartIndex(int start) {
361 spill_start_index_ = Min(start, spill_start_index_);
364 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
365 bool CanCover(LifetimePosition position) const;
366 bool Covers(LifetimePosition position);
367 LifetimePosition FirstIntersection(LiveRange* other);
369 // Add a new interval or a new use position to this live range.
370 void EnsureInterval(LifetimePosition start,
371 LifetimePosition end,
373 void AddUseInterval(LifetimePosition start,
374 LifetimePosition end,
376 void AddUsePosition(LifetimePosition pos,
381 // Shorten the most recently added interval by setting a new start.
382 void ShortenTo(LifetimePosition start);
385 // True if target overlaps an existing interval.
386 bool HasOverlap(UseInterval* target) const;
391 void ConvertOperands(Zone* zone);
392 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
393 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
394 LifetimePosition but_not_past) const;
399 int assigned_register_;
400 UseInterval* last_interval_;
401 UseInterval* first_interval_;
402 UsePosition* first_pos_;
405 // This is used as a cache, it doesn't affect correctness.
406 mutable UseInterval* current_interval_;
407 UsePosition* last_processed_use_;
408 // This is used as a cache, it's invalid outside of BuildLiveRanges.
409 LOperand* current_hint_operand_;
410 LOperand* spill_operand_;
411 int spill_start_index_;
413 friend class LAllocator; // Assigns to kind_.
417 class LAllocator BASE_EMBEDDED {
419 LAllocator(int first_virtual_register, HGraph* graph);
421 static void TraceAlloc(const char* msg, ...);
423 // Checks whether the value of a given virtual register is tagged.
424 bool HasTaggedValue(int virtual_register) const;
426 // Returns the register kind required by the given virtual register.
427 RegisterKind RequiredRegisterKind(int virtual_register) const;
429 bool Allocate(LChunk* chunk);
431 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
432 const Vector<LiveRange*>* fixed_live_ranges() const {
433 return &fixed_live_ranges_;
435 const Vector<LiveRange*>* fixed_double_live_ranges() const {
436 return &fixed_double_live_ranges_;
439 LPlatformChunk* chunk() const { return chunk_; }
440 HGraph* graph() const { return graph_; }
441 Isolate* isolate() const { return graph_->isolate(); }
442 Zone* zone() { return &zone_; }
444 int GetVirtualRegister() {
445 if (next_virtual_register_ >= LUnallocated::kMaxVirtualRegisters) {
446 allocation_ok_ = false;
447 // Maintain the invariant that we return something below the maximum.
450 return next_virtual_register_++;
453 bool AllocationOk() { return allocation_ok_; }
455 void MarkAsOsrEntry() {
456 // There can be only one.
457 ASSERT(!has_osr_entry_);
458 // Simply set a flag to find and process instruction later.
459 has_osr_entry_ = true;
466 BitVector* assigned_registers() {
467 return assigned_registers_;
469 BitVector* assigned_double_registers() {
470 return assigned_double_registers_;
474 void MeetRegisterConstraints();
476 void BuildLiveRanges();
477 void AllocateGeneralRegisters();
478 void AllocateDoubleRegisters();
479 void ConnectRanges();
480 void ResolveControlFlow();
481 void PopulatePointerMaps();
482 void AllocateRegisters();
483 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
484 inline bool SafePointsAreInOrder() const;
486 // Liveness analysis support.
487 void InitializeLivenessAnalysis();
488 BitVector* ComputeLiveOut(HBasicBlock* block);
489 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
490 void ProcessInstructions(HBasicBlock* block, BitVector* live);
491 void MeetRegisterConstraints(HBasicBlock* block);
492 void MeetConstraintsBetween(LInstruction* first,
493 LInstruction* second,
495 void ResolvePhis(HBasicBlock* block);
497 // Helper methods for building intervals.
498 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
499 LiveRange* LiveRangeFor(LOperand* operand);
500 void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
501 void Use(LifetimePosition block_start,
502 LifetimePosition position,
505 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
507 // Helper methods for updating the life range lists.
508 void AddToActive(LiveRange* range);
509 void AddToInactive(LiveRange* range);
510 void AddToUnhandledSorted(LiveRange* range);
511 void AddToUnhandledUnsorted(LiveRange* range);
512 void SortUnhandled();
513 bool UnhandledIsSorted();
514 void ActiveToHandled(LiveRange* range);
515 void ActiveToInactive(LiveRange* range);
516 void InactiveToHandled(LiveRange* range);
517 void InactiveToActive(LiveRange* range);
518 void FreeSpillSlot(LiveRange* range);
519 LOperand* TryReuseSpillSlot(LiveRange* range);
521 // Helper methods for allocating registers.
522 bool TryAllocateFreeReg(LiveRange* range);
523 void AllocateBlockedReg(LiveRange* range);
525 // Live range splitting helpers.
527 // Split the given range at the given position.
528 // If range starts at or after the given position then the
529 // original range is returned.
530 // Otherwise returns the live range that starts at pos and contains
531 // all uses from the original range that follow pos. Uses at pos will
532 // still be owned by the original range after splitting.
533 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
535 // Split the given range in a position from the interval [start, end].
536 LiveRange* SplitBetween(LiveRange* range,
537 LifetimePosition start,
538 LifetimePosition end);
540 // Find a lifetime position in the interval [start, end] which
541 // is optimal for splitting: it is either header of the outermost
542 // loop covered by this interval or the latest possible position.
543 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
544 LifetimePosition end);
546 // Spill the given life range after position pos.
547 void SpillAfter(LiveRange* range, LifetimePosition pos);
549 // Spill the given life range after position [start] and up to position [end].
550 void SpillBetween(LiveRange* range,
551 LifetimePosition start,
552 LifetimePosition end);
554 // Spill the given life range after position [start] and up to position [end].
555 // Range is guaranteed to be spilled at least until position [until].
556 void SpillBetweenUntil(LiveRange* range,
557 LifetimePosition start,
558 LifetimePosition until,
559 LifetimePosition end);
561 void SplitAndSpillIntersecting(LiveRange* range);
563 // If we are trying to spill a range inside the loop try to
564 // hoist spill position out to the point just before the loop.
565 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
566 LifetimePosition pos);
568 void Spill(LiveRange* range);
569 bool IsBlockBoundary(LifetimePosition pos);
571 // Helper methods for resolving control flow.
572 void ResolveControlFlow(LiveRange* range,
576 inline void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
578 // Return parallel move that should be used to connect ranges split at the
580 LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
582 // Return the block which contains give lifetime position.
583 HBasicBlock* GetBlock(LifetimePosition pos);
585 // Helper methods for the fixed registers.
586 int RegisterCount() const;
587 static int FixedLiveRangeID(int index) { return -index - 1; }
588 static int FixedDoubleLiveRangeID(int index);
589 LiveRange* FixedLiveRangeFor(int index);
590 LiveRange* FixedDoubleLiveRangeFor(int index);
591 LiveRange* LiveRangeFor(int index);
592 HPhi* LookupPhi(LOperand* operand) const;
593 LGap* GetLastGap(HBasicBlock* block);
595 const char* RegisterName(int allocation_index);
597 inline bool IsGapAt(int index);
599 inline LInstruction* InstructionAt(int index);
601 inline LGap* GapAt(int index);
605 LPlatformChunk* chunk_;
607 // During liveness analysis keep a mapping from block id to live_in sets
608 // for blocks already analyzed.
609 ZoneList<BitVector*> live_in_sets_;
611 // Liveness analysis results.
612 ZoneList<LiveRange*> live_ranges_;
614 // Lists of live ranges
615 EmbeddedVector<LiveRange*, Register::kMaxNumAllocatableRegisters>
617 EmbeddedVector<LiveRange*, DoubleRegister::kMaxNumAllocatableRegisters>
618 fixed_double_live_ranges_;
619 ZoneList<LiveRange*> unhandled_live_ranges_;
620 ZoneList<LiveRange*> active_live_ranges_;
621 ZoneList<LiveRange*> inactive_live_ranges_;
622 ZoneList<LiveRange*> reusable_slots_;
623 // Slots reusable for both float32x4 and int32x4 register spilling.
624 ZoneList<LiveRange*> reusable_simd128_slots_;
626 // Next virtual register number to be assigned to temporaries.
627 int next_virtual_register_;
628 int first_artificial_register_;
629 GrowableBitVector double_artificial_registers_;
630 GrowableBitVector float32x4_artificial_registers_;
631 GrowableBitVector int32x4_artificial_registers_;
636 BitVector* assigned_registers_;
637 BitVector* assigned_double_registers_;
643 // Indicates success or failure during register allocation.
647 LifetimePosition allocation_finger_;
650 DISALLOW_COPY_AND_ASSIGN(LAllocator);
654 class LAllocatorPhase : public CompilationPhase {
656 LAllocatorPhase(const char* name, LAllocator* allocator);
660 LAllocator* allocator_;
661 unsigned allocator_zone_start_allocation_size_;
663 DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase);
667 } } // namespace v8::internal
669 #endif // V8_LITHIUM_ALLOCATOR_H_