1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #include "lithium-allocator-inl.h"
32 #include "string-stream.h"
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
43 #error "Unknown architecture."
49 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
50 return a.Value() < b.Value() ? a : b;
54 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
55 return a.Value() > b.Value() ? a : b;
59 UsePosition::UsePosition(LifetimePosition pos,
67 register_beneficial_(true) {
68 if (operand_ != NULL && operand_->IsUnallocated()) {
69 LUnallocated* unalloc = LUnallocated::cast(operand_);
70 requires_reg_ = unalloc->HasRegisterPolicy();
71 register_beneficial_ = !unalloc->HasAnyPolicy();
73 ASSERT(pos_.IsValid());
77 bool UsePosition::HasHint() const {
78 return hint_ != NULL && !hint_->IsUnallocated();
82 bool UsePosition::RequiresRegister() const {
87 bool UsePosition::RegisterIsBeneficial() const {
88 return register_beneficial_;
92 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
93 ASSERT(Contains(pos) && pos.Value() != start().Value());
94 UseInterval* after = new(zone) UseInterval(pos, end_);
104 void LiveRange::Verify() const {
105 UsePosition* cur = first_pos_;
106 while (cur != NULL) {
107 ASSERT(Start().Value() <= cur->pos().Value() &&
108 cur->pos().Value() <= End().Value());
114 bool LiveRange::HasOverlap(UseInterval* target) const {
115 UseInterval* current_interval = first_interval_;
116 while (current_interval != NULL) {
117 // Intervals overlap if the start of one is contained in the other.
118 if (current_interval->Contains(target->start()) ||
119 target->Contains(current_interval->start())) {
122 current_interval = current_interval->next();
131 LiveRange::LiveRange(int id, Zone* zone)
134 kind_(UNALLOCATED_REGISTERS),
135 assigned_register_(kInvalidAssignment),
136 last_interval_(NULL),
137 first_interval_(NULL),
141 current_interval_(NULL),
142 last_processed_use_(NULL),
143 current_hint_operand_(NULL),
144 spill_operand_(new(zone) LOperand()),
145 spill_start_index_(kMaxInt) { }
148 void LiveRange::set_assigned_register(int reg, Zone* zone) {
149 ASSERT(!HasRegisterAssigned() && !IsSpilled());
150 assigned_register_ = reg;
151 ConvertOperands(zone);
155 void LiveRange::MakeSpilled(Zone* zone) {
156 ASSERT(!IsSpilled());
157 ASSERT(TopLevel()->HasAllocatedSpillOperand());
159 assigned_register_ = kInvalidAssignment;
160 ConvertOperands(zone);
164 bool LiveRange::HasAllocatedSpillOperand() const {
165 ASSERT(spill_operand_ != NULL);
166 return !spill_operand_->IsIgnored();
170 void LiveRange::SetSpillOperand(LOperand* operand) {
171 ASSERT(!operand->IsUnallocated());
172 ASSERT(spill_operand_ != NULL);
173 ASSERT(spill_operand_->IsIgnored());
174 spill_operand_->ConvertTo(operand->kind(), operand->index());
178 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
179 UsePosition* use_pos = last_processed_use_;
180 if (use_pos == NULL) use_pos = first_pos();
181 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
182 use_pos = use_pos->next();
184 last_processed_use_ = use_pos;
189 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
190 LifetimePosition start) {
191 UsePosition* pos = NextUsePosition(start);
192 while (pos != NULL && !pos->RegisterIsBeneficial()) {
199 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
200 LifetimePosition start) {
201 UsePosition* pos = first_pos();
202 UsePosition* prev = NULL;
203 while (pos != NULL && pos->pos().Value() < start.Value()) {
204 if (pos->RegisterIsBeneficial()) prev = pos;
211 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
212 UsePosition* pos = NextUsePosition(start);
213 while (pos != NULL && !pos->RequiresRegister()) {
220 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
221 // We cannot spill a live range that has a use requiring a register
222 // at the current or the immediate next position.
223 UsePosition* use_pos = NextRegisterPosition(pos);
224 if (use_pos == NULL) return true;
226 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
230 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
232 if (HasRegisterAssigned()) {
233 ASSERT(!IsSpilled());
235 case GENERAL_REGISTERS:
236 op = LRegister::Create(assigned_register(), zone);
238 case DOUBLE_REGISTERS:
239 op = LDoubleRegister::Create(assigned_register(), zone);
241 case FLOAT32x4_REGISTERS:
242 op = LFloat32x4Register::Create(assigned_register(), zone);
244 case INT32x4_REGISTERS:
245 op = LInt32x4Register::Create(assigned_register(), zone);
250 } else if (IsSpilled()) {
251 ASSERT(!HasRegisterAssigned());
252 op = TopLevel()->GetSpillOperand();
253 ASSERT(!op->IsUnallocated());
255 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
256 unalloc->set_virtual_register(id_);
263 UseInterval* LiveRange::FirstSearchIntervalForPosition(
264 LifetimePosition position) const {
265 if (current_interval_ == NULL) return first_interval_;
266 if (current_interval_->start().Value() > position.Value()) {
267 current_interval_ = NULL;
268 return first_interval_;
270 return current_interval_;
274 void LiveRange::AdvanceLastProcessedMarker(
275 UseInterval* to_start_of, LifetimePosition but_not_past) const {
276 if (to_start_of == NULL) return;
277 if (to_start_of->start().Value() > but_not_past.Value()) return;
278 LifetimePosition start =
279 current_interval_ == NULL ? LifetimePosition::Invalid()
280 : current_interval_->start();
281 if (to_start_of->start().Value() > start.Value()) {
282 current_interval_ = to_start_of;
287 void LiveRange::SplitAt(LifetimePosition position,
290 ASSERT(Start().Value() < position.Value());
291 ASSERT(result->IsEmpty());
292 // Find the last interval that ends before the position. If the
293 // position is contained in one of the intervals in the chain, we
294 // split that interval and use the first part.
295 UseInterval* current = FirstSearchIntervalForPosition(position);
297 // If the split position coincides with the beginning of a use interval
298 // we need to split use positons in a special way.
299 bool split_at_start = false;
301 if (current->start().Value() == position.Value()) {
302 // When splitting at start we need to locate the previous use interval.
303 current = first_interval_;
306 while (current != NULL) {
307 if (current->Contains(position)) {
308 current->SplitAt(position, zone);
311 UseInterval* next = current->next();
312 if (next->start().Value() >= position.Value()) {
313 split_at_start = (next->start().Value() == position.Value());
319 // Partition original use intervals to the two live ranges.
320 UseInterval* before = current;
321 UseInterval* after = before->next();
322 result->last_interval_ = (last_interval_ == before)
323 ? after // Only interval in the range after split.
324 : last_interval_; // Last interval of the original range.
325 result->first_interval_ = after;
326 last_interval_ = before;
328 // Find the last use position before the split and the first use
329 // position after it.
330 UsePosition* use_after = first_pos_;
331 UsePosition* use_before = NULL;
332 if (split_at_start) {
333 // The split position coincides with the beginning of a use interval (the
334 // end of a lifetime hole). Use at this position should be attributed to
335 // the split child because split child owns use interval covering it.
336 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
337 use_before = use_after;
338 use_after = use_after->next();
341 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
342 use_before = use_after;
343 use_after = use_after->next();
347 // Partition original use positions to the two live ranges.
348 if (use_before != NULL) {
349 use_before->next_ = NULL;
353 result->first_pos_ = use_after;
355 // Discard cached iteration state. It might be pointing
356 // to the use that no longer belongs to this live range.
357 last_processed_use_ = NULL;
358 current_interval_ = NULL;
360 // Link the new live range in the chain before any of the other
361 // ranges linked from the range before the split.
362 result->parent_ = (parent_ == NULL) ? this : parent_;
363 result->kind_ = result->parent_->kind_;
364 result->next_ = next_;
374 // This implements an ordering on live ranges so that they are ordered by their
375 // start positions. This is needed for the correctness of the register
376 // allocation algorithm. If two live ranges start at the same offset then there
377 // is a tie breaker based on where the value is first used. This part of the
378 // ordering is merely a heuristic.
379 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
380 LifetimePosition start = Start();
381 LifetimePosition other_start = other->Start();
382 if (start.Value() == other_start.Value()) {
383 UsePosition* pos = first_pos();
384 if (pos == NULL) return false;
385 UsePosition* other_pos = other->first_pos();
386 if (other_pos == NULL) return true;
387 return pos->pos().Value() < other_pos->pos().Value();
389 return start.Value() < other_start.Value();
393 void LiveRange::ShortenTo(LifetimePosition start) {
394 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
395 ASSERT(first_interval_ != NULL);
396 ASSERT(first_interval_->start().Value() <= start.Value());
397 ASSERT(start.Value() < first_interval_->end().Value());
398 first_interval_->set_start(start);
402 void LiveRange::EnsureInterval(LifetimePosition start,
403 LifetimePosition end,
405 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
409 LifetimePosition new_end = end;
410 while (first_interval_ != NULL &&
411 first_interval_->start().Value() <= end.Value()) {
412 if (first_interval_->end().Value() > end.Value()) {
413 new_end = first_interval_->end();
415 first_interval_ = first_interval_->next();
418 UseInterval* new_interval = new(zone) UseInterval(start, new_end);
419 new_interval->next_ = first_interval_;
420 first_interval_ = new_interval;
421 if (new_interval->next() == NULL) {
422 last_interval_ = new_interval;
427 void LiveRange::AddUseInterval(LifetimePosition start,
428 LifetimePosition end,
430 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
434 if (first_interval_ == NULL) {
435 UseInterval* interval = new(zone) UseInterval(start, end);
436 first_interval_ = interval;
437 last_interval_ = interval;
439 if (end.Value() == first_interval_->start().Value()) {
440 first_interval_->set_start(start);
441 } else if (end.Value() < first_interval_->start().Value()) {
442 UseInterval* interval = new(zone) UseInterval(start, end);
443 interval->set_next(first_interval_);
444 first_interval_ = interval;
446 // Order of instruction's processing (see ProcessInstructions) guarantees
447 // that each new use interval either precedes or intersects with
448 // last added interval.
449 ASSERT(start.Value() < first_interval_->end().Value());
450 first_interval_->start_ = Min(start, first_interval_->start_);
451 first_interval_->end_ = Max(end, first_interval_->end_);
457 void LiveRange::AddUsePosition(LifetimePosition pos,
461 LAllocator::TraceAlloc("Add to live range %d use position %d\n",
464 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
465 UsePosition* prev_hint = NULL;
466 UsePosition* prev = NULL;
467 UsePosition* current = first_pos_;
468 while (current != NULL && current->pos().Value() < pos.Value()) {
469 prev_hint = current->HasHint() ? current : prev_hint;
471 current = current->next();
475 use_pos->set_next(first_pos_);
476 first_pos_ = use_pos;
478 use_pos->next_ = prev->next_;
479 prev->next_ = use_pos;
482 if (prev_hint == NULL && use_pos->HasHint()) {
483 current_hint_operand_ = hint;
488 void LiveRange::ConvertOperands(Zone* zone) {
489 LOperand* op = CreateAssignedOperand(zone);
490 UsePosition* use_pos = first_pos();
491 while (use_pos != NULL) {
492 ASSERT(Start().Value() <= use_pos->pos().Value() &&
493 use_pos->pos().Value() <= End().Value());
495 if (use_pos->HasOperand()) {
496 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
497 op->IsSIMD128Register() || !use_pos->RequiresRegister());
498 use_pos->operand()->ConvertTo(op->kind(), op->index());
500 use_pos = use_pos->next();
505 bool LiveRange::CanCover(LifetimePosition position) const {
506 if (IsEmpty()) return false;
507 return Start().Value() <= position.Value() &&
508 position.Value() < End().Value();
512 bool LiveRange::Covers(LifetimePosition position) {
513 if (!CanCover(position)) return false;
514 UseInterval* start_search = FirstSearchIntervalForPosition(position);
515 for (UseInterval* interval = start_search;
517 interval = interval->next()) {
518 ASSERT(interval->next() == NULL ||
519 interval->next()->start().Value() >= interval->start().Value());
520 AdvanceLastProcessedMarker(interval, position);
521 if (interval->Contains(position)) return true;
522 if (interval->start().Value() > position.Value()) return false;
528 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
529 UseInterval* b = other->first_interval();
530 if (b == NULL) return LifetimePosition::Invalid();
531 LifetimePosition advance_last_processed_up_to = b->start();
532 UseInterval* a = FirstSearchIntervalForPosition(b->start());
533 while (a != NULL && b != NULL) {
534 if (a->start().Value() > other->End().Value()) break;
535 if (b->start().Value() > End().Value()) break;
536 LifetimePosition cur_intersection = a->Intersect(b);
537 if (cur_intersection.IsValid()) {
538 return cur_intersection;
540 if (a->start().Value() < b->start().Value()) {
542 if (a == NULL || a->start().Value() > other->End().Value()) break;
543 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
548 return LifetimePosition::Invalid();
552 LAllocator::LAllocator(int num_values, HGraph* graph)
553 : zone_(graph->isolate()),
555 live_in_sets_(graph->blocks()->length(), zone()),
556 live_ranges_(num_values * 2, zone()),
557 fixed_live_ranges_(NULL),
558 fixed_double_live_ranges_(NULL),
559 unhandled_live_ranges_(num_values * 2, zone()),
560 active_live_ranges_(8, zone()),
561 inactive_live_ranges_(8, zone()),
562 reusable_slots_(8, zone()),
563 reusable_simd128_slots_(8, zone()),
564 next_virtual_register_(num_values),
565 first_artificial_register_(num_values),
566 mode_(UNALLOCATED_REGISTERS),
569 has_osr_entry_(false),
570 allocation_ok_(true) { }
573 void LAllocator::InitializeLivenessAnalysis() {
574 // Initialize the live_in sets for each block to NULL.
575 int block_count = graph_->blocks()->length();
576 live_in_sets_.Initialize(block_count, zone());
577 live_in_sets_.AddBlock(NULL, block_count, zone());
581 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
582 // Compute live out for the given block, except not including backward
584 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
586 // Process all successor blocks.
587 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
588 // Add values live on entry to the successor. Note the successor's
589 // live_in will not be computed yet for backwards edges.
590 HBasicBlock* successor = it.Current();
591 BitVector* live_in = live_in_sets_[successor->block_id()];
592 if (live_in != NULL) live_out->Union(*live_in);
594 // All phi input operands corresponding to this successor edge are live
595 // out from this block.
596 int index = successor->PredecessorIndexOf(block);
597 const ZoneList<HPhi*>* phis = successor->phis();
598 for (int i = 0; i < phis->length(); ++i) {
599 HPhi* phi = phis->at(i);
600 if (!phi->OperandAt(index)->IsConstant()) {
601 live_out->Add(phi->OperandAt(index)->id());
610 void LAllocator::AddInitialIntervals(HBasicBlock* block,
611 BitVector* live_out) {
612 // Add an interval that includes the entire block to the live range for
613 // each live_out value.
614 LifetimePosition start = LifetimePosition::FromInstructionIndex(
615 block->first_instruction_index());
616 LifetimePosition end = LifetimePosition::FromInstructionIndex(
617 block->last_instruction_index()).NextInstruction();
618 BitVector::Iterator iterator(live_out);
619 while (!iterator.Done()) {
620 int operand_index = iterator.Current();
621 LiveRange* range = LiveRangeFor(operand_index);
622 range->AddUseInterval(start, end, zone());
628 int LAllocator::FixedDoubleLiveRangeID(int index) {
629 return -index - 1 - Register::kMaxNumAllocatableRegisters;
633 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
636 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
637 ASSERT(operand->HasFixedPolicy());
638 if (operand->HasFixedSlotPolicy()) {
639 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
640 } else if (operand->HasFixedRegisterPolicy()) {
641 int reg_index = operand->fixed_register_index();
642 operand->ConvertTo(LOperand::REGISTER, reg_index);
643 } else if (operand->HasFixedDoubleRegisterPolicy()) {
644 int reg_index = operand->fixed_register_index();
645 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
650 TraceAlloc("Fixed reg is tagged at %d\n", pos);
651 LInstruction* instr = InstructionAt(pos);
652 if (instr->HasPointerMap()) {
653 instr->pointer_map()->RecordPointer(operand, chunk()->zone());
660 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
661 ASSERT(index < Register::kMaxNumAllocatableRegisters);
662 LiveRange* result = fixed_live_ranges_[index];
663 if (result == NULL) {
664 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
665 ASSERT(result->IsFixed());
666 result->kind_ = GENERAL_REGISTERS;
667 SetLiveRangeAssignedRegister(result, index);
668 fixed_live_ranges_[index] = result;
674 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
675 ASSERT(index < DoubleRegister::NumAllocatableRegisters());
676 LiveRange* result = fixed_double_live_ranges_[index];
677 if (result == NULL) {
678 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
680 ASSERT(result->IsFixed());
681 result->kind_ = DOUBLE_REGISTERS;
682 SetLiveRangeAssignedRegister(result, index);
683 fixed_double_live_ranges_[index] = result;
689 LiveRange* LAllocator::LiveRangeFor(int index) {
690 if (index >= live_ranges_.length()) {
691 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
693 LiveRange* result = live_ranges_[index];
694 if (result == NULL) {
695 result = new(zone()) LiveRange(index, chunk()->zone());
696 live_ranges_[index] = result;
702 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
703 int last_instruction = block->last_instruction_index();
704 int index = chunk_->NearestGapPos(last_instruction);
709 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
710 if (!operand->IsUnallocated()) return NULL;
711 int index = LUnallocated::cast(operand)->virtual_register();
712 HValue* instr = graph_->LookupValue(index);
713 if (instr != NULL && instr->IsPhi()) {
714 return HPhi::cast(instr);
720 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
721 if (operand->IsUnallocated()) {
722 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
723 } else if (operand->IsRegister()) {
724 return FixedLiveRangeFor(operand->index());
725 } else if (operand->IsDoubleRegister()) {
726 return FixedDoubleLiveRangeFor(operand->index());
733 void LAllocator::Define(LifetimePosition position,
736 LiveRange* range = LiveRangeFor(operand);
737 if (range == NULL) return;
739 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
740 // Can happen if there is a definition without use.
741 range->AddUseInterval(position, position.NextInstruction(), zone());
742 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
744 range->ShortenTo(position);
747 if (operand->IsUnallocated()) {
748 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
749 range->AddUsePosition(position, unalloc_operand, hint, zone());
754 void LAllocator::Use(LifetimePosition block_start,
755 LifetimePosition position,
758 LiveRange* range = LiveRangeFor(operand);
759 if (range == NULL) return;
760 if (operand->IsUnallocated()) {
761 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
762 range->AddUsePosition(position, unalloc_operand, hint, zone());
764 range->AddUseInterval(block_start, position, zone());
768 void LAllocator::AddConstraintsGapMove(int index,
771 LGap* gap = GapAt(index);
772 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
774 if (from->IsUnallocated()) {
775 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
776 for (int i = 0; i < move_operands->length(); ++i) {
777 LMoveOperands cur = move_operands->at(i);
778 LOperand* cur_to = cur.destination();
779 if (cur_to->IsUnallocated()) {
780 if (LUnallocated::cast(cur_to)->virtual_register() ==
781 LUnallocated::cast(from)->virtual_register()) {
782 move->AddMove(cur.source(), to, chunk()->zone());
788 move->AddMove(from, to, chunk()->zone());
792 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
793 int start = block->first_instruction_index();
794 int end = block->last_instruction_index();
795 if (start == -1) return;
796 for (int i = start; i <= end; ++i) {
798 LInstruction* instr = NULL;
799 LInstruction* prev_instr = NULL;
800 if (i < end) instr = InstructionAt(i + 1);
801 if (i > start) prev_instr = InstructionAt(i - 1);
802 MeetConstraintsBetween(prev_instr, instr, i);
803 if (!AllocationOk()) return;
809 void LAllocator::MeetConstraintsBetween(LInstruction* first,
810 LInstruction* second,
812 // Handle fixed temporaries.
814 for (TempIterator it(first); !it.Done(); it.Advance()) {
815 LUnallocated* temp = LUnallocated::cast(it.Current());
816 if (temp->HasFixedPolicy()) {
817 AllocateFixed(temp, gap_index - 1, false);
822 // Handle fixed output operand.
823 if (first != NULL && first->Output() != NULL) {
824 LUnallocated* first_output = LUnallocated::cast(first->Output());
825 LiveRange* range = LiveRangeFor(first_output->virtual_register());
826 bool assigned = false;
827 if (first_output->HasFixedPolicy()) {
828 LUnallocated* output_copy = first_output->CopyUnconstrained(
830 bool is_tagged = HasTaggedValue(first_output->virtual_register());
831 AllocateFixed(first_output, gap_index, is_tagged);
833 // This value is produced on the stack, we never need to spill it.
834 if (first_output->IsStackSlot()) {
835 range->SetSpillOperand(first_output);
836 range->SetSpillStartIndex(gap_index - 1);
839 chunk_->AddGapMove(gap_index, first_output, output_copy);
843 range->SetSpillStartIndex(gap_index);
845 // This move to spill operand is not a real use. Liveness analysis
846 // and splitting of live ranges do not account for it.
847 // Thus it should be inserted to a lifetime position corresponding to
848 // the instruction end.
849 LGap* gap = GapAt(gap_index);
850 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
852 move->AddMove(first_output, range->GetSpillOperand(),
857 // Handle fixed input operands of second instruction.
858 if (second != NULL) {
859 for (UseIterator it(second); !it.Done(); it.Advance()) {
860 LUnallocated* cur_input = LUnallocated::cast(it.Current());
861 if (cur_input->HasFixedPolicy()) {
862 LUnallocated* input_copy = cur_input->CopyUnconstrained(
864 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
865 AllocateFixed(cur_input, gap_index + 1, is_tagged);
866 AddConstraintsGapMove(gap_index, input_copy, cur_input);
867 } else if (cur_input->HasWritableRegisterPolicy()) {
868 // The live range of writable input registers always goes until the end
869 // of the instruction.
870 ASSERT(!cur_input->IsUsedAtStart());
872 LUnallocated* input_copy = cur_input->CopyUnconstrained(
874 int vreg = GetVirtualRegister();
875 if (!AllocationOk()) return;
876 cur_input->set_virtual_register(vreg);
878 if (RequiredRegisterKind(input_copy->virtual_register()) ==
880 double_artificial_registers_.Add(
881 cur_input->virtual_register() - first_artificial_register_,
883 } else if (RequiredRegisterKind(input_copy->virtual_register()) ==
884 FLOAT32x4_REGISTERS) {
885 float32x4_artificial_registers_.Add(
886 cur_input->virtual_register() - first_artificial_register_,
888 } else if (RequiredRegisterKind(input_copy->virtual_register()) ==
890 int32x4_artificial_registers_.Add(
891 cur_input->virtual_register() - first_artificial_register_,
895 AddConstraintsGapMove(gap_index, input_copy, cur_input);
900 // Handle "output same as input" for second instruction.
901 if (second != NULL && second->Output() != NULL) {
902 LUnallocated* second_output = LUnallocated::cast(second->Output());
903 if (second_output->HasSameAsInputPolicy()) {
904 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
905 int output_vreg = second_output->virtual_register();
906 int input_vreg = cur_input->virtual_register();
908 LUnallocated* input_copy = cur_input->CopyUnconstrained(
910 cur_input->set_virtual_register(second_output->virtual_register());
911 AddConstraintsGapMove(gap_index, input_copy, cur_input);
913 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
914 int index = gap_index + 1;
915 LInstruction* instr = InstructionAt(index);
916 if (instr->HasPointerMap()) {
917 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
919 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
920 // The input is assumed to immediately have a tagged representation,
921 // before the pointer map can be used. I.e. the pointer map at the
922 // instruction will include the output operand (whose value at the
923 // beginning of the instruction is equal to the input operand). If
924 // this is not desired, then the pointer map at this instruction needs
925 // to be adjusted manually.
932 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
933 int block_start = block->first_instruction_index();
934 int index = block->last_instruction_index();
936 LifetimePosition block_start_position =
937 LifetimePosition::FromInstructionIndex(block_start);
939 while (index >= block_start) {
940 LifetimePosition curr_position =
941 LifetimePosition::FromInstructionIndex(index);
943 if (IsGapAt(index)) {
944 // We have a gap at this position.
945 LGap* gap = GapAt(index);
946 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
948 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
949 for (int i = 0; i < move_operands->length(); ++i) {
950 LMoveOperands* cur = &move_operands->at(i);
951 if (cur->IsIgnored()) continue;
952 LOperand* from = cur->source();
953 LOperand* to = cur->destination();
954 HPhi* phi = LookupPhi(to);
957 // This is a phi resolving move.
958 if (!phi->block()->IsLoopHeader()) {
959 hint = LiveRangeFor(phi->id())->current_hint_operand();
962 if (to->IsUnallocated()) {
963 if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
964 Define(curr_position, to, from);
965 live->Remove(LUnallocated::cast(to)->virtual_register());
971 Define(curr_position, to, from);
974 Use(block_start_position, curr_position, from, hint);
975 if (from->IsUnallocated()) {
976 live->Add(LUnallocated::cast(from)->virtual_register());
980 ASSERT(!IsGapAt(index));
981 LInstruction* instr = InstructionAt(index);
984 LOperand* output = instr->Output();
985 if (output != NULL) {
986 if (output->IsUnallocated()) {
987 live->Remove(LUnallocated::cast(output)->virtual_register());
989 Define(curr_position, output, NULL);
992 if (instr->ClobbersRegisters()) {
993 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
994 if (output == NULL || !output->IsRegister() ||
995 output->index() != i) {
996 LiveRange* range = FixedLiveRangeFor(i);
997 range->AddUseInterval(curr_position,
998 curr_position.InstructionEnd(),
1004 if (instr->ClobbersDoubleRegisters()) {
1005 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1006 if (output == NULL || !output->IsDoubleRegister() ||
1007 output->index() != i) {
1008 LiveRange* range = FixedDoubleLiveRangeFor(i);
1009 range->AddUseInterval(curr_position,
1010 curr_position.InstructionEnd(),
1016 for (UseIterator it(instr); !it.Done(); it.Advance()) {
1017 LOperand* input = it.Current();
1019 LifetimePosition use_pos;
1020 if (input->IsUnallocated() &&
1021 LUnallocated::cast(input)->IsUsedAtStart()) {
1022 use_pos = curr_position;
1024 use_pos = curr_position.InstructionEnd();
1027 Use(block_start_position, use_pos, input, NULL);
1028 if (input->IsUnallocated()) {
1029 live->Add(LUnallocated::cast(input)->virtual_register());
1033 for (TempIterator it(instr); !it.Done(); it.Advance()) {
1034 LOperand* temp = it.Current();
1035 if (instr->ClobbersTemps()) {
1036 if (temp->IsRegister()) continue;
1037 if (temp->IsUnallocated()) {
1038 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1039 if (temp_unalloc->HasFixedPolicy()) {
1044 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1045 Define(curr_position, temp, NULL);
1055 void LAllocator::ResolvePhis(HBasicBlock* block) {
1056 const ZoneList<HPhi*>* phis = block->phis();
1057 for (int i = 0; i < phis->length(); ++i) {
1058 HPhi* phi = phis->at(i);
1059 LUnallocated* phi_operand =
1060 new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
1061 phi_operand->set_virtual_register(phi->id());
1062 for (int j = 0; j < phi->OperandCount(); ++j) {
1063 HValue* op = phi->OperandAt(j);
1064 LOperand* operand = NULL;
1065 if (op->IsConstant() && op->EmitAtUses()) {
1066 HConstant* constant = HConstant::cast(op);
1067 operand = chunk_->DefineConstantOperand(constant);
1069 ASSERT(!op->EmitAtUses());
1070 LUnallocated* unalloc =
1071 new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1072 unalloc->set_virtual_register(op->id());
1075 HBasicBlock* cur_block = block->predecessors()->at(j);
1076 // The gap move must be added without any special processing as in
1077 // the AddConstraintsGapMove.
1078 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1082 // We are going to insert a move before the branch instruction.
1083 // Some branch instructions (e.g. loops' back edges)
1084 // can potentially cause a GC so they have a pointer map.
1085 // By inserting a move we essentially create a copy of a
1086 // value which is invisible to PopulatePointerMaps(), because we store
1087 // it into a location different from the operand of a live range
1088 // covering a branch instruction.
1089 // Thus we need to manually record a pointer.
1090 LInstruction* branch =
1091 InstructionAt(cur_block->last_instruction_index());
1092 if (branch->HasPointerMap()) {
1093 if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1094 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1095 } else if (!phi->representation().IsDouble()) {
1096 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1101 LiveRange* live_range = LiveRangeFor(phi->id());
1102 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1103 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1104 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1105 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1110 bool LAllocator::Allocate(LChunk* chunk) {
1111 ASSERT(chunk_ == NULL);
1112 chunk_ = static_cast<LPlatformChunk*>(chunk);
1113 assigned_registers_ =
1114 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
1116 assigned_double_registers_ =
1117 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
1119 MeetRegisterConstraints();
1120 if (!AllocationOk()) return false;
1123 AllocateGeneralRegisters();
1124 if (!AllocationOk()) return false;
1125 AllocateDoubleRegisters();
1126 if (!AllocationOk()) return false;
1127 PopulatePointerMaps();
1129 ResolveControlFlow();
1134 void LAllocator::MeetRegisterConstraints() {
1135 LAllocatorPhase phase("L_Register constraints", this);
1136 first_artificial_register_ = next_virtual_register_;
1137 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1138 for (int i = 0; i < blocks->length(); ++i) {
1139 HBasicBlock* block = blocks->at(i);
1140 MeetRegisterConstraints(block);
1141 if (!AllocationOk()) return;
1146 void LAllocator::ResolvePhis() {
1147 LAllocatorPhase phase("L_Resolve phis", this);
1149 // Process the blocks in reverse order.
1150 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1151 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1152 HBasicBlock* block = blocks->at(block_id);
1158 void LAllocator::ResolveControlFlow(LiveRange* range,
1160 HBasicBlock* pred) {
1161 LifetimePosition pred_end =
1162 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1163 LifetimePosition cur_start =
1164 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1165 LiveRange* pred_cover = NULL;
1166 LiveRange* cur_cover = NULL;
1167 LiveRange* cur_range = range;
1168 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1169 if (cur_range->CanCover(cur_start)) {
1170 ASSERT(cur_cover == NULL);
1171 cur_cover = cur_range;
1173 if (cur_range->CanCover(pred_end)) {
1174 ASSERT(pred_cover == NULL);
1175 pred_cover = cur_range;
1177 cur_range = cur_range->next();
1180 if (cur_cover->IsSpilled()) return;
1181 ASSERT(pred_cover != NULL && cur_cover != NULL);
1182 if (pred_cover != cur_cover) {
1183 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1184 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1185 if (!pred_op->Equals(cur_op)) {
1187 if (block->predecessors()->length() == 1) {
1188 gap = GapAt(block->first_instruction_index());
1190 ASSERT(pred->end()->SecondSuccessor() == NULL);
1191 gap = GetLastGap(pred);
1193 // We are going to insert a move before the branch instruction.
1194 // Some branch instructions (e.g. loops' back edges)
1195 // can potentially cause a GC so they have a pointer map.
1196 // By inserting a move we essentially create a copy of a
1197 // value which is invisible to PopulatePointerMaps(), because we store
1198 // it into a location different from the operand of a live range
1199 // covering a branch instruction.
1200 // Thus we need to manually record a pointer.
1201 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1202 if (branch->HasPointerMap()) {
1203 if (HasTaggedValue(range->id())) {
1204 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1205 } else if (!cur_op->IsDoubleStackSlot() &&
1206 !cur_op->IsDoubleRegister() &&
1207 !cur_op->IsSIMD128StackSlot() &&
1208 !cur_op->IsSIMD128Register()) {
1209 branch->pointer_map()->RemovePointer(cur_op);
1213 gap->GetOrCreateParallelMove(
1214 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1221 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1222 int index = pos.InstructionIndex();
1223 if (IsGapAt(index)) {
1224 LGap* gap = GapAt(index);
1225 return gap->GetOrCreateParallelMove(
1226 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1228 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1229 return GapAt(gap_pos)->GetOrCreateParallelMove(
1230 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1234 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1235 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1236 return gap->block();
1240 void LAllocator::ConnectRanges() {
1241 LAllocatorPhase phase("L_Connect ranges", this);
1242 for (int i = 0; i < live_ranges()->length(); ++i) {
1243 LiveRange* first_range = live_ranges()->at(i);
1244 if (first_range == NULL || first_range->parent() != NULL) continue;
1246 LiveRange* second_range = first_range->next();
1247 while (second_range != NULL) {
1248 LifetimePosition pos = second_range->Start();
1250 if (!second_range->IsSpilled()) {
1251 // Add gap move if the two live ranges touch and there is no block
1253 if (first_range->End().Value() == pos.Value()) {
1254 bool should_insert = true;
1255 if (IsBlockBoundary(pos)) {
1256 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1258 if (should_insert) {
1259 LParallelMove* move = GetConnectingParallelMove(pos);
1260 LOperand* prev_operand = first_range->CreateAssignedOperand(
1262 LOperand* cur_operand = second_range->CreateAssignedOperand(
1264 move->AddMove(prev_operand, cur_operand,
1270 first_range = second_range;
1271 second_range = second_range->next();
1277 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1278 if (block->predecessors()->length() != 1) return false;
1279 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1283 void LAllocator::ResolveControlFlow() {
1284 LAllocatorPhase phase("L_Resolve control flow", this);
1285 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1286 for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1287 HBasicBlock* block = blocks->at(block_id);
1288 if (CanEagerlyResolveControlFlow(block)) continue;
1289 BitVector* live = live_in_sets_[block->block_id()];
1290 BitVector::Iterator iterator(live);
1291 while (!iterator.Done()) {
1292 int operand_index = iterator.Current();
1293 for (int i = 0; i < block->predecessors()->length(); ++i) {
1294 HBasicBlock* cur = block->predecessors()->at(i);
1295 LiveRange* cur_range = LiveRangeFor(operand_index);
1296 ResolveControlFlow(cur_range, block, cur);
1304 void LAllocator::BuildLiveRanges() {
1305 LAllocatorPhase phase("L_Build live ranges", this);
1306 InitializeLivenessAnalysis();
1307 // Process the blocks in reverse order.
1308 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1309 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1310 HBasicBlock* block = blocks->at(block_id);
1311 BitVector* live = ComputeLiveOut(block);
1312 // Initially consider all live_out values live for the entire block. We
1313 // will shorten these intervals if necessary.
1314 AddInitialIntervals(block, live);
1316 // Process the instructions in reverse order, generating and killing
1318 ProcessInstructions(block, live);
1319 // All phi output operands are killed by this block.
1320 const ZoneList<HPhi*>* phis = block->phis();
1321 for (int i = 0; i < phis->length(); ++i) {
1322 // The live range interval already ends at the first instruction of the
1324 HPhi* phi = phis->at(i);
1325 live->Remove(phi->id());
1327 LOperand* hint = NULL;
1328 LOperand* phi_operand = NULL;
1329 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1330 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1332 for (int j = 0; j < move->move_operands()->length(); ++j) {
1333 LOperand* to = move->move_operands()->at(j).destination();
1334 if (to->IsUnallocated() &&
1335 LUnallocated::cast(to)->virtual_register() == phi->id()) {
1336 hint = move->move_operands()->at(j).source();
1341 ASSERT(hint != NULL);
1343 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1344 block->first_instruction_index());
1345 Define(block_start, phi_operand, hint);
1348 // Now live is live_in for this block except not including values live
1349 // out on backward successor edges.
1350 live_in_sets_[block_id] = live;
1352 // If this block is a loop header go back and patch up the necessary
1353 // predecessor blocks.
1354 if (block->IsLoopHeader()) {
1355 // TODO(kmillikin): Need to be able to get the last block of the loop
1356 // in the loop information. Add a live range stretching from the first
1357 // loop instruction to the last for each value live on entry to the
1359 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1360 BitVector::Iterator iterator(live);
1361 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1362 block->first_instruction_index());
1363 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1364 back_edge->last_instruction_index()).NextInstruction();
1365 while (!iterator.Done()) {
1366 int operand_index = iterator.Current();
1367 LiveRange* range = LiveRangeFor(operand_index);
1368 range->EnsureInterval(start, end, zone());
1372 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1373 live_in_sets_[i]->Union(*live);
1378 if (block_id == 0) {
1379 BitVector::Iterator iterator(live);
1381 while (!iterator.Done()) {
1383 int operand_index = iterator.Current();
1384 if (chunk_->info()->IsStub()) {
1385 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
1386 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
1388 ASSERT(chunk_->info()->IsOptimizing());
1389 AllowHandleDereference allow_deref;
1390 PrintF("Function: %s\n",
1391 chunk_->info()->function()->debug_name()->ToCString().get());
1393 PrintF("Value %d used before first definition!\n", operand_index);
1394 LiveRange* range = LiveRangeFor(operand_index);
1395 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1403 for (int i = 0; i < live_ranges_.length(); ++i) {
1404 if (live_ranges_[i] != NULL) {
1405 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1411 bool LAllocator::SafePointsAreInOrder() const {
1412 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1414 for (int i = 0; i < pointer_maps->length(); ++i) {
1415 LPointerMap* map = pointer_maps->at(i);
1416 if (safe_point > map->lithium_position()) return false;
1417 safe_point = map->lithium_position();
1423 void LAllocator::PopulatePointerMaps() {
1424 LAllocatorPhase phase("L_Populate pointer maps", this);
1425 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1427 ASSERT(SafePointsAreInOrder());
1429 // Iterate over all safe point positions and record a pointer
1430 // for all spilled live ranges at this point.
1431 int first_safe_point_index = 0;
1432 int last_range_start = 0;
1433 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1434 LiveRange* range = live_ranges()->at(range_idx);
1435 if (range == NULL) continue;
1436 // Iterate over the first parts of multi-part live ranges.
1437 if (range->parent() != NULL) continue;
1438 // Skip non-pointer values.
1439 if (!HasTaggedValue(range->id())) continue;
1440 // Skip empty live ranges.
1441 if (range->IsEmpty()) continue;
1443 // Find the extent of the range and its children.
1444 int start = range->Start().InstructionIndex();
1446 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1447 LifetimePosition this_end = cur->End();
1448 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1449 ASSERT(cur->Start().InstructionIndex() >= start);
1452 // Most of the ranges are in order, but not all. Keep an eye on when
1453 // they step backwards and reset the first_safe_point_index so we don't
1454 // miss any safe points.
1455 if (start < last_range_start) {
1456 first_safe_point_index = 0;
1458 last_range_start = start;
1460 // Step across all the safe points that are before the start of this range,
1461 // recording how far we step in order to save doing this for the next range.
1462 while (first_safe_point_index < pointer_maps->length()) {
1463 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1464 int safe_point = map->lithium_position();
1465 if (safe_point >= start) break;
1466 first_safe_point_index++;
1469 // Step through the safe points to see whether they are in the range.
1470 for (int safe_point_index = first_safe_point_index;
1471 safe_point_index < pointer_maps->length();
1472 ++safe_point_index) {
1473 LPointerMap* map = pointer_maps->at(safe_point_index);
1474 int safe_point = map->lithium_position();
1476 // The safe points are sorted so we can stop searching here.
1477 if (safe_point - 1 > end) break;
1479 // Advance to the next active range that covers the current
1480 // safe point position.
1481 LifetimePosition safe_point_pos =
1482 LifetimePosition::FromInstructionIndex(safe_point);
1483 LiveRange* cur = range;
1484 while (cur != NULL && !cur->Covers(safe_point_pos)) {
1487 if (cur == NULL) continue;
1489 // Check if the live range is spilled and the safe point is after
1490 // the spill position.
1491 if (range->HasAllocatedSpillOperand() &&
1492 safe_point >= range->spill_start_index()) {
1493 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1494 range->id(), range->spill_start_index(), safe_point);
1495 map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1498 if (!cur->IsSpilled()) {
1499 TraceAlloc("Pointer in register for range %d (start at %d) "
1500 "at safe point %d\n",
1501 cur->id(), cur->Start().Value(), safe_point);
1502 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1503 ASSERT(!operand->IsStackSlot());
1504 map->RecordPointer(operand, chunk()->zone());
1511 void LAllocator::AllocateGeneralRegisters() {
1512 LAllocatorPhase phase("L_Allocate general registers", this);
1513 num_registers_ = Register::NumAllocatableRegisters();
1514 mode_ = GENERAL_REGISTERS;
1515 AllocateRegisters();
1519 void LAllocator::AllocateDoubleRegisters() {
1520 LAllocatorPhase phase("L_Allocate double registers", this);
1521 num_registers_ = DoubleRegister::NumAllocatableRegisters();
1522 mode_ = DOUBLE_REGISTERS;
1523 AllocateRegisters();
1527 void LAllocator::AllocateRegisters() {
1528 ASSERT(unhandled_live_ranges_.is_empty());
1530 for (int i = 0; i < live_ranges_.length(); ++i) {
1531 if (live_ranges_[i] != NULL) {
1532 if (live_ranges_[i]->Kind() == mode_) {
1533 AddToUnhandledUnsorted(live_ranges_[i]);
1534 } else if (mode_ == DOUBLE_REGISTERS &&
1535 IsSIMD128RegisterKind(live_ranges_[i]->Kind())) {
1536 AddToUnhandledUnsorted(live_ranges_[i]);
1541 ASSERT(UnhandledIsSorted());
1543 ASSERT(reusable_slots_.is_empty());
1544 ASSERT(reusable_simd128_slots_.is_empty());
1545 ASSERT(active_live_ranges_.is_empty());
1546 ASSERT(inactive_live_ranges_.is_empty());
1548 if (mode_ == DOUBLE_REGISTERS) {
1549 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1550 LiveRange* current = fixed_double_live_ranges_.at(i);
1551 if (current != NULL) {
1552 AddToInactive(current);
1556 ASSERT(mode_ == GENERAL_REGISTERS);
1557 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1558 LiveRange* current = fixed_live_ranges_.at(i);
1559 if (current != NULL) {
1560 AddToInactive(current);
1565 while (!unhandled_live_ranges_.is_empty()) {
1566 ASSERT(UnhandledIsSorted());
1567 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1568 ASSERT(UnhandledIsSorted());
1569 LifetimePosition position = current->Start();
1571 allocation_finger_ = position;
1573 TraceAlloc("Processing interval %d start=%d\n",
1577 if (current->HasAllocatedSpillOperand()) {
1578 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1579 LifetimePosition next_pos = position;
1580 if (IsGapAt(next_pos.InstructionIndex())) {
1581 next_pos = next_pos.NextInstruction();
1583 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1584 // If the range already has a spill operand and it doesn't need a
1585 // register immediately, split it and spill the first part of the range.
1589 } else if (pos->pos().Value() >
1590 current->Start().NextInstruction().Value()) {
1591 // Do not spill live range eagerly if use position that can benefit from
1592 // the register is too close to the start of live range.
1593 SpillBetween(current, current->Start(), pos->pos());
1594 if (!AllocationOk()) return;
1595 ASSERT(UnhandledIsSorted());
1600 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1601 LiveRange* cur_active = active_live_ranges_.at(i);
1602 if (cur_active->End().Value() <= position.Value()) {
1603 ActiveToHandled(cur_active);
1604 --i; // The live range was removed from the list of active live ranges.
1605 } else if (!cur_active->Covers(position)) {
1606 ActiveToInactive(cur_active);
1607 --i; // The live range was removed from the list of active live ranges.
1611 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1612 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1613 if (cur_inactive->End().Value() <= position.Value()) {
1614 InactiveToHandled(cur_inactive);
1615 --i; // Live range was removed from the list of inactive live ranges.
1616 } else if (cur_inactive->Covers(position)) {
1617 InactiveToActive(cur_inactive);
1618 --i; // Live range was removed from the list of inactive live ranges.
1622 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1624 bool result = TryAllocateFreeReg(current);
1625 if (!AllocationOk()) return;
1627 if (!result) AllocateBlockedReg(current);
1628 if (!AllocationOk()) return;
1630 if (current->HasRegisterAssigned()) {
1631 AddToActive(current);
1635 reusable_slots_.Rewind(0);
1636 reusable_simd128_slots_.Rewind(0);
1637 active_live_ranges_.Rewind(0);
1638 inactive_live_ranges_.Rewind(0);
1642 const char* LAllocator::RegisterName(int allocation_index) {
1643 if (mode_ == GENERAL_REGISTERS) {
1644 return Register::AllocationIndexToString(allocation_index);
1646 return DoubleRegister::AllocationIndexToString(allocation_index);
1651 void LAllocator::TraceAlloc(const char* msg, ...) {
1652 if (FLAG_trace_alloc) {
1654 va_start(arguments, msg);
1655 OS::VPrint(msg, arguments);
1661 bool LAllocator::HasTaggedValue(int virtual_register) const {
1662 HValue* value = graph_->LookupValue(virtual_register);
1663 if (value == NULL) return false;
1664 return value->representation().IsTagged() && !value->type().IsSmi();
1668 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1669 if (virtual_register < first_artificial_register_) {
1670 HValue* value = graph_->LookupValue(virtual_register);
1671 if (value != NULL && value->representation().IsDouble()) {
1672 return DOUBLE_REGISTERS;
1673 } else if (value != NULL && (value->representation().IsFloat32x4())) {
1674 return FLOAT32x4_REGISTERS;
1675 } else if (value != NULL && (value->representation().IsInt32x4())) {
1676 return INT32x4_REGISTERS;
1678 } else if (double_artificial_registers_.Contains(
1679 virtual_register - first_artificial_register_)) {
1680 return DOUBLE_REGISTERS;
1681 } else if (float32x4_artificial_registers_.Contains(
1682 virtual_register - first_artificial_register_)) {
1683 return FLOAT32x4_REGISTERS;
1684 } else if (int32x4_artificial_registers_.Contains(
1685 virtual_register - first_artificial_register_)) {
1686 return INT32x4_REGISTERS;
1689 return GENERAL_REGISTERS;
1693 void LAllocator::AddToActive(LiveRange* range) {
1694 TraceAlloc("Add live range %d to active\n", range->id());
1695 active_live_ranges_.Add(range, zone());
1699 void LAllocator::AddToInactive(LiveRange* range) {
1700 TraceAlloc("Add live range %d to inactive\n", range->id());
1701 inactive_live_ranges_.Add(range, zone());
1705 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1706 if (range == NULL || range->IsEmpty()) return;
1707 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1708 ASSERT(allocation_finger_.Value() <= range->Start().Value());
1709 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1710 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1711 if (range->ShouldBeAllocatedBefore(cur_range)) {
1712 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1713 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1714 ASSERT(UnhandledIsSorted());
1718 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1719 unhandled_live_ranges_.InsertAt(0, range, zone());
1720 ASSERT(UnhandledIsSorted());
1724 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1725 if (range == NULL || range->IsEmpty()) return;
1726 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1727 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1728 unhandled_live_ranges_.Add(range, zone());
1732 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1733 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1734 !(*b)->ShouldBeAllocatedBefore(*a));
1735 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1736 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1737 return (*a)->id() - (*b)->id();
1741 // Sort the unhandled live ranges so that the ranges to be processed first are
1742 // at the end of the array list. This is convenient for the register allocation
1743 // algorithm because it is efficient to remove elements from the end.
1744 void LAllocator::SortUnhandled() {
1745 TraceAlloc("Sort unhandled\n");
1746 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1750 bool LAllocator::UnhandledIsSorted() {
1751 int len = unhandled_live_ranges_.length();
1752 for (int i = 1; i < len; i++) {
1753 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1754 LiveRange* b = unhandled_live_ranges_.at(i);
1755 if (a->Start().Value() < b->Start().Value()) return false;
1761 void LAllocator::FreeSpillSlot(LiveRange* range) {
1762 // Check that we are the last range.
1763 if (range->next() != NULL) return;
1765 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1767 int index = range->TopLevel()->GetSpillOperand()->index();
1769 if (IsSIMD128RegisterKind(range->Kind())) {
1770 reusable_simd128_slots_.Add(range, zone());
1772 reusable_slots_.Add(range, zone());
1778 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1779 ZoneList<LiveRange*>* reusable_slots = IsSIMD128RegisterKind(range->Kind())
1780 ? &reusable_simd128_slots_
1782 if (reusable_slots->is_empty()) return NULL;
1783 if (reusable_slots->first()->End().Value() >
1784 range->TopLevel()->Start().Value()) {
1787 LOperand* result = reusable_slots->first()->TopLevel()->GetSpillOperand();
1788 reusable_slots->Remove(0);
1793 void LAllocator::ActiveToHandled(LiveRange* range) {
1794 ASSERT(active_live_ranges_.Contains(range));
1795 active_live_ranges_.RemoveElement(range);
1796 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1797 FreeSpillSlot(range);
1801 void LAllocator::ActiveToInactive(LiveRange* range) {
1802 ASSERT(active_live_ranges_.Contains(range));
1803 active_live_ranges_.RemoveElement(range);
1804 inactive_live_ranges_.Add(range, zone());
1805 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1809 void LAllocator::InactiveToHandled(LiveRange* range) {
1810 ASSERT(inactive_live_ranges_.Contains(range));
1811 inactive_live_ranges_.RemoveElement(range);
1812 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1813 FreeSpillSlot(range);
1817 void LAllocator::InactiveToActive(LiveRange* range) {
1818 ASSERT(inactive_live_ranges_.Contains(range));
1819 inactive_live_ranges_.RemoveElement(range);
1820 active_live_ranges_.Add(range, zone());
1821 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1825 // TryAllocateFreeReg and AllocateBlockedReg assume this
1826 // when allocating local arrays.
1827 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
1828 Register::kMaxNumAllocatableRegisters);
1831 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1832 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1834 for (int i = 0; i < num_registers_; i++) {
1835 free_until_pos[i] = LifetimePosition::MaxPosition();
1838 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1839 LiveRange* cur_active = active_live_ranges_.at(i);
1840 free_until_pos[cur_active->assigned_register()] =
1841 LifetimePosition::FromInstructionIndex(0);
1844 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1845 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1846 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1847 LifetimePosition next_intersection =
1848 cur_inactive->FirstIntersection(current);
1849 if (!next_intersection.IsValid()) continue;
1850 int cur_reg = cur_inactive->assigned_register();
1851 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1854 LOperand* hint = current->FirstHint();
1855 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister() ||
1856 hint->IsSIMD128Register())) {
1857 int register_index = hint->index();
1859 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1860 RegisterName(register_index),
1861 free_until_pos[register_index].Value(),
1863 current->End().Value());
1865 // The desired register is free until the end of the current live range.
1866 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1867 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1868 RegisterName(register_index),
1870 SetLiveRangeAssignedRegister(current, register_index);
1875 // Find the register which stays free for the longest time.
1877 for (int i = 1; i < RegisterCount(); ++i) {
1878 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1883 LifetimePosition pos = free_until_pos[reg];
1885 if (pos.Value() <= current->Start().Value()) {
1886 // All registers are blocked.
1890 if (pos.Value() < current->End().Value()) {
1891 // Register reg is available at the range start but becomes blocked before
1892 // the range end. Split current at position where it becomes blocked.
1893 LiveRange* tail = SplitRangeAt(current, pos);
1894 if (!AllocationOk()) return false;
1895 AddToUnhandledSorted(tail);
1899 // Register reg is available at the range start and is free until
1901 ASSERT(pos.Value() >= current->End().Value());
1902 TraceAlloc("Assigning free reg %s to live range %d\n",
1905 SetLiveRangeAssignedRegister(current, reg);
1911 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1912 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1913 if (register_use == NULL) {
1914 // There is no use in the current live range that requires a register.
1915 // We can just spill it.
1921 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1922 LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1924 for (int i = 0; i < num_registers_; i++) {
1925 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1928 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1929 LiveRange* range = active_live_ranges_[i];
1930 int cur_reg = range->assigned_register();
1931 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1932 block_pos[cur_reg] = use_pos[cur_reg] =
1933 LifetimePosition::FromInstructionIndex(0);
1935 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1937 if (next_use == NULL) {
1938 use_pos[cur_reg] = range->End();
1940 use_pos[cur_reg] = next_use->pos();
1945 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1946 LiveRange* range = inactive_live_ranges_.at(i);
1947 ASSERT(range->End().Value() > current->Start().Value());
1948 LifetimePosition next_intersection = range->FirstIntersection(current);
1949 if (!next_intersection.IsValid()) continue;
1950 int cur_reg = range->assigned_register();
1951 if (range->IsFixed()) {
1952 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1953 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1955 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1960 for (int i = 1; i < RegisterCount(); ++i) {
1961 if (use_pos[i].Value() > use_pos[reg].Value()) {
1966 LifetimePosition pos = use_pos[reg];
1968 if (pos.Value() < register_use->pos().Value()) {
1969 // All registers are blocked before the first use that requires a register.
1970 // Spill starting part of live range up to that use.
1971 SpillBetween(current, current->Start(), register_use->pos());
1975 if (block_pos[reg].Value() < current->End().Value()) {
1976 // Register becomes blocked before the current range end. Split before that
1978 LiveRange* tail = SplitBetween(current,
1980 block_pos[reg].InstructionStart());
1981 if (!AllocationOk()) return;
1982 AddToUnhandledSorted(tail);
1985 // Register reg is not blocked for the whole range.
1986 ASSERT(block_pos[reg].Value() >= current->End().Value());
1987 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1990 SetLiveRangeAssignedRegister(current, reg);
1992 // This register was not free. Thus we need to find and spill
1993 // parts of active and inactive live regions that use the same register
1994 // at the same lifetime positions as current.
1995 SplitAndSpillIntersecting(current);
1999 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
2000 LifetimePosition pos) {
2001 HBasicBlock* block = GetBlock(pos.InstructionStart());
2002 HBasicBlock* loop_header =
2003 block->IsLoopHeader() ? block : block->parent_loop_header();
2005 if (loop_header == NULL) return pos;
2007 UsePosition* prev_use =
2008 range->PreviousUsePositionRegisterIsBeneficial(pos);
2010 while (loop_header != NULL) {
2011 // We are going to spill live range inside the loop.
2012 // If possible try to move spilling position backwards to loop header.
2013 // This will reduce number of memory moves on the back edge.
2014 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
2015 loop_header->first_instruction_index());
2017 if (range->Covers(loop_start)) {
2018 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
2019 // No register beneficial use inside the loop before the pos.
2024 // Try hoisting out to an outer loop.
2025 loop_header = loop_header->parent_loop_header();
2032 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2033 ASSERT(current->HasRegisterAssigned());
2034 int reg = current->assigned_register();
2035 LifetimePosition split_pos = current->Start();
2036 for (int i = 0; i < active_live_ranges_.length(); ++i) {
2037 LiveRange* range = active_live_ranges_[i];
2038 if (range->assigned_register() == reg) {
2039 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2040 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2041 if (next_pos == NULL) {
2042 SpillAfter(range, spill_pos);
2044 // When spilling between spill_pos and next_pos ensure that the range
2045 // remains spilled at least until the start of the current live range.
2046 // This guarantees that we will not introduce new unhandled ranges that
2047 // start before the current range as this violates allocation invariant
2048 // and will lead to an inconsistent state of active and inactive
2049 // live-ranges: ranges are allocated in order of their start positions,
2050 // ranges are retired from active/inactive when the start of the
2051 // current live-range is larger than their end.
2052 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2054 if (!AllocationOk()) return;
2055 ActiveToHandled(range);
2060 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
2061 LiveRange* range = inactive_live_ranges_[i];
2062 ASSERT(range->End().Value() > current->Start().Value());
2063 if (range->assigned_register() == reg && !range->IsFixed()) {
2064 LifetimePosition next_intersection = range->FirstIntersection(current);
2065 if (next_intersection.IsValid()) {
2066 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2067 if (next_pos == NULL) {
2068 SpillAfter(range, split_pos);
2070 next_intersection = Min(next_intersection, next_pos->pos());
2071 SpillBetween(range, split_pos, next_intersection);
2073 if (!AllocationOk()) return;
2074 InactiveToHandled(range);
2082 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2083 return pos.IsInstructionStart() &&
2084 InstructionAt(pos.InstructionIndex())->IsLabel();
2088 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2089 ASSERT(!range->IsFixed());
2090 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2092 if (pos.Value() <= range->Start().Value()) return range;
2094 // We can't properly connect liveranges if split occured at the end
2095 // of control instruction.
2096 ASSERT(pos.IsInstructionStart() ||
2097 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2099 int vreg = GetVirtualRegister();
2100 if (!AllocationOk()) return NULL;
2101 LiveRange* result = LiveRangeFor(vreg);
2102 range->SplitAt(pos, result, zone());
2107 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2108 LifetimePosition start,
2109 LifetimePosition end) {
2110 ASSERT(!range->IsFixed());
2111 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2116 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2117 ASSERT(split_pos.Value() >= start.Value());
2118 return SplitRangeAt(range, split_pos);
2122 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2123 LifetimePosition end) {
2124 int start_instr = start.InstructionIndex();
2125 int end_instr = end.InstructionIndex();
2126 ASSERT(start_instr <= end_instr);
2128 // We have no choice
2129 if (start_instr == end_instr) return end;
2131 HBasicBlock* start_block = GetBlock(start);
2132 HBasicBlock* end_block = GetBlock(end);
2134 if (end_block == start_block) {
2135 // The interval is split in the same basic block. Split at the latest
2136 // possible position.
2140 HBasicBlock* block = end_block;
2141 // Find header of outermost loop.
2142 while (block->parent_loop_header() != NULL &&
2143 block->parent_loop_header()->block_id() > start_block->block_id()) {
2144 block = block->parent_loop_header();
2147 // We did not find any suitable outer loop. Split at the latest possible
2148 // position unless end_block is a loop header itself.
2149 if (block == end_block && !end_block->IsLoopHeader()) return end;
2151 return LifetimePosition::FromInstructionIndex(
2152 block->first_instruction_index());
2156 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2157 LiveRange* second_part = SplitRangeAt(range, pos);
2158 if (!AllocationOk()) return;
2163 void LAllocator::SpillBetween(LiveRange* range,
2164 LifetimePosition start,
2165 LifetimePosition end) {
2166 SpillBetweenUntil(range, start, start, end);
2170 void LAllocator::SpillBetweenUntil(LiveRange* range,
2171 LifetimePosition start,
2172 LifetimePosition until,
2173 LifetimePosition end) {
2174 CHECK(start.Value() < end.Value());
2175 LiveRange* second_part = SplitRangeAt(range, start);
2176 if (!AllocationOk()) return;
2178 if (second_part->Start().Value() < end.Value()) {
2179 // The split result intersects with [start, end[.
2180 // Split it at position between ]start+1, end[, spill the middle part
2181 // and put the rest to unhandled.
2182 LiveRange* third_part = SplitBetween(
2184 Max(second_part->Start().InstructionEnd(), until),
2185 end.PrevInstruction().InstructionEnd());
2186 if (!AllocationOk()) return;
2188 ASSERT(third_part != second_part);
2191 AddToUnhandledSorted(third_part);
2193 // The split result does not intersect with [start, end[.
2194 // Nothing to spill. Just put it to unhandled as whole.
2195 AddToUnhandledSorted(second_part);
2200 void LAllocator::Spill(LiveRange* range) {
2201 ASSERT(!range->IsSpilled());
2202 TraceAlloc("Spilling live range %d\n", range->id());
2203 LiveRange* first = range->TopLevel();
2205 if (!first->HasAllocatedSpillOperand()) {
2206 LOperand* op = TryReuseSpillSlot(range);
2208 op = chunk_->GetNextSpillSlot(range->Kind());
2209 } else if (range->Kind() == FLOAT32x4_REGISTERS &&
2210 op->kind() != LOperand::FLOAT32x4_STACK_SLOT) {
2211 // Convert to Float32x4StackSlot.
2212 op = LFloat32x4StackSlot::Create(op->index(), zone());
2213 } else if (range->Kind() == INT32x4_REGISTERS &&
2214 op->kind() != LOperand::INT32x4_STACK_SLOT) {
2215 // Convert to Int32x4StackSlot.
2216 op = LInt32x4StackSlot::Create(op->index(), zone());
2218 first->SetSpillOperand(op);
2220 range->MakeSpilled(chunk()->zone());
2224 int LAllocator::RegisterCount() const {
2225 return num_registers_;
2232 void LAllocator::Verify() const {
2233 for (int i = 0; i < live_ranges()->length(); ++i) {
2234 LiveRange* current = live_ranges()->at(i);
2235 if (current != NULL) current->Verify();
2243 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2244 : CompilationPhase(name, allocator->graph()->info()),
2245 allocator_(allocator) {
2246 if (FLAG_hydrogen_stats) {
2247 allocator_zone_start_allocation_size_ =
2248 allocator->zone()->allocation_size();
2253 LAllocatorPhase::~LAllocatorPhase() {
2254 if (FLAG_hydrogen_stats) {
2255 unsigned size = allocator_->zone()->allocation_size() -
2256 allocator_zone_start_allocation_size_;
2257 isolate()->GetHStatistics()->SaveTiming(name(), TimeDelta(), size);
2260 if (ShouldProduceTraceOutput()) {
2261 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2262 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2266 if (allocator_ != NULL) allocator_->Verify();
2271 } } // namespace v8::internal