Upstream version 11.40.271.0
[platform/framework/web/crosswalk.git] / src / v8 / src / lithium-allocator.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/hydrogen.h"
8 #include "src/lithium-inl.h"
9 #include "src/lithium-allocator-inl.h"
10 #include "src/string-stream.h"
11
12 namespace v8 {
13 namespace internal {
14
15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
16   return a.Value() < b.Value() ? a : b;
17 }
18
19
20 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
21   return a.Value() > b.Value() ? a : b;
22 }
23
24
25 UsePosition::UsePosition(LifetimePosition pos,
26                          LOperand* operand,
27                          LOperand* hint)
28     : operand_(operand),
29       hint_(hint),
30       pos_(pos),
31       next_(NULL),
32       requires_reg_(false),
33       register_beneficial_(true) {
34   if (operand_ != NULL && operand_->IsUnallocated()) {
35     LUnallocated* unalloc = LUnallocated::cast(operand_);
36     requires_reg_ = unalloc->HasRegisterPolicy() ||
37         unalloc->HasDoubleRegisterPolicy();
38     register_beneficial_ = !unalloc->HasAnyPolicy();
39   }
40   DCHECK(pos_.IsValid());
41 }
42
43
44 bool UsePosition::HasHint() const {
45   return hint_ != NULL && !hint_->IsUnallocated();
46 }
47
48
49 bool UsePosition::RequiresRegister() const {
50   return requires_reg_;
51 }
52
53
54 bool UsePosition::RegisterIsBeneficial() const {
55   return register_beneficial_;
56 }
57
58
59 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
60   DCHECK(Contains(pos) && pos.Value() != start().Value());
61   UseInterval* after = new(zone) UseInterval(pos, end_);
62   after->next_ = next_;
63   next_ = after;
64   end_ = pos;
65 }
66
67
68 #ifdef DEBUG
69
70
71 void LiveRange::Verify() const {
72   UsePosition* cur = first_pos_;
73   while (cur != NULL) {
74     DCHECK(Start().Value() <= cur->pos().Value() &&
75            cur->pos().Value() <= End().Value());
76     cur = cur->next();
77   }
78 }
79
80
81 bool LiveRange::HasOverlap(UseInterval* target) const {
82   UseInterval* current_interval = first_interval_;
83   while (current_interval != NULL) {
84     // Intervals overlap if the start of one is contained in the other.
85     if (current_interval->Contains(target->start()) ||
86         target->Contains(current_interval->start())) {
87       return true;
88     }
89     current_interval = current_interval->next();
90   }
91   return false;
92 }
93
94
95 #endif
96
97
98 LiveRange::LiveRange(int id, Zone* zone)
99     : id_(id),
100       spilled_(false),
101       kind_(UNALLOCATED_REGISTERS),
102       assigned_register_(kInvalidAssignment),
103       last_interval_(NULL),
104       first_interval_(NULL),
105       first_pos_(NULL),
106       parent_(NULL),
107       next_(NULL),
108       current_interval_(NULL),
109       last_processed_use_(NULL),
110       current_hint_operand_(NULL),
111       spill_operand_(new (zone) LOperand()),
112       spill_start_index_(kMaxInt) {}
113
114
115 void LiveRange::set_assigned_register(int reg, Zone* zone) {
116   DCHECK(!HasRegisterAssigned() && !IsSpilled());
117   assigned_register_ = reg;
118   ConvertOperands(zone);
119 }
120
121
122 void LiveRange::MakeSpilled(Zone* zone) {
123   DCHECK(!IsSpilled());
124   DCHECK(TopLevel()->HasAllocatedSpillOperand());
125   spilled_ = true;
126   assigned_register_ = kInvalidAssignment;
127   ConvertOperands(zone);
128 }
129
130
131 bool LiveRange::HasAllocatedSpillOperand() const {
132   DCHECK(spill_operand_ != NULL);
133   return !spill_operand_->IsIgnored();
134 }
135
136
137 void LiveRange::SetSpillOperand(LOperand* operand) {
138   DCHECK(!operand->IsUnallocated());
139   DCHECK(spill_operand_ != NULL);
140   DCHECK(spill_operand_->IsIgnored());
141   spill_operand_->ConvertTo(operand->kind(), operand->index());
142 }
143
144
145 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
146   UsePosition* use_pos = last_processed_use_;
147   if (use_pos == NULL) use_pos = first_pos();
148   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
149     use_pos = use_pos->next();
150   }
151   last_processed_use_ = use_pos;
152   return use_pos;
153 }
154
155
156 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
157     LifetimePosition start) {
158   UsePosition* pos = NextUsePosition(start);
159   while (pos != NULL && !pos->RegisterIsBeneficial()) {
160     pos = pos->next();
161   }
162   return pos;
163 }
164
165
166 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
167     LifetimePosition start) {
168   UsePosition* pos = first_pos();
169   UsePosition* prev = NULL;
170   while (pos != NULL && pos->pos().Value() < start.Value()) {
171     if (pos->RegisterIsBeneficial()) prev = pos;
172     pos = pos->next();
173   }
174   return prev;
175 }
176
177
178 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
179   UsePosition* pos = NextUsePosition(start);
180   while (pos != NULL && !pos->RequiresRegister()) {
181     pos = pos->next();
182   }
183   return pos;
184 }
185
186
187 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
188   // We cannot spill a live range that has a use requiring a register
189   // at the current or the immediate next position.
190   UsePosition* use_pos = NextRegisterPosition(pos);
191   if (use_pos == NULL) return true;
192   return
193       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
194 }
195
196
197 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
198   LOperand* op = NULL;
199   if (HasRegisterAssigned()) {
200     DCHECK(!IsSpilled());
201     switch (Kind()) {
202       case GENERAL_REGISTERS:
203         op = LRegister::Create(assigned_register(), zone);
204         break;
205       case DOUBLE_REGISTERS:
206         op = LDoubleRegister::Create(assigned_register(), zone);
207         break;
208       case FLOAT32x4_REGISTERS:
209         op = LFloat32x4Register::Create(assigned_register(), zone);
210         break;
211       case FLOAT64x2_REGISTERS:
212         op = LFloat64x2Register::Create(assigned_register(), zone);
213         break;
214       case INT32x4_REGISTERS:
215         op = LInt32x4Register::Create(assigned_register(), zone);
216         break;
217       default:
218         UNREACHABLE();
219     }
220   } else if (IsSpilled()) {
221     DCHECK(!HasRegisterAssigned());
222     op = TopLevel()->GetSpillOperand();
223     DCHECK(!op->IsUnallocated());
224   } else {
225     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
226     unalloc->set_virtual_register(id_);
227     op = unalloc;
228   }
229   return op;
230 }
231
232
233 UseInterval* LiveRange::FirstSearchIntervalForPosition(
234     LifetimePosition position) const {
235   if (current_interval_ == NULL) return first_interval_;
236   if (current_interval_->start().Value() > position.Value()) {
237     current_interval_ = NULL;
238     return first_interval_;
239   }
240   return current_interval_;
241 }
242
243
244 void LiveRange::AdvanceLastProcessedMarker(
245     UseInterval* to_start_of, LifetimePosition but_not_past) const {
246   if (to_start_of == NULL) return;
247   if (to_start_of->start().Value() > but_not_past.Value()) return;
248   LifetimePosition start =
249       current_interval_ == NULL ? LifetimePosition::Invalid()
250                                 : current_interval_->start();
251   if (to_start_of->start().Value() > start.Value()) {
252     current_interval_ = to_start_of;
253   }
254 }
255
256
257 void LiveRange::SplitAt(LifetimePosition position,
258                         LiveRange* result,
259                         Zone* zone) {
260   DCHECK(Start().Value() < position.Value());
261   DCHECK(result->IsEmpty());
262   // Find the last interval that ends before the position. If the
263   // position is contained in one of the intervals in the chain, we
264   // split that interval and use the first part.
265   UseInterval* current = FirstSearchIntervalForPosition(position);
266
267   // If the split position coincides with the beginning of a use interval
268   // we need to split use positons in a special way.
269   bool split_at_start = false;
270
271   if (current->start().Value() == position.Value()) {
272     // When splitting at start we need to locate the previous use interval.
273     current = first_interval_;
274   }
275
276   while (current != NULL) {
277     if (current->Contains(position)) {
278       current->SplitAt(position, zone);
279       break;
280     }
281     UseInterval* next = current->next();
282     if (next->start().Value() >= position.Value()) {
283       split_at_start = (next->start().Value() == position.Value());
284       break;
285     }
286     current = next;
287   }
288
289   // Partition original use intervals to the two live ranges.
290   UseInterval* before = current;
291   UseInterval* after = before->next();
292   result->last_interval_ = (last_interval_ == before)
293       ? after            // Only interval in the range after split.
294       : last_interval_;  // Last interval of the original range.
295   result->first_interval_ = after;
296   last_interval_ = before;
297
298   // Find the last use position before the split and the first use
299   // position after it.
300   UsePosition* use_after = first_pos_;
301   UsePosition* use_before = NULL;
302   if (split_at_start) {
303     // The split position coincides with the beginning of a use interval (the
304     // end of a lifetime hole). Use at this position should be attributed to
305     // the split child because split child owns use interval covering it.
306     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
307       use_before = use_after;
308       use_after = use_after->next();
309     }
310   } else {
311     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
312       use_before = use_after;
313       use_after = use_after->next();
314     }
315   }
316
317   // Partition original use positions to the two live ranges.
318   if (use_before != NULL) {
319     use_before->next_ = NULL;
320   } else {
321     first_pos_ = NULL;
322   }
323   result->first_pos_ = use_after;
324
325   // Discard cached iteration state. It might be pointing
326   // to the use that no longer belongs to this live range.
327   last_processed_use_ = NULL;
328   current_interval_ = NULL;
329
330   // Link the new live range in the chain before any of the other
331   // ranges linked from the range before the split.
332   result->parent_ = (parent_ == NULL) ? this : parent_;
333   result->kind_ = result->parent_->kind_;
334   result->next_ = next_;
335   next_ = result;
336
337 #ifdef DEBUG
338   Verify();
339   result->Verify();
340 #endif
341 }
342
343
344 // This implements an ordering on live ranges so that they are ordered by their
345 // start positions.  This is needed for the correctness of the register
346 // allocation algorithm.  If two live ranges start at the same offset then there
347 // is a tie breaker based on where the value is first used.  This part of the
348 // ordering is merely a heuristic.
349 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
350   LifetimePosition start = Start();
351   LifetimePosition other_start = other->Start();
352   if (start.Value() == other_start.Value()) {
353     UsePosition* pos = first_pos();
354     if (pos == NULL) return false;
355     UsePosition* other_pos = other->first_pos();
356     if (other_pos == NULL) return true;
357     return pos->pos().Value() < other_pos->pos().Value();
358   }
359   return start.Value() < other_start.Value();
360 }
361
362
363 void LiveRange::ShortenTo(LifetimePosition start) {
364   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
365   DCHECK(first_interval_ != NULL);
366   DCHECK(first_interval_->start().Value() <= start.Value());
367   DCHECK(start.Value() < first_interval_->end().Value());
368   first_interval_->set_start(start);
369 }
370
371
372 void LiveRange::EnsureInterval(LifetimePosition start,
373                                LifetimePosition end,
374                                Zone* zone) {
375   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
376                          id_,
377                          start.Value(),
378                          end.Value());
379   LifetimePosition new_end = end;
380   while (first_interval_ != NULL &&
381          first_interval_->start().Value() <= end.Value()) {
382     if (first_interval_->end().Value() > end.Value()) {
383       new_end = first_interval_->end();
384     }
385     first_interval_ = first_interval_->next();
386   }
387
388   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
389   new_interval->next_ = first_interval_;
390   first_interval_ = new_interval;
391   if (new_interval->next() == NULL) {
392     last_interval_ = new_interval;
393   }
394 }
395
396
397 void LiveRange::AddUseInterval(LifetimePosition start,
398                                LifetimePosition end,
399                                Zone* zone) {
400   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
401                          id_,
402                          start.Value(),
403                          end.Value());
404   if (first_interval_ == NULL) {
405     UseInterval* interval = new(zone) UseInterval(start, end);
406     first_interval_ = interval;
407     last_interval_ = interval;
408   } else {
409     if (end.Value() == first_interval_->start().Value()) {
410       first_interval_->set_start(start);
411     } else if (end.Value() < first_interval_->start().Value()) {
412       UseInterval* interval = new(zone) UseInterval(start, end);
413       interval->set_next(first_interval_);
414       first_interval_ = interval;
415     } else {
416       // Order of instruction's processing (see ProcessInstructions) guarantees
417       // that each new use interval either precedes or intersects with
418       // last added interval.
419       DCHECK(start.Value() < first_interval_->end().Value());
420       first_interval_->start_ = Min(start, first_interval_->start_);
421       first_interval_->end_ = Max(end, first_interval_->end_);
422     }
423   }
424 }
425
426
427 void LiveRange::AddUsePosition(LifetimePosition pos,
428                                LOperand* operand,
429                                LOperand* hint,
430                                Zone* zone) {
431   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
432                          id_,
433                          pos.Value());
434   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
435   UsePosition* prev_hint = NULL;
436   UsePosition* prev = NULL;
437   UsePosition* current = first_pos_;
438   while (current != NULL && current->pos().Value() < pos.Value()) {
439     prev_hint = current->HasHint() ? current : prev_hint;
440     prev = current;
441     current = current->next();
442   }
443
444   if (prev == NULL) {
445     use_pos->set_next(first_pos_);
446     first_pos_ = use_pos;
447   } else {
448     use_pos->next_ = prev->next_;
449     prev->next_ = use_pos;
450   }
451
452   if (prev_hint == NULL && use_pos->HasHint()) {
453     current_hint_operand_ = hint;
454   }
455 }
456
457
458 void LiveRange::ConvertOperands(Zone* zone) {
459   LOperand* op = CreateAssignedOperand(zone);
460   UsePosition* use_pos = first_pos();
461   while (use_pos != NULL) {
462     DCHECK(Start().Value() <= use_pos->pos().Value() &&
463            use_pos->pos().Value() <= End().Value());
464
465     if (use_pos->HasOperand()) {
466       DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
467              op->IsSIMD128Register() || !use_pos->RequiresRegister());
468       use_pos->operand()->ConvertTo(op->kind(), op->index());
469     }
470     use_pos = use_pos->next();
471   }
472 }
473
474
475 bool LiveRange::CanCover(LifetimePosition position) const {
476   if (IsEmpty()) return false;
477   return Start().Value() <= position.Value() &&
478          position.Value() < End().Value();
479 }
480
481
482 bool LiveRange::Covers(LifetimePosition position) {
483   if (!CanCover(position)) return false;
484   UseInterval* start_search = FirstSearchIntervalForPosition(position);
485   for (UseInterval* interval = start_search;
486        interval != NULL;
487        interval = interval->next()) {
488     DCHECK(interval->next() == NULL ||
489            interval->next()->start().Value() >= interval->start().Value());
490     AdvanceLastProcessedMarker(interval, position);
491     if (interval->Contains(position)) return true;
492     if (interval->start().Value() > position.Value()) return false;
493   }
494   return false;
495 }
496
497
498 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
499   UseInterval* b = other->first_interval();
500   if (b == NULL) return LifetimePosition::Invalid();
501   LifetimePosition advance_last_processed_up_to = b->start();
502   UseInterval* a = FirstSearchIntervalForPosition(b->start());
503   while (a != NULL && b != NULL) {
504     if (a->start().Value() > other->End().Value()) break;
505     if (b->start().Value() > End().Value()) break;
506     LifetimePosition cur_intersection = a->Intersect(b);
507     if (cur_intersection.IsValid()) {
508       return cur_intersection;
509     }
510     if (a->start().Value() < b->start().Value()) {
511       a = a->next();
512       if (a == NULL || a->start().Value() > other->End().Value()) break;
513       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
514     } else {
515       b = b->next();
516     }
517   }
518   return LifetimePosition::Invalid();
519 }
520
521
522 LAllocator::LAllocator(int num_values, HGraph* graph)
523     : zone_(graph->isolate()),
524       chunk_(NULL),
525       live_in_sets_(graph->blocks()->length(), zone()),
526       live_ranges_(num_values * 2, zone()),
527       fixed_live_ranges_(NULL),
528       fixed_double_live_ranges_(NULL),
529       unhandled_live_ranges_(num_values * 2, zone()),
530       active_live_ranges_(8, zone()),
531       inactive_live_ranges_(8, zone()),
532       reusable_slots_(8, zone()),
533       reusable_simd128_slots_(8, zone()),
534       next_virtual_register_(num_values),
535       first_artificial_register_(num_values),
536       mode_(UNALLOCATED_REGISTERS),
537       num_registers_(-1),
538       graph_(graph),
539       has_osr_entry_(false),
540       allocation_ok_(true) {}
541
542
543 void LAllocator::InitializeLivenessAnalysis() {
544   // Initialize the live_in sets for each block to NULL.
545   int block_count = graph_->blocks()->length();
546   live_in_sets_.Initialize(block_count, zone());
547   live_in_sets_.AddBlock(NULL, block_count, zone());
548 }
549
550
551 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
552   // Compute live out for the given block, except not including backward
553   // successor edges.
554   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
555
556   // Process all successor blocks.
557   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
558     // Add values live on entry to the successor. Note the successor's
559     // live_in will not be computed yet for backwards edges.
560     HBasicBlock* successor = it.Current();
561     BitVector* live_in = live_in_sets_[successor->block_id()];
562     if (live_in != NULL) live_out->Union(*live_in);
563
564     // All phi input operands corresponding to this successor edge are live
565     // out from this block.
566     int index = successor->PredecessorIndexOf(block);
567     const ZoneList<HPhi*>* phis = successor->phis();
568     for (int i = 0; i < phis->length(); ++i) {
569       HPhi* phi = phis->at(i);
570       if (!phi->OperandAt(index)->IsConstant()) {
571         live_out->Add(phi->OperandAt(index)->id());
572       }
573     }
574   }
575
576   return live_out;
577 }
578
579
580 void LAllocator::AddInitialIntervals(HBasicBlock* block,
581                                      BitVector* live_out) {
582   // Add an interval that includes the entire block to the live range for
583   // each live_out value.
584   LifetimePosition start = LifetimePosition::FromInstructionIndex(
585       block->first_instruction_index());
586   LifetimePosition end = LifetimePosition::FromInstructionIndex(
587       block->last_instruction_index()).NextInstruction();
588   BitVector::Iterator iterator(live_out);
589   while (!iterator.Done()) {
590     int operand_index = iterator.Current();
591     LiveRange* range = LiveRangeFor(operand_index);
592     range->AddUseInterval(start, end, zone());
593     iterator.Advance();
594   }
595 }
596
597
598 int LAllocator::FixedDoubleLiveRangeID(int index) {
599   return -index - 1 - Register::kMaxNumAllocatableRegisters;
600 }
601
602
603 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
604                                     int pos,
605                                     bool is_tagged) {
606   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
607   DCHECK(operand->HasFixedPolicy());
608   if (operand->HasFixedSlotPolicy()) {
609     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
610   } else if (operand->HasFixedRegisterPolicy()) {
611     int reg_index = operand->fixed_register_index();
612     operand->ConvertTo(LOperand::REGISTER, reg_index);
613   } else if (operand->HasFixedDoubleRegisterPolicy()) {
614     int reg_index = operand->fixed_register_index();
615     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
616   } else {
617     UNREACHABLE();
618   }
619   if (is_tagged) {
620     TraceAlloc("Fixed reg is tagged at %d\n", pos);
621     LInstruction* instr = InstructionAt(pos);
622     if (instr->HasPointerMap()) {
623       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
624     }
625   }
626   return operand;
627 }
628
629
630 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
631   DCHECK(index < Register::kMaxNumAllocatableRegisters);
632   LiveRange* result = fixed_live_ranges_[index];
633   if (result == NULL) {
634     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
635     DCHECK(result->IsFixed());
636     result->kind_ = GENERAL_REGISTERS;
637     SetLiveRangeAssignedRegister(result, index);
638     fixed_live_ranges_[index] = result;
639   }
640   return result;
641 }
642
643
644 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
645   DCHECK(index < DoubleRegister::NumAllocatableRegisters());
646   LiveRange* result = fixed_double_live_ranges_[index];
647   if (result == NULL) {
648     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
649                                    chunk()->zone());
650     DCHECK(result->IsFixed());
651     result->kind_ = DOUBLE_REGISTERS;
652     SetLiveRangeAssignedRegister(result, index);
653     fixed_double_live_ranges_[index] = result;
654   }
655   return result;
656 }
657
658
659 LiveRange* LAllocator::LiveRangeFor(int index) {
660   if (index >= live_ranges_.length()) {
661     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
662   }
663   LiveRange* result = live_ranges_[index];
664   if (result == NULL) {
665     result = new(zone()) LiveRange(index, chunk()->zone());
666     live_ranges_[index] = result;
667   }
668   return result;
669 }
670
671
672 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
673   int last_instruction = block->last_instruction_index();
674   int index = chunk_->NearestGapPos(last_instruction);
675   return GapAt(index);
676 }
677
678
679 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
680   if (!operand->IsUnallocated()) return NULL;
681   int index = LUnallocated::cast(operand)->virtual_register();
682   HValue* instr = graph_->LookupValue(index);
683   if (instr != NULL && instr->IsPhi()) {
684     return HPhi::cast(instr);
685   }
686   return NULL;
687 }
688
689
690 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
691   if (operand->IsUnallocated()) {
692     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
693   } else if (operand->IsRegister()) {
694     return FixedLiveRangeFor(operand->index());
695   } else if (operand->IsDoubleRegister()) {
696     return FixedDoubleLiveRangeFor(operand->index());
697   } else {
698     return NULL;
699   }
700 }
701
702
703 void LAllocator::Define(LifetimePosition position,
704                         LOperand* operand,
705                         LOperand* hint) {
706   LiveRange* range = LiveRangeFor(operand);
707   if (range == NULL) return;
708
709   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
710     // Can happen if there is a definition without use.
711     range->AddUseInterval(position, position.NextInstruction(), zone());
712     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
713   } else {
714     range->ShortenTo(position);
715   }
716
717   if (operand->IsUnallocated()) {
718     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
719     range->AddUsePosition(position, unalloc_operand, hint, zone());
720   }
721 }
722
723
724 void LAllocator::Use(LifetimePosition block_start,
725                      LifetimePosition position,
726                      LOperand* operand,
727                      LOperand* hint) {
728   LiveRange* range = LiveRangeFor(operand);
729   if (range == NULL) return;
730   if (operand->IsUnallocated()) {
731     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
732     range->AddUsePosition(position, unalloc_operand, hint, zone());
733   }
734   range->AddUseInterval(block_start, position, zone());
735 }
736
737
738 void LAllocator::AddConstraintsGapMove(int index,
739                                        LOperand* from,
740                                        LOperand* to) {
741   LGap* gap = GapAt(index);
742   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
743                                                      chunk()->zone());
744   if (from->IsUnallocated()) {
745     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
746     for (int i = 0; i < move_operands->length(); ++i) {
747       LMoveOperands cur = move_operands->at(i);
748       LOperand* cur_to = cur.destination();
749       if (cur_to->IsUnallocated()) {
750         if (LUnallocated::cast(cur_to)->virtual_register() ==
751             LUnallocated::cast(from)->virtual_register()) {
752           move->AddMove(cur.source(), to, chunk()->zone());
753           return;
754         }
755       }
756     }
757   }
758   move->AddMove(from, to, chunk()->zone());
759 }
760
761
762 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
763   int start = block->first_instruction_index();
764   int end = block->last_instruction_index();
765   if (start == -1) return;
766   for (int i = start; i <= end; ++i) {
767     if (IsGapAt(i)) {
768       LInstruction* instr = NULL;
769       LInstruction* prev_instr = NULL;
770       if (i < end) instr = InstructionAt(i + 1);
771       if (i > start) prev_instr = InstructionAt(i - 1);
772       MeetConstraintsBetween(prev_instr, instr, i);
773       if (!AllocationOk()) return;
774     }
775   }
776 }
777
778
779 void LAllocator::MeetConstraintsBetween(LInstruction* first,
780                                         LInstruction* second,
781                                         int gap_index) {
782   // Handle fixed temporaries.
783   if (first != NULL) {
784     for (TempIterator it(first); !it.Done(); it.Advance()) {
785       LUnallocated* temp = LUnallocated::cast(it.Current());
786       if (temp->HasFixedPolicy()) {
787         AllocateFixed(temp, gap_index - 1, false);
788       }
789     }
790   }
791
792   // Handle fixed output operand.
793   if (first != NULL && first->Output() != NULL) {
794     LUnallocated* first_output = LUnallocated::cast(first->Output());
795     LiveRange* range = LiveRangeFor(first_output->virtual_register());
796     bool assigned = false;
797     if (first_output->HasFixedPolicy()) {
798       LUnallocated* output_copy = first_output->CopyUnconstrained(
799           chunk()->zone());
800       bool is_tagged = HasTaggedValue(first_output->virtual_register());
801       AllocateFixed(first_output, gap_index, is_tagged);
802
803       // This value is produced on the stack, we never need to spill it.
804       if (first_output->IsStackSlot()) {
805         range->SetSpillOperand(first_output);
806         range->SetSpillStartIndex(gap_index - 1);
807         assigned = true;
808       }
809       chunk_->AddGapMove(gap_index, first_output, output_copy);
810     }
811
812     if (!assigned) {
813       range->SetSpillStartIndex(gap_index);
814
815       // This move to spill operand is not a real use. Liveness analysis
816       // and splitting of live ranges do not account for it.
817       // Thus it should be inserted to a lifetime position corresponding to
818       // the instruction end.
819       LGap* gap = GapAt(gap_index);
820       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
821                                                          chunk()->zone());
822       move->AddMove(first_output, range->GetSpillOperand(),
823                     chunk()->zone());
824     }
825   }
826
827   // Handle fixed input operands of second instruction.
828   if (second != NULL) {
829     for (UseIterator it(second); !it.Done(); it.Advance()) {
830       LUnallocated* cur_input = LUnallocated::cast(it.Current());
831       if (cur_input->HasFixedPolicy()) {
832         LUnallocated* input_copy = cur_input->CopyUnconstrained(
833             chunk()->zone());
834         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
835         AllocateFixed(cur_input, gap_index + 1, is_tagged);
836         AddConstraintsGapMove(gap_index, input_copy, cur_input);
837       } else if (cur_input->HasWritableRegisterPolicy()) {
838         // The live range of writable input registers always goes until the end
839         // of the instruction.
840         DCHECK(!cur_input->IsUsedAtStart());
841
842         LUnallocated* input_copy = cur_input->CopyUnconstrained(
843             chunk()->zone());
844         int vreg = GetVirtualRegister();
845         if (!AllocationOk()) return;
846         cur_input->set_virtual_register(vreg);
847
848         if (RequiredRegisterKind(input_copy->virtual_register()) ==
849             DOUBLE_REGISTERS) {
850           double_artificial_registers_.Add(
851               cur_input->virtual_register() - first_artificial_register_,
852               zone());
853         } else if (RequiredRegisterKind(input_copy->virtual_register()) ==
854             FLOAT32x4_REGISTERS) {
855           float32x4_artificial_registers_.Add(
856               cur_input->virtual_register() - first_artificial_register_,
857               zone());
858         } else if (RequiredRegisterKind(input_copy->virtual_register()) ==
859             FLOAT64x2_REGISTERS) {
860           float64x2_artificial_registers_.Add(
861               cur_input->virtual_register() - first_artificial_register_,
862               zone());
863         } else if (RequiredRegisterKind(input_copy->virtual_register()) ==
864             INT32x4_REGISTERS) {
865           int32x4_artificial_registers_.Add(
866               cur_input->virtual_register() - first_artificial_register_,
867               zone());
868         }
869
870         AddConstraintsGapMove(gap_index, input_copy, cur_input);
871       }
872     }
873   }
874
875   // Handle "output same as input" for second instruction.
876   if (second != NULL && second->Output() != NULL) {
877     LUnallocated* second_output = LUnallocated::cast(second->Output());
878     if (second_output->HasSameAsInputPolicy()) {
879       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
880       int output_vreg = second_output->virtual_register();
881       int input_vreg = cur_input->virtual_register();
882
883       LUnallocated* input_copy = cur_input->CopyUnconstrained(
884           chunk()->zone());
885       cur_input->set_virtual_register(second_output->virtual_register());
886       AddConstraintsGapMove(gap_index, input_copy, cur_input);
887
888       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
889         int index = gap_index + 1;
890         LInstruction* instr = InstructionAt(index);
891         if (instr->HasPointerMap()) {
892           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
893         }
894       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
895         // The input is assumed to immediately have a tagged representation,
896         // before the pointer map can be used. I.e. the pointer map at the
897         // instruction will include the output operand (whose value at the
898         // beginning of the instruction is equal to the input operand). If
899         // this is not desired, then the pointer map at this instruction needs
900         // to be adjusted manually.
901       }
902     }
903   }
904 }
905
906
907 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
908   int block_start = block->first_instruction_index();
909   int index = block->last_instruction_index();
910
911   LifetimePosition block_start_position =
912       LifetimePosition::FromInstructionIndex(block_start);
913
914   while (index >= block_start) {
915     LifetimePosition curr_position =
916         LifetimePosition::FromInstructionIndex(index);
917
918     if (IsGapAt(index)) {
919       // We have a gap at this position.
920       LGap* gap = GapAt(index);
921       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
922                                                          chunk()->zone());
923       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
924       for (int i = 0; i < move_operands->length(); ++i) {
925         LMoveOperands* cur = &move_operands->at(i);
926         if (cur->IsIgnored()) continue;
927         LOperand* from = cur->source();
928         LOperand* to = cur->destination();
929         HPhi* phi = LookupPhi(to);
930         LOperand* hint = to;
931         if (phi != NULL) {
932           // This is a phi resolving move.
933           if (!phi->block()->IsLoopHeader()) {
934             hint = LiveRangeFor(phi->id())->current_hint_operand();
935           }
936         } else {
937           if (to->IsUnallocated()) {
938             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
939               Define(curr_position, to, from);
940               live->Remove(LUnallocated::cast(to)->virtual_register());
941             } else {
942               cur->Eliminate();
943               continue;
944             }
945           } else {
946             Define(curr_position, to, from);
947           }
948         }
949         Use(block_start_position, curr_position, from, hint);
950         if (from->IsUnallocated()) {
951           live->Add(LUnallocated::cast(from)->virtual_register());
952         }
953       }
954     } else {
955       DCHECK(!IsGapAt(index));
956       LInstruction* instr = InstructionAt(index);
957
958       if (instr != NULL) {
959         LOperand* output = instr->Output();
960         if (output != NULL) {
961           if (output->IsUnallocated()) {
962             live->Remove(LUnallocated::cast(output)->virtual_register());
963           }
964           Define(curr_position, output, NULL);
965         }
966
967         if (instr->ClobbersRegisters()) {
968           for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
969             if (output == NULL || !output->IsRegister() ||
970                 output->index() != i) {
971               LiveRange* range = FixedLiveRangeFor(i);
972               range->AddUseInterval(curr_position,
973                                     curr_position.InstructionEnd(),
974                                     zone());
975             }
976           }
977         }
978
979         if (instr->ClobbersDoubleRegisters(isolate())) {
980           for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
981             if (output == NULL || !output->IsDoubleRegister() ||
982                 output->index() != i) {
983               LiveRange* range = FixedDoubleLiveRangeFor(i);
984               range->AddUseInterval(curr_position,
985                                     curr_position.InstructionEnd(),
986                                     zone());
987             }
988           }
989         }
990
991         for (UseIterator it(instr); !it.Done(); it.Advance()) {
992           LOperand* input = it.Current();
993
994           LifetimePosition use_pos;
995           if (input->IsUnallocated() &&
996               LUnallocated::cast(input)->IsUsedAtStart()) {
997             use_pos = curr_position;
998           } else {
999             use_pos = curr_position.InstructionEnd();
1000           }
1001
1002           Use(block_start_position, use_pos, input, NULL);
1003           if (input->IsUnallocated()) {
1004             live->Add(LUnallocated::cast(input)->virtual_register());
1005           }
1006         }
1007
1008         for (TempIterator it(instr); !it.Done(); it.Advance()) {
1009           LOperand* temp = it.Current();
1010           if (instr->ClobbersTemps()) {
1011             if (temp->IsRegister()) continue;
1012             if (temp->IsUnallocated()) {
1013               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1014               if (temp_unalloc->HasFixedPolicy()) {
1015                 continue;
1016               }
1017             }
1018           }
1019           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1020           Define(curr_position, temp, NULL);
1021
1022           if (temp->IsUnallocated()) {
1023             LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1024             if (temp_unalloc->HasDoubleRegisterPolicy()) {
1025               double_artificial_registers_.Add(
1026                   temp_unalloc->virtual_register() - first_artificial_register_,
1027                   zone());
1028             }
1029           }
1030         }
1031       }
1032     }
1033
1034     index = index - 1;
1035   }
1036 }
1037
1038
1039 void LAllocator::ResolvePhis(HBasicBlock* block) {
1040   const ZoneList<HPhi*>* phis = block->phis();
1041   for (int i = 0; i < phis->length(); ++i) {
1042     HPhi* phi = phis->at(i);
1043     LUnallocated* phi_operand =
1044         new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
1045     phi_operand->set_virtual_register(phi->id());
1046     for (int j = 0; j < phi->OperandCount(); ++j) {
1047       HValue* op = phi->OperandAt(j);
1048       LOperand* operand = NULL;
1049       if (op->IsConstant() && op->EmitAtUses()) {
1050         HConstant* constant = HConstant::cast(op);
1051         operand = chunk_->DefineConstantOperand(constant);
1052       } else {
1053         DCHECK(!op->EmitAtUses());
1054         LUnallocated* unalloc =
1055             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1056         unalloc->set_virtual_register(op->id());
1057         operand = unalloc;
1058       }
1059       HBasicBlock* cur_block = block->predecessors()->at(j);
1060       // The gap move must be added without any special processing as in
1061       // the AddConstraintsGapMove.
1062       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1063                          operand,
1064                          phi_operand);
1065
1066       // We are going to insert a move before the branch instruction.
1067       // Some branch instructions (e.g. loops' back edges)
1068       // can potentially cause a GC so they have a pointer map.
1069       // By inserting a move we essentially create a copy of a
1070       // value which is invisible to PopulatePointerMaps(), because we store
1071       // it into a location different from the operand of a live range
1072       // covering a branch instruction.
1073       // Thus we need to manually record a pointer.
1074       LInstruction* branch =
1075           InstructionAt(cur_block->last_instruction_index());
1076       if (branch->HasPointerMap()) {
1077         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1078           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1079         } else if (!phi->representation().IsDouble()) {
1080           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1081         }
1082       }
1083     }
1084
1085     LiveRange* live_range = LiveRangeFor(phi->id());
1086     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1087     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1088         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1089     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1090   }
1091 }
1092
1093
1094 bool LAllocator::Allocate(LChunk* chunk) {
1095   DCHECK(chunk_ == NULL);
1096   chunk_ = static_cast<LPlatformChunk*>(chunk);
1097   assigned_registers_ =
1098       new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
1099                                    chunk->zone());
1100   assigned_double_registers_ =
1101       new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
1102                                    chunk->zone());
1103   MeetRegisterConstraints();
1104   if (!AllocationOk()) return false;
1105   ResolvePhis();
1106   BuildLiveRanges();
1107   AllocateGeneralRegisters();
1108   if (!AllocationOk()) return false;
1109   AllocateDoubleRegisters();
1110   if (!AllocationOk()) return false;
1111   PopulatePointerMaps();
1112   ConnectRanges();
1113   ResolveControlFlow();
1114   return true;
1115 }
1116
1117
1118 void LAllocator::MeetRegisterConstraints() {
1119   LAllocatorPhase phase("L_Register constraints", this);
1120   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1121   for (int i = 0; i < blocks->length(); ++i) {
1122     HBasicBlock* block = blocks->at(i);
1123     MeetRegisterConstraints(block);
1124     if (!AllocationOk()) return;
1125   }
1126 }
1127
1128
1129 void LAllocator::ResolvePhis() {
1130   LAllocatorPhase phase("L_Resolve phis", this);
1131
1132   // Process the blocks in reverse order.
1133   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1134   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1135     HBasicBlock* block = blocks->at(block_id);
1136     ResolvePhis(block);
1137   }
1138 }
1139
1140
1141 void LAllocator::ResolveControlFlow(LiveRange* range,
1142                                     HBasicBlock* block,
1143                                     HBasicBlock* pred) {
1144   LifetimePosition pred_end =
1145       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1146   LifetimePosition cur_start =
1147       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1148   LiveRange* pred_cover = NULL;
1149   LiveRange* cur_cover = NULL;
1150   LiveRange* cur_range = range;
1151   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1152     if (cur_range->CanCover(cur_start)) {
1153       DCHECK(cur_cover == NULL);
1154       cur_cover = cur_range;
1155     }
1156     if (cur_range->CanCover(pred_end)) {
1157       DCHECK(pred_cover == NULL);
1158       pred_cover = cur_range;
1159     }
1160     cur_range = cur_range->next();
1161   }
1162
1163   if (cur_cover->IsSpilled()) return;
1164   DCHECK(pred_cover != NULL && cur_cover != NULL);
1165   if (pred_cover != cur_cover) {
1166     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1167     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1168     if (!pred_op->Equals(cur_op)) {
1169       LGap* gap = NULL;
1170       if (block->predecessors()->length() == 1) {
1171         gap = GapAt(block->first_instruction_index());
1172       } else {
1173         DCHECK(pred->end()->SecondSuccessor() == NULL);
1174         gap = GetLastGap(pred);
1175
1176         // We are going to insert a move before the branch instruction.
1177         // Some branch instructions (e.g. loops' back edges)
1178         // can potentially cause a GC so they have a pointer map.
1179         // By inserting a move we essentially create a copy of a
1180         // value which is invisible to PopulatePointerMaps(), because we store
1181         // it into a location different from the operand of a live range
1182         // covering a branch instruction.
1183         // Thus we need to manually record a pointer.
1184         LInstruction* branch = InstructionAt(pred->last_instruction_index());
1185         if (branch->HasPointerMap()) {
1186           if (HasTaggedValue(range->id())) {
1187             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1188           } else if (!cur_op->IsDoubleStackSlot()  &&
1189                      !cur_op->IsDoubleRegister()   &&
1190                      !cur_op->IsSIMD128StackSlot() &&
1191                      !cur_op->IsSIMD128Register()) {
1192             branch->pointer_map()->RemovePointer(cur_op);
1193           }
1194         }
1195       }
1196       gap->GetOrCreateParallelMove(
1197           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1198                                                  chunk()->zone());
1199     }
1200   }
1201 }
1202
1203
1204 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1205   int index = pos.InstructionIndex();
1206   if (IsGapAt(index)) {
1207     LGap* gap = GapAt(index);
1208     return gap->GetOrCreateParallelMove(
1209         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1210   }
1211   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1212   return GapAt(gap_pos)->GetOrCreateParallelMove(
1213       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1214 }
1215
1216
1217 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1218   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1219   return gap->block();
1220 }
1221
1222
1223 void LAllocator::ConnectRanges() {
1224   LAllocatorPhase phase("L_Connect ranges", this);
1225   for (int i = 0; i < live_ranges()->length(); ++i) {
1226     LiveRange* first_range = live_ranges()->at(i);
1227     if (first_range == NULL || first_range->parent() != NULL) continue;
1228
1229     LiveRange* second_range = first_range->next();
1230     while (second_range != NULL) {
1231       LifetimePosition pos = second_range->Start();
1232
1233       if (!second_range->IsSpilled()) {
1234         // Add gap move if the two live ranges touch and there is no block
1235         // boundary.
1236         if (first_range->End().Value() == pos.Value()) {
1237           bool should_insert = true;
1238           if (IsBlockBoundary(pos)) {
1239             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1240           }
1241           if (should_insert) {
1242             LParallelMove* move = GetConnectingParallelMove(pos);
1243             LOperand* prev_operand = first_range->CreateAssignedOperand(
1244                 chunk()->zone());
1245             LOperand* cur_operand = second_range->CreateAssignedOperand(
1246                 chunk()->zone());
1247             move->AddMove(prev_operand, cur_operand,
1248                           chunk()->zone());
1249           }
1250         }
1251       }
1252
1253       first_range = second_range;
1254       second_range = second_range->next();
1255     }
1256   }
1257 }
1258
1259
1260 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1261   if (block->predecessors()->length() != 1) return false;
1262   return block->predecessors()->first()->block_id() == block->block_id() - 1;
1263 }
1264
1265
1266 void LAllocator::ResolveControlFlow() {
1267   LAllocatorPhase phase("L_Resolve control flow", this);
1268   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1269   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1270     HBasicBlock* block = blocks->at(block_id);
1271     if (CanEagerlyResolveControlFlow(block)) continue;
1272     BitVector* live = live_in_sets_[block->block_id()];
1273     BitVector::Iterator iterator(live);
1274     while (!iterator.Done()) {
1275       int operand_index = iterator.Current();
1276       for (int i = 0; i < block->predecessors()->length(); ++i) {
1277         HBasicBlock* cur = block->predecessors()->at(i);
1278         LiveRange* cur_range = LiveRangeFor(operand_index);
1279         ResolveControlFlow(cur_range, block, cur);
1280       }
1281       iterator.Advance();
1282     }
1283   }
1284 }
1285
1286
1287 void LAllocator::BuildLiveRanges() {
1288   LAllocatorPhase phase("L_Build live ranges", this);
1289   InitializeLivenessAnalysis();
1290   // Process the blocks in reverse order.
1291   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1292   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1293     HBasicBlock* block = blocks->at(block_id);
1294     BitVector* live = ComputeLiveOut(block);
1295     // Initially consider all live_out values live for the entire block. We
1296     // will shorten these intervals if necessary.
1297     AddInitialIntervals(block, live);
1298
1299     // Process the instructions in reverse order, generating and killing
1300     // live values.
1301     ProcessInstructions(block, live);
1302     // All phi output operands are killed by this block.
1303     const ZoneList<HPhi*>* phis = block->phis();
1304     for (int i = 0; i < phis->length(); ++i) {
1305       // The live range interval already ends at the first instruction of the
1306       // block.
1307       HPhi* phi = phis->at(i);
1308       live->Remove(phi->id());
1309
1310       LOperand* hint = NULL;
1311       LOperand* phi_operand = NULL;
1312       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1313       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1314                                                          chunk()->zone());
1315       for (int j = 0; j < move->move_operands()->length(); ++j) {
1316         LOperand* to = move->move_operands()->at(j).destination();
1317         if (to->IsUnallocated() &&
1318             LUnallocated::cast(to)->virtual_register() == phi->id()) {
1319           hint = move->move_operands()->at(j).source();
1320           phi_operand = to;
1321           break;
1322         }
1323       }
1324       DCHECK(hint != NULL);
1325
1326       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1327               block->first_instruction_index());
1328       Define(block_start, phi_operand, hint);
1329     }
1330
1331     // Now live is live_in for this block except not including values live
1332     // out on backward successor edges.
1333     live_in_sets_[block_id] = live;
1334
1335     // If this block is a loop header go back and patch up the necessary
1336     // predecessor blocks.
1337     if (block->IsLoopHeader()) {
1338       // TODO(kmillikin): Need to be able to get the last block of the loop
1339       // in the loop information. Add a live range stretching from the first
1340       // loop instruction to the last for each value live on entry to the
1341       // header.
1342       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1343       BitVector::Iterator iterator(live);
1344       LifetimePosition start = LifetimePosition::FromInstructionIndex(
1345           block->first_instruction_index());
1346       LifetimePosition end = LifetimePosition::FromInstructionIndex(
1347           back_edge->last_instruction_index()).NextInstruction();
1348       while (!iterator.Done()) {
1349         int operand_index = iterator.Current();
1350         LiveRange* range = LiveRangeFor(operand_index);
1351         range->EnsureInterval(start, end, zone());
1352         iterator.Advance();
1353       }
1354
1355       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1356         live_in_sets_[i]->Union(*live);
1357       }
1358     }
1359
1360 #ifdef DEBUG
1361     if (block_id == 0) {
1362       BitVector::Iterator iterator(live);
1363       bool found = false;
1364       while (!iterator.Done()) {
1365         found = true;
1366         int operand_index = iterator.Current();
1367         if (chunk_->info()->IsStub()) {
1368           CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
1369           PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
1370         } else {
1371           DCHECK(chunk_->info()->IsOptimizing());
1372           AllowHandleDereference allow_deref;
1373           PrintF("Function: %s\n",
1374                  chunk_->info()->function()->debug_name()->ToCString().get());
1375         }
1376         PrintF("Value %d used before first definition!\n", operand_index);
1377         LiveRange* range = LiveRangeFor(operand_index);
1378         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1379         iterator.Advance();
1380       }
1381       DCHECK(!found);
1382     }
1383 #endif
1384   }
1385
1386   for (int i = 0; i < live_ranges_.length(); ++i) {
1387     if (live_ranges_[i] != NULL) {
1388       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1389     }
1390   }
1391 }
1392
1393
1394 bool LAllocator::SafePointsAreInOrder() const {
1395   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1396   int safe_point = 0;
1397   for (int i = 0; i < pointer_maps->length(); ++i) {
1398     LPointerMap* map = pointer_maps->at(i);
1399     if (safe_point > map->lithium_position()) return false;
1400     safe_point = map->lithium_position();
1401   }
1402   return true;
1403 }
1404
1405
1406 void LAllocator::PopulatePointerMaps() {
1407   LAllocatorPhase phase("L_Populate pointer maps", this);
1408   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1409
1410   DCHECK(SafePointsAreInOrder());
1411
1412   // Iterate over all safe point positions and record a pointer
1413   // for all spilled live ranges at this point.
1414   int first_safe_point_index = 0;
1415   int last_range_start = 0;
1416   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1417     LiveRange* range = live_ranges()->at(range_idx);
1418     if (range == NULL) continue;
1419     // Iterate over the first parts of multi-part live ranges.
1420     if (range->parent() != NULL) continue;
1421     // Skip non-pointer values.
1422     if (!HasTaggedValue(range->id())) continue;
1423     // Skip empty live ranges.
1424     if (range->IsEmpty()) continue;
1425
1426     // Find the extent of the range and its children.
1427     int start = range->Start().InstructionIndex();
1428     int end = 0;
1429     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1430       LifetimePosition this_end = cur->End();
1431       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1432       DCHECK(cur->Start().InstructionIndex() >= start);
1433     }
1434
1435     // Most of the ranges are in order, but not all.  Keep an eye on when
1436     // they step backwards and reset the first_safe_point_index so we don't
1437     // miss any safe points.
1438     if (start < last_range_start) {
1439       first_safe_point_index = 0;
1440     }
1441     last_range_start = start;
1442
1443     // Step across all the safe points that are before the start of this range,
1444     // recording how far we step in order to save doing this for the next range.
1445     while (first_safe_point_index < pointer_maps->length()) {
1446       LPointerMap* map = pointer_maps->at(first_safe_point_index);
1447       int safe_point = map->lithium_position();
1448       if (safe_point >= start) break;
1449       first_safe_point_index++;
1450     }
1451
1452     // Step through the safe points to see whether they are in the range.
1453     for (int safe_point_index = first_safe_point_index;
1454          safe_point_index < pointer_maps->length();
1455          ++safe_point_index) {
1456       LPointerMap* map = pointer_maps->at(safe_point_index);
1457       int safe_point = map->lithium_position();
1458
1459       // The safe points are sorted so we can stop searching here.
1460       if (safe_point - 1 > end) break;
1461
1462       // Advance to the next active range that covers the current
1463       // safe point position.
1464       LifetimePosition safe_point_pos =
1465           LifetimePosition::FromInstructionIndex(safe_point);
1466       LiveRange* cur = range;
1467       while (cur != NULL && !cur->Covers(safe_point_pos)) {
1468         cur = cur->next();
1469       }
1470       if (cur == NULL) continue;
1471
1472       // Check if the live range is spilled and the safe point is after
1473       // the spill position.
1474       if (range->HasAllocatedSpillOperand() &&
1475           safe_point >= range->spill_start_index()) {
1476         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1477                    range->id(), range->spill_start_index(), safe_point);
1478         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1479       }
1480
1481       if (!cur->IsSpilled()) {
1482         TraceAlloc("Pointer in register for range %d (start at %d) "
1483                    "at safe point %d\n",
1484                    cur->id(), cur->Start().Value(), safe_point);
1485         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1486         DCHECK(!operand->IsStackSlot());
1487         map->RecordPointer(operand, chunk()->zone());
1488       }
1489     }
1490   }
1491 }
1492
1493
1494 void LAllocator::AllocateGeneralRegisters() {
1495   LAllocatorPhase phase("L_Allocate general registers", this);
1496   num_registers_ = Register::NumAllocatableRegisters();
1497   mode_ = GENERAL_REGISTERS;
1498   AllocateRegisters();
1499 }
1500
1501
1502 void LAllocator::AllocateDoubleRegisters() {
1503   LAllocatorPhase phase("L_Allocate double registers", this);
1504   num_registers_ = DoubleRegister::NumAllocatableRegisters();
1505   mode_ = DOUBLE_REGISTERS;
1506   AllocateRegisters();
1507 }
1508
1509
1510 void LAllocator::AllocateRegisters() {
1511   DCHECK(unhandled_live_ranges_.is_empty());
1512
1513   for (int i = 0; i < live_ranges_.length(); ++i) {
1514     if (live_ranges_[i] != NULL) {
1515       if (live_ranges_[i]->Kind() == mode_) {
1516         AddToUnhandledUnsorted(live_ranges_[i]);
1517       } else if (mode_ == DOUBLE_REGISTERS &&
1518                  IsSIMD128RegisterKind(live_ranges_[i]->Kind())) {
1519         AddToUnhandledUnsorted(live_ranges_[i]);
1520       }
1521     }
1522   }
1523   SortUnhandled();
1524   DCHECK(UnhandledIsSorted());
1525
1526   DCHECK(reusable_slots_.is_empty());
1527   DCHECK(reusable_simd128_slots_.is_empty());
1528   DCHECK(active_live_ranges_.is_empty());
1529   DCHECK(inactive_live_ranges_.is_empty());
1530
1531   if (mode_ == DOUBLE_REGISTERS) {
1532     for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1533       LiveRange* current = fixed_double_live_ranges_.at(i);
1534       if (current != NULL) {
1535         AddToInactive(current);
1536       }
1537     }
1538   } else {
1539     DCHECK(mode_ == GENERAL_REGISTERS);
1540     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1541       LiveRange* current = fixed_live_ranges_.at(i);
1542       if (current != NULL) {
1543         AddToInactive(current);
1544       }
1545     }
1546   }
1547
1548   while (!unhandled_live_ranges_.is_empty()) {
1549     DCHECK(UnhandledIsSorted());
1550     LiveRange* current = unhandled_live_ranges_.RemoveLast();
1551     DCHECK(UnhandledIsSorted());
1552     LifetimePosition position = current->Start();
1553 #ifdef DEBUG
1554     allocation_finger_ = position;
1555 #endif
1556     TraceAlloc("Processing interval %d start=%d\n",
1557                current->id(),
1558                position.Value());
1559
1560     if (current->HasAllocatedSpillOperand()) {
1561       TraceAlloc("Live range %d already has a spill operand\n", current->id());
1562       LifetimePosition next_pos = position;
1563       if (IsGapAt(next_pos.InstructionIndex())) {
1564         next_pos = next_pos.NextInstruction();
1565       }
1566       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1567       // If the range already has a spill operand and it doesn't need a
1568       // register immediately, split it and spill the first part of the range.
1569       if (pos == NULL) {
1570         Spill(current);
1571         continue;
1572       } else if (pos->pos().Value() >
1573                  current->Start().NextInstruction().Value()) {
1574         // Do not spill live range eagerly if use position that can benefit from
1575         // the register is too close to the start of live range.
1576         SpillBetween(current, current->Start(), pos->pos());
1577         if (!AllocationOk()) return;
1578         DCHECK(UnhandledIsSorted());
1579         continue;
1580       }
1581     }
1582
1583     for (int i = 0; i < active_live_ranges_.length(); ++i) {
1584       LiveRange* cur_active = active_live_ranges_.at(i);
1585       if (cur_active->End().Value() <= position.Value()) {
1586         ActiveToHandled(cur_active);
1587         --i;  // The live range was removed from the list of active live ranges.
1588       } else if (!cur_active->Covers(position)) {
1589         ActiveToInactive(cur_active);
1590         --i;  // The live range was removed from the list of active live ranges.
1591       }
1592     }
1593
1594     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1595       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1596       if (cur_inactive->End().Value() <= position.Value()) {
1597         InactiveToHandled(cur_inactive);
1598         --i;  // Live range was removed from the list of inactive live ranges.
1599       } else if (cur_inactive->Covers(position)) {
1600         InactiveToActive(cur_inactive);
1601         --i;  // Live range was removed from the list of inactive live ranges.
1602       }
1603     }
1604
1605     DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
1606
1607     bool result = TryAllocateFreeReg(current);
1608     if (!AllocationOk()) return;
1609
1610     if (!result) AllocateBlockedReg(current);
1611     if (!AllocationOk()) return;
1612
1613     if (current->HasRegisterAssigned()) {
1614       AddToActive(current);
1615     }
1616   }
1617
1618   reusable_slots_.Rewind(0);
1619   reusable_simd128_slots_.Rewind(0);
1620   active_live_ranges_.Rewind(0);
1621   inactive_live_ranges_.Rewind(0);
1622 }
1623
1624
1625 const char* LAllocator::RegisterName(int allocation_index) {
1626   if (mode_ == GENERAL_REGISTERS) {
1627     return Register::AllocationIndexToString(allocation_index);
1628   } else {
1629     return DoubleRegister::AllocationIndexToString(allocation_index);
1630   }
1631 }
1632
1633
1634 void LAllocator::TraceAlloc(const char* msg, ...) {
1635   if (FLAG_trace_alloc) {
1636     va_list arguments;
1637     va_start(arguments, msg);
1638     base::OS::VPrint(msg, arguments);
1639     va_end(arguments);
1640   }
1641 }
1642
1643
1644 bool LAllocator::HasTaggedValue(int virtual_register) const {
1645   HValue* value = graph_->LookupValue(virtual_register);
1646   if (value == NULL) return false;
1647   return value->representation().IsTagged() && !value->type().IsSmi();
1648 }
1649
1650
1651 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1652   if (virtual_register < first_artificial_register_) {
1653     HValue* value = graph_->LookupValue(virtual_register);
1654     if (value != NULL && value->representation().IsDouble()) {
1655       return DOUBLE_REGISTERS;
1656     } else if (value != NULL && (value->representation().IsFloat32x4())) {
1657       return FLOAT32x4_REGISTERS;
1658     } else if (value != NULL && (value->representation().IsFloat64x2())) {
1659       return FLOAT64x2_REGISTERS;
1660     } else if (value != NULL && (value->representation().IsInt32x4())) {
1661       return INT32x4_REGISTERS;
1662     }
1663   } else if (double_artificial_registers_.Contains(
1664       virtual_register - first_artificial_register_)) {
1665     return DOUBLE_REGISTERS;
1666   } else if (float32x4_artificial_registers_.Contains(
1667       virtual_register - first_artificial_register_)) {
1668     return FLOAT32x4_REGISTERS;
1669   } else if (float64x2_artificial_registers_.Contains(
1670       virtual_register - first_artificial_register_)) {
1671     return FLOAT64x2_REGISTERS;
1672   } else if (int32x4_artificial_registers_.Contains(
1673       virtual_register - first_artificial_register_)) {
1674     return INT32x4_REGISTERS;
1675   }
1676
1677   return GENERAL_REGISTERS;
1678 }
1679
1680
1681 void LAllocator::AddToActive(LiveRange* range) {
1682   TraceAlloc("Add live range %d to active\n", range->id());
1683   active_live_ranges_.Add(range, zone());
1684 }
1685
1686
1687 void LAllocator::AddToInactive(LiveRange* range) {
1688   TraceAlloc("Add live range %d to inactive\n", range->id());
1689   inactive_live_ranges_.Add(range, zone());
1690 }
1691
1692
1693 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1694   if (range == NULL || range->IsEmpty()) return;
1695   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1696   DCHECK(allocation_finger_.Value() <= range->Start().Value());
1697   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1698     LiveRange* cur_range = unhandled_live_ranges_.at(i);
1699     if (range->ShouldBeAllocatedBefore(cur_range)) {
1700       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1701       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1702       DCHECK(UnhandledIsSorted());
1703       return;
1704     }
1705   }
1706   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1707   unhandled_live_ranges_.InsertAt(0, range, zone());
1708   DCHECK(UnhandledIsSorted());
1709 }
1710
1711
1712 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1713   if (range == NULL || range->IsEmpty()) return;
1714   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1715   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1716   unhandled_live_ranges_.Add(range, zone());
1717 }
1718
1719
1720 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1721   DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
1722          !(*b)->ShouldBeAllocatedBefore(*a));
1723   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1724   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1725   return (*a)->id() - (*b)->id();
1726 }
1727
1728
1729 // Sort the unhandled live ranges so that the ranges to be processed first are
1730 // at the end of the array list.  This is convenient for the register allocation
1731 // algorithm because it is efficient to remove elements from the end.
1732 void LAllocator::SortUnhandled() {
1733   TraceAlloc("Sort unhandled\n");
1734   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1735 }
1736
1737
1738 bool LAllocator::UnhandledIsSorted() {
1739   int len = unhandled_live_ranges_.length();
1740   for (int i = 1; i < len; i++) {
1741     LiveRange* a = unhandled_live_ranges_.at(i - 1);
1742     LiveRange* b = unhandled_live_ranges_.at(i);
1743     if (a->Start().Value() < b->Start().Value()) return false;
1744   }
1745   return true;
1746 }
1747
1748
1749 void LAllocator::FreeSpillSlot(LiveRange* range) {
1750   // Check that we are the last range.
1751   if (range->next() != NULL) return;
1752
1753   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1754
1755   int index = range->TopLevel()->GetSpillOperand()->index();
1756   if (index >= 0) {
1757     if (IsSIMD128RegisterKind(range->Kind())) {
1758       reusable_simd128_slots_.Add(range, zone());
1759     } else {
1760       reusable_slots_.Add(range, zone());
1761     }
1762   }
1763 }
1764
1765
1766 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1767   ZoneList<LiveRange*>* reusable_slots = IsSIMD128RegisterKind(range->Kind())
1768       ? &reusable_simd128_slots_
1769       : &reusable_slots_;
1770   if (reusable_slots->is_empty()) return NULL;
1771   if (reusable_slots->first()->End().Value() >
1772       range->TopLevel()->Start().Value()) {
1773     return NULL;
1774   }
1775   LOperand* result = reusable_slots->first()->TopLevel()->GetSpillOperand();
1776   reusable_slots->Remove(0);
1777   return result;
1778 }
1779
1780
1781 void LAllocator::ActiveToHandled(LiveRange* range) {
1782   DCHECK(active_live_ranges_.Contains(range));
1783   active_live_ranges_.RemoveElement(range);
1784   TraceAlloc("Moving live range %d from active to handled\n", range->id());
1785   FreeSpillSlot(range);
1786 }
1787
1788
1789 void LAllocator::ActiveToInactive(LiveRange* range) {
1790   DCHECK(active_live_ranges_.Contains(range));
1791   active_live_ranges_.RemoveElement(range);
1792   inactive_live_ranges_.Add(range, zone());
1793   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1794 }
1795
1796
1797 void LAllocator::InactiveToHandled(LiveRange* range) {
1798   DCHECK(inactive_live_ranges_.Contains(range));
1799   inactive_live_ranges_.RemoveElement(range);
1800   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1801   FreeSpillSlot(range);
1802 }
1803
1804
1805 void LAllocator::InactiveToActive(LiveRange* range) {
1806   DCHECK(inactive_live_ranges_.Contains(range));
1807   inactive_live_ranges_.RemoveElement(range);
1808   active_live_ranges_.Add(range, zone());
1809   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1810 }
1811
1812
1813 // TryAllocateFreeReg and AllocateBlockedReg assume this
1814 // when allocating local arrays.
1815 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
1816               Register::kMaxNumAllocatableRegisters);
1817
1818
1819 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1820   LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1821
1822   for (int i = 0; i < num_registers_; i++) {
1823     free_until_pos[i] = LifetimePosition::MaxPosition();
1824   }
1825
1826   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1827     LiveRange* cur_active = active_live_ranges_.at(i);
1828     free_until_pos[cur_active->assigned_register()] =
1829         LifetimePosition::FromInstructionIndex(0);
1830   }
1831
1832   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1833     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1834     DCHECK(cur_inactive->End().Value() > current->Start().Value());
1835     LifetimePosition next_intersection =
1836         cur_inactive->FirstIntersection(current);
1837     if (!next_intersection.IsValid()) continue;
1838     int cur_reg = cur_inactive->assigned_register();
1839     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1840   }
1841
1842   LOperand* hint = current->FirstHint();
1843   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister() ||
1844       hint->IsSIMD128Register())) {
1845     int register_index = hint->index();
1846     TraceAlloc(
1847         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1848         RegisterName(register_index),
1849         free_until_pos[register_index].Value(),
1850         current->id(),
1851         current->End().Value());
1852
1853     // The desired register is free until the end of the current live range.
1854     if (free_until_pos[register_index].Value() >= current->End().Value()) {
1855       TraceAlloc("Assigning preferred reg %s to live range %d\n",
1856                  RegisterName(register_index),
1857                  current->id());
1858       SetLiveRangeAssignedRegister(current, register_index);
1859       return true;
1860     }
1861   }
1862
1863   // Find the register which stays free for the longest time.
1864   int reg = 0;
1865   for (int i = 1; i < RegisterCount(); ++i) {
1866     if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1867       reg = i;
1868     }
1869   }
1870
1871   LifetimePosition pos = free_until_pos[reg];
1872
1873   if (pos.Value() <= current->Start().Value()) {
1874     // All registers are blocked.
1875     return false;
1876   }
1877
1878   if (pos.Value() < current->End().Value()) {
1879     // Register reg is available at the range start but becomes blocked before
1880     // the range end. Split current at position where it becomes blocked.
1881     LiveRange* tail = SplitRangeAt(current, pos);
1882     if (!AllocationOk()) return false;
1883     AddToUnhandledSorted(tail);
1884   }
1885
1886
1887   // Register reg is available at the range start and is free until
1888   // the range end.
1889   DCHECK(pos.Value() >= current->End().Value());
1890   TraceAlloc("Assigning free reg %s to live range %d\n",
1891              RegisterName(reg),
1892              current->id());
1893   SetLiveRangeAssignedRegister(current, reg);
1894
1895   return true;
1896 }
1897
1898
1899 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1900   UsePosition* register_use = current->NextRegisterPosition(current->Start());
1901   if (register_use == NULL) {
1902     // There is no use in the current live range that requires a register.
1903     // We can just spill it.
1904     Spill(current);
1905     return;
1906   }
1907
1908
1909   LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1910   LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1911
1912   for (int i = 0; i < num_registers_; i++) {
1913     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1914   }
1915
1916   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1917     LiveRange* range = active_live_ranges_[i];
1918     int cur_reg = range->assigned_register();
1919     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1920       block_pos[cur_reg] = use_pos[cur_reg] =
1921           LifetimePosition::FromInstructionIndex(0);
1922     } else {
1923       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1924           current->Start());
1925       if (next_use == NULL) {
1926         use_pos[cur_reg] = range->End();
1927       } else {
1928         use_pos[cur_reg] = next_use->pos();
1929       }
1930     }
1931   }
1932
1933   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1934     LiveRange* range = inactive_live_ranges_.at(i);
1935     DCHECK(range->End().Value() > current->Start().Value());
1936     LifetimePosition next_intersection = range->FirstIntersection(current);
1937     if (!next_intersection.IsValid()) continue;
1938     int cur_reg = range->assigned_register();
1939     if (range->IsFixed()) {
1940       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1941       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1942     } else {
1943       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1944     }
1945   }
1946
1947   int reg = 0;
1948   for (int i = 1; i < RegisterCount(); ++i) {
1949     if (use_pos[i].Value() > use_pos[reg].Value()) {
1950       reg = i;
1951     }
1952   }
1953
1954   LifetimePosition pos = use_pos[reg];
1955
1956   if (pos.Value() < register_use->pos().Value()) {
1957     // All registers are blocked before the first use that requires a register.
1958     // Spill starting part of live range up to that use.
1959     SpillBetween(current, current->Start(), register_use->pos());
1960     return;
1961   }
1962
1963   if (block_pos[reg].Value() < current->End().Value()) {
1964     // Register becomes blocked before the current range end. Split before that
1965     // position.
1966     LiveRange* tail = SplitBetween(current,
1967                                    current->Start(),
1968                                    block_pos[reg].InstructionStart());
1969     if (!AllocationOk()) return;
1970     AddToUnhandledSorted(tail);
1971   }
1972
1973   // Register reg is not blocked for the whole range.
1974   DCHECK(block_pos[reg].Value() >= current->End().Value());
1975   TraceAlloc("Assigning blocked reg %s to live range %d\n",
1976              RegisterName(reg),
1977              current->id());
1978   SetLiveRangeAssignedRegister(current, reg);
1979
1980   // This register was not free. Thus we need to find and spill
1981   // parts of active and inactive live regions that use the same register
1982   // at the same lifetime positions as current.
1983   SplitAndSpillIntersecting(current);
1984 }
1985
1986
1987 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1988                                                     LifetimePosition pos) {
1989   HBasicBlock* block = GetBlock(pos.InstructionStart());
1990   HBasicBlock* loop_header =
1991       block->IsLoopHeader() ? block : block->parent_loop_header();
1992
1993   if (loop_header == NULL) return pos;
1994
1995   UsePosition* prev_use =
1996     range->PreviousUsePositionRegisterIsBeneficial(pos);
1997
1998   while (loop_header != NULL) {
1999     // We are going to spill live range inside the loop.
2000     // If possible try to move spilling position backwards to loop header.
2001     // This will reduce number of memory moves on the back edge.
2002     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
2003         loop_header->first_instruction_index());
2004
2005     if (range->Covers(loop_start)) {
2006       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
2007         // No register beneficial use inside the loop before the pos.
2008         pos = loop_start;
2009       }
2010     }
2011
2012     // Try hoisting out to an outer loop.
2013     loop_header = loop_header->parent_loop_header();
2014   }
2015
2016   return pos;
2017 }
2018
2019
2020 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2021   DCHECK(current->HasRegisterAssigned());
2022   int reg = current->assigned_register();
2023   LifetimePosition split_pos = current->Start();
2024   for (int i = 0; i < active_live_ranges_.length(); ++i) {
2025     LiveRange* range = active_live_ranges_[i];
2026     if (range->assigned_register() == reg) {
2027       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2028       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2029       if (next_pos == NULL) {
2030         SpillAfter(range, spill_pos);
2031       } else {
2032         // When spilling between spill_pos and next_pos ensure that the range
2033         // remains spilled at least until the start of the current live range.
2034         // This guarantees that we will not introduce new unhandled ranges that
2035         // start before the current range as this violates allocation invariant
2036         // and will lead to an inconsistent state of active and inactive
2037         // live-ranges: ranges are allocated in order of their start positions,
2038         // ranges are retired from active/inactive when the start of the
2039         // current live-range is larger than their end.
2040         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2041       }
2042       if (!AllocationOk()) return;
2043       ActiveToHandled(range);
2044       --i;
2045     }
2046   }
2047
2048   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
2049     LiveRange* range = inactive_live_ranges_[i];
2050     DCHECK(range->End().Value() > current->Start().Value());
2051     if (range->assigned_register() == reg && !range->IsFixed()) {
2052       LifetimePosition next_intersection = range->FirstIntersection(current);
2053       if (next_intersection.IsValid()) {
2054         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2055         if (next_pos == NULL) {
2056           SpillAfter(range, split_pos);
2057         } else {
2058           next_intersection = Min(next_intersection, next_pos->pos());
2059           SpillBetween(range, split_pos, next_intersection);
2060         }
2061         if (!AllocationOk()) return;
2062         InactiveToHandled(range);
2063         --i;
2064       }
2065     }
2066   }
2067 }
2068
2069
2070 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2071   return pos.IsInstructionStart() &&
2072       InstructionAt(pos.InstructionIndex())->IsLabel();
2073 }
2074
2075
2076 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2077   DCHECK(!range->IsFixed());
2078   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2079
2080   if (pos.Value() <= range->Start().Value()) return range;
2081
2082   // We can't properly connect liveranges if split occured at the end
2083   // of control instruction.
2084   DCHECK(pos.IsInstructionStart() ||
2085          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2086
2087   int vreg = GetVirtualRegister();
2088   if (!AllocationOk()) return NULL;
2089   LiveRange* result = LiveRangeFor(vreg);
2090   range->SplitAt(pos, result, zone());
2091   return result;
2092 }
2093
2094
2095 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2096                                     LifetimePosition start,
2097                                     LifetimePosition end) {
2098   DCHECK(!range->IsFixed());
2099   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2100              range->id(),
2101              start.Value(),
2102              end.Value());
2103
2104   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2105   DCHECK(split_pos.Value() >= start.Value());
2106   return SplitRangeAt(range, split_pos);
2107 }
2108
2109
2110 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2111                                                  LifetimePosition end) {
2112   int start_instr = start.InstructionIndex();
2113   int end_instr = end.InstructionIndex();
2114   DCHECK(start_instr <= end_instr);
2115
2116   // We have no choice
2117   if (start_instr == end_instr) return end;
2118
2119   HBasicBlock* start_block = GetBlock(start);
2120   HBasicBlock* end_block = GetBlock(end);
2121
2122   if (end_block == start_block) {
2123     // The interval is split in the same basic block. Split at the latest
2124     // possible position.
2125     return end;
2126   }
2127
2128   HBasicBlock* block = end_block;
2129   // Find header of outermost loop.
2130   while (block->parent_loop_header() != NULL &&
2131       block->parent_loop_header()->block_id() > start_block->block_id()) {
2132     block = block->parent_loop_header();
2133   }
2134
2135   // We did not find any suitable outer loop. Split at the latest possible
2136   // position unless end_block is a loop header itself.
2137   if (block == end_block && !end_block->IsLoopHeader()) return end;
2138
2139   return LifetimePosition::FromInstructionIndex(
2140       block->first_instruction_index());
2141 }
2142
2143
2144 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2145   LiveRange* second_part = SplitRangeAt(range, pos);
2146   if (!AllocationOk()) return;
2147   Spill(second_part);
2148 }
2149
2150
2151 void LAllocator::SpillBetween(LiveRange* range,
2152                               LifetimePosition start,
2153                               LifetimePosition end) {
2154   SpillBetweenUntil(range, start, start, end);
2155 }
2156
2157
2158 void LAllocator::SpillBetweenUntil(LiveRange* range,
2159                                    LifetimePosition start,
2160                                    LifetimePosition until,
2161                                    LifetimePosition end) {
2162   CHECK(start.Value() < end.Value());
2163   LiveRange* second_part = SplitRangeAt(range, start);
2164   if (!AllocationOk()) return;
2165
2166   if (second_part->Start().Value() < end.Value()) {
2167     // The split result intersects with [start, end[.
2168     // Split it at position between ]start+1, end[, spill the middle part
2169     // and put the rest to unhandled.
2170     LiveRange* third_part = SplitBetween(
2171         second_part,
2172         Max(second_part->Start().InstructionEnd(), until),
2173         end.PrevInstruction().InstructionEnd());
2174     if (!AllocationOk()) return;
2175
2176     DCHECK(third_part != second_part);
2177
2178     Spill(second_part);
2179     AddToUnhandledSorted(third_part);
2180   } else {
2181     // The split result does not intersect with [start, end[.
2182     // Nothing to spill. Just put it to unhandled as whole.
2183     AddToUnhandledSorted(second_part);
2184   }
2185 }
2186
2187
2188 void LAllocator::Spill(LiveRange* range) {
2189   DCHECK(!range->IsSpilled());
2190   TraceAlloc("Spilling live range %d\n", range->id());
2191   LiveRange* first = range->TopLevel();
2192
2193   if (!first->HasAllocatedSpillOperand()) {
2194     LOperand* op = TryReuseSpillSlot(range);
2195     if (op == NULL) {
2196       op = chunk_->GetNextSpillSlot(range->Kind());
2197     } else if (range->Kind() == FLOAT32x4_REGISTERS &&
2198                op->kind() != LOperand::FLOAT32x4_STACK_SLOT) {
2199       // Convert to Float32x4StackSlot.
2200       op = LFloat32x4StackSlot::Create(op->index(), zone());
2201     } else if (range->Kind() == FLOAT64x2_REGISTERS &&
2202                op->kind() != LOperand::FLOAT64x2_STACK_SLOT) {
2203       // Convert to Float64x2StackSlot.
2204       op = LFloat64x2StackSlot::Create(op->index(), zone());
2205     } else if (range->Kind() == INT32x4_REGISTERS &&
2206                op->kind() != LOperand::INT32x4_STACK_SLOT) {
2207       // Convert to Int32x4StackSlot.
2208       op = LInt32x4StackSlot::Create(op->index(), zone());
2209     }
2210     first->SetSpillOperand(op);
2211   }
2212   range->MakeSpilled(chunk()->zone());
2213 }
2214
2215
2216 int LAllocator::RegisterCount() const {
2217   return num_registers_;
2218 }
2219
2220
2221 #ifdef DEBUG
2222
2223
2224 void LAllocator::Verify() const {
2225   for (int i = 0; i < live_ranges()->length(); ++i) {
2226     LiveRange* current = live_ranges()->at(i);
2227     if (current != NULL) current->Verify();
2228   }
2229 }
2230
2231
2232 #endif
2233
2234
2235 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2236     : CompilationPhase(name, allocator->graph()->info()),
2237       allocator_(allocator) {
2238   if (FLAG_hydrogen_stats) {
2239     allocator_zone_start_allocation_size_ =
2240         allocator->zone()->allocation_size();
2241   }
2242 }
2243
2244
2245 LAllocatorPhase::~LAllocatorPhase() {
2246   if (FLAG_hydrogen_stats) {
2247     unsigned size = allocator_->zone()->allocation_size() -
2248                     allocator_zone_start_allocation_size_;
2249     isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2250   }
2251
2252   if (ShouldProduceTraceOutput()) {
2253     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2254     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2255   }
2256
2257 #ifdef DEBUG
2258   if (allocator_ != NULL) allocator_->Verify();
2259 #endif
2260 }
2261
2262
2263 } }  // namespace v8::internal