deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / compiler / register-allocator-verifier.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/bit-vector.h"
6 #include "src/compiler/instruction.h"
7 #include "src/compiler/register-allocator-verifier.h"
8
9 namespace v8 {
10 namespace internal {
11 namespace compiler {
12
13 static size_t OperandCount(const Instruction* instr) {
14   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
15 }
16
17
18 static void VerifyGapEmpty(const GapInstruction* gap) {
19   for (int i = GapInstruction::FIRST_INNER_POSITION;
20        i <= GapInstruction::LAST_INNER_POSITION; i++) {
21     GapInstruction::InnerPosition inner_pos =
22         static_cast<GapInstruction::InnerPosition>(i);
23     CHECK(!gap->GetParallelMove(inner_pos));
24   }
25 }
26
27
28 void RegisterAllocatorVerifier::VerifyInput(
29     const OperandConstraint& constraint) {
30   CHECK_NE(kSameAsFirst, constraint.type_);
31   if (constraint.type_ != kImmediate) {
32     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
33              constraint.virtual_register_);
34   }
35 }
36
37
38 void RegisterAllocatorVerifier::VerifyTemp(
39     const OperandConstraint& constraint) {
40   CHECK_NE(kSameAsFirst, constraint.type_);
41   CHECK_NE(kImmediate, constraint.type_);
42   CHECK_NE(kConstant, constraint.type_);
43 }
44
45
46 void RegisterAllocatorVerifier::VerifyOutput(
47     const OperandConstraint& constraint) {
48   CHECK_NE(kImmediate, constraint.type_);
49   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
50            constraint.virtual_register_);
51 }
52
53
54 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
55     Zone* zone, const RegisterConfiguration* config,
56     const InstructionSequence* sequence)
57     : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) {
58   constraints_.reserve(sequence->instructions().size());
59   // TODO(dcarney): model unique constraints.
60   // Construct OperandConstraints for all InstructionOperands, eliminating
61   // kSameAsFirst along the way.
62   for (const auto* instr : sequence->instructions()) {
63     const size_t operand_count = OperandCount(instr);
64     auto* op_constraints = zone->NewArray<OperandConstraint>(operand_count);
65     size_t count = 0;
66     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
67       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
68       VerifyInput(op_constraints[count]);
69     }
70     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
71       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
72       VerifyTemp(op_constraints[count]);
73     }
74     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
75       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
76       if (op_constraints[count].type_ == kSameAsFirst) {
77         CHECK(instr->InputCount() > 0);
78         op_constraints[count].type_ = op_constraints[0].type_;
79         op_constraints[count].value_ = op_constraints[0].value_;
80       }
81       VerifyOutput(op_constraints[count]);
82     }
83     // All gaps should be totally unallocated at this point.
84     if (instr->IsGapMoves()) {
85       CHECK(operand_count == 0);
86       VerifyGapEmpty(GapInstruction::cast(instr));
87     }
88     InstructionConstraint instr_constraint = {instr, operand_count,
89                                               op_constraints};
90     constraints()->push_back(instr_constraint);
91   }
92 }
93
94
95 void RegisterAllocatorVerifier::VerifyAssignment() {
96   CHECK(sequence()->instructions().size() == constraints()->size());
97   auto instr_it = sequence()->begin();
98   for (const auto& instr_constraint : *constraints()) {
99     const auto* instr = instr_constraint.instruction_;
100     const size_t operand_count = instr_constraint.operand_constaints_size_;
101     const auto* op_constraints = instr_constraint.operand_constraints_;
102     CHECK_EQ(instr, *instr_it);
103     CHECK(operand_count == OperandCount(instr));
104     size_t count = 0;
105     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
106       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
107     }
108     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
109       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
110     }
111     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
112       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
113     }
114     ++instr_it;
115   }
116 }
117
118
119 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
120                                                 OperandConstraint* constraint) {
121   constraint->value_ = kMinInt;
122   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
123   if (op->IsConstant()) {
124     constraint->type_ = kConstant;
125     constraint->value_ = ConstantOperand::cast(op)->index();
126     constraint->virtual_register_ = constraint->value_;
127   } else if (op->IsImmediate()) {
128     constraint->type_ = kImmediate;
129     constraint->value_ = ImmediateOperand::cast(op)->index();
130   } else {
131     CHECK(op->IsUnallocated());
132     const auto* unallocated = UnallocatedOperand::cast(op);
133     int vreg = unallocated->virtual_register();
134     constraint->virtual_register_ = vreg;
135     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
136       constraint->type_ = kFixedSlot;
137       constraint->value_ = unallocated->fixed_slot_index();
138     } else {
139       switch (unallocated->extended_policy()) {
140         case UnallocatedOperand::ANY:
141           CHECK(false);
142           break;
143         case UnallocatedOperand::NONE:
144           if (sequence()->IsDouble(vreg)) {
145             constraint->type_ = kNoneDouble;
146           } else {
147             constraint->type_ = kNone;
148           }
149           break;
150         case UnallocatedOperand::FIXED_REGISTER:
151           constraint->type_ = kFixedRegister;
152           constraint->value_ = unallocated->fixed_register_index();
153           break;
154         case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
155           constraint->type_ = kFixedDoubleRegister;
156           constraint->value_ = unallocated->fixed_register_index();
157           break;
158         case UnallocatedOperand::MUST_HAVE_REGISTER:
159           if (sequence()->IsDouble(vreg)) {
160             constraint->type_ = kDoubleRegister;
161           } else {
162             constraint->type_ = kRegister;
163           }
164           break;
165         case UnallocatedOperand::MUST_HAVE_SLOT:
166           if (sequence()->IsDouble(vreg)) {
167             constraint->type_ = kDoubleSlot;
168           } else {
169             constraint->type_ = kSlot;
170           }
171           break;
172         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
173           constraint->type_ = kSameAsFirst;
174           break;
175       }
176     }
177   }
178 }
179
180
181 void RegisterAllocatorVerifier::CheckConstraint(
182     const InstructionOperand* op, const OperandConstraint* constraint) {
183   switch (constraint->type_) {
184     case kConstant:
185       CHECK(op->IsConstant());
186       CHECK_EQ(op->index(), constraint->value_);
187       return;
188     case kImmediate:
189       CHECK(op->IsImmediate());
190       CHECK_EQ(op->index(), constraint->value_);
191       return;
192     case kRegister:
193       CHECK(op->IsRegister());
194       return;
195     case kFixedRegister:
196       CHECK(op->IsRegister());
197       CHECK_EQ(op->index(), constraint->value_);
198       return;
199     case kDoubleRegister:
200       CHECK(op->IsDoubleRegister());
201       return;
202     case kFixedDoubleRegister:
203       CHECK(op->IsDoubleRegister());
204       CHECK_EQ(op->index(), constraint->value_);
205       return;
206     case kFixedSlot:
207       CHECK(op->IsStackSlot());
208       CHECK_EQ(op->index(), constraint->value_);
209       return;
210     case kSlot:
211       CHECK(op->IsStackSlot());
212       return;
213     case kDoubleSlot:
214       CHECK(op->IsDoubleStackSlot());
215       return;
216     case kNone:
217       CHECK(op->IsRegister() || op->IsStackSlot());
218       return;
219     case kNoneDouble:
220       CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot());
221       return;
222     case kSameAsFirst:
223       CHECK(false);
224       return;
225   }
226 }
227
228 namespace {
229
230 typedef RpoNumber Rpo;
231
232 static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister;
233
234 struct PhiData : public ZoneObject {
235   PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg,
236           const PhiData* first_pred_phi, Zone* zone)
237       : definition_rpo(definition_rpo),
238         virtual_register(phi->virtual_register()),
239         first_pred_vreg(first_pred_vreg),
240         first_pred_phi(first_pred_phi),
241         operands(zone) {
242     operands.reserve(phi->operands().size());
243     operands.insert(operands.begin(), phi->operands().begin(),
244                     phi->operands().end());
245   }
246   const Rpo definition_rpo;
247   const int virtual_register;
248   const int first_pred_vreg;
249   const PhiData* first_pred_phi;
250   IntVector operands;
251 };
252
253 class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
254  public:
255   explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {}
256 };
257
258 struct OperandLess {
259   bool operator()(const InstructionOperand* a,
260                   const InstructionOperand* b) const {
261     return *a < *b;
262   }
263 };
264
265 class OperandMap : public ZoneObject {
266  public:
267   struct MapValue : public ZoneObject {
268     MapValue()
269         : incoming(nullptr),
270           define_vreg(kInvalidVreg),
271           use_vreg(kInvalidVreg),
272           succ_vreg(kInvalidVreg) {}
273     MapValue* incoming;  // value from first predecessor block.
274     int define_vreg;     // valid if this value was defined in this block.
275     int use_vreg;        // valid if this value was used in this block.
276     int succ_vreg;       // valid if propagated back from successor block.
277   };
278
279   class Map
280       : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> {
281    public:
282     explicit Map(Zone* zone)
283         : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {}
284
285     // Remove all entries with keys not in other.
286     void Intersect(const Map& other) {
287       if (this->empty()) return;
288       auto it = this->begin();
289       OperandLess less;
290       for (const auto& o : other) {
291         while (less(it->first, o.first)) {
292           this->erase(it++);
293           if (it == this->end()) return;
294         }
295         if (it->first->Equals(o.first)) {
296           ++it;
297           if (it == this->end()) return;
298         } else {
299           CHECK(less(o.first, it->first));
300         }
301       }
302     }
303   };
304
305   explicit OperandMap(Zone* zone) : map_(zone) {}
306
307   Map& map() { return map_; }
308
309   void RunParallelMoves(Zone* zone, const ParallelMove* move) {
310     // Compute outgoing mappings.
311     Map to_insert(zone);
312     auto moves = move->move_operands();
313     for (auto i = moves->begin(); i != moves->end(); ++i) {
314       if (i->IsEliminated()) continue;
315       auto cur = map().find(i->source());
316       CHECK(cur != map().end());
317       auto res =
318           to_insert.insert(std::make_pair(i->destination(), cur->second));
319       // Ensure injectivity of moves.
320       CHECK(res.second);
321     }
322     // Drop current mappings.
323     for (auto i = moves->begin(); i != moves->end(); ++i) {
324       if (i->IsEliminated()) continue;
325       auto cur = map().find(i->destination());
326       if (cur != map().end()) map().erase(cur);
327     }
328     // Insert new values.
329     map().insert(to_insert.begin(), to_insert.end());
330   }
331
332   void RunGapInstruction(Zone* zone, const GapInstruction* gap) {
333     for (int i = GapInstruction::FIRST_INNER_POSITION;
334          i <= GapInstruction::LAST_INNER_POSITION; i++) {
335       auto inner_pos = static_cast<GapInstruction::InnerPosition>(i);
336       auto move = gap->GetParallelMove(inner_pos);
337       if (move == nullptr) continue;
338       RunParallelMoves(zone, move);
339     }
340   }
341
342   void Drop(const InstructionOperand* op) {
343     auto it = map().find(op);
344     if (it != map().end()) map().erase(it);
345   }
346
347   void DropRegisters(const RegisterConfiguration* config) {
348     for (int i = 0; i < config->num_general_registers(); ++i) {
349       InstructionOperand op(InstructionOperand::REGISTER, i);
350       Drop(&op);
351     }
352     for (int i = 0; i < config->num_double_registers(); ++i) {
353       InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i);
354       Drop(&op);
355     }
356   }
357
358   void Define(Zone* zone, const InstructionOperand* op, int virtual_register) {
359     auto value = new (zone) MapValue();
360     value->define_vreg = virtual_register;
361     auto res = map().insert(std::make_pair(op, value));
362     if (!res.second) res.first->second = value;
363   }
364
365   void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
366     auto it = map().find(op);
367     CHECK(it != map().end());
368     auto v = it->second;
369     if (v->define_vreg != kInvalidVreg) {
370       CHECK_EQ(v->define_vreg, use_vreg);
371     }
372     // Already used this vreg in this block.
373     if (v->use_vreg != kInvalidVreg) {
374       CHECK_EQ(v->use_vreg, use_vreg);
375       return;
376     }
377     if (!initial_pass) {
378       // A value may be defined and used in this block or the use must have
379       // propagated up.
380       if (v->succ_vreg != kInvalidVreg) {
381         CHECK_EQ(v->succ_vreg, use_vreg);
382       } else {
383         CHECK_EQ(v->define_vreg, use_vreg);
384       }
385       // Mark the use.
386       it->second->use_vreg = use_vreg;
387       return;
388     }
389     // Go up block list and ensure the correct definition is reached.
390     for (; v != nullptr; v = v->incoming) {
391       // Value unused in block.
392       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
393         continue;
394       }
395       // Found correct definition or use.
396       CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg);
397       // Mark the use.
398       it->second->use_vreg = use_vreg;
399       return;
400     }
401     // Use of a non-phi value without definition.
402     CHECK(false);
403   }
404
405   void UsePhi(const InstructionOperand* op, const PhiData* phi,
406               bool initial_pass) {
407     auto it = map().find(op);
408     CHECK(it != map().end());
409     auto v = it->second;
410     int use_vreg = phi->virtual_register;
411     // Phis are not defined.
412     CHECK_EQ(kInvalidVreg, v->define_vreg);
413     // Already used this vreg in this block.
414     if (v->use_vreg != kInvalidVreg) {
415       CHECK_EQ(v->use_vreg, use_vreg);
416       return;
417     }
418     if (!initial_pass) {
419       // A used phi must have propagated its use to a predecessor.
420       CHECK_EQ(v->succ_vreg, use_vreg);
421       // Mark the use.
422       v->use_vreg = use_vreg;
423       return;
424     }
425     // Go up the block list starting at the first predecessor and ensure this
426     // phi has a correct use or definition.
427     for (v = v->incoming; v != nullptr; v = v->incoming) {
428       // Value unused in block.
429       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
430         continue;
431       }
432       // Found correct definition or use.
433       if (v->define_vreg != kInvalidVreg) {
434         CHECK(v->define_vreg == phi->first_pred_vreg);
435       } else if (v->use_vreg != phi->first_pred_vreg) {
436         // Walk the phi chain, hunting for a matching phi use.
437         auto p = phi;
438         for (; p != nullptr; p = p->first_pred_phi) {
439           if (p->virtual_register == v->use_vreg) break;
440         }
441         CHECK(p);
442       }
443       // Mark the use.
444       it->second->use_vreg = use_vreg;
445       return;
446     }
447     // Use of a phi value without definition.
448     UNREACHABLE();
449   }
450
451  private:
452   Map map_;
453   DISALLOW_COPY_AND_ASSIGN(OperandMap);
454 };
455
456 }  // namespace
457
458
459 class RegisterAllocatorVerifier::BlockMaps {
460  public:
461   BlockMaps(Zone* zone, const InstructionSequence* sequence)
462       : zone_(zone),
463         sequence_(sequence),
464         phi_map_guard_(sequence->VirtualRegisterCount(), zone),
465         phi_map_(zone),
466         incoming_maps_(zone),
467         outgoing_maps_(zone) {
468     InitializePhis();
469     InitializeOperandMaps();
470   }
471
472   bool IsPhi(int virtual_register) {
473     return phi_map_guard_.Contains(virtual_register);
474   }
475
476   const PhiData* GetPhi(int virtual_register) {
477     auto it = phi_map_.find(virtual_register);
478     CHECK(it != phi_map_.end());
479     return it->second;
480   }
481
482   OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) {
483     return initial_pass ? InitializeFromFirstPredecessor(block_index)
484                         : InitializeFromIntersection(block_index);
485   }
486
487   void PropagateUsesBackwards() {
488     typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>>
489         BlockIds;
490     BlockIds block_ids((BlockIds::key_compare()),
491                        zone_allocator<size_t>(zone()));
492     // First ensure that incoming contains only keys in all predecessors.
493     for (auto block : sequence()->instruction_blocks()) {
494       size_t index = block->rpo_number().ToSize();
495       block_ids.insert(index);
496       auto& succ_map = incoming_maps_[index]->map();
497       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
498         auto pred_rpo = block->predecessors()[i];
499         succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map());
500       }
501     }
502     // Back propagation fixpoint.
503     while (!block_ids.empty()) {
504       // Pop highest block_id.
505       auto block_id_it = block_ids.begin();
506       const size_t succ_index = *block_id_it;
507       block_ids.erase(block_id_it);
508       // Propagate uses back to their definition blocks using succ_vreg.
509       auto block = sequence()->instruction_blocks()[succ_index];
510       auto& succ_map = incoming_maps_[succ_index]->map();
511       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
512         for (auto& succ_val : succ_map) {
513           // An incoming map contains no defines.
514           CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg);
515           // Compute succ_vreg.
516           int succ_vreg = succ_val.second->succ_vreg;
517           if (succ_vreg == kInvalidVreg) {
518             succ_vreg = succ_val.second->use_vreg;
519             // Initialize succ_vreg in back propagation chain.
520             succ_val.second->succ_vreg = succ_vreg;
521           }
522           if (succ_vreg == kInvalidVreg) continue;
523           // May need to transition phi.
524           if (IsPhi(succ_vreg)) {
525             auto phi = GetPhi(succ_vreg);
526             if (phi->definition_rpo.ToSize() == succ_index) {
527               // phi definition block, transition to pred value.
528               succ_vreg = phi->operands[i];
529             }
530           }
531           // Push succ_vreg up to all predecessors.
532           auto pred_rpo = block->predecessors()[i];
533           auto& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map();
534           auto& pred_val = *pred_map.find(succ_val.first);
535           if (pred_val.second->use_vreg != kInvalidVreg) {
536             CHECK_EQ(succ_vreg, pred_val.second->use_vreg);
537           }
538           if (pred_val.second->define_vreg != kInvalidVreg) {
539             CHECK_EQ(succ_vreg, pred_val.second->define_vreg);
540           }
541           if (pred_val.second->succ_vreg != kInvalidVreg) {
542             CHECK_EQ(succ_vreg, pred_val.second->succ_vreg);
543           } else {
544             pred_val.second->succ_vreg = succ_vreg;
545             block_ids.insert(pred_rpo.ToSize());
546           }
547         }
548       }
549     }
550     // Clear uses and back links for second pass.
551     for (auto operand_map : incoming_maps_) {
552       for (auto& succ_val : operand_map->map()) {
553         succ_val.second->incoming = nullptr;
554         succ_val.second->use_vreg = kInvalidVreg;
555       }
556     }
557   }
558
559  private:
560   OperandMap* InitializeFromFirstPredecessor(size_t block_index) {
561     auto to_init = outgoing_maps_[block_index];
562     CHECK(to_init->map().empty());
563     auto block = sequence()->instruction_blocks()[block_index];
564     if (block->predecessors().empty()) return to_init;
565     size_t predecessor_index = block->predecessors()[0].ToSize();
566     // Ensure not a backedge.
567     CHECK(predecessor_index < block->rpo_number().ToSize());
568     auto incoming = outgoing_maps_[predecessor_index];
569     // Copy map and replace values.
570     to_init->map() = incoming->map();
571     for (auto& it : to_init->map()) {
572       auto incoming = it.second;
573       it.second = new (zone()) OperandMap::MapValue();
574       it.second->incoming = incoming;
575     }
576     // Copy to incoming map for second pass.
577     incoming_maps_[block_index]->map() = to_init->map();
578     return to_init;
579   }
580
581   OperandMap* InitializeFromIntersection(size_t block_index) {
582     return incoming_maps_[block_index];
583   }
584
585   void InitializeOperandMaps() {
586     size_t block_count = sequence()->instruction_blocks().size();
587     incoming_maps_.reserve(block_count);
588     outgoing_maps_.reserve(block_count);
589     for (size_t i = 0; i < block_count; ++i) {
590       incoming_maps_.push_back(new (zone()) OperandMap(zone()));
591       outgoing_maps_.push_back(new (zone()) OperandMap(zone()));
592     }
593   }
594
595   void InitializePhis() {
596     const size_t block_count = sequence()->instruction_blocks().size();
597     for (size_t block_index = 0; block_index < block_count; ++block_index) {
598       const auto block = sequence()->instruction_blocks()[block_index];
599       for (auto phi : block->phis()) {
600         int first_pred_vreg = phi->operands()[0];
601         const PhiData* first_pred_phi = nullptr;
602         if (IsPhi(first_pred_vreg)) {
603           first_pred_phi = GetPhi(first_pred_vreg);
604           first_pred_vreg = first_pred_phi->first_pred_vreg;
605         }
606         CHECK(!IsPhi(first_pred_vreg));
607         auto phi_data = new (zone()) PhiData(
608             block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone());
609         auto res =
610             phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data));
611         CHECK(res.second);
612         phi_map_guard_.Add(phi->virtual_register());
613       }
614     }
615   }
616
617   typedef ZoneVector<OperandMap*> OperandMaps;
618   typedef ZoneVector<PhiData*> PhiVector;
619
620   Zone* zone() const { return zone_; }
621   const InstructionSequence* sequence() const { return sequence_; }
622
623   Zone* const zone_;
624   const InstructionSequence* const sequence_;
625   BitVector phi_map_guard_;
626   PhiMap phi_map_;
627   OperandMaps incoming_maps_;
628   OperandMaps outgoing_maps_;
629 };
630
631
632 void RegisterAllocatorVerifier::VerifyGapMoves() {
633   BlockMaps block_maps(zone(), sequence());
634   VerifyGapMoves(&block_maps, true);
635   block_maps.PropagateUsesBackwards();
636   VerifyGapMoves(&block_maps, false);
637 }
638
639
640 // Compute and verify outgoing values for every block.
641 void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
642                                                bool initial_pass) {
643   const size_t block_count = sequence()->instruction_blocks().size();
644   for (size_t block_index = 0; block_index < block_count; ++block_index) {
645     auto current = block_maps->InitializeIncoming(block_index, initial_pass);
646     const auto block = sequence()->instruction_blocks()[block_index];
647     for (int instr_index = block->code_start(); instr_index < block->code_end();
648          ++instr_index) {
649       const auto& instr_constraint = constraints_[instr_index];
650       const auto instr = instr_constraint.instruction_;
651       if (instr->IsSourcePosition()) continue;
652       if (instr->IsGapMoves()) {
653         current->RunGapInstruction(zone(), GapInstruction::cast(instr));
654         continue;
655       }
656       const auto op_constraints = instr_constraint.operand_constraints_;
657       size_t count = 0;
658       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
659         if (op_constraints[count].type_ == kImmediate) continue;
660         int virtual_register = op_constraints[count].virtual_register_;
661         auto op = instr->InputAt(i);
662         if (!block_maps->IsPhi(virtual_register)) {
663           current->Use(op, virtual_register, initial_pass);
664         } else {
665           auto phi = block_maps->GetPhi(virtual_register);
666           current->UsePhi(op, phi, initial_pass);
667         }
668       }
669       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
670         current->Drop(instr->TempAt(i));
671       }
672       if (instr->IsCall()) {
673         current->DropRegisters(config());
674       }
675       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
676         int virtual_register = op_constraints[count].virtual_register_;
677         current->Define(zone(), instr->OutputAt(i), virtual_register);
678       }
679     }
680   }
681 }
682
683 }  // namespace compiler
684 }  // namespace internal
685 }  // namespace v8