434e965bf3217d6b265957afa076015a06f93130
[platform/upstream/nodejs.git] / deps / v8 / src / compiler / register-allocator-verifier.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/bit-vector.h"
6 #include "src/compiler/instruction.h"
7 #include "src/compiler/register-allocator-verifier.h"
8
9 namespace v8 {
10 namespace internal {
11 namespace compiler {
12
13 static size_t OperandCount(const Instruction* instr) {
14   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
15 }
16
17
18 static void VerifyGapEmpty(const GapInstruction* gap) {
19   for (int i = GapInstruction::FIRST_INNER_POSITION;
20        i <= GapInstruction::LAST_INNER_POSITION; i++) {
21     GapInstruction::InnerPosition inner_pos =
22         static_cast<GapInstruction::InnerPosition>(i);
23     CHECK(!gap->GetParallelMove(inner_pos));
24   }
25 }
26
27
28 void RegisterAllocatorVerifier::VerifyInput(
29     const OperandConstraint& constraint) {
30   CHECK_NE(kSameAsFirst, constraint.type_);
31   if (constraint.type_ != kImmediate) {
32     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
33              constraint.virtual_register_);
34   }
35 }
36
37
38 void RegisterAllocatorVerifier::VerifyTemp(
39     const OperandConstraint& constraint) {
40   CHECK_NE(kSameAsFirst, constraint.type_);
41   CHECK_NE(kImmediate, constraint.type_);
42   CHECK_NE(kConstant, constraint.type_);
43 }
44
45
46 void RegisterAllocatorVerifier::VerifyOutput(
47     const OperandConstraint& constraint) {
48   CHECK_NE(kImmediate, constraint.type_);
49   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
50            constraint.virtual_register_);
51 }
52
53
54 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
55     Zone* zone, const RegisterConfiguration* config,
56     const InstructionSequence* sequence)
57     : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) {
58   constraints_.reserve(sequence->instructions().size());
59   // TODO(dcarney): model unique constraints.
60   // Construct OperandConstraints for all InstructionOperands, eliminating
61   // kSameAsFirst along the way.
62   for (const auto* instr : sequence->instructions()) {
63     const size_t operand_count = OperandCount(instr);
64     auto* op_constraints = zone->NewArray<OperandConstraint>(operand_count);
65     size_t count = 0;
66     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
67       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
68       VerifyInput(op_constraints[count]);
69     }
70     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
71       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
72       VerifyTemp(op_constraints[count]);
73     }
74     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
75       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
76       if (op_constraints[count].type_ == kSameAsFirst) {
77         CHECK(instr->InputCount() > 0);
78         op_constraints[count].type_ = op_constraints[0].type_;
79         op_constraints[count].value_ = op_constraints[0].value_;
80       }
81       VerifyOutput(op_constraints[count]);
82     }
83     // All gaps should be totally unallocated at this point.
84     if (instr->IsGapMoves()) {
85       CHECK(operand_count == 0);
86       VerifyGapEmpty(GapInstruction::cast(instr));
87     }
88     InstructionConstraint instr_constraint = {instr, operand_count,
89                                               op_constraints};
90     constraints()->push_back(instr_constraint);
91   }
92 }
93
94
95 void RegisterAllocatorVerifier::VerifyAssignment() {
96   CHECK(sequence()->instructions().size() == constraints()->size());
97   auto instr_it = sequence()->begin();
98   for (const auto& instr_constraint : *constraints()) {
99     const auto* instr = instr_constraint.instruction_;
100     const size_t operand_count = instr_constraint.operand_constaints_size_;
101     const auto* op_constraints = instr_constraint.operand_constraints_;
102     CHECK_EQ(instr, *instr_it);
103     CHECK(operand_count == OperandCount(instr));
104     size_t count = 0;
105     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
106       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
107     }
108     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
109       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
110     }
111     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
112       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
113     }
114     ++instr_it;
115   }
116 }
117
118
119 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
120                                                 OperandConstraint* constraint) {
121   constraint->value_ = kMinInt;
122   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
123   if (op->IsConstant()) {
124     constraint->type_ = kConstant;
125     constraint->value_ = ConstantOperand::cast(op)->index();
126     constraint->virtual_register_ = constraint->value_;
127   } else if (op->IsImmediate()) {
128     constraint->type_ = kImmediate;
129     constraint->value_ = ImmediateOperand::cast(op)->index();
130   } else {
131     CHECK(op->IsUnallocated());
132     const auto* unallocated = UnallocatedOperand::cast(op);
133     int vreg = unallocated->virtual_register();
134     constraint->virtual_register_ = vreg;
135     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
136       constraint->type_ = kFixedSlot;
137       constraint->value_ = unallocated->fixed_slot_index();
138     } else {
139       switch (unallocated->extended_policy()) {
140         case UnallocatedOperand::ANY:
141           CHECK(false);
142           break;
143         case UnallocatedOperand::NONE:
144           if (sequence()->IsDouble(vreg)) {
145             constraint->type_ = kNoneDouble;
146           } else {
147             constraint->type_ = kNone;
148           }
149           break;
150         case UnallocatedOperand::FIXED_REGISTER:
151           constraint->type_ = kFixedRegister;
152           constraint->value_ = unallocated->fixed_register_index();
153           break;
154         case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
155           constraint->type_ = kFixedDoubleRegister;
156           constraint->value_ = unallocated->fixed_register_index();
157           break;
158         case UnallocatedOperand::MUST_HAVE_REGISTER:
159           if (sequence()->IsDouble(vreg)) {
160             constraint->type_ = kDoubleRegister;
161           } else {
162             constraint->type_ = kRegister;
163           }
164           break;
165         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
166           constraint->type_ = kSameAsFirst;
167           break;
168       }
169     }
170   }
171 }
172
173
174 void RegisterAllocatorVerifier::CheckConstraint(
175     const InstructionOperand* op, const OperandConstraint* constraint) {
176   switch (constraint->type_) {
177     case kConstant:
178       CHECK(op->IsConstant());
179       CHECK_EQ(op->index(), constraint->value_);
180       return;
181     case kImmediate:
182       CHECK(op->IsImmediate());
183       CHECK_EQ(op->index(), constraint->value_);
184       return;
185     case kRegister:
186       CHECK(op->IsRegister());
187       return;
188     case kFixedRegister:
189       CHECK(op->IsRegister());
190       CHECK_EQ(op->index(), constraint->value_);
191       return;
192     case kDoubleRegister:
193       CHECK(op->IsDoubleRegister());
194       return;
195     case kFixedDoubleRegister:
196       CHECK(op->IsDoubleRegister());
197       CHECK_EQ(op->index(), constraint->value_);
198       return;
199     case kFixedSlot:
200       CHECK(op->IsStackSlot());
201       CHECK_EQ(op->index(), constraint->value_);
202       return;
203     case kNone:
204       CHECK(op->IsRegister() || op->IsStackSlot());
205       return;
206     case kNoneDouble:
207       CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot());
208       return;
209     case kSameAsFirst:
210       CHECK(false);
211       return;
212   }
213 }
214
215 namespace {
216
217 typedef BasicBlock::RpoNumber Rpo;
218
219 static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister;
220
221 struct PhiData : public ZoneObject {
222   PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg,
223           const PhiData* first_pred_phi, Zone* zone)
224       : definition_rpo(definition_rpo),
225         virtual_register(phi->virtual_register()),
226         first_pred_vreg(first_pred_vreg),
227         first_pred_phi(first_pred_phi),
228         operands(zone) {
229     operands.reserve(phi->operands().size());
230     operands.insert(operands.begin(), phi->operands().begin(),
231                     phi->operands().end());
232   }
233   const Rpo definition_rpo;
234   const int virtual_register;
235   const int first_pred_vreg;
236   const PhiData* first_pred_phi;
237   IntVector operands;
238 };
239
240 class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
241  public:
242   explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {}
243 };
244
245 struct OperandLess {
246   bool operator()(const InstructionOperand* a,
247                   const InstructionOperand* b) const {
248     if (a->kind() == b->kind()) return a->index() < b->index();
249     return a->kind() < b->kind();
250   }
251 };
252
253 class OperandMap : public ZoneObject {
254  public:
255   struct MapValue : public ZoneObject {
256     MapValue()
257         : incoming(nullptr),
258           define_vreg(kInvalidVreg),
259           use_vreg(kInvalidVreg),
260           succ_vreg(kInvalidVreg) {}
261     MapValue* incoming;  // value from first predecessor block.
262     int define_vreg;     // valid if this value was defined in this block.
263     int use_vreg;        // valid if this value was used in this block.
264     int succ_vreg;       // valid if propagated back from successor block.
265   };
266
267   class Map
268       : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> {
269    public:
270     explicit Map(Zone* zone)
271         : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {}
272
273     // Remove all entries with keys not in other.
274     void Intersect(const Map& other) {
275       if (this->empty()) return;
276       auto it = this->begin();
277       OperandLess less;
278       for (const auto& o : other) {
279         while (less(it->first, o.first)) {
280           this->erase(it++);
281           if (it == this->end()) return;
282         }
283         if (it->first->Equals(o.first)) {
284           ++it;
285           if (it == this->end()) return;
286         } else {
287           CHECK(less(o.first, it->first));
288         }
289       }
290     }
291   };
292
293   explicit OperandMap(Zone* zone) : map_(zone) {}
294
295   Map& map() { return map_; }
296
297   void RunParallelMoves(Zone* zone, const ParallelMove* move) {
298     // Compute outgoing mappings.
299     Map to_insert(zone);
300     auto moves = move->move_operands();
301     for (auto i = moves->begin(); i != moves->end(); ++i) {
302       if (i->IsEliminated()) continue;
303       auto cur = map().find(i->source());
304       CHECK(cur != map().end());
305       to_insert.insert(std::make_pair(i->destination(), cur->second));
306     }
307     // Drop current mappings.
308     for (auto i = moves->begin(); i != moves->end(); ++i) {
309       if (i->IsEliminated()) continue;
310       auto cur = map().find(i->destination());
311       if (cur != map().end()) map().erase(cur);
312     }
313     // Insert new values.
314     map().insert(to_insert.begin(), to_insert.end());
315   }
316
317   void RunGapInstruction(Zone* zone, const GapInstruction* gap) {
318     for (int i = GapInstruction::FIRST_INNER_POSITION;
319          i <= GapInstruction::LAST_INNER_POSITION; i++) {
320       auto inner_pos = static_cast<GapInstruction::InnerPosition>(i);
321       auto move = gap->GetParallelMove(inner_pos);
322       if (move == nullptr) continue;
323       RunParallelMoves(zone, move);
324     }
325   }
326
327   void Drop(const InstructionOperand* op) {
328     auto it = map().find(op);
329     if (it != map().end()) map().erase(it);
330   }
331
332   void DropRegisters(const RegisterConfiguration* config) {
333     for (int i = 0; i < config->num_general_registers(); ++i) {
334       InstructionOperand op(InstructionOperand::REGISTER, i);
335       Drop(&op);
336     }
337     for (int i = 0; i < config->num_double_registers(); ++i) {
338       InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i);
339       Drop(&op);
340     }
341   }
342
343   void Define(Zone* zone, const InstructionOperand* op, int virtual_register) {
344     auto value = new (zone) MapValue();
345     value->define_vreg = virtual_register;
346     auto res = map().insert(std::make_pair(op, value));
347     if (!res.second) res.first->second = value;
348   }
349
350   void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
351     auto it = map().find(op);
352     CHECK(it != map().end());
353     auto v = it->second;
354     if (v->define_vreg != kInvalidVreg) {
355       CHECK_EQ(v->define_vreg, use_vreg);
356     }
357     // Already used this vreg in this block.
358     if (v->use_vreg != kInvalidVreg) {
359       CHECK_EQ(v->use_vreg, use_vreg);
360       return;
361     }
362     if (!initial_pass) {
363       // A value may be defined and used in this block or the use must have
364       // propagated up.
365       if (v->succ_vreg != kInvalidVreg) {
366         CHECK_EQ(v->succ_vreg, use_vreg);
367       } else {
368         CHECK_EQ(v->define_vreg, use_vreg);
369       }
370       // Mark the use.
371       it->second->use_vreg = use_vreg;
372       return;
373     }
374     // Go up block list and ensure the correct definition is reached.
375     for (; v != nullptr; v = v->incoming) {
376       // Value unused in block.
377       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
378         continue;
379       }
380       // Found correct definition or use.
381       CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg);
382       // Mark the use.
383       it->second->use_vreg = use_vreg;
384       return;
385     }
386     // Use of a non-phi value without definition.
387     CHECK(false);
388   }
389
390   void UsePhi(const InstructionOperand* op, const PhiData* phi,
391               bool initial_pass) {
392     auto it = map().find(op);
393     CHECK(it != map().end());
394     auto v = it->second;
395     int use_vreg = phi->virtual_register;
396     // Phis are not defined.
397     CHECK_EQ(kInvalidVreg, v->define_vreg);
398     // Already used this vreg in this block.
399     if (v->use_vreg != kInvalidVreg) {
400       CHECK_EQ(v->use_vreg, use_vreg);
401       return;
402     }
403     if (!initial_pass) {
404       // A used phi must have propagated its use to a predecessor.
405       CHECK_EQ(v->succ_vreg, use_vreg);
406       // Mark the use.
407       v->use_vreg = use_vreg;
408       return;
409     }
410     // Go up the block list starting at the first predecessor and ensure this
411     // phi has a correct use or definition.
412     for (v = v->incoming; v != nullptr; v = v->incoming) {
413       // Value unused in block.
414       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
415         continue;
416       }
417       // Found correct definition or use.
418       if (v->define_vreg != kInvalidVreg) {
419         CHECK(v->define_vreg == phi->first_pred_vreg);
420       } else if (v->use_vreg != phi->first_pred_vreg) {
421         // Walk the phi chain, hunting for a matching phi use.
422         auto p = phi;
423         for (; p != nullptr; p = p->first_pred_phi) {
424           if (p->virtual_register == v->use_vreg) break;
425         }
426         CHECK(p);
427       }
428       // Mark the use.
429       it->second->use_vreg = use_vreg;
430       return;
431     }
432     // Use of a phi value without definition.
433     UNREACHABLE();
434   }
435
436  private:
437   Map map_;
438   DISALLOW_COPY_AND_ASSIGN(OperandMap);
439 };
440
441 }  // namespace
442
443
444 class RegisterAllocatorVerifier::BlockMaps {
445  public:
446   BlockMaps(Zone* zone, const InstructionSequence* sequence)
447       : zone_(zone),
448         sequence_(sequence),
449         phi_map_guard_(sequence->VirtualRegisterCount(), zone),
450         phi_map_(zone),
451         incoming_maps_(zone),
452         outgoing_maps_(zone) {
453     InitializePhis();
454     InitializeOperandMaps();
455   }
456
457   bool IsPhi(int virtual_register) {
458     return phi_map_guard_.Contains(virtual_register);
459   }
460
461   const PhiData* GetPhi(int virtual_register) {
462     auto it = phi_map_.find(virtual_register);
463     CHECK(it != phi_map_.end());
464     return it->second;
465   }
466
467   OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) {
468     return initial_pass ? InitializeFromFirstPredecessor(block_index)
469                         : InitializeFromIntersection(block_index);
470   }
471
472   void PropagateUsesBackwards() {
473     typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>>
474         BlockIds;
475     BlockIds block_ids((BlockIds::key_compare()),
476                        zone_allocator<size_t>(zone()));
477     // First ensure that incoming contains only keys in all predecessors.
478     for (auto block : sequence()->instruction_blocks()) {
479       size_t index = block->rpo_number().ToSize();
480       block_ids.insert(index);
481       auto& succ_map = incoming_maps_[index]->map();
482       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
483         auto pred_rpo = block->predecessors()[i];
484         succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map());
485       }
486     }
487     // Back propagation fixpoint.
488     while (!block_ids.empty()) {
489       // Pop highest block_id.
490       auto block_id_it = block_ids.begin();
491       const size_t succ_index = *block_id_it;
492       block_ids.erase(block_id_it);
493       // Propagate uses back to their definition blocks using succ_vreg.
494       auto block = sequence()->instruction_blocks()[succ_index];
495       auto& succ_map = incoming_maps_[succ_index]->map();
496       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
497         for (auto& succ_val : succ_map) {
498           // An incoming map contains no defines.
499           CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg);
500           // Compute succ_vreg.
501           int succ_vreg = succ_val.second->succ_vreg;
502           if (succ_vreg == kInvalidVreg) {
503             succ_vreg = succ_val.second->use_vreg;
504             // Initialize succ_vreg in back propagation chain.
505             succ_val.second->succ_vreg = succ_vreg;
506           }
507           if (succ_vreg == kInvalidVreg) continue;
508           // May need to transition phi.
509           if (IsPhi(succ_vreg)) {
510             auto phi = GetPhi(succ_vreg);
511             if (phi->definition_rpo.ToSize() == succ_index) {
512               // phi definition block, transition to pred value.
513               succ_vreg = phi->operands[i];
514             }
515           }
516           // Push succ_vreg up to all predecessors.
517           auto pred_rpo = block->predecessors()[i];
518           auto& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map();
519           auto& pred_val = *pred_map.find(succ_val.first);
520           if (pred_val.second->use_vreg != kInvalidVreg) {
521             CHECK_EQ(succ_vreg, pred_val.second->use_vreg);
522           }
523           if (pred_val.second->define_vreg != kInvalidVreg) {
524             CHECK_EQ(succ_vreg, pred_val.second->define_vreg);
525           }
526           if (pred_val.second->succ_vreg != kInvalidVreg) {
527             CHECK_EQ(succ_vreg, pred_val.second->succ_vreg);
528           } else {
529             pred_val.second->succ_vreg = succ_vreg;
530             block_ids.insert(pred_rpo.ToSize());
531           }
532         }
533       }
534     }
535     // Clear uses and back links for second pass.
536     for (auto operand_map : incoming_maps_) {
537       for (auto& succ_val : operand_map->map()) {
538         succ_val.second->incoming = nullptr;
539         succ_val.second->use_vreg = kInvalidVreg;
540       }
541     }
542   }
543
544  private:
545   OperandMap* InitializeFromFirstPredecessor(size_t block_index) {
546     auto to_init = outgoing_maps_[block_index];
547     CHECK(to_init->map().empty());
548     auto block = sequence()->instruction_blocks()[block_index];
549     if (block->predecessors().empty()) return to_init;
550     size_t predecessor_index = block->predecessors()[0].ToSize();
551     // Ensure not a backedge.
552     CHECK(predecessor_index < block->rpo_number().ToSize());
553     auto incoming = outgoing_maps_[predecessor_index];
554     // Copy map and replace values.
555     to_init->map() = incoming->map();
556     for (auto& it : to_init->map()) {
557       auto incoming = it.second;
558       it.second = new (zone()) OperandMap::MapValue();
559       it.second->incoming = incoming;
560     }
561     // Copy to incoming map for second pass.
562     incoming_maps_[block_index]->map() = to_init->map();
563     return to_init;
564   }
565
566   OperandMap* InitializeFromIntersection(size_t block_index) {
567     return incoming_maps_[block_index];
568   }
569
570   void InitializeOperandMaps() {
571     size_t block_count = sequence()->instruction_blocks().size();
572     incoming_maps_.reserve(block_count);
573     outgoing_maps_.reserve(block_count);
574     for (size_t i = 0; i < block_count; ++i) {
575       incoming_maps_.push_back(new (zone()) OperandMap(zone()));
576       outgoing_maps_.push_back(new (zone()) OperandMap(zone()));
577     }
578   }
579
580   void InitializePhis() {
581     const size_t block_count = sequence()->instruction_blocks().size();
582     for (size_t block_index = 0; block_index < block_count; ++block_index) {
583       const auto block = sequence()->instruction_blocks()[block_index];
584       for (auto phi : block->phis()) {
585         int first_pred_vreg = phi->operands()[0];
586         const PhiData* first_pred_phi = nullptr;
587         if (IsPhi(first_pred_vreg)) {
588           first_pred_phi = GetPhi(first_pred_vreg);
589           first_pred_vreg = first_pred_phi->first_pred_vreg;
590         }
591         CHECK(!IsPhi(first_pred_vreg));
592         auto phi_data = new (zone()) PhiData(
593             block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone());
594         auto res =
595             phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data));
596         CHECK(res.second);
597         phi_map_guard_.Add(phi->virtual_register());
598       }
599     }
600   }
601
602   typedef ZoneVector<OperandMap*> OperandMaps;
603   typedef ZoneVector<PhiData*> PhiVector;
604
605   Zone* zone() const { return zone_; }
606   const InstructionSequence* sequence() const { return sequence_; }
607
608   Zone* const zone_;
609   const InstructionSequence* const sequence_;
610   BitVector phi_map_guard_;
611   PhiMap phi_map_;
612   OperandMaps incoming_maps_;
613   OperandMaps outgoing_maps_;
614 };
615
616
617 void RegisterAllocatorVerifier::VerifyGapMoves() {
618   BlockMaps block_maps(zone(), sequence());
619   VerifyGapMoves(&block_maps, true);
620   block_maps.PropagateUsesBackwards();
621   VerifyGapMoves(&block_maps, false);
622 }
623
624
625 // Compute and verify outgoing values for every block.
626 void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
627                                                bool initial_pass) {
628   const size_t block_count = sequence()->instruction_blocks().size();
629   for (size_t block_index = 0; block_index < block_count; ++block_index) {
630     auto current = block_maps->InitializeIncoming(block_index, initial_pass);
631     const auto block = sequence()->instruction_blocks()[block_index];
632     for (int instr_index = block->code_start(); instr_index < block->code_end();
633          ++instr_index) {
634       const auto& instr_constraint = constraints_[instr_index];
635       const auto instr = instr_constraint.instruction_;
636       if (instr->IsSourcePosition()) continue;
637       if (instr->IsGapMoves()) {
638         current->RunGapInstruction(zone(), GapInstruction::cast(instr));
639         continue;
640       }
641       const auto op_constraints = instr_constraint.operand_constraints_;
642       size_t count = 0;
643       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
644         if (op_constraints[count].type_ == kImmediate) continue;
645         int virtual_register = op_constraints[count].virtual_register_;
646         auto op = instr->InputAt(i);
647         if (!block_maps->IsPhi(virtual_register)) {
648           current->Use(op, virtual_register, initial_pass);
649         } else {
650           auto phi = block_maps->GetPhi(virtual_register);
651           current->UsePhi(op, phi, initial_pass);
652         }
653       }
654       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
655         current->Drop(instr->TempAt(i));
656       }
657       if (instr->IsCall()) {
658         current->DropRegisters(config());
659       }
660       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
661         int virtual_register = op_constraints[count].virtual_register_;
662         current->Define(zone(), instr->OutputAt(i), virtual_register);
663       }
664     }
665   }
666 }
667
668 }  // namespace compiler
669 }  // namespace internal
670 }  // namespace v8