1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "src/allocation.h"
11 #include "src/hydrogen.h"
12 #include "src/safepoint-table.h"
13 #include "src/zone-allocator.h"
18 #define LITHIUM_OPERAND_LIST(V) \
19 V(ConstantOperand, CONSTANT_OPERAND, 128) \
20 V(StackSlot, STACK_SLOT, 128) \
21 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
22 V(Float32x4StackSlot, FLOAT32x4_STACK_SLOT, 128) \
23 V(Float64x2StackSlot, FLOAT64x2_STACK_SLOT, 128) \
24 V(Int32x4StackSlot, INT32x4_STACK_SLOT, 128) \
25 V(Register, REGISTER, 16) \
26 V(DoubleRegister, DOUBLE_REGISTER, 16) \
27 V(Float32x4Register, FLOAT32x4_REGISTER, 16) \
28 V(Float64x2Register, FLOAT64x2_REGISTER, 16) \
29 V(Int32x4Register, INT32x4_REGISTER, 16)
31 class LOperand : public ZoneObject {
49 LOperand() : value_(KindField::encode(INVALID)) { }
51 Kind kind() const { return KindField::decode(value_); }
52 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
53 #define LITHIUM_OPERAND_PREDICATE(name, type, number) \
54 bool Is##name() const { return kind() == type; }
55 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
56 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
57 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID, 0)
58 #undef LITHIUM_OPERAND_PREDICATE
59 bool IsSIMD128Register() const {
60 return kind() == FLOAT32x4_REGISTER || kind() == FLOAT64x2_REGISTER ||
61 kind() == INT32x4_REGISTER;
63 bool IsSIMD128StackSlot() const {
64 return kind() == FLOAT32x4_STACK_SLOT || kind() == FLOAT64x2_STACK_SLOT ||
65 kind() == INT32x4_STACK_SLOT;
67 bool Equals(LOperand* other) const {
68 return value_ == other->value_ || (index() == other->index() &&
69 ((IsSIMD128Register() && other->IsSIMD128Register()) ||
70 (IsSIMD128StackSlot() && other->IsSIMD128StackSlot())));
73 void PrintTo(StringStream* stream);
74 void ConvertTo(Kind kind, int index) {
75 if (kind == REGISTER) DCHECK(index >= 0);
76 value_ = KindField::encode(kind);
77 value_ |= index << kKindFieldWidth;
78 DCHECK(this->index() == index);
81 // Calls SetUpCache()/TearDownCache() for each subclass.
82 static void SetUpCaches();
83 static void TearDownCaches();
86 static const int kKindFieldWidth = 4;
87 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
89 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
95 class LUnallocated : public LOperand {
102 enum ExtendedPolicy {
106 FIXED_DOUBLE_REGISTER,
108 MUST_HAVE_DOUBLE_REGISTER,
113 // Lifetime of operand inside the instruction.
115 // USED_AT_START operand is guaranteed to be live only at
116 // instruction start. Register allocator is free to assign the same register
117 // to some other operand used inside instruction (i.e. temporary or
121 // USED_AT_END operand is treated as live until the end of
122 // instruction. This means that register allocator will not reuse it's
123 // register for any other operand inside instruction.
127 explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
128 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
129 value_ |= ExtendedPolicyField::encode(policy);
130 value_ |= LifetimeField::encode(USED_AT_END);
133 LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
134 DCHECK(policy == FIXED_SLOT);
135 value_ |= BasicPolicyField::encode(policy);
136 value_ |= index << FixedSlotIndexField::kShift;
137 DCHECK(this->fixed_slot_index() == index);
140 LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
141 DCHECK(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
142 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
143 value_ |= ExtendedPolicyField::encode(policy);
144 value_ |= LifetimeField::encode(USED_AT_END);
145 value_ |= FixedRegisterField::encode(index);
148 LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
149 : LOperand(UNALLOCATED, 0) {
150 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
151 value_ |= ExtendedPolicyField::encode(policy);
152 value_ |= LifetimeField::encode(lifetime);
155 LUnallocated* CopyUnconstrained(Zone* zone) {
156 LUnallocated* result = new(zone) LUnallocated(ANY);
157 result->set_virtual_register(virtual_register());
161 static LUnallocated* cast(LOperand* op) {
162 DCHECK(op->IsUnallocated());
163 return reinterpret_cast<LUnallocated*>(op);
166 // The encoding used for LUnallocated operands depends on the policy that is
167 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
168 // because it accommodates a larger pay-load.
170 // For FIXED_SLOT policy:
171 // +-------------------------------------------+
172 // | slot_index | vreg | 0 | 0001 |
173 // +-------------------------------------------+
175 // For all other (extended) policies:
176 // +-------------------------------------------+
177 // | reg_index | L | PPP | vreg | 1 | 0001 | L ... Lifetime
178 // +-------------------------------------------+ P ... Policy
180 // The slot index is a signed value which requires us to decode it manually
181 // instead of using the BitField utility class.
183 // The superclass has a KindField.
184 STATIC_ASSERT(kKindFieldWidth == 4);
186 // BitFields for all unallocated operands.
187 class BasicPolicyField : public BitField<BasicPolicy, 4, 1> {};
188 class VirtualRegisterField : public BitField<unsigned, 5, 18> {};
190 // BitFields specific to BasicPolicy::FIXED_SLOT.
191 class FixedSlotIndexField : public BitField<int, 23, 9> {};
193 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
194 class ExtendedPolicyField : public BitField<ExtendedPolicy, 23, 3> {};
195 class LifetimeField : public BitField<Lifetime, 26, 1> {};
196 class FixedRegisterField : public BitField<int, 27, 5> {};
198 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
199 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
200 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
201 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
203 // Predicates for the operand policy.
204 bool HasAnyPolicy() const {
205 return basic_policy() == EXTENDED_POLICY &&
206 extended_policy() == ANY;
208 bool HasFixedPolicy() const {
209 return basic_policy() == FIXED_SLOT ||
210 extended_policy() == FIXED_REGISTER ||
211 extended_policy() == FIXED_DOUBLE_REGISTER;
213 bool HasRegisterPolicy() const {
214 return basic_policy() == EXTENDED_POLICY && (
215 extended_policy() == WRITABLE_REGISTER ||
216 extended_policy() == MUST_HAVE_REGISTER);
218 bool HasDoubleRegisterPolicy() const {
219 return basic_policy() == EXTENDED_POLICY &&
220 extended_policy() == MUST_HAVE_DOUBLE_REGISTER;
222 bool HasSameAsInputPolicy() const {
223 return basic_policy() == EXTENDED_POLICY &&
224 extended_policy() == SAME_AS_FIRST_INPUT;
226 bool HasFixedSlotPolicy() const {
227 return basic_policy() == FIXED_SLOT;
229 bool HasFixedRegisterPolicy() const {
230 return basic_policy() == EXTENDED_POLICY &&
231 extended_policy() == FIXED_REGISTER;
233 bool HasFixedDoubleRegisterPolicy() const {
234 return basic_policy() == EXTENDED_POLICY &&
235 extended_policy() == FIXED_DOUBLE_REGISTER;
237 bool HasWritableRegisterPolicy() const {
238 return basic_policy() == EXTENDED_POLICY &&
239 extended_policy() == WRITABLE_REGISTER;
242 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
243 BasicPolicy basic_policy() const {
244 return BasicPolicyField::decode(value_);
247 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
248 ExtendedPolicy extended_policy() const {
249 DCHECK(basic_policy() == EXTENDED_POLICY);
250 return ExtendedPolicyField::decode(value_);
253 // [fixed_slot_index]: Only for FIXED_SLOT.
254 int fixed_slot_index() const {
255 DCHECK(HasFixedSlotPolicy());
256 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
259 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
260 int fixed_register_index() const {
261 DCHECK(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
262 return FixedRegisterField::decode(value_);
265 // [virtual_register]: The virtual register ID for this operand.
266 int virtual_register() const {
267 return VirtualRegisterField::decode(value_);
269 void set_virtual_register(unsigned id) {
270 value_ = VirtualRegisterField::update(value_, id);
273 // [lifetime]: Only for non-FIXED_SLOT.
274 bool IsUsedAtStart() {
275 DCHECK(basic_policy() == EXTENDED_POLICY);
276 return LifetimeField::decode(value_) == USED_AT_START;
281 class LMoveOperands V8_FINAL BASE_EMBEDDED {
283 LMoveOperands(LOperand* source, LOperand* destination)
284 : source_(source), destination_(destination) {
287 LOperand* source() const { return source_; }
288 void set_source(LOperand* operand) { source_ = operand; }
290 LOperand* destination() const { return destination_; }
291 void set_destination(LOperand* operand) { destination_ = operand; }
293 // The gap resolver marks moves as "in-progress" by clearing the
294 // destination (but not the source).
295 bool IsPending() const {
296 return destination_ == NULL && source_ != NULL;
299 // True if this move a move into the given destination operand.
300 bool Blocks(LOperand* operand) const {
301 return !IsEliminated() && source()->Equals(operand);
304 // A move is redundant if it's been eliminated, if its source and
305 // destination are the same, or if its destination is unneeded or constant.
306 bool IsRedundant() const {
307 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
308 (destination_ != NULL && destination_->IsConstantOperand());
311 bool IsIgnored() const {
312 return destination_ != NULL && destination_->IsIgnored();
315 // We clear both operands to indicate move that's been eliminated.
316 void Eliminate() { source_ = destination_ = NULL; }
317 bool IsEliminated() const {
318 DCHECK(source_ != NULL || destination_ == NULL);
319 return source_ == NULL;
324 LOperand* destination_;
328 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
329 class LSubKindOperand V8_FINAL : public LOperand {
331 static LSubKindOperand* Create(int index, Zone* zone) {
333 if (index < kNumCachedOperands) return &cache[index];
334 return new(zone) LSubKindOperand(index);
337 static LSubKindOperand* cast(LOperand* op) {
338 DCHECK(op->kind() == kOperandKind);
339 return reinterpret_cast<LSubKindOperand*>(op);
342 static void SetUpCache();
343 static void TearDownCache();
346 static LSubKindOperand* cache;
348 LSubKindOperand() : LOperand() { }
349 explicit LSubKindOperand(int index) : LOperand(kOperandKind, index) { }
353 #define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
354 typedef LSubKindOperand<LOperand::type, number> L##name;
355 LITHIUM_OPERAND_LIST(LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS)
356 #undef LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS
359 class LParallelMove V8_FINAL : public ZoneObject {
361 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
363 void AddMove(LOperand* from, LOperand* to, Zone* zone) {
364 move_operands_.Add(LMoveOperands(from, to), zone);
367 bool IsRedundant() const;
369 ZoneList<LMoveOperands>* move_operands() { return &move_operands_; }
371 void PrintDataTo(StringStream* stream) const;
374 ZoneList<LMoveOperands> move_operands_;
378 class LPointerMap V8_FINAL : public ZoneObject {
380 explicit LPointerMap(Zone* zone)
381 : pointer_operands_(8, zone),
382 untagged_operands_(0, zone),
383 lithium_position_(-1) { }
385 const ZoneList<LOperand*>* GetNormalizedOperands() {
386 for (int i = 0; i < untagged_operands_.length(); ++i) {
387 RemovePointer(untagged_operands_[i]);
389 untagged_operands_.Clear();
390 return &pointer_operands_;
392 int lithium_position() const { return lithium_position_; }
394 void set_lithium_position(int pos) {
395 DCHECK(lithium_position_ == -1);
396 lithium_position_ = pos;
399 void RecordPointer(LOperand* op, Zone* zone);
400 void RemovePointer(LOperand* op);
401 void RecordUntagged(LOperand* op, Zone* zone);
402 void PrintTo(StringStream* stream);
405 ZoneList<LOperand*> pointer_operands_;
406 ZoneList<LOperand*> untagged_operands_;
407 int lithium_position_;
411 class LEnvironment V8_FINAL : public ZoneObject {
413 LEnvironment(Handle<JSFunction> closure,
414 FrameType frame_type,
420 HEnterInlined* entry,
423 frame_type_(frame_type),
424 arguments_stack_height_(argument_count),
425 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
426 translation_index_(-1),
428 translation_size_(value_count),
429 parameter_count_(parameter_count),
431 values_(value_count, zone),
432 is_tagged_(value_count, zone),
433 is_uint32_(value_count, zone),
434 object_mapping_(0, zone),
438 has_been_used_(false) { }
440 Handle<JSFunction> closure() const { return closure_; }
441 FrameType frame_type() const { return frame_type_; }
442 int arguments_stack_height() const { return arguments_stack_height_; }
443 int deoptimization_index() const { return deoptimization_index_; }
444 int translation_index() const { return translation_index_; }
445 BailoutId ast_id() const { return ast_id_; }
446 int translation_size() const { return translation_size_; }
447 int parameter_count() const { return parameter_count_; }
448 int pc_offset() const { return pc_offset_; }
449 const ZoneList<LOperand*>* values() const { return &values_; }
450 LEnvironment* outer() const { return outer_; }
451 HEnterInlined* entry() { return entry_; }
452 Zone* zone() const { return zone_; }
454 bool has_been_used() const { return has_been_used_; }
455 void set_has_been_used() { has_been_used_ = true; }
457 void AddValue(LOperand* operand,
458 Representation representation,
460 values_.Add(operand, zone());
461 if (representation.IsSmiOrTagged()) {
463 is_tagged_.Add(values_.length() - 1, zone());
467 is_uint32_.Add(values_.length() - 1, zone());
471 bool HasTaggedValueAt(int index) const {
472 return is_tagged_.Contains(index);
475 bool HasUint32ValueAt(int index) const {
476 return is_uint32_.Contains(index);
479 void AddNewObject(int length, bool is_arguments) {
480 uint32_t encoded = LengthOrDupeField::encode(length) |
481 IsArgumentsField::encode(is_arguments) |
482 IsDuplicateField::encode(false);
483 object_mapping_.Add(encoded, zone());
486 void AddDuplicateObject(int dupe_of) {
487 uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
488 IsDuplicateField::encode(true);
489 object_mapping_.Add(encoded, zone());
492 int ObjectDuplicateOfAt(int index) {
493 DCHECK(ObjectIsDuplicateAt(index));
494 return LengthOrDupeField::decode(object_mapping_[index]);
497 int ObjectLengthAt(int index) {
498 DCHECK(!ObjectIsDuplicateAt(index));
499 return LengthOrDupeField::decode(object_mapping_[index]);
502 bool ObjectIsArgumentsAt(int index) {
503 DCHECK(!ObjectIsDuplicateAt(index));
504 return IsArgumentsField::decode(object_mapping_[index]);
507 bool ObjectIsDuplicateAt(int index) {
508 return IsDuplicateField::decode(object_mapping_[index]);
511 void Register(int deoptimization_index,
512 int translation_index,
514 DCHECK(!HasBeenRegistered());
515 deoptimization_index_ = deoptimization_index;
516 translation_index_ = translation_index;
517 pc_offset_ = pc_offset;
519 bool HasBeenRegistered() const {
520 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
523 void PrintTo(StringStream* stream);
525 // Marker value indicating a de-materialized object.
526 static LOperand* materialization_marker() { return NULL; }
528 // Encoding used for the object_mapping map below.
529 class LengthOrDupeField : public BitField<int, 0, 30> { };
530 class IsArgumentsField : public BitField<bool, 30, 1> { };
531 class IsDuplicateField : public BitField<bool, 31, 1> { };
534 Handle<JSFunction> closure_;
535 FrameType frame_type_;
536 int arguments_stack_height_;
537 int deoptimization_index_;
538 int translation_index_;
540 int translation_size_;
541 int parameter_count_;
544 // Value array: [parameters] [locals] [expression stack] [de-materialized].
545 // |>--------- translation_size ---------<|
546 ZoneList<LOperand*> values_;
547 GrowableBitVector is_tagged_;
548 GrowableBitVector is_uint32_;
550 // Map with encoded information about materialization_marker operands.
551 ZoneList<uint32_t> object_mapping_;
553 LEnvironment* outer_;
554 HEnterInlined* entry_;
560 // Iterates over the non-null, non-constant operands in an environment.
561 class ShallowIterator V8_FINAL BASE_EMBEDDED {
563 explicit ShallowIterator(LEnvironment* env)
565 limit_(env != NULL ? env->values()->length() : 0),
570 bool Done() { return current_ >= limit_; }
572 LOperand* Current() {
574 DCHECK(env_->values()->at(current_) != NULL);
575 return env_->values()->at(current_);
584 LEnvironment* env() { return env_; }
587 bool ShouldSkip(LOperand* op) {
588 return op == NULL || op->IsConstantOperand();
591 // Skip until something interesting, beginning with and including current_.
592 void SkipUninteresting() {
593 while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
604 // Iterator for non-null, non-constant operands incl. outer environments.
605 class DeepIterator V8_FINAL BASE_EMBEDDED {
607 explicit DeepIterator(LEnvironment* env)
608 : current_iterator_(env) {
612 bool Done() { return current_iterator_.Done(); }
614 LOperand* Current() {
615 DCHECK(!current_iterator_.Done());
616 DCHECK(current_iterator_.Current() != NULL);
617 return current_iterator_.Current();
621 current_iterator_.Advance();
626 void SkipUninteresting() {
627 while (current_iterator_.env() != NULL && current_iterator_.Done()) {
628 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
632 ShallowIterator current_iterator_;
636 class LPlatformChunk;
640 // Superclass providing data and behavior common to all the
641 // arch-specific LPlatformChunk classes.
642 class LChunk : public ZoneObject {
644 static LChunk* NewChunk(HGraph* graph);
646 void AddInstruction(LInstruction* instruction, HBasicBlock* block);
647 LConstantOperand* DefineConstantOperand(HConstant* constant);
648 HConstant* LookupConstant(LConstantOperand* operand) const;
649 Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
651 int ParameterAt(int index);
652 int GetParameterStackSlot(int index) const;
653 int spill_slot_count() const { return spill_slot_count_; }
654 CompilationInfo* info() const { return info_; }
655 HGraph* graph() const { return graph_; }
656 Isolate* isolate() const { return graph_->isolate(); }
657 const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
658 void AddGapMove(int index, LOperand* from, LOperand* to);
659 LGap* GetGapAt(int index) const;
660 bool IsGapAt(int index) const;
661 int NearestGapPos(int index) const;
662 void MarkEmptyBlocks();
663 const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
664 LLabel* GetLabel(int block_id) const;
665 int LookupDestination(int block_id) const;
666 Label* GetAssemblyLabel(int block_id) const;
668 const ZoneList<Handle<JSFunction> >* inlined_closures() const {
669 return &inlined_closures_;
672 void AddInlinedClosure(Handle<JSFunction> closure) {
673 inlined_closures_.Add(closure, zone());
676 void AddDeprecationDependency(Handle<Map> map) {
677 DCHECK(!map->is_deprecated());
678 if (!map->CanBeDeprecated()) return;
679 DCHECK(!info_->IsStub());
680 deprecation_dependencies_.insert(map);
683 void AddStabilityDependency(Handle<Map> map) {
684 DCHECK(map->is_stable());
685 if (!map->CanTransition()) return;
686 DCHECK(!info_->IsStub());
687 stability_dependencies_.insert(map);
690 Zone* zone() const { return info_->zone(); }
692 Handle<Code> Codegen();
694 void set_allocated_double_registers(BitVector* allocated_registers);
695 BitVector* allocated_double_registers() {
696 return allocated_double_registers_;
700 LChunk(CompilationInfo* info, HGraph* graph);
702 int spill_slot_count_;
705 typedef std::less<Handle<Map> > MapLess;
706 typedef zone_allocator<Handle<Map> > MapAllocator;
707 typedef std::set<Handle<Map>, MapLess, MapAllocator> MapSet;
709 void CommitDependencies(Handle<Code> code) const;
711 CompilationInfo* info_;
712 HGraph* const graph_;
713 BitVector* allocated_double_registers_;
714 ZoneList<LInstruction*> instructions_;
715 ZoneList<LPointerMap*> pointer_maps_;
716 ZoneList<Handle<JSFunction> > inlined_closures_;
717 MapSet deprecation_dependencies_;
718 MapSet stability_dependencies_;
722 class LChunkBuilderBase BASE_EMBEDDED {
724 explicit LChunkBuilderBase(Zone* zone)
725 : argument_count_(0),
728 virtual ~LChunkBuilderBase() { }
731 // An input operand in register, stack slot or a constant operand.
732 // Will not be moved to a register even if one is freely available.
733 virtual MUST_USE_RESULT LOperand* UseAny(HValue* value) = 0;
735 LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
736 int* argument_index_accumulator,
737 ZoneList<HValue*>* objects_to_materialize);
738 void AddObjectToMaterialize(HValue* value,
739 ZoneList<HValue*>* objects_to_materialize,
740 LEnvironment* result);
742 Zone* zone() const { return zone_; }
751 int StackSlotOffset(int index);
753 enum NumberUntagDMode {
754 NUMBER_CANDIDATE_IS_SMI,
755 NUMBER_CANDIDATE_IS_ANY_TAGGED
759 class LPhase : public CompilationPhase {
761 LPhase(const char* name, LChunk* chunk)
762 : CompilationPhase(name, chunk->info()),
769 DISALLOW_COPY_AND_ASSIGN(LPhase);
773 // A register-allocator view of a Lithium instruction. It contains the id of
774 // the output operand and a list of input operand uses.
776 UNALLOCATED_REGISTERS,
784 // Iterator for non-null temp operands.
785 class TempIterator BASE_EMBEDDED {
787 inline explicit TempIterator(LInstruction* instr);
789 inline LOperand* Current();
790 inline void Advance();
793 inline void SkipUninteresting();
794 LInstruction* instr_;
800 // Iterator for non-constant input operands.
801 class InputIterator BASE_EMBEDDED {
803 inline explicit InputIterator(LInstruction* instr);
805 inline LOperand* Current();
806 inline void Advance();
809 inline void SkipUninteresting();
810 LInstruction* instr_;
816 class UseIterator BASE_EMBEDDED {
818 inline explicit UseIterator(LInstruction* instr);
820 inline LOperand* Current();
821 inline void Advance();
824 InputIterator input_iterator_;
825 DeepIterator env_iterator_;
830 } } // namespace v8::internal
832 #endif // V8_LITHIUM_H_