1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "src/allocation.h"
11 #include "src/bailout-reason.h"
12 #include "src/hydrogen.h"
13 #include "src/safepoint-table.h"
14 #include "src/zone-allocator.h"
19 #define LITHIUM_OPERAND_LIST(V) \
20 V(ConstantOperand, CONSTANT_OPERAND, 128) \
21 V(StackSlot, STACK_SLOT, 128) \
22 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
23 V(Register, REGISTER, 16) \
24 V(DoubleRegister, DOUBLE_REGISTER, 16)
26 class LOperand : public ZoneObject {
38 LOperand() : value_(KindField::encode(INVALID)) { }
40 Kind kind() const { return KindField::decode(value_); }
41 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
42 #define LITHIUM_OPERAND_PREDICATE(name, type, number) \
43 bool Is##name() const { return kind() == type; }
44 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
45 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
46 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID, 0)
47 #undef LITHIUM_OPERAND_PREDICATE
48 bool Equals(LOperand* other) const { return value_ == other->value_; }
50 void PrintTo(StringStream* stream);
51 void ConvertTo(Kind kind, int index) {
52 if (kind == REGISTER) DCHECK(index >= 0);
53 value_ = KindField::encode(kind);
54 value_ |= index << kKindFieldWidth;
55 DCHECK(this->index() == index);
58 // Calls SetUpCache()/TearDownCache() for each subclass.
59 static void SetUpCaches();
60 static void TearDownCaches();
63 static const int kKindFieldWidth = 3;
64 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
66 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
72 class LUnallocated : public LOperand {
83 FIXED_DOUBLE_REGISTER,
85 MUST_HAVE_DOUBLE_REGISTER,
90 // Lifetime of operand inside the instruction.
92 // USED_AT_START operand is guaranteed to be live only at
93 // instruction start. Register allocator is free to assign the same register
94 // to some other operand used inside instruction (i.e. temporary or
98 // USED_AT_END operand is treated as live until the end of
99 // instruction. This means that register allocator will not reuse it's
100 // register for any other operand inside instruction.
104 explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
105 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
106 value_ |= ExtendedPolicyField::encode(policy);
107 value_ |= LifetimeField::encode(USED_AT_END);
110 LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
111 DCHECK(policy == FIXED_SLOT);
112 value_ |= BasicPolicyField::encode(policy);
113 value_ |= index << FixedSlotIndexField::kShift;
114 DCHECK(this->fixed_slot_index() == index);
117 LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
118 DCHECK(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
119 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
120 value_ |= ExtendedPolicyField::encode(policy);
121 value_ |= LifetimeField::encode(USED_AT_END);
122 value_ |= FixedRegisterField::encode(index);
125 LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
126 : LOperand(UNALLOCATED, 0) {
127 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
128 value_ |= ExtendedPolicyField::encode(policy);
129 value_ |= LifetimeField::encode(lifetime);
132 LUnallocated* CopyUnconstrained(Zone* zone) {
133 LUnallocated* result = new(zone) LUnallocated(ANY);
134 result->set_virtual_register(virtual_register());
138 static LUnallocated* cast(LOperand* op) {
139 DCHECK(op->IsUnallocated());
140 return reinterpret_cast<LUnallocated*>(op);
143 // The encoding used for LUnallocated operands depends on the policy that is
144 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
145 // because it accommodates a larger pay-load.
147 // For FIXED_SLOT policy:
148 // +------------------------------------------+
149 // | slot_index | vreg | 0 | 001 |
150 // +------------------------------------------+
152 // For all other (extended) policies:
153 // +------------------------------------------+
154 // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime
155 // +------------------------------------------+ P ... Policy
157 // The slot index is a signed value which requires us to decode it manually
158 // instead of using the BitField utility class.
160 // The superclass has a KindField.
161 STATIC_ASSERT(kKindFieldWidth == 3);
163 // BitFields for all unallocated operands.
164 class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {};
165 class VirtualRegisterField : public BitField<unsigned, 4, 18> {};
167 // BitFields specific to BasicPolicy::FIXED_SLOT.
168 class FixedSlotIndexField : public BitField<int, 22, 10> {};
170 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
171 class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {};
172 class LifetimeField : public BitField<Lifetime, 25, 1> {};
173 class FixedRegisterField : public BitField<int, 26, 6> {};
175 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
176 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
177 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
178 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
180 // Predicates for the operand policy.
181 bool HasAnyPolicy() const {
182 return basic_policy() == EXTENDED_POLICY &&
183 extended_policy() == ANY;
185 bool HasFixedPolicy() const {
186 return basic_policy() == FIXED_SLOT ||
187 extended_policy() == FIXED_REGISTER ||
188 extended_policy() == FIXED_DOUBLE_REGISTER;
190 bool HasRegisterPolicy() const {
191 return basic_policy() == EXTENDED_POLICY && (
192 extended_policy() == WRITABLE_REGISTER ||
193 extended_policy() == MUST_HAVE_REGISTER);
195 bool HasDoubleRegisterPolicy() const {
196 return basic_policy() == EXTENDED_POLICY &&
197 extended_policy() == MUST_HAVE_DOUBLE_REGISTER;
199 bool HasSameAsInputPolicy() const {
200 return basic_policy() == EXTENDED_POLICY &&
201 extended_policy() == SAME_AS_FIRST_INPUT;
203 bool HasFixedSlotPolicy() const {
204 return basic_policy() == FIXED_SLOT;
206 bool HasFixedRegisterPolicy() const {
207 return basic_policy() == EXTENDED_POLICY &&
208 extended_policy() == FIXED_REGISTER;
210 bool HasFixedDoubleRegisterPolicy() const {
211 return basic_policy() == EXTENDED_POLICY &&
212 extended_policy() == FIXED_DOUBLE_REGISTER;
214 bool HasWritableRegisterPolicy() const {
215 return basic_policy() == EXTENDED_POLICY &&
216 extended_policy() == WRITABLE_REGISTER;
219 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
220 BasicPolicy basic_policy() const {
221 return BasicPolicyField::decode(value_);
224 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
225 ExtendedPolicy extended_policy() const {
226 DCHECK(basic_policy() == EXTENDED_POLICY);
227 return ExtendedPolicyField::decode(value_);
230 // [fixed_slot_index]: Only for FIXED_SLOT.
231 int fixed_slot_index() const {
232 DCHECK(HasFixedSlotPolicy());
233 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
236 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
237 int fixed_register_index() const {
238 DCHECK(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
239 return FixedRegisterField::decode(value_);
242 // [virtual_register]: The virtual register ID for this operand.
243 int virtual_register() const {
244 return VirtualRegisterField::decode(value_);
246 void set_virtual_register(unsigned id) {
247 value_ = VirtualRegisterField::update(value_, id);
250 // [lifetime]: Only for non-FIXED_SLOT.
251 bool IsUsedAtStart() {
252 DCHECK(basic_policy() == EXTENDED_POLICY);
253 return LifetimeField::decode(value_) == USED_AT_START;
256 static bool TooManyParameters(int num_parameters) {
257 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
258 return num_parameters + 1 > parameter_limit;
261 static bool TooManyParametersOrStackSlots(int num_parameters,
262 int num_stack_slots) {
263 const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
264 return num_parameters + 1 + num_stack_slots > locals_limit;
269 class LMoveOperands final BASE_EMBEDDED {
271 LMoveOperands(LOperand* source, LOperand* destination)
272 : source_(source), destination_(destination) {
275 LOperand* source() const { return source_; }
276 void set_source(LOperand* operand) { source_ = operand; }
278 LOperand* destination() const { return destination_; }
279 void set_destination(LOperand* operand) { destination_ = operand; }
281 // The gap resolver marks moves as "in-progress" by clearing the
282 // destination (but not the source).
283 bool IsPending() const {
284 return destination_ == NULL && source_ != NULL;
287 // True if this move a move into the given destination operand.
288 bool Blocks(LOperand* operand) const {
289 return !IsEliminated() && source()->Equals(operand);
292 // A move is redundant if it's been eliminated, if its source and
293 // destination are the same, or if its destination is unneeded or constant.
294 bool IsRedundant() const {
295 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
296 (destination_ != NULL && destination_->IsConstantOperand());
299 bool IsIgnored() const {
300 return destination_ != NULL && destination_->IsIgnored();
303 // We clear both operands to indicate move that's been eliminated.
304 void Eliminate() { source_ = destination_ = NULL; }
305 bool IsEliminated() const {
306 DCHECK(source_ != NULL || destination_ == NULL);
307 return source_ == NULL;
312 LOperand* destination_;
316 template <LOperand::Kind kOperandKind, int kNumCachedOperands>
317 class LSubKindOperand final : public LOperand {
319 static LSubKindOperand* Create(int index, Zone* zone) {
321 if (index < kNumCachedOperands) return &cache[index];
322 return new(zone) LSubKindOperand(index);
325 static LSubKindOperand* cast(LOperand* op) {
326 DCHECK(op->kind() == kOperandKind);
327 return reinterpret_cast<LSubKindOperand*>(op);
330 static void SetUpCache();
331 static void TearDownCache();
334 static LSubKindOperand* cache;
336 LSubKindOperand() : LOperand() { }
337 explicit LSubKindOperand(int index) : LOperand(kOperandKind, index) { }
341 #define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
342 typedef LSubKindOperand<LOperand::type, number> L##name;
343 LITHIUM_OPERAND_LIST(LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS)
344 #undef LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS
347 class LParallelMove final : public ZoneObject {
349 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
351 void AddMove(LOperand* from, LOperand* to, Zone* zone) {
352 move_operands_.Add(LMoveOperands(from, to), zone);
355 bool IsRedundant() const;
357 ZoneList<LMoveOperands>* move_operands() { return &move_operands_; }
359 void PrintDataTo(StringStream* stream) const;
362 ZoneList<LMoveOperands> move_operands_;
366 class LPointerMap final : public ZoneObject {
368 explicit LPointerMap(Zone* zone)
369 : pointer_operands_(8, zone),
370 untagged_operands_(0, zone),
371 lithium_position_(-1) { }
373 const ZoneList<LOperand*>* GetNormalizedOperands() {
374 for (int i = 0; i < untagged_operands_.length(); ++i) {
375 RemovePointer(untagged_operands_[i]);
377 untagged_operands_.Clear();
378 return &pointer_operands_;
380 int lithium_position() const { return lithium_position_; }
382 void set_lithium_position(int pos) {
383 DCHECK(lithium_position_ == -1);
384 lithium_position_ = pos;
387 void RecordPointer(LOperand* op, Zone* zone);
388 void RemovePointer(LOperand* op);
389 void RecordUntagged(LOperand* op, Zone* zone);
390 void PrintTo(StringStream* stream);
393 ZoneList<LOperand*> pointer_operands_;
394 ZoneList<LOperand*> untagged_operands_;
395 int lithium_position_;
399 class LEnvironment final : public ZoneObject {
401 LEnvironment(Handle<JSFunction> closure,
402 FrameType frame_type,
408 HEnterInlined* entry,
411 frame_type_(frame_type),
412 arguments_stack_height_(argument_count),
413 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
414 translation_index_(-1),
416 translation_size_(value_count),
417 parameter_count_(parameter_count),
419 values_(value_count, zone),
420 is_tagged_(value_count, zone),
421 is_uint32_(value_count, zone),
422 object_mapping_(0, zone),
426 has_been_used_(false) { }
428 Handle<JSFunction> closure() const { return closure_; }
429 FrameType frame_type() const { return frame_type_; }
430 int arguments_stack_height() const { return arguments_stack_height_; }
431 int deoptimization_index() const { return deoptimization_index_; }
432 int translation_index() const { return translation_index_; }
433 BailoutId ast_id() const { return ast_id_; }
434 int translation_size() const { return translation_size_; }
435 int parameter_count() const { return parameter_count_; }
436 int pc_offset() const { return pc_offset_; }
437 const ZoneList<LOperand*>* values() const { return &values_; }
438 LEnvironment* outer() const { return outer_; }
439 HEnterInlined* entry() { return entry_; }
440 Zone* zone() const { return zone_; }
442 bool has_been_used() const { return has_been_used_; }
443 void set_has_been_used() { has_been_used_ = true; }
445 void AddValue(LOperand* operand,
446 Representation representation,
448 values_.Add(operand, zone());
449 if (representation.IsSmiOrTagged()) {
451 is_tagged_.Add(values_.length() - 1, zone());
455 is_uint32_.Add(values_.length() - 1, zone());
459 bool HasTaggedValueAt(int index) const {
460 return is_tagged_.Contains(index);
463 bool HasUint32ValueAt(int index) const {
464 return is_uint32_.Contains(index);
467 void AddNewObject(int length, bool is_arguments) {
468 uint32_t encoded = LengthOrDupeField::encode(length) |
469 IsArgumentsField::encode(is_arguments) |
470 IsDuplicateField::encode(false);
471 object_mapping_.Add(encoded, zone());
474 void AddDuplicateObject(int dupe_of) {
475 uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
476 IsDuplicateField::encode(true);
477 object_mapping_.Add(encoded, zone());
480 int ObjectDuplicateOfAt(int index) {
481 DCHECK(ObjectIsDuplicateAt(index));
482 return LengthOrDupeField::decode(object_mapping_[index]);
485 int ObjectLengthAt(int index) {
486 DCHECK(!ObjectIsDuplicateAt(index));
487 return LengthOrDupeField::decode(object_mapping_[index]);
490 bool ObjectIsArgumentsAt(int index) {
491 DCHECK(!ObjectIsDuplicateAt(index));
492 return IsArgumentsField::decode(object_mapping_[index]);
495 bool ObjectIsDuplicateAt(int index) {
496 return IsDuplicateField::decode(object_mapping_[index]);
499 void Register(int deoptimization_index,
500 int translation_index,
502 DCHECK(!HasBeenRegistered());
503 deoptimization_index_ = deoptimization_index;
504 translation_index_ = translation_index;
505 pc_offset_ = pc_offset;
507 bool HasBeenRegistered() const {
508 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
511 void PrintTo(StringStream* stream);
513 // Marker value indicating a de-materialized object.
514 static LOperand* materialization_marker() { return NULL; }
516 // Encoding used for the object_mapping map below.
517 class LengthOrDupeField : public BitField<int, 0, 30> { };
518 class IsArgumentsField : public BitField<bool, 30, 1> { };
519 class IsDuplicateField : public BitField<bool, 31, 1> { };
522 Handle<JSFunction> closure_;
523 FrameType frame_type_;
524 int arguments_stack_height_;
525 int deoptimization_index_;
526 int translation_index_;
528 int translation_size_;
529 int parameter_count_;
532 // Value array: [parameters] [locals] [expression stack] [de-materialized].
533 // |>--------- translation_size ---------<|
534 ZoneList<LOperand*> values_;
535 GrowableBitVector is_tagged_;
536 GrowableBitVector is_uint32_;
538 // Map with encoded information about materialization_marker operands.
539 ZoneList<uint32_t> object_mapping_;
541 LEnvironment* outer_;
542 HEnterInlined* entry_;
548 // Iterates over the non-null, non-constant operands in an environment.
549 class ShallowIterator final BASE_EMBEDDED {
551 explicit ShallowIterator(LEnvironment* env)
553 limit_(env != NULL ? env->values()->length() : 0),
558 bool Done() { return current_ >= limit_; }
560 LOperand* Current() {
562 DCHECK(env_->values()->at(current_) != NULL);
563 return env_->values()->at(current_);
572 LEnvironment* env() { return env_; }
575 bool ShouldSkip(LOperand* op) {
576 return op == NULL || op->IsConstantOperand();
579 // Skip until something interesting, beginning with and including current_.
580 void SkipUninteresting() {
581 while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
592 // Iterator for non-null, non-constant operands incl. outer environments.
593 class DeepIterator final BASE_EMBEDDED {
595 explicit DeepIterator(LEnvironment* env)
596 : current_iterator_(env) {
600 bool Done() { return current_iterator_.Done(); }
602 LOperand* Current() {
603 DCHECK(!current_iterator_.Done());
604 DCHECK(current_iterator_.Current() != NULL);
605 return current_iterator_.Current();
609 current_iterator_.Advance();
614 void SkipUninteresting() {
615 while (current_iterator_.env() != NULL && current_iterator_.Done()) {
616 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
620 ShallowIterator current_iterator_;
624 class LPlatformChunk;
628 // Superclass providing data and behavior common to all the
629 // arch-specific LPlatformChunk classes.
630 class LChunk : public ZoneObject {
632 static LChunk* NewChunk(HGraph* graph);
634 void AddInstruction(LInstruction* instruction, HBasicBlock* block);
635 LConstantOperand* DefineConstantOperand(HConstant* constant);
636 HConstant* LookupConstant(LConstantOperand* operand) const;
637 Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
639 int ParameterAt(int index);
640 int GetParameterStackSlot(int index) const;
641 int spill_slot_count() const { return spill_slot_count_; }
642 CompilationInfo* info() const { return info_; }
643 HGraph* graph() const { return graph_; }
644 Isolate* isolate() const { return graph_->isolate(); }
645 const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
646 void AddGapMove(int index, LOperand* from, LOperand* to);
647 LGap* GetGapAt(int index) const;
648 bool IsGapAt(int index) const;
649 int NearestGapPos(int index) const;
650 void MarkEmptyBlocks();
651 const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
652 LLabel* GetLabel(int block_id) const;
653 int LookupDestination(int block_id) const;
654 Label* GetAssemblyLabel(int block_id) const;
656 const ZoneList<Handle<SharedFunctionInfo>>& inlined_functions() const {
657 return inlined_functions_;
660 void AddInlinedFunction(Handle<SharedFunctionInfo> closure) {
661 inlined_functions_.Add(closure, zone());
664 void AddDeprecationDependency(Handle<Map> map) {
665 DCHECK(!map->is_deprecated());
666 if (!map->CanBeDeprecated()) return;
667 DCHECK(!info_->IsStub());
668 deprecation_dependencies_.Add(map, zone());
671 void AddStabilityDependency(Handle<Map> map) {
672 DCHECK(map->is_stable());
673 if (!map->CanTransition()) return;
674 DCHECK(!info_->IsStub());
675 stability_dependencies_.Add(map, zone());
678 Zone* zone() const { return info_->zone(); }
680 Handle<Code> Codegen();
682 void set_allocated_double_registers(BitVector* allocated_registers);
683 BitVector* allocated_double_registers() {
684 return allocated_double_registers_;
688 LChunk(CompilationInfo* info, HGraph* graph);
690 int spill_slot_count_;
693 void RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const;
694 void CommitDependencies(Handle<Code> code) const;
696 CompilationInfo* info_;
697 HGraph* const graph_;
698 BitVector* allocated_double_registers_;
699 ZoneList<LInstruction*> instructions_;
700 ZoneList<LPointerMap*> pointer_maps_;
701 ZoneList<Handle<SharedFunctionInfo>> inlined_functions_;
702 ZoneList<Handle<Map>> deprecation_dependencies_;
703 ZoneList<Handle<Map>> stability_dependencies_;
707 class LChunkBuilderBase BASE_EMBEDDED {
709 explicit LChunkBuilderBase(CompilationInfo* info, HGraph* graph)
710 : argument_count_(0),
715 zone_(graph->zone()) {}
717 virtual ~LChunkBuilderBase() { }
719 void Abort(BailoutReason reason);
720 void Retry(BailoutReason reason);
723 enum Status { UNUSED, BUILDING, DONE, ABORTED };
725 LPlatformChunk* chunk() const { return chunk_; }
726 CompilationInfo* info() const { return info_; }
727 HGraph* graph() const { return graph_; }
728 int argument_count() const { return argument_count_; }
729 Isolate* isolate() const { return graph_->isolate(); }
730 Heap* heap() const { return isolate()->heap(); }
732 bool is_unused() const { return status_ == UNUSED; }
733 bool is_building() const { return status_ == BUILDING; }
734 bool is_done() const { return status_ == DONE; }
735 bool is_aborted() const { return status_ == ABORTED; }
737 // An input operand in register, stack slot or a constant operand.
738 // Will not be moved to a register even if one is freely available.
739 virtual MUST_USE_RESULT LOperand* UseAny(HValue* value) = 0;
741 LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
742 int* argument_index_accumulator,
743 ZoneList<HValue*>* objects_to_materialize);
744 void AddObjectToMaterialize(HValue* value,
745 ZoneList<HValue*>* objects_to_materialize,
746 LEnvironment* result);
748 Zone* zone() const { return zone_; }
751 LPlatformChunk* chunk_;
752 CompilationInfo* info_;
753 HGraph* const graph_;
761 int StackSlotOffset(int index);
763 enum NumberUntagDMode {
764 NUMBER_CANDIDATE_IS_SMI,
765 NUMBER_CANDIDATE_IS_ANY_TAGGED
769 class LPhase : public CompilationPhase {
771 LPhase(const char* name, LChunk* chunk)
772 : CompilationPhase(name, chunk->info()),
779 DISALLOW_COPY_AND_ASSIGN(LPhase);
783 // A register-allocator view of a Lithium instruction. It contains the id of
784 // the output operand and a list of input operand uses.
787 UNALLOCATED_REGISTERS,
792 // Iterator for non-null temp operands.
793 class TempIterator BASE_EMBEDDED {
795 inline explicit TempIterator(LInstruction* instr);
797 inline LOperand* Current();
798 inline void Advance();
801 inline void SkipUninteresting();
802 LInstruction* instr_;
808 // Iterator for non-constant input operands.
809 class InputIterator BASE_EMBEDDED {
811 inline explicit InputIterator(LInstruction* instr);
813 inline LOperand* Current();
814 inline void Advance();
817 inline void SkipUninteresting();
818 LInstruction* instr_;
824 class UseIterator BASE_EMBEDDED {
826 inline explicit UseIterator(LInstruction* instr);
828 inline LOperand* Current();
829 inline void Advance();
832 InputIterator input_iterator_;
833 DeepIterator env_iterator_;
838 } // namespace internal
841 #endif // V8_LITHIUM_H_