1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "src/allocation.h"
11 #include "src/hydrogen.h"
12 #include "src/safepoint-table.h"
13 #include "src/zone-allocator.h"
18 #define LITHIUM_OPERAND_LIST(V) \
19 V(ConstantOperand, CONSTANT_OPERAND, 128) \
20 V(StackSlot, STACK_SLOT, 128) \
21 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
22 V(Register, REGISTER, 16) \
23 V(DoubleRegister, DOUBLE_REGISTER, 16)
25 class LOperand : public ZoneObject {
37 LOperand() : value_(KindField::encode(INVALID)) { }
39 Kind kind() const { return KindField::decode(value_); }
40 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
41 #define LITHIUM_OPERAND_PREDICATE(name, type, number) \
42 bool Is##name() const { return kind() == type; }
43 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
44 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
45 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID, 0)
46 #undef LITHIUM_OPERAND_PREDICATE
47 bool Equals(LOperand* other) const { return value_ == other->value_; }
49 void PrintTo(StringStream* stream);
50 void ConvertTo(Kind kind, int index) {
51 if (kind == REGISTER) DCHECK(index >= 0);
52 value_ = KindField::encode(kind);
53 value_ |= index << kKindFieldWidth;
54 DCHECK(this->index() == index);
57 // Calls SetUpCache()/TearDownCache() for each subclass.
58 static void SetUpCaches();
59 static void TearDownCaches();
62 static const int kKindFieldWidth = 3;
63 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
65 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
71 class LUnallocated : public LOperand {
82 FIXED_DOUBLE_REGISTER,
84 MUST_HAVE_DOUBLE_REGISTER,
89 // Lifetime of operand inside the instruction.
91 // USED_AT_START operand is guaranteed to be live only at
92 // instruction start. Register allocator is free to assign the same register
93 // to some other operand used inside instruction (i.e. temporary or
97 // USED_AT_END operand is treated as live until the end of
98 // instruction. This means that register allocator will not reuse it's
99 // register for any other operand inside instruction.
103 explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
104 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
105 value_ |= ExtendedPolicyField::encode(policy);
106 value_ |= LifetimeField::encode(USED_AT_END);
109 LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
110 DCHECK(policy == FIXED_SLOT);
111 value_ |= BasicPolicyField::encode(policy);
112 value_ |= index << FixedSlotIndexField::kShift;
113 DCHECK(this->fixed_slot_index() == index);
116 LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
117 DCHECK(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
118 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
119 value_ |= ExtendedPolicyField::encode(policy);
120 value_ |= LifetimeField::encode(USED_AT_END);
121 value_ |= FixedRegisterField::encode(index);
124 LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
125 : LOperand(UNALLOCATED, 0) {
126 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
127 value_ |= ExtendedPolicyField::encode(policy);
128 value_ |= LifetimeField::encode(lifetime);
131 LUnallocated* CopyUnconstrained(Zone* zone) {
132 LUnallocated* result = new(zone) LUnallocated(ANY);
133 result->set_virtual_register(virtual_register());
137 static LUnallocated* cast(LOperand* op) {
138 DCHECK(op->IsUnallocated());
139 return reinterpret_cast<LUnallocated*>(op);
142 // The encoding used for LUnallocated operands depends on the policy that is
143 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
144 // because it accommodates a larger pay-load.
146 // For FIXED_SLOT policy:
147 // +------------------------------------------+
148 // | slot_index | vreg | 0 | 001 |
149 // +------------------------------------------+
151 // For all other (extended) policies:
152 // +------------------------------------------+
153 // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime
154 // +------------------------------------------+ P ... Policy
156 // The slot index is a signed value which requires us to decode it manually
157 // instead of using the BitField utility class.
159 // The superclass has a KindField.
160 STATIC_ASSERT(kKindFieldWidth == 3);
162 // BitFields for all unallocated operands.
163 class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {};
164 class VirtualRegisterField : public BitField<unsigned, 4, 18> {};
166 // BitFields specific to BasicPolicy::FIXED_SLOT.
167 class FixedSlotIndexField : public BitField<int, 22, 10> {};
169 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
170 class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {};
171 class LifetimeField : public BitField<Lifetime, 25, 1> {};
172 class FixedRegisterField : public BitField<int, 26, 6> {};
174 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
175 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
176 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
177 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
179 // Predicates for the operand policy.
180 bool HasAnyPolicy() const {
181 return basic_policy() == EXTENDED_POLICY &&
182 extended_policy() == ANY;
184 bool HasFixedPolicy() const {
185 return basic_policy() == FIXED_SLOT ||
186 extended_policy() == FIXED_REGISTER ||
187 extended_policy() == FIXED_DOUBLE_REGISTER;
189 bool HasRegisterPolicy() const {
190 return basic_policy() == EXTENDED_POLICY && (
191 extended_policy() == WRITABLE_REGISTER ||
192 extended_policy() == MUST_HAVE_REGISTER);
194 bool HasDoubleRegisterPolicy() const {
195 return basic_policy() == EXTENDED_POLICY &&
196 extended_policy() == MUST_HAVE_DOUBLE_REGISTER;
198 bool HasSameAsInputPolicy() const {
199 return basic_policy() == EXTENDED_POLICY &&
200 extended_policy() == SAME_AS_FIRST_INPUT;
202 bool HasFixedSlotPolicy() const {
203 return basic_policy() == FIXED_SLOT;
205 bool HasFixedRegisterPolicy() const {
206 return basic_policy() == EXTENDED_POLICY &&
207 extended_policy() == FIXED_REGISTER;
209 bool HasFixedDoubleRegisterPolicy() const {
210 return basic_policy() == EXTENDED_POLICY &&
211 extended_policy() == FIXED_DOUBLE_REGISTER;
213 bool HasWritableRegisterPolicy() const {
214 return basic_policy() == EXTENDED_POLICY &&
215 extended_policy() == WRITABLE_REGISTER;
218 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
219 BasicPolicy basic_policy() const {
220 return BasicPolicyField::decode(value_);
223 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
224 ExtendedPolicy extended_policy() const {
225 DCHECK(basic_policy() == EXTENDED_POLICY);
226 return ExtendedPolicyField::decode(value_);
229 // [fixed_slot_index]: Only for FIXED_SLOT.
230 int fixed_slot_index() const {
231 DCHECK(HasFixedSlotPolicy());
232 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
235 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
236 int fixed_register_index() const {
237 DCHECK(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
238 return FixedRegisterField::decode(value_);
241 // [virtual_register]: The virtual register ID for this operand.
242 int virtual_register() const {
243 return VirtualRegisterField::decode(value_);
245 void set_virtual_register(unsigned id) {
246 value_ = VirtualRegisterField::update(value_, id);
249 // [lifetime]: Only for non-FIXED_SLOT.
250 bool IsUsedAtStart() {
251 DCHECK(basic_policy() == EXTENDED_POLICY);
252 return LifetimeField::decode(value_) == USED_AT_START;
257 class LMoveOperands V8_FINAL BASE_EMBEDDED {
259 LMoveOperands(LOperand* source, LOperand* destination)
260 : source_(source), destination_(destination) {
263 LOperand* source() const { return source_; }
264 void set_source(LOperand* operand) { source_ = operand; }
266 LOperand* destination() const { return destination_; }
267 void set_destination(LOperand* operand) { destination_ = operand; }
269 // The gap resolver marks moves as "in-progress" by clearing the
270 // destination (but not the source).
271 bool IsPending() const {
272 return destination_ == NULL && source_ != NULL;
275 // True if this move a move into the given destination operand.
276 bool Blocks(LOperand* operand) const {
277 return !IsEliminated() && source()->Equals(operand);
280 // A move is redundant if it's been eliminated, if its source and
281 // destination are the same, or if its destination is unneeded or constant.
282 bool IsRedundant() const {
283 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
284 (destination_ != NULL && destination_->IsConstantOperand());
287 bool IsIgnored() const {
288 return destination_ != NULL && destination_->IsIgnored();
291 // We clear both operands to indicate move that's been eliminated.
292 void Eliminate() { source_ = destination_ = NULL; }
293 bool IsEliminated() const {
294 DCHECK(source_ != NULL || destination_ == NULL);
295 return source_ == NULL;
300 LOperand* destination_;
304 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
305 class LSubKindOperand V8_FINAL : public LOperand {
307 static LSubKindOperand* Create(int index, Zone* zone) {
309 if (index < kNumCachedOperands) return &cache[index];
310 return new(zone) LSubKindOperand(index);
313 static LSubKindOperand* cast(LOperand* op) {
314 DCHECK(op->kind() == kOperandKind);
315 return reinterpret_cast<LSubKindOperand*>(op);
318 static void SetUpCache();
319 static void TearDownCache();
322 static LSubKindOperand* cache;
324 LSubKindOperand() : LOperand() { }
325 explicit LSubKindOperand(int index) : LOperand(kOperandKind, index) { }
329 #define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
330 typedef LSubKindOperand<LOperand::type, number> L##name;
331 LITHIUM_OPERAND_LIST(LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS)
332 #undef LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS
335 class LParallelMove V8_FINAL : public ZoneObject {
337 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
339 void AddMove(LOperand* from, LOperand* to, Zone* zone) {
340 move_operands_.Add(LMoveOperands(from, to), zone);
343 bool IsRedundant() const;
345 ZoneList<LMoveOperands>* move_operands() { return &move_operands_; }
347 void PrintDataTo(StringStream* stream) const;
350 ZoneList<LMoveOperands> move_operands_;
354 class LPointerMap V8_FINAL : public ZoneObject {
356 explicit LPointerMap(Zone* zone)
357 : pointer_operands_(8, zone),
358 untagged_operands_(0, zone),
359 lithium_position_(-1) { }
361 const ZoneList<LOperand*>* GetNormalizedOperands() {
362 for (int i = 0; i < untagged_operands_.length(); ++i) {
363 RemovePointer(untagged_operands_[i]);
365 untagged_operands_.Clear();
366 return &pointer_operands_;
368 int lithium_position() const { return lithium_position_; }
370 void set_lithium_position(int pos) {
371 DCHECK(lithium_position_ == -1);
372 lithium_position_ = pos;
375 void RecordPointer(LOperand* op, Zone* zone);
376 void RemovePointer(LOperand* op);
377 void RecordUntagged(LOperand* op, Zone* zone);
378 void PrintTo(StringStream* stream);
381 ZoneList<LOperand*> pointer_operands_;
382 ZoneList<LOperand*> untagged_operands_;
383 int lithium_position_;
387 class LEnvironment V8_FINAL : public ZoneObject {
389 LEnvironment(Handle<JSFunction> closure,
390 FrameType frame_type,
396 HEnterInlined* entry,
399 frame_type_(frame_type),
400 arguments_stack_height_(argument_count),
401 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
402 translation_index_(-1),
404 translation_size_(value_count),
405 parameter_count_(parameter_count),
407 values_(value_count, zone),
408 is_tagged_(value_count, zone),
409 is_uint32_(value_count, zone),
410 object_mapping_(0, zone),
414 has_been_used_(false) { }
416 Handle<JSFunction> closure() const { return closure_; }
417 FrameType frame_type() const { return frame_type_; }
418 int arguments_stack_height() const { return arguments_stack_height_; }
419 int deoptimization_index() const { return deoptimization_index_; }
420 int translation_index() const { return translation_index_; }
421 BailoutId ast_id() const { return ast_id_; }
422 int translation_size() const { return translation_size_; }
423 int parameter_count() const { return parameter_count_; }
424 int pc_offset() const { return pc_offset_; }
425 const ZoneList<LOperand*>* values() const { return &values_; }
426 LEnvironment* outer() const { return outer_; }
427 HEnterInlined* entry() { return entry_; }
428 Zone* zone() const { return zone_; }
430 bool has_been_used() const { return has_been_used_; }
431 void set_has_been_used() { has_been_used_ = true; }
433 void AddValue(LOperand* operand,
434 Representation representation,
436 values_.Add(operand, zone());
437 if (representation.IsSmiOrTagged()) {
439 is_tagged_.Add(values_.length() - 1, zone());
443 is_uint32_.Add(values_.length() - 1, zone());
447 bool HasTaggedValueAt(int index) const {
448 return is_tagged_.Contains(index);
451 bool HasUint32ValueAt(int index) const {
452 return is_uint32_.Contains(index);
455 void AddNewObject(int length, bool is_arguments) {
456 uint32_t encoded = LengthOrDupeField::encode(length) |
457 IsArgumentsField::encode(is_arguments) |
458 IsDuplicateField::encode(false);
459 object_mapping_.Add(encoded, zone());
462 void AddDuplicateObject(int dupe_of) {
463 uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
464 IsDuplicateField::encode(true);
465 object_mapping_.Add(encoded, zone());
468 int ObjectDuplicateOfAt(int index) {
469 DCHECK(ObjectIsDuplicateAt(index));
470 return LengthOrDupeField::decode(object_mapping_[index]);
473 int ObjectLengthAt(int index) {
474 DCHECK(!ObjectIsDuplicateAt(index));
475 return LengthOrDupeField::decode(object_mapping_[index]);
478 bool ObjectIsArgumentsAt(int index) {
479 DCHECK(!ObjectIsDuplicateAt(index));
480 return IsArgumentsField::decode(object_mapping_[index]);
483 bool ObjectIsDuplicateAt(int index) {
484 return IsDuplicateField::decode(object_mapping_[index]);
487 void Register(int deoptimization_index,
488 int translation_index,
490 DCHECK(!HasBeenRegistered());
491 deoptimization_index_ = deoptimization_index;
492 translation_index_ = translation_index;
493 pc_offset_ = pc_offset;
495 bool HasBeenRegistered() const {
496 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
499 void PrintTo(StringStream* stream);
501 // Marker value indicating a de-materialized object.
502 static LOperand* materialization_marker() { return NULL; }
504 // Encoding used for the object_mapping map below.
505 class LengthOrDupeField : public BitField<int, 0, 30> { };
506 class IsArgumentsField : public BitField<bool, 30, 1> { };
507 class IsDuplicateField : public BitField<bool, 31, 1> { };
510 Handle<JSFunction> closure_;
511 FrameType frame_type_;
512 int arguments_stack_height_;
513 int deoptimization_index_;
514 int translation_index_;
516 int translation_size_;
517 int parameter_count_;
520 // Value array: [parameters] [locals] [expression stack] [de-materialized].
521 // |>--------- translation_size ---------<|
522 ZoneList<LOperand*> values_;
523 GrowableBitVector is_tagged_;
524 GrowableBitVector is_uint32_;
526 // Map with encoded information about materialization_marker operands.
527 ZoneList<uint32_t> object_mapping_;
529 LEnvironment* outer_;
530 HEnterInlined* entry_;
536 // Iterates over the non-null, non-constant operands in an environment.
537 class ShallowIterator V8_FINAL BASE_EMBEDDED {
539 explicit ShallowIterator(LEnvironment* env)
541 limit_(env != NULL ? env->values()->length() : 0),
546 bool Done() { return current_ >= limit_; }
548 LOperand* Current() {
550 DCHECK(env_->values()->at(current_) != NULL);
551 return env_->values()->at(current_);
560 LEnvironment* env() { return env_; }
563 bool ShouldSkip(LOperand* op) {
564 return op == NULL || op->IsConstantOperand();
567 // Skip until something interesting, beginning with and including current_.
568 void SkipUninteresting() {
569 while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
580 // Iterator for non-null, non-constant operands incl. outer environments.
581 class DeepIterator V8_FINAL BASE_EMBEDDED {
583 explicit DeepIterator(LEnvironment* env)
584 : current_iterator_(env) {
588 bool Done() { return current_iterator_.Done(); }
590 LOperand* Current() {
591 DCHECK(!current_iterator_.Done());
592 DCHECK(current_iterator_.Current() != NULL);
593 return current_iterator_.Current();
597 current_iterator_.Advance();
602 void SkipUninteresting() {
603 while (current_iterator_.env() != NULL && current_iterator_.Done()) {
604 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
608 ShallowIterator current_iterator_;
612 class LPlatformChunk;
616 // Superclass providing data and behavior common to all the
617 // arch-specific LPlatformChunk classes.
618 class LChunk : public ZoneObject {
620 static LChunk* NewChunk(HGraph* graph);
622 void AddInstruction(LInstruction* instruction, HBasicBlock* block);
623 LConstantOperand* DefineConstantOperand(HConstant* constant);
624 HConstant* LookupConstant(LConstantOperand* operand) const;
625 Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
627 int ParameterAt(int index);
628 int GetParameterStackSlot(int index) const;
629 int spill_slot_count() const { return spill_slot_count_; }
630 CompilationInfo* info() const { return info_; }
631 HGraph* graph() const { return graph_; }
632 Isolate* isolate() const { return graph_->isolate(); }
633 const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
634 void AddGapMove(int index, LOperand* from, LOperand* to);
635 LGap* GetGapAt(int index) const;
636 bool IsGapAt(int index) const;
637 int NearestGapPos(int index) const;
638 void MarkEmptyBlocks();
639 const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
640 LLabel* GetLabel(int block_id) const;
641 int LookupDestination(int block_id) const;
642 Label* GetAssemblyLabel(int block_id) const;
644 const ZoneList<Handle<JSFunction> >* inlined_closures() const {
645 return &inlined_closures_;
648 void AddInlinedClosure(Handle<JSFunction> closure) {
649 inlined_closures_.Add(closure, zone());
652 void AddDeprecationDependency(Handle<Map> map) {
653 DCHECK(!map->is_deprecated());
654 if (!map->CanBeDeprecated()) return;
655 DCHECK(!info_->IsStub());
656 deprecation_dependencies_.insert(map);
659 void AddStabilityDependency(Handle<Map> map) {
660 DCHECK(map->is_stable());
661 if (!map->CanTransition()) return;
662 DCHECK(!info_->IsStub());
663 stability_dependencies_.insert(map);
666 Zone* zone() const { return info_->zone(); }
668 Handle<Code> Codegen();
670 void set_allocated_double_registers(BitVector* allocated_registers);
671 BitVector* allocated_double_registers() {
672 return allocated_double_registers_;
676 LChunk(CompilationInfo* info, HGraph* graph);
678 int spill_slot_count_;
681 typedef std::less<Handle<Map> > MapLess;
682 typedef zone_allocator<Handle<Map> > MapAllocator;
683 typedef std::set<Handle<Map>, MapLess, MapAllocator> MapSet;
685 void CommitDependencies(Handle<Code> code) const;
687 CompilationInfo* info_;
688 HGraph* const graph_;
689 BitVector* allocated_double_registers_;
690 ZoneList<LInstruction*> instructions_;
691 ZoneList<LPointerMap*> pointer_maps_;
692 ZoneList<Handle<JSFunction> > inlined_closures_;
693 MapSet deprecation_dependencies_;
694 MapSet stability_dependencies_;
698 class LChunkBuilderBase BASE_EMBEDDED {
700 explicit LChunkBuilderBase(Zone* zone)
701 : argument_count_(0),
704 virtual ~LChunkBuilderBase() { }
707 // An input operand in register, stack slot or a constant operand.
708 // Will not be moved to a register even if one is freely available.
709 virtual MUST_USE_RESULT LOperand* UseAny(HValue* value) = 0;
711 LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
712 int* argument_index_accumulator,
713 ZoneList<HValue*>* objects_to_materialize);
714 void AddObjectToMaterialize(HValue* value,
715 ZoneList<HValue*>* objects_to_materialize,
716 LEnvironment* result);
718 Zone* zone() const { return zone_; }
727 int StackSlotOffset(int index);
729 enum NumberUntagDMode {
730 NUMBER_CANDIDATE_IS_SMI,
731 NUMBER_CANDIDATE_IS_ANY_TAGGED
735 class LPhase : public CompilationPhase {
737 LPhase(const char* name, LChunk* chunk)
738 : CompilationPhase(name, chunk->info()),
745 DISALLOW_COPY_AND_ASSIGN(LPhase);
749 // A register-allocator view of a Lithium instruction. It contains the id of
750 // the output operand and a list of input operand uses.
753 UNALLOCATED_REGISTERS,
758 // Iterator for non-null temp operands.
759 class TempIterator BASE_EMBEDDED {
761 inline explicit TempIterator(LInstruction* instr);
763 inline LOperand* Current();
764 inline void Advance();
767 inline void SkipUninteresting();
768 LInstruction* instr_;
774 // Iterator for non-constant input operands.
775 class InputIterator BASE_EMBEDDED {
777 inline explicit InputIterator(LInstruction* instr);
779 inline LOperand* Current();
780 inline void Advance();
783 inline void SkipUninteresting();
784 LInstruction* instr_;
790 class UseIterator BASE_EMBEDDED {
792 inline explicit UseIterator(LInstruction* instr);
794 inline LOperand* Current();
795 inline void Advance();
798 InputIterator input_iterator_;
799 DeepIterator env_iterator_;
804 } } // namespace v8::internal
806 #endif // V8_LITHIUM_H_