1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "allocation.h"
33 #include "safepoint-table.h"
38 #define LITHIUM_OPERAND_LIST(V) \
39 V(ConstantOperand, CONSTANT_OPERAND) \
40 V(StackSlot, STACK_SLOT) \
41 V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
42 V(Register, REGISTER) \
43 V(DoubleRegister, DOUBLE_REGISTER)
46 class LOperand: public ZoneObject {
59 LOperand() : value_(KindField::encode(INVALID)) { }
61 Kind kind() const { return KindField::decode(value_); }
62 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
63 #define LITHIUM_OPERAND_PREDICATE(name, type) \
64 bool Is##name() const { return kind() == type; }
65 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
66 LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT)
67 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
68 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID)
69 #undef LITHIUM_OPERAND_PREDICATE
70 bool Equals(LOperand* other) const { return value_ == other->value_; }
72 void PrintTo(StringStream* stream);
73 void ConvertTo(Kind kind, int index) {
74 value_ = KindField::encode(kind);
75 value_ |= index << kKindFieldWidth;
76 ASSERT(this->index() == index);
79 // Calls SetUpCache()/TearDownCache() for each subclass.
80 static void SetUpCaches();
81 static void TearDownCaches();
84 static const int kKindFieldWidth = 3;
85 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
87 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
93 class LUnallocated: public LOperand {
99 FIXED_DOUBLE_REGISTER,
106 // Lifetime of operand inside the instruction.
108 // USED_AT_START operand is guaranteed to be live only at
109 // instruction start. Register allocator is free to assign the same register
110 // to some other operand used inside instruction (i.e. temporary or
114 // USED_AT_END operand is treated as live until the end of
115 // instruction. This means that register allocator will not reuse it's
116 // register for any other operand inside instruction.
120 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
121 Initialize(policy, 0, USED_AT_END);
124 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
125 Initialize(policy, fixed_index, USED_AT_END);
128 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
129 Initialize(policy, 0, lifetime);
132 // The superclass has a KindField. Some policies have a signed fixed
133 // index in the upper bits.
134 static const int kPolicyWidth = 3;
135 static const int kLifetimeWidth = 1;
136 static const int kVirtualRegisterWidth = 18;
138 static const int kPolicyShift = kKindFieldWidth;
139 static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
140 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
141 static const int kFixedIndexShift =
142 kVirtualRegisterShift + kVirtualRegisterWidth;
144 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
147 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
150 class VirtualRegisterField
151 : public BitField<unsigned,
152 kVirtualRegisterShift,
153 kVirtualRegisterWidth> {
156 static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
157 static const int kMaxFixedIndex = 63;
158 static const int kMinFixedIndex = -64;
160 bool HasAnyPolicy() const {
161 return policy() == ANY;
163 bool HasFixedPolicy() const {
164 return policy() == FIXED_REGISTER ||
165 policy() == FIXED_DOUBLE_REGISTER ||
166 policy() == FIXED_SLOT;
168 bool HasRegisterPolicy() const {
169 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
171 bool HasSameAsInputPolicy() const {
172 return policy() == SAME_AS_FIRST_INPUT;
174 Policy policy() const { return PolicyField::decode(value_); }
175 void set_policy(Policy policy) {
176 value_ = PolicyField::update(value_, policy);
178 int fixed_index() const {
179 return static_cast<int>(value_) >> kFixedIndexShift;
182 int virtual_register() const {
183 return VirtualRegisterField::decode(value_);
186 void set_virtual_register(unsigned id) {
187 value_ = VirtualRegisterField::update(value_, id);
190 LUnallocated* CopyUnconstrained() {
191 LUnallocated* result = new LUnallocated(ANY);
192 result->set_virtual_register(virtual_register());
196 static LUnallocated* cast(LOperand* op) {
197 ASSERT(op->IsUnallocated());
198 return reinterpret_cast<LUnallocated*>(op);
201 bool IsUsedAtStart() {
202 return LifetimeField::decode(value_) == USED_AT_START;
206 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
207 value_ |= PolicyField::encode(policy);
208 value_ |= LifetimeField::encode(lifetime);
209 value_ |= fixed_index << kFixedIndexShift;
210 ASSERT(this->fixed_index() == fixed_index);
215 class LMoveOperands BASE_EMBEDDED {
217 LMoveOperands(LOperand* source, LOperand* destination)
218 : source_(source), destination_(destination) {
221 LOperand* source() const { return source_; }
222 void set_source(LOperand* operand) { source_ = operand; }
224 LOperand* destination() const { return destination_; }
225 void set_destination(LOperand* operand) { destination_ = operand; }
227 // The gap resolver marks moves as "in-progress" by clearing the
228 // destination (but not the source).
229 bool IsPending() const {
230 return destination_ == NULL && source_ != NULL;
233 // True if this move a move into the given destination operand.
234 bool Blocks(LOperand* operand) const {
235 return !IsEliminated() && source()->Equals(operand);
238 // A move is redundant if it's been eliminated, if its source and
239 // destination are the same, or if its destination is unneeded.
240 bool IsRedundant() const {
241 return IsEliminated() || source_->Equals(destination_) || IsIgnored();
244 bool IsIgnored() const {
245 return destination_ != NULL && destination_->IsIgnored();
248 // We clear both operands to indicate move that's been eliminated.
249 void Eliminate() { source_ = destination_ = NULL; }
250 bool IsEliminated() const {
251 ASSERT(source_ != NULL || destination_ == NULL);
252 return source_ == NULL;
257 LOperand* destination_;
261 class LConstantOperand: public LOperand {
263 static LConstantOperand* Create(int index) {
265 if (index < kNumCachedOperands) return &cache[index];
266 return new LConstantOperand(index);
269 static LConstantOperand* cast(LOperand* op) {
270 ASSERT(op->IsConstantOperand());
271 return reinterpret_cast<LConstantOperand*>(op);
274 static void SetUpCache();
275 static void TearDownCache();
278 static const int kNumCachedOperands = 128;
279 static LConstantOperand* cache;
281 LConstantOperand() : LOperand() { }
282 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
286 class LArgument: public LOperand {
288 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
290 static LArgument* cast(LOperand* op) {
291 ASSERT(op->IsArgument());
292 return reinterpret_cast<LArgument*>(op);
297 class LStackSlot: public LOperand {
299 static LStackSlot* Create(int index) {
301 if (index < kNumCachedOperands) return &cache[index];
302 return new LStackSlot(index);
305 static LStackSlot* cast(LOperand* op) {
306 ASSERT(op->IsStackSlot());
307 return reinterpret_cast<LStackSlot*>(op);
310 static void SetUpCache();
311 static void TearDownCache();
314 static const int kNumCachedOperands = 128;
315 static LStackSlot* cache;
317 LStackSlot() : LOperand() { }
318 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
322 class LDoubleStackSlot: public LOperand {
324 static LDoubleStackSlot* Create(int index) {
326 if (index < kNumCachedOperands) return &cache[index];
327 return new LDoubleStackSlot(index);
330 static LDoubleStackSlot* cast(LOperand* op) {
331 ASSERT(op->IsStackSlot());
332 return reinterpret_cast<LDoubleStackSlot*>(op);
335 static void SetUpCache();
336 static void TearDownCache();
339 static const int kNumCachedOperands = 128;
340 static LDoubleStackSlot* cache;
342 LDoubleStackSlot() : LOperand() { }
343 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
347 class LRegister: public LOperand {
349 static LRegister* Create(int index) {
351 if (index < kNumCachedOperands) return &cache[index];
352 return new LRegister(index);
355 static LRegister* cast(LOperand* op) {
356 ASSERT(op->IsRegister());
357 return reinterpret_cast<LRegister*>(op);
360 static void SetUpCache();
361 static void TearDownCache();
364 static const int kNumCachedOperands = 16;
365 static LRegister* cache;
367 LRegister() : LOperand() { }
368 explicit LRegister(int index) : LOperand(REGISTER, index) { }
372 class LDoubleRegister: public LOperand {
374 static LDoubleRegister* Create(int index) {
376 if (index < kNumCachedOperands) return &cache[index];
377 return new LDoubleRegister(index);
380 static LDoubleRegister* cast(LOperand* op) {
381 ASSERT(op->IsDoubleRegister());
382 return reinterpret_cast<LDoubleRegister*>(op);
385 static void SetUpCache();
386 static void TearDownCache();
389 static const int kNumCachedOperands = 16;
390 static LDoubleRegister* cache;
392 LDoubleRegister() : LOperand() { }
393 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
397 class LParallelMove : public ZoneObject {
399 LParallelMove() : move_operands_(4) { }
401 void AddMove(LOperand* from, LOperand* to) {
402 move_operands_.Add(LMoveOperands(from, to));
405 bool IsRedundant() const;
407 const ZoneList<LMoveOperands>* move_operands() const {
408 return &move_operands_;
411 void PrintDataTo(StringStream* stream) const;
414 ZoneList<LMoveOperands> move_operands_;
418 class LPointerMap: public ZoneObject {
420 explicit LPointerMap(int position)
421 : pointer_operands_(8),
422 untagged_operands_(0),
424 lithium_position_(-1) { }
426 const ZoneList<LOperand*>* GetNormalizedOperands() {
427 for (int i = 0; i < untagged_operands_.length(); ++i) {
428 RemovePointer(untagged_operands_[i]);
430 untagged_operands_.Clear();
431 return &pointer_operands_;
433 int position() const { return position_; }
434 int lithium_position() const { return lithium_position_; }
436 void set_lithium_position(int pos) {
437 ASSERT(lithium_position_ == -1);
438 lithium_position_ = pos;
441 void RecordPointer(LOperand* op);
442 void RemovePointer(LOperand* op);
443 void RecordUntagged(LOperand* op);
444 void PrintTo(StringStream* stream);
447 ZoneList<LOperand*> pointer_operands_;
448 ZoneList<LOperand*> untagged_operands_;
450 int lithium_position_;
454 class LEnvironment: public ZoneObject {
456 LEnvironment(Handle<JSFunction> closure,
457 FrameType frame_type,
464 frame_type_(frame_type),
465 arguments_stack_height_(argument_count),
466 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
467 translation_index_(-1),
469 parameter_count_(parameter_count),
471 values_(value_count),
472 is_tagged_(value_count, closure->GetHeap()->isolate()->zone()),
473 spilled_registers_(NULL),
474 spilled_double_registers_(NULL),
477 Handle<JSFunction> closure() const { return closure_; }
478 FrameType frame_type() const { return frame_type_; }
479 int arguments_stack_height() const { return arguments_stack_height_; }
480 int deoptimization_index() const { return deoptimization_index_; }
481 int translation_index() const { return translation_index_; }
482 int ast_id() const { return ast_id_; }
483 int parameter_count() const { return parameter_count_; }
484 int pc_offset() const { return pc_offset_; }
485 LOperand** spilled_registers() const { return spilled_registers_; }
486 LOperand** spilled_double_registers() const {
487 return spilled_double_registers_;
489 const ZoneList<LOperand*>* values() const { return &values_; }
490 LEnvironment* outer() const { return outer_; }
492 void AddValue(LOperand* operand, Representation representation) {
493 values_.Add(operand);
494 if (representation.IsTagged()) {
495 is_tagged_.Add(values_.length() - 1);
499 bool HasTaggedValueAt(int index) const {
500 return is_tagged_.Contains(index);
503 void Register(int deoptimization_index,
504 int translation_index,
506 ASSERT(!HasBeenRegistered());
507 deoptimization_index_ = deoptimization_index;
508 translation_index_ = translation_index;
509 pc_offset_ = pc_offset;
511 bool HasBeenRegistered() const {
512 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
515 void SetSpilledRegisters(LOperand** registers,
516 LOperand** double_registers) {
517 spilled_registers_ = registers;
518 spilled_double_registers_ = double_registers;
521 void PrintTo(StringStream* stream);
524 Handle<JSFunction> closure_;
525 FrameType frame_type_;
526 int arguments_stack_height_;
527 int deoptimization_index_;
528 int translation_index_;
530 int parameter_count_;
532 ZoneList<LOperand*> values_;
533 BitVector is_tagged_;
535 // Allocation index indexed arrays of spill slot operands for registers
536 // that are also in spill slots at an OSR entry. NULL for environments
537 // that do not correspond to an OSR entry.
538 LOperand** spilled_registers_;
539 LOperand** spilled_double_registers_;
541 LEnvironment* outer_;
545 // Iterates over the non-null, non-constant operands in an environment.
546 class ShallowIterator BASE_EMBEDDED {
548 explicit ShallowIterator(LEnvironment* env)
550 limit_(env != NULL ? env->values()->length() : 0),
555 bool Done() { return current_ >= limit_; }
557 LOperand* Current() {
559 return env_->values()->at(current_);
568 LEnvironment* env() { return env_; }
571 bool ShouldSkip(LOperand* op) {
572 return op == NULL || op->IsConstantOperand() || op->IsArgument();
575 // Skip until something interesting, beginning with and including current_.
576 void SkipUninteresting() {
577 while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
588 // Iterator for non-null, non-constant operands incl. outer environments.
589 class DeepIterator BASE_EMBEDDED {
591 explicit DeepIterator(LEnvironment* env)
592 : current_iterator_(env) {
596 bool Done() { return current_iterator_.Done(); }
598 LOperand* Current() {
599 ASSERT(!current_iterator_.Done());
600 return current_iterator_.Current();
604 current_iterator_.Advance();
609 void SkipUninteresting() {
610 while (current_iterator_.env() != NULL && current_iterator_.Done()) {
611 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
615 ShallowIterator current_iterator_;
619 int ElementsKindToShiftSize(ElementsKind elements_kind);
622 } } // namespace v8::internal
624 #endif // V8_LITHIUM_H_