1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "allocation.h"
33 #include "safepoint-table.h"
38 #define LITHIUM_OPERAND_LIST(V) \
39 V(ConstantOperand, CONSTANT_OPERAND, 128) \
40 V(StackSlot, STACK_SLOT, 128) \
41 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
42 V(Float32x4StackSlot, FLOAT32x4_STACK_SLOT, 128) \
43 V(Int32x4StackSlot, INT32x4_STACK_SLOT, 128) \
44 V(Register, REGISTER, 16) \
45 V(DoubleRegister, DOUBLE_REGISTER, 16) \
46 V(Float32x4Register, FLOAT32x4_REGISTER, 16) \
47 V(Int32x4Register, INT32x4_REGISTER, 16)
50 class LOperand : public ZoneObject {
67 LOperand() : value_(KindField::encode(INVALID)) { }
69 Kind kind() const { return KindField::decode(value_); }
70 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
71 #define LITHIUM_OPERAND_PREDICATE(name, type, number) \
72 bool Is##name() const { return kind() == type; }
73 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
74 LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT, 0)
75 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
76 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID, 0)
77 #undef LITHIUM_OPERAND_PREDICATE
78 bool IsSIMD128Register() const {
79 return kind() == FLOAT32x4_REGISTER || kind() == INT32x4_REGISTER;
81 bool IsSIMD128StackSlot() const {
82 return kind() == FLOAT32x4_STACK_SLOT || kind() == INT32x4_STACK_SLOT;
84 bool Equals(LOperand* other) const {
85 return value_ == other->value_ || (index() == other->index() &&
86 ((IsSIMD128Register() && other->IsSIMD128Register()) ||
87 (IsSIMD128StackSlot() && other->IsSIMD128StackSlot())));
90 void PrintTo(StringStream* stream);
91 void ConvertTo(Kind kind, int index) {
92 value_ = KindField::encode(kind);
93 value_ |= index << kKindFieldWidth;
94 ASSERT(this->index() == index);
97 // Calls SetUpCache()/TearDownCache() for each subclass.
98 static void SetUpCaches();
99 static void TearDownCaches();
102 static const int kKindFieldWidth = 4;
103 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
105 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
111 class LUnallocated : public LOperand {
118 enum ExtendedPolicy {
122 FIXED_DOUBLE_REGISTER,
128 // Lifetime of operand inside the instruction.
130 // USED_AT_START operand is guaranteed to be live only at
131 // instruction start. Register allocator is free to assign the same register
132 // to some other operand used inside instruction (i.e. temporary or
136 // USED_AT_END operand is treated as live until the end of
137 // instruction. This means that register allocator will not reuse it's
138 // register for any other operand inside instruction.
142 explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
143 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
144 value_ |= ExtendedPolicyField::encode(policy);
145 value_ |= LifetimeField::encode(USED_AT_END);
148 LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
149 ASSERT(policy == FIXED_SLOT);
150 value_ |= BasicPolicyField::encode(policy);
151 value_ |= index << FixedSlotIndexField::kShift;
152 ASSERT(this->fixed_slot_index() == index);
155 LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
156 ASSERT(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
157 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
158 value_ |= ExtendedPolicyField::encode(policy);
159 value_ |= LifetimeField::encode(USED_AT_END);
160 value_ |= FixedRegisterField::encode(index);
163 LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
164 : LOperand(UNALLOCATED, 0) {
165 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
166 value_ |= ExtendedPolicyField::encode(policy);
167 value_ |= LifetimeField::encode(lifetime);
170 LUnallocated* CopyUnconstrained(Zone* zone) {
171 LUnallocated* result = new(zone) LUnallocated(ANY);
172 result->set_virtual_register(virtual_register());
176 static LUnallocated* cast(LOperand* op) {
177 ASSERT(op->IsUnallocated());
178 return reinterpret_cast<LUnallocated*>(op);
181 // The encoding used for LUnallocated operands depends on the policy that is
182 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
183 // because it accommodates a larger pay-load.
185 // For FIXED_SLOT policy:
186 // +-------------------------------------------+
187 // | slot_index | vreg | 0 | 0001 |
188 // +-------------------------------------------+
190 // For all other (extended) policies:
191 // +-------------------------------------------+
192 // | reg_index | L | PPP | vreg | 1 | 0001 | L ... Lifetime
193 // +-------------------------------------------+ P ... Policy
195 // The slot index is a signed value which requires us to decode it manually
196 // instead of using the BitField utility class.
198 // The superclass has a KindField.
199 STATIC_ASSERT(kKindFieldWidth == 4);
201 // BitFields for all unallocated operands.
202 class BasicPolicyField : public BitField<BasicPolicy, 4, 1> {};
203 class VirtualRegisterField : public BitField<unsigned, 5, 18> {};
205 // BitFields specific to BasicPolicy::FIXED_SLOT.
206 class FixedSlotIndexField : public BitField<int, 23, 9> {};
208 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
209 class ExtendedPolicyField : public BitField<ExtendedPolicy, 23, 3> {};
210 class LifetimeField : public BitField<Lifetime, 26, 1> {};
211 class FixedRegisterField : public BitField<int, 27, 5> {};
213 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
214 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
215 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
216 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
218 // Predicates for the operand policy.
219 bool HasAnyPolicy() const {
220 return basic_policy() == EXTENDED_POLICY &&
221 extended_policy() == ANY;
223 bool HasFixedPolicy() const {
224 return basic_policy() == FIXED_SLOT ||
225 extended_policy() == FIXED_REGISTER ||
226 extended_policy() == FIXED_DOUBLE_REGISTER;
228 bool HasRegisterPolicy() const {
229 return basic_policy() == EXTENDED_POLICY && (
230 extended_policy() == WRITABLE_REGISTER ||
231 extended_policy() == MUST_HAVE_REGISTER);
233 bool HasSameAsInputPolicy() const {
234 return basic_policy() == EXTENDED_POLICY &&
235 extended_policy() == SAME_AS_FIRST_INPUT;
237 bool HasFixedSlotPolicy() const {
238 return basic_policy() == FIXED_SLOT;
240 bool HasFixedRegisterPolicy() const {
241 return basic_policy() == EXTENDED_POLICY &&
242 extended_policy() == FIXED_REGISTER;
244 bool HasFixedDoubleRegisterPolicy() const {
245 return basic_policy() == EXTENDED_POLICY &&
246 extended_policy() == FIXED_DOUBLE_REGISTER;
248 bool HasWritableRegisterPolicy() const {
249 return basic_policy() == EXTENDED_POLICY &&
250 extended_policy() == WRITABLE_REGISTER;
253 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
254 BasicPolicy basic_policy() const {
255 return BasicPolicyField::decode(value_);
258 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
259 ExtendedPolicy extended_policy() const {
260 ASSERT(basic_policy() == EXTENDED_POLICY);
261 return ExtendedPolicyField::decode(value_);
264 // [fixed_slot_index]: Only for FIXED_SLOT.
265 int fixed_slot_index() const {
266 ASSERT(HasFixedSlotPolicy());
267 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
270 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
271 int fixed_register_index() const {
272 ASSERT(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
273 return FixedRegisterField::decode(value_);
276 // [virtual_register]: The virtual register ID for this operand.
277 int virtual_register() const {
278 return VirtualRegisterField::decode(value_);
280 void set_virtual_register(unsigned id) {
281 value_ = VirtualRegisterField::update(value_, id);
284 // [lifetime]: Only for non-FIXED_SLOT.
285 bool IsUsedAtStart() {
286 ASSERT(basic_policy() == EXTENDED_POLICY);
287 return LifetimeField::decode(value_) == USED_AT_START;
292 class LMoveOperands V8_FINAL BASE_EMBEDDED {
294 LMoveOperands(LOperand* source, LOperand* destination)
295 : source_(source), destination_(destination) {
298 LOperand* source() const { return source_; }
299 void set_source(LOperand* operand) { source_ = operand; }
301 LOperand* destination() const { return destination_; }
302 void set_destination(LOperand* operand) { destination_ = operand; }
304 // The gap resolver marks moves as "in-progress" by clearing the
305 // destination (but not the source).
306 bool IsPending() const {
307 return destination_ == NULL && source_ != NULL;
310 // True if this move a move into the given destination operand.
311 bool Blocks(LOperand* operand) const {
312 return !IsEliminated() && source()->Equals(operand);
315 // A move is redundant if it's been eliminated, if its source and
316 // destination are the same, or if its destination is unneeded.
317 bool IsRedundant() const {
318 return IsEliminated() || source_->Equals(destination_) || IsIgnored();
321 bool IsIgnored() const {
322 return destination_ != NULL && destination_->IsIgnored();
325 // We clear both operands to indicate move that's been eliminated.
326 void Eliminate() { source_ = destination_ = NULL; }
327 bool IsEliminated() const {
328 ASSERT(source_ != NULL || destination_ == NULL);
329 return source_ == NULL;
334 LOperand* destination_;
338 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
339 class LSubKindOperand V8_FINAL : public LOperand {
341 static LSubKindOperand* Create(int index, Zone* zone) {
343 if (index < kNumCachedOperands) return &cache[index];
344 return new(zone) LSubKindOperand(index);
347 static LSubKindOperand* cast(LOperand* op) {
348 ASSERT(op->kind() == kOperandKind);
349 return reinterpret_cast<LSubKindOperand*>(op);
352 static void SetUpCache();
353 static void TearDownCache();
356 static LSubKindOperand* cache;
358 LSubKindOperand() : LOperand() { }
359 explicit LSubKindOperand(int index) : LOperand(kOperandKind, index) { }
363 #define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
364 typedef LSubKindOperand<LOperand::type, number> L##name;
365 LITHIUM_OPERAND_LIST(LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS)
366 #undef LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS
369 class LArgument V8_FINAL : public LOperand {
371 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
373 static LArgument* cast(LOperand* op) {
374 ASSERT(op->IsArgument());
375 return reinterpret_cast<LArgument*>(op);
380 class LParallelMove V8_FINAL : public ZoneObject {
382 explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
384 void AddMove(LOperand* from, LOperand* to, Zone* zone) {
385 move_operands_.Add(LMoveOperands(from, to), zone);
388 bool IsRedundant() const;
390 const ZoneList<LMoveOperands>* move_operands() const {
391 return &move_operands_;
394 void PrintDataTo(StringStream* stream) const;
397 ZoneList<LMoveOperands> move_operands_;
401 class LPointerMap V8_FINAL : public ZoneObject {
403 explicit LPointerMap(Zone* zone)
404 : pointer_operands_(8, zone),
405 untagged_operands_(0, zone),
406 lithium_position_(-1) { }
408 const ZoneList<LOperand*>* GetNormalizedOperands() {
409 for (int i = 0; i < untagged_operands_.length(); ++i) {
410 RemovePointer(untagged_operands_[i]);
412 untagged_operands_.Clear();
413 return &pointer_operands_;
415 int lithium_position() const { return lithium_position_; }
417 void set_lithium_position(int pos) {
418 ASSERT(lithium_position_ == -1);
419 lithium_position_ = pos;
422 void RecordPointer(LOperand* op, Zone* zone);
423 void RemovePointer(LOperand* op);
424 void RecordUntagged(LOperand* op, Zone* zone);
425 void PrintTo(StringStream* stream);
428 ZoneList<LOperand*> pointer_operands_;
429 ZoneList<LOperand*> untagged_operands_;
430 int lithium_position_;
434 class LEnvironment V8_FINAL : public ZoneObject {
436 LEnvironment(Handle<JSFunction> closure,
437 FrameType frame_type,
443 HEnterInlined* entry,
446 frame_type_(frame_type),
447 arguments_stack_height_(argument_count),
448 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
449 translation_index_(-1),
451 translation_size_(value_count),
452 parameter_count_(parameter_count),
454 values_(value_count, zone),
455 is_tagged_(value_count, zone),
456 is_uint32_(value_count, zone),
457 object_mapping_(0, zone),
462 Handle<JSFunction> closure() const { return closure_; }
463 FrameType frame_type() const { return frame_type_; }
464 int arguments_stack_height() const { return arguments_stack_height_; }
465 int deoptimization_index() const { return deoptimization_index_; }
466 int translation_index() const { return translation_index_; }
467 BailoutId ast_id() const { return ast_id_; }
468 int translation_size() const { return translation_size_; }
469 int parameter_count() const { return parameter_count_; }
470 int pc_offset() const { return pc_offset_; }
471 const ZoneList<LOperand*>* values() const { return &values_; }
472 LEnvironment* outer() const { return outer_; }
473 HEnterInlined* entry() { return entry_; }
474 Zone* zone() const { return zone_; }
476 void AddValue(LOperand* operand,
477 Representation representation,
479 values_.Add(operand, zone());
480 if (representation.IsSmiOrTagged()) {
482 is_tagged_.Add(values_.length() - 1, zone());
486 is_uint32_.Add(values_.length() - 1, zone());
490 bool HasTaggedValueAt(int index) const {
491 return is_tagged_.Contains(index);
494 bool HasUint32ValueAt(int index) const {
495 return is_uint32_.Contains(index);
498 void AddNewObject(int length, bool is_arguments) {
499 uint32_t encoded = LengthOrDupeField::encode(length) |
500 IsArgumentsField::encode(is_arguments) |
501 IsDuplicateField::encode(false);
502 object_mapping_.Add(encoded, zone());
505 void AddDuplicateObject(int dupe_of) {
506 uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
507 IsDuplicateField::encode(true);
508 object_mapping_.Add(encoded, zone());
511 int ObjectDuplicateOfAt(int index) {
512 ASSERT(ObjectIsDuplicateAt(index));
513 return LengthOrDupeField::decode(object_mapping_[index]);
516 int ObjectLengthAt(int index) {
517 ASSERT(!ObjectIsDuplicateAt(index));
518 return LengthOrDupeField::decode(object_mapping_[index]);
521 bool ObjectIsArgumentsAt(int index) {
522 ASSERT(!ObjectIsDuplicateAt(index));
523 return IsArgumentsField::decode(object_mapping_[index]);
526 bool ObjectIsDuplicateAt(int index) {
527 return IsDuplicateField::decode(object_mapping_[index]);
530 void Register(int deoptimization_index,
531 int translation_index,
533 ASSERT(!HasBeenRegistered());
534 deoptimization_index_ = deoptimization_index;
535 translation_index_ = translation_index;
536 pc_offset_ = pc_offset;
538 bool HasBeenRegistered() const {
539 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
542 void PrintTo(StringStream* stream);
544 // Marker value indicating a de-materialized object.
545 static LOperand* materialization_marker() { return NULL; }
547 // Encoding used for the object_mapping map below.
548 class LengthOrDupeField : public BitField<int, 0, 30> { };
549 class IsArgumentsField : public BitField<bool, 30, 1> { };
550 class IsDuplicateField : public BitField<bool, 31, 1> { };
553 Handle<JSFunction> closure_;
554 FrameType frame_type_;
555 int arguments_stack_height_;
556 int deoptimization_index_;
557 int translation_index_;
559 int translation_size_;
560 int parameter_count_;
563 // Value array: [parameters] [locals] [expression stack] [de-materialized].
564 // |>--------- translation_size ---------<|
565 ZoneList<LOperand*> values_;
566 GrowableBitVector is_tagged_;
567 GrowableBitVector is_uint32_;
569 // Map with encoded information about materialization_marker operands.
570 ZoneList<uint32_t> object_mapping_;
572 LEnvironment* outer_;
573 HEnterInlined* entry_;
578 // Iterates over the non-null, non-constant operands in an environment.
579 class ShallowIterator V8_FINAL BASE_EMBEDDED {
581 explicit ShallowIterator(LEnvironment* env)
583 limit_(env != NULL ? env->values()->length() : 0),
588 bool Done() { return current_ >= limit_; }
590 LOperand* Current() {
592 ASSERT(env_->values()->at(current_) != NULL);
593 return env_->values()->at(current_);
602 LEnvironment* env() { return env_; }
605 bool ShouldSkip(LOperand* op) {
606 return op == NULL || op->IsConstantOperand() || op->IsArgument();
609 // Skip until something interesting, beginning with and including current_.
610 void SkipUninteresting() {
611 while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
622 // Iterator for non-null, non-constant operands incl. outer environments.
623 class DeepIterator V8_FINAL BASE_EMBEDDED {
625 explicit DeepIterator(LEnvironment* env)
626 : current_iterator_(env) {
630 bool Done() { return current_iterator_.Done(); }
632 LOperand* Current() {
633 ASSERT(!current_iterator_.Done());
634 ASSERT(current_iterator_.Current() != NULL);
635 return current_iterator_.Current();
639 current_iterator_.Advance();
644 void SkipUninteresting() {
645 while (current_iterator_.env() != NULL && current_iterator_.Done()) {
646 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
650 ShallowIterator current_iterator_;
654 class LPlatformChunk;
658 // Superclass providing data and behavior common to all the
659 // arch-specific LPlatformChunk classes.
660 class LChunk : public ZoneObject {
662 static LChunk* NewChunk(HGraph* graph);
664 void AddInstruction(LInstruction* instruction, HBasicBlock* block);
665 LConstantOperand* DefineConstantOperand(HConstant* constant);
666 HConstant* LookupConstant(LConstantOperand* operand) const;
667 Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
669 int ParameterAt(int index);
670 int GetParameterStackSlot(int index) const;
671 int spill_slot_count() const { return spill_slot_count_; }
672 CompilationInfo* info() const { return info_; }
673 HGraph* graph() const { return graph_; }
674 Isolate* isolate() const { return graph_->isolate(); }
675 const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
676 void AddGapMove(int index, LOperand* from, LOperand* to);
677 LGap* GetGapAt(int index) const;
678 bool IsGapAt(int index) const;
679 int NearestGapPos(int index) const;
680 void MarkEmptyBlocks();
681 const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
682 LLabel* GetLabel(int block_id) const;
683 int LookupDestination(int block_id) const;
684 Label* GetAssemblyLabel(int block_id) const;
686 const ZoneList<Handle<JSFunction> >* inlined_closures() const {
687 return &inlined_closures_;
690 void AddInlinedClosure(Handle<JSFunction> closure) {
691 inlined_closures_.Add(closure, zone());
694 Zone* zone() const { return info_->zone(); }
696 Handle<Code> Codegen();
698 void set_allocated_double_registers(BitVector* allocated_registers);
699 BitVector* allocated_double_registers() {
700 return allocated_double_registers_;
704 LChunk(CompilationInfo* info, HGraph* graph);
706 int spill_slot_count_;
709 CompilationInfo* info_;
710 HGraph* const graph_;
711 BitVector* allocated_double_registers_;
712 ZoneList<LInstruction*> instructions_;
713 ZoneList<LPointerMap*> pointer_maps_;
714 ZoneList<Handle<JSFunction> > inlined_closures_;
718 class LChunkBuilderBase BASE_EMBEDDED {
720 explicit LChunkBuilderBase(Zone* zone)
721 : argument_count_(0),
724 virtual ~LChunkBuilderBase() { }
727 // An input operand in register, stack slot or a constant operand.
728 // Will not be moved to a register even if one is freely available.
729 virtual MUST_USE_RESULT LOperand* UseAny(HValue* value) = 0;
731 LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
732 int* argument_index_accumulator,
733 ZoneList<HValue*>* objects_to_materialize);
734 void AddObjectToMaterialize(HValue* value,
735 ZoneList<HValue*>* objects_to_materialize,
736 LEnvironment* result);
738 Zone* zone() const { return zone_; }
747 int StackSlotOffset(int index);
749 enum NumberUntagDMode {
750 NUMBER_CANDIDATE_IS_SMI,
751 NUMBER_CANDIDATE_IS_ANY_TAGGED
755 class LPhase : public CompilationPhase {
757 LPhase(const char* name, LChunk* chunk)
758 : CompilationPhase(name, chunk->info()),
765 DISALLOW_COPY_AND_ASSIGN(LPhase);
769 } } // namespace v8::internal
771 #endif // V8_LITHIUM_H_