1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "lithium-allocator-inl.h"
31 #include "arm64/lithium-arm64.h"
32 #include "arm64/lithium-codegen-arm64.h"
33 #include "hydrogen-osr.h"
39 #define DEFINE_COMPILE(type) \
40 void L##type::CompileToNative(LCodeGen* generator) { \
41 generator->Do##type(this); \
43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
47 void LInstruction::VerifyCall() {
48 // Call instructions can use only fixed registers as temporaries and
49 // outputs because all registers are blocked by the calling convention.
50 // Inputs operands must use a fixed register or use-at-start policy or
51 // a non-register policy.
52 ASSERT(Output() == NULL ||
53 LUnallocated::cast(Output())->HasFixedPolicy() ||
54 !LUnallocated::cast(Output())->HasRegisterPolicy());
55 for (UseIterator it(this); !it.Done(); it.Advance()) {
56 LUnallocated* operand = LUnallocated::cast(it.Current());
57 ASSERT(operand->HasFixedPolicy() ||
58 operand->IsUsedAtStart());
60 for (TempIterator it(this); !it.Done(); it.Advance()) {
61 LUnallocated* operand = LUnallocated::cast(it.Current());
62 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
68 void LLabel::PrintDataTo(StringStream* stream) {
69 LGap::PrintDataTo(stream);
70 LLabel* rep = replacement();
72 stream->Add(" Dead block replaced with B%d", rep->block_id());
77 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
78 arguments()->PrintTo(stream);
79 stream->Add(" length ");
80 length()->PrintTo(stream);
81 stream->Add(" index ");
82 index()->PrintTo(stream);
86 void LBranch::PrintDataTo(StringStream* stream) {
87 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
88 value()->PrintTo(stream);
92 void LCallJSFunction::PrintDataTo(StringStream* stream) {
94 function()->PrintTo(stream);
95 stream->Add("#%d / ", arity());
99 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
100 for (int i = 0; i < InputCount(); i++) {
101 InputAt(i)->PrintTo(stream);
104 stream->Add("#%d / ", arity());
108 void LCallNew::PrintDataTo(StringStream* stream) {
110 constructor()->PrintTo(stream);
111 stream->Add(" #%d / ", arity());
115 void LCallNewArray::PrintDataTo(StringStream* stream) {
117 constructor()->PrintTo(stream);
118 stream->Add(" #%d / ", arity());
119 ElementsKind kind = hydrogen()->elements_kind();
120 stream->Add(" (%s) ", ElementsKindToString(kind));
124 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
125 stream->Add("if class_of_test(");
126 value()->PrintTo(stream);
127 stream->Add(", \"%o\") then B%d else B%d",
128 *hydrogen()->class_name(),
134 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
136 left()->PrintTo(stream);
137 stream->Add(" %s ", Token::String(op()));
138 right()->PrintTo(stream);
139 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
143 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
144 stream->Add("if has_cached_array_index(");
145 value()->PrintTo(stream);
146 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
150 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
151 return !gen->IsNextEmittedBlock(block_id());
155 void LGoto::PrintDataTo(StringStream* stream) {
156 stream->Add("B%d", block_id());
160 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
162 base_object()->PrintTo(stream);
164 offset()->PrintTo(stream);
168 void LInvokeFunction::PrintDataTo(StringStream* stream) {
170 function()->PrintTo(stream);
171 stream->Add(" #%d / ", arity());
175 void LInstruction::PrintTo(StringStream* stream) {
176 stream->Add("%s ", this->Mnemonic());
178 PrintOutputOperandTo(stream);
182 if (HasEnvironment()) {
184 environment()->PrintTo(stream);
187 if (HasPointerMap()) {
189 pointer_map()->PrintTo(stream);
194 void LInstruction::PrintDataTo(StringStream* stream) {
196 for (int i = 0; i < InputCount(); i++) {
197 if (i > 0) stream->Add(" ");
198 if (InputAt(i) == NULL) {
201 InputAt(i)->PrintTo(stream);
207 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
208 if (HasResult()) result()->PrintTo(stream);
212 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
213 stream->Add("if has_instance_type(");
214 value()->PrintTo(stream);
215 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
219 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
220 stream->Add("if is_object(");
221 value()->PrintTo(stream);
222 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
226 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if is_string(");
228 value()->PrintTo(stream);
229 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
233 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
234 stream->Add("if is_smi(");
235 value()->PrintTo(stream);
236 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
240 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
241 stream->Add("if typeof ");
242 value()->PrintTo(stream);
243 stream->Add(" == \"%s\" then B%d else B%d",
244 hydrogen()->type_literal()->ToCString().get(),
245 true_block_id(), false_block_id());
249 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
250 stream->Add("if is_undetectable(");
251 value()->PrintTo(stream);
252 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
256 bool LGap::IsRedundant() const {
257 for (int i = 0; i < 4; i++) {
258 if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
267 void LGap::PrintDataTo(StringStream* stream) {
268 for (int i = 0; i < 4; i++) {
270 if (parallel_moves_[i] != NULL) {
271 parallel_moves_[i]->PrintDataTo(stream);
278 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
279 context()->PrintTo(stream);
280 stream->Add("[%d]", slot_index());
284 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
286 function()->PrintTo(stream);
287 stream->Add(".code_entry = ");
288 code_object()->PrintTo(stream);
292 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
293 context()->PrintTo(stream);
294 stream->Add("[%d] <- ", slot_index());
295 value()->PrintTo(stream);
299 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
300 object()->PrintTo(stream);
302 key()->PrintTo(stream);
303 stream->Add("] <- ");
304 value()->PrintTo(stream);
308 void LStoreNamedField::PrintDataTo(StringStream* stream) {
309 object()->PrintTo(stream);
310 hydrogen()->access().PrintTo(stream);
312 value()->PrintTo(stream);
316 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
317 object()->PrintTo(stream);
319 stream->Add(String::cast(*name())->ToCString().get());
321 value()->PrintTo(stream);
325 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
326 stream->Add("if string_compare(");
327 left()->PrintTo(stream);
328 right()->PrintTo(stream);
329 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
333 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
334 object()->PrintTo(stream);
335 stream->Add("%p -> %p", *original_map(), *transitioned_map());
340 void LUnaryMathOperation<T>::PrintDataTo(StringStream* stream) {
341 value()->PrintTo(stream);
345 const char* LArithmeticD::Mnemonic() const {
347 case Token::ADD: return "add-d";
348 case Token::SUB: return "sub-d";
349 case Token::MUL: return "mul-d";
350 case Token::DIV: return "div-d";
351 case Token::MOD: return "mod-d";
359 const char* LArithmeticT::Mnemonic() const {
361 case Token::ADD: return "add-t";
362 case Token::SUB: return "sub-t";
363 case Token::MUL: return "mul-t";
364 case Token::MOD: return "mod-t";
365 case Token::DIV: return "div-t";
366 case Token::BIT_AND: return "bit-and-t";
367 case Token::BIT_OR: return "bit-or-t";
368 case Token::BIT_XOR: return "bit-xor-t";
369 case Token::ROR: return "ror-t";
370 case Token::SHL: return "shl-t";
371 case Token::SAR: return "sar-t";
372 case Token::SHR: return "shr-t";
380 void LChunkBuilder::Abort(BailoutReason reason) {
381 info()->set_bailout_reason(reason);
386 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
387 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
388 Register::ToAllocationIndex(reg));
392 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
393 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
394 DoubleRegister::ToAllocationIndex(reg));
398 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
399 if (value->EmitAtUses()) {
400 HInstruction* instr = HInstruction::cast(value);
401 VisitInstruction(instr);
403 operand->set_virtual_register(value->id());
408 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
409 return Use(value, ToUnallocated(fixed_register));
413 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
414 DoubleRegister fixed_register) {
415 return Use(value, ToUnallocated(fixed_register));
419 LOperand* LChunkBuilder::UseRegister(HValue* value) {
420 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
424 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
425 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
429 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
431 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
432 LUnallocated::USED_AT_START));
436 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
437 return value->IsConstant() ? UseConstant(value) : UseRegister(value);
441 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
442 return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
446 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
447 return chunk_->DefineConstantOperand(HConstant::cast(value));
451 LOperand* LChunkBuilder::UseAny(HValue* value) {
452 return value->IsConstant()
454 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
458 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
459 LUnallocated* result) {
460 result->set_virtual_register(current_instruction_->id());
461 instr->set_result(result);
466 LInstruction* LChunkBuilder::DefineAsRegister(
467 LTemplateResultInstruction<1>* instr) {
469 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
473 LInstruction* LChunkBuilder::DefineAsSpilled(
474 LTemplateResultInstruction<1>* instr, int index) {
476 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
480 LInstruction* LChunkBuilder::DefineSameAsFirst(
481 LTemplateResultInstruction<1>* instr) {
483 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
487 LInstruction* LChunkBuilder::DefineFixed(
488 LTemplateResultInstruction<1>* instr, Register reg) {
489 return Define(instr, ToUnallocated(reg));
493 LInstruction* LChunkBuilder::DefineFixedDouble(
494 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
495 return Define(instr, ToUnallocated(reg));
499 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
500 HInstruction* hinstr,
501 CanDeoptimize can_deoptimize) {
502 info()->MarkAsNonDeferredCalling();
507 instr = AssignPointerMap(instr);
509 // If instruction does not have side-effects lazy deoptimization
510 // after the call will try to deoptimize to the point before the call.
511 // Thus we still need to attach environment to this call even if
512 // call sequence can not deoptimize eagerly.
513 bool needs_environment =
514 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
515 !hinstr->HasObservableSideEffects();
516 if (needs_environment && !instr->HasEnvironment()) {
517 instr = AssignEnvironment(instr);
524 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
525 ASSERT(!instr->HasPointerMap());
526 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
531 LUnallocated* LChunkBuilder::TempRegister() {
532 LUnallocated* operand =
533 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
534 int vreg = allocator_->GetVirtualRegister();
535 if (!allocator_->AllocationOk()) {
536 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
539 operand->set_virtual_register(vreg);
544 int LPlatformChunk::GetNextSpillIndex() {
545 return spill_slot_count_++;
549 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
550 int index = GetNextSpillIndex();
551 if (kind == DOUBLE_REGISTERS) {
552 return LDoubleStackSlot::Create(index, zone());
554 ASSERT(kind == GENERAL_REGISTERS);
555 return LStackSlot::Create(index, zone());
560 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
561 LUnallocated* operand = ToUnallocated(reg);
562 ASSERT(operand->HasFixedPolicy());
567 LPlatformChunk* LChunkBuilder::Build() {
569 chunk_ = new(zone()) LPlatformChunk(info_, graph_);
570 LPhase phase("L_Building chunk", chunk_);
573 // If compiling for OSR, reserve space for the unoptimized frame,
574 // which will be subsumed into this frame.
575 if (graph()->has_osr()) {
576 // TODO(all): GetNextSpillIndex just increments a field. It has no other
577 // side effects, so we should get rid of this loop.
578 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
579 chunk_->GetNextSpillIndex();
583 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
584 for (int i = 0; i < blocks->length(); i++) {
585 DoBasicBlock(blocks->at(i));
586 if (is_aborted()) return NULL;
593 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
594 ASSERT(is_building());
595 current_block_ = block;
597 if (block->IsStartBlock()) {
598 block->UpdateEnvironment(graph_->start_environment());
600 } else if (block->predecessors()->length() == 1) {
601 // We have a single predecessor => copy environment and outgoing
602 // argument count from the predecessor.
603 ASSERT(block->phis()->length() == 0);
604 HBasicBlock* pred = block->predecessors()->at(0);
605 HEnvironment* last_environment = pred->last_environment();
606 ASSERT(last_environment != NULL);
608 // Only copy the environment, if it is later used again.
609 if (pred->end()->SecondSuccessor() == NULL) {
610 ASSERT(pred->end()->FirstSuccessor() == block);
612 if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
613 (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
614 last_environment = last_environment->Copy();
617 block->UpdateEnvironment(last_environment);
618 ASSERT(pred->argument_count() >= 0);
619 argument_count_ = pred->argument_count();
621 // We are at a state join => process phis.
622 HBasicBlock* pred = block->predecessors()->at(0);
623 // No need to copy the environment, it cannot be used later.
624 HEnvironment* last_environment = pred->last_environment();
625 for (int i = 0; i < block->phis()->length(); ++i) {
626 HPhi* phi = block->phis()->at(i);
627 if (phi->HasMergedIndex()) {
628 last_environment->SetValueAt(phi->merged_index(), phi);
631 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
632 if (block->deleted_phis()->at(i) < last_environment->length()) {
633 last_environment->SetValueAt(block->deleted_phis()->at(i),
634 graph_->GetConstantUndefined());
637 block->UpdateEnvironment(last_environment);
638 // Pick up the outgoing argument count of one of the predecessors.
639 argument_count_ = pred->argument_count();
642 // Translate hydrogen instructions to lithium ones for the current block.
643 HInstruction* current = block->first();
644 int start = chunk_->instructions()->length();
645 while ((current != NULL) && !is_aborted()) {
646 // Code for constants in registers is generated lazily.
647 if (!current->EmitAtUses()) {
648 VisitInstruction(current);
650 current = current->next();
652 int end = chunk_->instructions()->length() - 1;
654 block->set_first_instruction_index(start);
655 block->set_last_instruction_index(end);
657 block->set_argument_count(argument_count_);
658 current_block_ = NULL;
662 void LChunkBuilder::VisitInstruction(HInstruction* current) {
663 HInstruction* old_current = current_instruction_;
664 current_instruction_ = current;
666 LInstruction* instr = NULL;
667 if (current->CanReplaceWithDummyUses()) {
668 if (current->OperandCount() == 0) {
669 instr = DefineAsRegister(new(zone()) LDummy());
671 ASSERT(!current->OperandAt(0)->IsControlInstruction());
672 instr = DefineAsRegister(new(zone())
673 LDummyUse(UseAny(current->OperandAt(0))));
675 for (int i = 1; i < current->OperandCount(); ++i) {
676 if (current->OperandAt(i)->IsControlInstruction()) continue;
677 LInstruction* dummy =
678 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
679 dummy->set_hydrogen_value(current);
680 chunk_->AddInstruction(dummy, current_block_);
683 instr = current->CompileToLithium(this);
686 argument_count_ += current->argument_delta();
687 ASSERT(argument_count_ >= 0);
690 // Associate the hydrogen instruction first, since we may need it for
691 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
692 instr->set_hydrogen_value(current);
695 // Make sure that the lithium instruction has either no fixed register
696 // constraints in temps or the result OR no uses that are only used at
697 // start. If this invariant doesn't hold, the register allocator can decide
698 // to insert a split of a range immediately before the instruction due to an
699 // already allocated register needing to be used for the instruction's fixed
700 // register constraint. In this case, the register allocator won't see an
701 // interference between the split child and the use-at-start (it would if
702 // the it was just a plain use), so it is free to move the split child into
703 // the same register that is used for the use-at-start.
704 // See https://code.google.com/p/chromium/issues/detail?id=201590
705 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
707 int used_at_start = 0;
708 for (UseIterator it(instr); !it.Done(); it.Advance()) {
709 LUnallocated* operand = LUnallocated::cast(it.Current());
710 if (operand->IsUsedAtStart()) ++used_at_start;
712 if (instr->Output() != NULL) {
713 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
715 for (TempIterator it(instr); !it.Done(); it.Advance()) {
716 LUnallocated* operand = LUnallocated::cast(it.Current());
717 if (operand->HasFixedPolicy()) ++fixed;
719 ASSERT(fixed == 0 || used_at_start == 0);
723 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
724 instr = AssignPointerMap(instr);
726 if (FLAG_stress_environments && !instr->HasEnvironment()) {
727 instr = AssignEnvironment(instr);
729 chunk_->AddInstruction(instr, current_block_);
731 if (instr->IsCall()) {
732 HValue* hydrogen_value_for_lazy_bailout = current;
733 LInstruction* instruction_needing_environment = NULL;
734 if (current->HasObservableSideEffects()) {
735 HSimulate* sim = HSimulate::cast(current->next());
736 instruction_needing_environment = instr;
737 sim->ReplayEnvironment(current_block_->last_environment());
738 hydrogen_value_for_lazy_bailout = sim;
740 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
741 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
742 chunk_->AddInstruction(bailout, current_block_);
743 if (instruction_needing_environment != NULL) {
744 // Store the lazy deopt environment with the instruction if needed.
745 // Right now it is only used for LInstanceOfKnownGlobal.
746 instruction_needing_environment->
747 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
751 current_instruction_ = old_current;
755 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
756 HEnvironment* hydrogen_env = current_block_->last_environment();
757 int argument_index_accumulator = 0;
758 ZoneList<HValue*> objects_to_materialize(0, zone());
759 instr->set_environment(CreateEnvironment(hydrogen_env,
760 &argument_index_accumulator,
761 &objects_to_materialize));
766 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
767 // The control instruction marking the end of a block that completed
768 // abruptly (e.g., threw an exception). There is nothing specific to do.
773 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
774 HArithmeticBinaryOperation* instr) {
775 ASSERT(instr->representation().IsDouble());
776 ASSERT(instr->left()->representation().IsDouble());
777 ASSERT(instr->right()->representation().IsDouble());
779 if (op == Token::MOD) {
780 LOperand* left = UseFixedDouble(instr->left(), d0);
781 LOperand* right = UseFixedDouble(instr->right(), d1);
782 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
783 return MarkAsCall(DefineFixedDouble(result, d0), instr);
785 LOperand* left = UseRegisterAtStart(instr->left());
786 LOperand* right = UseRegisterAtStart(instr->right());
787 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
788 return DefineAsRegister(result);
793 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
794 HBinaryOperation* instr) {
795 ASSERT((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
796 (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
797 (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
798 (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
799 (op == Token::BIT_XOR));
800 HValue* left = instr->left();
801 HValue* right = instr->right();
803 // TODO(jbramley): Once we've implemented smi support for all arithmetic
804 // operations, these assertions should check IsTagged().
805 ASSERT(instr->representation().IsSmiOrTagged());
806 ASSERT(left->representation().IsSmiOrTagged());
807 ASSERT(right->representation().IsSmiOrTagged());
809 LOperand* context = UseFixed(instr->context(), cp);
810 LOperand* left_operand = UseFixed(left, x1);
811 LOperand* right_operand = UseFixed(right, x0);
812 LArithmeticT* result =
813 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
814 return MarkAsCall(DefineFixed(result, x0), instr);
818 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
819 HBoundsCheckBaseIndexInformation* instr) {
825 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
826 info()->MarkAsRequiresFrame();
827 LOperand* args = NULL;
828 LOperand* length = NULL;
829 LOperand* index = NULL;
831 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
832 args = UseRegisterAtStart(instr->arguments());
833 length = UseConstant(instr->length());
834 index = UseConstant(instr->index());
836 args = UseRegister(instr->arguments());
837 length = UseRegisterAtStart(instr->length());
838 index = UseRegisterOrConstantAtStart(instr->index());
841 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
845 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
846 if (instr->representation().IsSmiOrInteger32()) {
847 ASSERT(instr->left()->representation().Equals(instr->representation()));
848 ASSERT(instr->right()->representation().Equals(instr->representation()));
849 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
851 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
852 LInstruction* result = instr->representation().IsSmi() ?
853 DefineAsRegister(new(zone()) LAddS(left, right)) :
854 DefineAsRegister(new(zone()) LAddI(left, right));
855 if (instr->CheckFlag(HValue::kCanOverflow)) {
856 result = AssignEnvironment(result);
859 } else if (instr->representation().IsExternal()) {
860 ASSERT(instr->left()->representation().IsExternal());
861 ASSERT(instr->right()->representation().IsInteger32());
862 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
863 LOperand* left = UseRegisterAtStart(instr->left());
864 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
865 return DefineAsRegister(new(zone()) LAddE(left, right));
866 } else if (instr->representation().IsDouble()) {
867 return DoArithmeticD(Token::ADD, instr);
869 ASSERT(instr->representation().IsTagged());
870 return DoArithmeticT(Token::ADD, instr);
875 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
876 info()->MarkAsDeferredCalling();
877 LOperand* context = UseAny(instr->context());
878 LOperand* size = UseRegisterOrConstant(instr->size());
879 LOperand* temp1 = TempRegister();
880 LOperand* temp2 = TempRegister();
881 LOperand* temp3 = instr->MustPrefillWithFiller() ? TempRegister() : NULL;
882 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2, temp3);
883 return AssignPointerMap(DefineAsRegister(result));
887 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
888 LOperand* function = UseFixed(instr->function(), x1);
889 LOperand* receiver = UseFixed(instr->receiver(), x0);
890 LOperand* length = UseFixed(instr->length(), x2);
891 LOperand* elements = UseFixed(instr->elements(), x3);
892 LApplyArguments* result = new(zone()) LApplyArguments(function,
896 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
900 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
901 info()->MarkAsRequiresFrame();
902 LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
903 return DefineAsRegister(new(zone()) LArgumentsElements(temp));
907 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
908 info()->MarkAsRequiresFrame();
909 LOperand* value = UseRegisterAtStart(instr->value());
910 return DefineAsRegister(new(zone()) LArgumentsLength(value));
914 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
915 // There are no real uses of the arguments object.
916 // arguments.length and element access are supported directly on
917 // stack arguments, and any real arguments object use causes a bailout.
918 // So this value is never used.
923 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
924 if (instr->representation().IsSmiOrInteger32()) {
925 ASSERT(instr->left()->representation().Equals(instr->representation()));
926 ASSERT(instr->right()->representation().Equals(instr->representation()));
927 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
929 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
931 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
932 return instr->representation().IsSmi() ?
933 DefineAsRegister(new(zone()) LBitS(left, right)) :
934 DefineAsRegister(new(zone()) LBitI(left, right));
936 return DoArithmeticT(instr->op(), instr);
941 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
942 // V8 expects a label to be generated for each basic block.
943 // This is used in some places like LAllocator::IsBlockBoundary
944 // in lithium-allocator.cc
945 return new(zone()) LLabel(instr->block());
949 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
950 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
951 LOperand* length = UseRegister(instr->length());
952 return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
956 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
957 LInstruction* goto_instr = CheckElideControlInstruction(instr);
958 if (goto_instr != NULL) return goto_instr;
960 HValue* value = instr->value();
961 Representation r = value->representation();
962 HType type = value->type();
964 if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
965 // These representations have simple checks that cannot deoptimize.
966 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
968 ASSERT(r.IsTagged());
969 if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
970 type.IsHeapNumber()) {
971 // These types have simple checks that cannot deoptimize.
972 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
975 if (type.IsString()) {
976 // This type cannot deoptimize, but needs a scratch register.
977 return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
980 ToBooleanStub::Types expected = instr->expected_input_types();
981 bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
982 LOperand* temp1 = needs_temps ? TempRegister() : NULL;
983 LOperand* temp2 = needs_temps ? TempRegister() : NULL;
985 if (expected.IsGeneric() || expected.IsEmpty()) {
986 // The generic case cannot deoptimize because it already supports every
987 // possible input type.
989 return new(zone()) LBranch(UseRegister(value), temp1, temp2);
991 return AssignEnvironment(
992 new(zone()) LBranch(UseRegister(value), temp1, temp2));
998 LInstruction* LChunkBuilder::DoCallJSFunction(
999 HCallJSFunction* instr) {
1000 LOperand* function = UseFixed(instr->function(), x1);
1002 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1004 return MarkAsCall(DefineFixed(result, x0), instr);
1008 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1009 HCallWithDescriptor* instr) {
1010 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1012 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1013 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1014 ops.Add(target, zone());
1015 for (int i = 1; i < instr->OperandCount(); i++) {
1016 LOperand* op = UseFixed(instr->OperandAt(i),
1017 descriptor->GetParameterRegister(i - 1));
1018 ops.Add(op, zone());
1021 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1024 return MarkAsCall(DefineFixed(result, x0), instr);
1028 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1029 LOperand* context = UseFixed(instr->context(), cp);
1030 LOperand* function = UseFixed(instr->function(), x1);
1031 LCallFunction* call = new(zone()) LCallFunction(context, function);
1032 return MarkAsCall(DefineFixed(call, x0), instr);
1036 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1037 LOperand* context = UseFixed(instr->context(), cp);
1038 // The call to CallConstructStub will expect the constructor to be in x1.
1039 LOperand* constructor = UseFixed(instr->constructor(), x1);
1040 LCallNew* result = new(zone()) LCallNew(context, constructor);
1041 return MarkAsCall(DefineFixed(result, x0), instr);
1045 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1046 LOperand* context = UseFixed(instr->context(), cp);
1047 // The call to ArrayConstructCode will expect the constructor to be in x1.
1048 LOperand* constructor = UseFixed(instr->constructor(), x1);
1049 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1050 return MarkAsCall(DefineFixed(result, x0), instr);
1054 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1055 LOperand* context = UseFixed(instr->context(), cp);
1056 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1060 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1061 LOperand* context = UseFixed(instr->context(), cp);
1062 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1066 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1067 instr->ReplayEnvironment(current_block_->last_environment());
1069 // There are no real uses of a captured object.
1074 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1075 Representation from = instr->from();
1076 Representation to = instr->to();
1079 if (to.IsTagged()) {
1080 LOperand* value = UseRegister(instr->value());
1081 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1083 from = Representation::Tagged();
1086 if (from.IsTagged()) {
1087 if (to.IsDouble()) {
1088 LOperand* value = UseRegister(instr->value());
1089 LOperand* temp = TempRegister();
1090 LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1091 return AssignEnvironment(DefineAsRegister(res));
1092 } else if (to.IsSmi()) {
1093 LOperand* value = UseRegister(instr->value());
1094 if (instr->value()->type().IsSmi()) {
1095 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1097 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1099 ASSERT(to.IsInteger32());
1100 LInstruction* res = NULL;
1102 if (instr->value()->type().IsSmi() ||
1103 instr->value()->representation().IsSmi()) {
1104 LOperand* value = UseRegisterAtStart(instr->value());
1105 res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1107 LOperand* value = UseRegister(instr->value());
1108 LOperand* temp1 = TempRegister();
1109 LOperand* temp2 = instr->CanTruncateToInt32() ? NULL : FixedTemp(d24);
1110 res = DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1111 res = AssignEnvironment(res);
1116 } else if (from.IsDouble()) {
1117 if (to.IsTagged()) {
1118 info()->MarkAsDeferredCalling();
1119 LOperand* value = UseRegister(instr->value());
1120 LOperand* temp1 = TempRegister();
1121 LOperand* temp2 = TempRegister();
1123 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1124 return AssignPointerMap(DefineAsRegister(result));
1126 ASSERT(to.IsSmi() || to.IsInteger32());
1127 LOperand* value = UseRegister(instr->value());
1129 if (instr->CanTruncateToInt32()) {
1130 LTruncateDoubleToIntOrSmi* result =
1131 new(zone()) LTruncateDoubleToIntOrSmi(value);
1132 return DefineAsRegister(result);
1134 LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1135 return AssignEnvironment(DefineAsRegister(result));
1138 } else if (from.IsInteger32()) {
1139 info()->MarkAsDeferredCalling();
1140 if (to.IsTagged()) {
1141 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1142 LOperand* value = UseRegister(instr->value());
1143 LNumberTagU* result = new(zone()) LNumberTagU(value,
1146 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1148 STATIC_ASSERT((kMinInt == Smi::kMinValue) &&
1149 (kMaxInt == Smi::kMaxValue));
1150 LOperand* value = UseRegisterAtStart(instr->value());
1151 return DefineAsRegister(new(zone()) LSmiTag(value));
1153 } else if (to.IsSmi()) {
1154 LOperand* value = UseRegisterAtStart(instr->value());
1155 LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1156 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1157 result = AssignEnvironment(result);
1161 ASSERT(to.IsDouble());
1162 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1163 return DefineAsRegister(
1164 new(zone()) LUint32ToDouble(UseRegisterAtStart(instr->value())));
1166 return DefineAsRegister(
1167 new(zone()) LInteger32ToDouble(UseRegisterAtStart(instr->value())));
1177 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1178 LOperand* value = UseRegisterAtStart(instr->value());
1179 return AssignEnvironment(new(zone()) LCheckValue(value));
1183 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1184 LOperand* value = UseRegisterAtStart(instr->value());
1185 LOperand* temp = TempRegister();
1186 LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1187 return AssignEnvironment(result);
1191 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1192 if (instr->CanOmitMapChecks()) {
1193 // LCheckMaps does nothing in this case.
1194 return new(zone()) LCheckMaps(NULL);
1196 LOperand* value = UseRegisterAtStart(instr->value());
1197 LOperand* temp = TempRegister();
1199 if (instr->has_migration_target()) {
1200 info()->MarkAsDeferredCalling();
1201 LInstruction* result = new(zone()) LCheckMaps(value, temp);
1202 return AssignPointerMap(AssignEnvironment(result));
1204 return AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1210 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1211 LOperand* value = UseRegisterAtStart(instr->value());
1212 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1216 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1217 LOperand* value = UseRegisterAtStart(instr->value());
1218 return AssignEnvironment(new(zone()) LCheckSmi(value));
1222 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1223 HValue* value = instr->value();
1224 Representation input_rep = value->representation();
1225 LOperand* reg = UseRegister(value);
1226 if (input_rep.IsDouble()) {
1227 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1228 } else if (input_rep.IsInteger32()) {
1229 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1231 ASSERT(input_rep.IsSmiOrTagged());
1232 return AssignEnvironment(
1233 DefineAsRegister(new(zone()) LClampTToUint8(reg,
1240 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1241 HClassOfTestAndBranch* instr) {
1242 ASSERT(instr->value()->representation().IsTagged());
1243 LOperand* value = UseRegisterAtStart(instr->value());
1244 return new(zone()) LClassOfTestAndBranch(value,
1250 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1251 HCompareNumericAndBranch* instr) {
1252 Representation r = instr->representation();
1254 if (r.IsSmiOrInteger32()) {
1255 ASSERT(instr->left()->representation().Equals(r));
1256 ASSERT(instr->right()->representation().Equals(r));
1257 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1258 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1259 return new(zone()) LCompareNumericAndBranch(left, right);
1261 ASSERT(r.IsDouble());
1262 ASSERT(instr->left()->representation().IsDouble());
1263 ASSERT(instr->right()->representation().IsDouble());
1264 // TODO(all): In fact the only case that we can handle more efficiently is
1265 // when one of the operand is the constant 0. Currently the MacroAssembler
1266 // will be able to cope with any constant by loading it into an internal
1267 // scratch register. This means that if the constant is used more that once,
1268 // it will be loaded multiple times. Unfortunatly crankshaft already
1269 // duplicates constant loads, but we should modify the code below once this
1270 // issue has been addressed in crankshaft.
1271 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1272 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1273 return new(zone()) LCompareNumericAndBranch(left, right);
1278 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1279 ASSERT(instr->left()->representation().IsTagged());
1280 ASSERT(instr->right()->representation().IsTagged());
1281 LOperand* context = UseFixed(instr->context(), cp);
1282 LOperand* left = UseFixed(instr->left(), x1);
1283 LOperand* right = UseFixed(instr->right(), x0);
1284 LCmpT* result = new(zone()) LCmpT(context, left, right);
1285 return MarkAsCall(DefineFixed(result, x0), instr);
1289 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1290 HCompareHoleAndBranch* instr) {
1291 LOperand* value = UseRegister(instr->value());
1292 if (instr->representation().IsTagged()) {
1293 return new(zone()) LCmpHoleAndBranchT(value);
1295 LOperand* temp = TempRegister();
1296 return new(zone()) LCmpHoleAndBranchD(value, temp);
1301 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1302 HCompareObjectEqAndBranch* instr) {
1303 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1304 if (goto_instr != NULL) return goto_instr;
1306 LOperand* left = UseRegisterAtStart(instr->left());
1307 LOperand* right = UseRegisterAtStart(instr->right());
1308 return new(zone()) LCmpObjectEqAndBranch(left, right);
1312 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1313 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1314 if (goto_instr != NULL) return goto_instr;
1316 ASSERT(instr->value()->representation().IsTagged());
1317 LOperand* value = UseRegisterAtStart(instr->value());
1318 LOperand* temp = TempRegister();
1319 return new(zone()) LCmpMapAndBranch(value, temp);
1323 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1324 Representation r = instr->representation();
1326 return DefineAsRegister(new(zone()) LConstantS);
1327 } else if (r.IsInteger32()) {
1328 return DefineAsRegister(new(zone()) LConstantI);
1329 } else if (r.IsDouble()) {
1330 return DefineAsRegister(new(zone()) LConstantD);
1331 } else if (r.IsExternal()) {
1332 return DefineAsRegister(new(zone()) LConstantE);
1333 } else if (r.IsTagged()) {
1334 return DefineAsRegister(new(zone()) LConstantT);
1342 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1343 if (instr->HasNoUses()) return NULL;
1345 if (info()->IsStub()) {
1346 return DefineFixed(new(zone()) LContext, cp);
1349 return DefineAsRegister(new(zone()) LContext);
1353 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1354 LOperand* object = UseFixed(instr->value(), x0);
1355 LDateField* result = new(zone()) LDateField(object, instr->index());
1356 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1360 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1361 return new(zone()) LDebugBreak();
1365 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1366 LOperand* context = UseFixed(instr->context(), cp);
1367 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1371 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1372 return AssignEnvironment(new(zone()) LDeoptimize);
1376 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1377 ASSERT(instr->representation().IsInteger32());
1378 ASSERT(instr->left()->representation().Equals(instr->representation()));
1379 ASSERT(instr->right()->representation().Equals(instr->representation()));
1380 LOperand* dividend = UseRegister(instr->left());
1381 int32_t divisor = instr->right()->GetInteger32Constant();
1382 LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1383 dividend, divisor));
1384 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1385 (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1386 (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1387 divisor != 1 && divisor != -1)) {
1388 result = AssignEnvironment(result);
1394 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1395 ASSERT(instr->representation().IsInteger32());
1396 ASSERT(instr->left()->representation().Equals(instr->representation()));
1397 ASSERT(instr->right()->representation().Equals(instr->representation()));
1398 LOperand* dividend = UseRegister(instr->left());
1399 int32_t divisor = instr->right()->GetInteger32Constant();
1400 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1401 ? NULL : TempRegister();
1402 LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1403 dividend, divisor, temp));
1405 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1406 !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1407 result = AssignEnvironment(result);
1413 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1414 ASSERT(instr->representation().IsSmiOrInteger32());
1415 ASSERT(instr->left()->representation().Equals(instr->representation()));
1416 ASSERT(instr->right()->representation().Equals(instr->representation()));
1417 LOperand* dividend = UseRegister(instr->left());
1418 LOperand* divisor = UseRegister(instr->right());
1419 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1420 ? NULL : TempRegister();
1421 LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1422 return AssignEnvironment(DefineAsRegister(div));
1426 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1427 if (instr->representation().IsSmiOrInteger32()) {
1428 if (instr->RightIsPowerOf2()) {
1429 return DoDivByPowerOf2I(instr);
1430 } else if (instr->right()->IsConstant()) {
1431 return DoDivByConstI(instr);
1433 return DoDivI(instr);
1435 } else if (instr->representation().IsDouble()) {
1436 return DoArithmeticD(Token::DIV, instr);
1438 return DoArithmeticT(Token::DIV, instr);
1443 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1444 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1448 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1449 HEnvironment* outer = current_block_->last_environment();
1450 HConstant* undefined = graph()->GetConstantUndefined();
1451 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1452 instr->arguments_count(),
1455 instr->inlining_kind());
1456 // Only replay binding of arguments object if it wasn't removed from graph.
1457 if ((instr->arguments_var() != NULL) &&
1458 instr->arguments_object()->IsLinked()) {
1459 inner->Bind(instr->arguments_var(), instr->arguments_object());
1461 inner->set_entry(instr);
1462 current_block_->UpdateEnvironment(inner);
1463 chunk_->AddInlinedClosure(instr->closure());
1468 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1474 LInstruction* LChunkBuilder::DoForceRepresentation(
1475 HForceRepresentation* instr) {
1476 // All HForceRepresentation instructions should be eliminated in the
1477 // representation change phase of Hydrogen.
1483 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1484 LOperand* context = UseFixed(instr->context(), cp);
1486 DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1490 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1491 HGetCachedArrayIndex* instr) {
1492 ASSERT(instr->value()->representation().IsTagged());
1493 LOperand* value = UseRegisterAtStart(instr->value());
1494 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1498 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1499 return new(zone()) LGoto(instr->FirstSuccessor());
1503 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1504 HHasCachedArrayIndexAndBranch* instr) {
1505 ASSERT(instr->value()->representation().IsTagged());
1506 return new(zone()) LHasCachedArrayIndexAndBranch(
1507 UseRegisterAtStart(instr->value()), TempRegister());
1511 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1512 HHasInstanceTypeAndBranch* instr) {
1513 ASSERT(instr->value()->representation().IsTagged());
1514 LOperand* value = UseRegisterAtStart(instr->value());
1515 return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1519 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1520 HInnerAllocatedObject* instr) {
1521 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1522 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1523 return DefineAsRegister(
1524 new(zone()) LInnerAllocatedObject(base_object, offset));
1528 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1529 LOperand* context = UseFixed(instr->context(), cp);
1530 LInstanceOf* result = new(zone()) LInstanceOf(
1532 UseFixed(instr->left(), InstanceofStub::left()),
1533 UseFixed(instr->right(), InstanceofStub::right()));
1534 return MarkAsCall(DefineFixed(result, x0), instr);
1538 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1539 HInstanceOfKnownGlobal* instr) {
1540 LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1541 UseFixed(instr->context(), cp),
1542 UseFixed(instr->left(), InstanceofStub::left()));
1543 return MarkAsCall(DefineFixed(result, x0), instr);
1547 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1548 LOperand* context = UseFixed(instr->context(), cp);
1549 // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1550 LOperand* function = UseFixed(instr->function(), x1);
1551 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1552 return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1556 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1557 HIsConstructCallAndBranch* instr) {
1558 return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1562 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1563 HCompareMinusZeroAndBranch* instr) {
1564 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1565 if (goto_instr != NULL) return goto_instr;
1566 LOperand* value = UseRegister(instr->value());
1567 LOperand* scratch = TempRegister();
1568 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1572 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1573 ASSERT(instr->value()->representation().IsTagged());
1574 LOperand* value = UseRegisterAtStart(instr->value());
1575 LOperand* temp1 = TempRegister();
1576 LOperand* temp2 = TempRegister();
1577 return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1581 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1582 ASSERT(instr->value()->representation().IsTagged());
1583 LOperand* value = UseRegisterAtStart(instr->value());
1584 LOperand* temp = TempRegister();
1585 return new(zone()) LIsStringAndBranch(value, temp);
1589 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1590 ASSERT(instr->value()->representation().IsTagged());
1591 return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1595 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1596 HIsUndetectableAndBranch* instr) {
1597 ASSERT(instr->value()->representation().IsTagged());
1598 LOperand* value = UseRegisterAtStart(instr->value());
1599 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1603 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1604 LInstruction* pop = NULL;
1605 HEnvironment* env = current_block_->last_environment();
1607 if (env->entry()->arguments_pushed()) {
1608 int argument_count = env->arguments_environment()->parameter_count();
1609 pop = new(zone()) LDrop(argument_count);
1610 ASSERT(instr->argument_delta() == -argument_count);
1613 HEnvironment* outer =
1614 current_block_->last_environment()->DiscardInlined(false);
1615 current_block_->UpdateEnvironment(outer);
1621 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1622 LOperand* context = UseRegisterAtStart(instr->value());
1623 LInstruction* result =
1624 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1625 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1629 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1630 HLoadFunctionPrototype* instr) {
1631 LOperand* function = UseRegister(instr->function());
1632 LOperand* temp = TempRegister();
1633 return AssignEnvironment(DefineAsRegister(
1634 new(zone()) LLoadFunctionPrototype(function, temp)));
1638 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1639 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1640 return instr->RequiresHoleCheck()
1641 ? AssignEnvironment(DefineAsRegister(result))
1642 : DefineAsRegister(result);
1646 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1647 LOperand* context = UseFixed(instr->context(), cp);
1648 LOperand* global_object = UseFixed(instr->global_object(), x0);
1649 LLoadGlobalGeneric* result =
1650 new(zone()) LLoadGlobalGeneric(context, global_object);
1651 return MarkAsCall(DefineFixed(result, x0), instr);
1655 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1656 ASSERT(instr->key()->representation().IsSmiOrInteger32());
1657 ElementsKind elements_kind = instr->elements_kind();
1658 LOperand* elements = UseRegister(instr->elements());
1659 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1661 if (!instr->is_typed_elements()) {
1662 if (instr->representation().IsDouble()) {
1663 LOperand* temp = (!instr->key()->IsConstant() ||
1664 instr->RequiresHoleCheck())
1668 LLoadKeyedFixedDouble* result =
1669 new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1670 return instr->RequiresHoleCheck()
1671 ? AssignEnvironment(DefineAsRegister(result))
1672 : DefineAsRegister(result);
1674 ASSERT(instr->representation().IsSmiOrTagged() ||
1675 instr->representation().IsInteger32());
1676 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1677 LLoadKeyedFixed* result =
1678 new(zone()) LLoadKeyedFixed(elements, key, temp);
1679 return instr->RequiresHoleCheck()
1680 ? AssignEnvironment(DefineAsRegister(result))
1681 : DefineAsRegister(result);
1684 ASSERT((instr->representation().IsInteger32() &&
1685 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1686 (instr->representation().IsDouble() &&
1687 IsDoubleOrFloatElementsKind(instr->elements_kind())));
1689 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1690 LLoadKeyedExternal* result =
1691 new(zone()) LLoadKeyedExternal(elements, key, temp);
1692 // An unsigned int array load might overflow and cause a deopt. Make sure it
1693 // has an environment.
1694 if (instr->RequiresHoleCheck() ||
1695 elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1696 elements_kind == UINT32_ELEMENTS) {
1697 return AssignEnvironment(DefineAsRegister(result));
1699 return DefineAsRegister(result);
1705 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1706 LOperand* context = UseFixed(instr->context(), cp);
1707 LOperand* object = UseFixed(instr->object(), x1);
1708 LOperand* key = UseFixed(instr->key(), x0);
1710 LInstruction* result =
1711 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), x0);
1712 return MarkAsCall(result, instr);
1716 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1717 LOperand* object = UseRegisterAtStart(instr->object());
1718 return DefineAsRegister(new(zone()) LLoadNamedField(object));
1722 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1723 LOperand* context = UseFixed(instr->context(), cp);
1724 LOperand* object = UseFixed(instr->object(), x0);
1725 LInstruction* result =
1726 DefineFixed(new(zone()) LLoadNamedGeneric(context, object), x0);
1727 return MarkAsCall(result, instr);
1731 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1732 return DefineAsRegister(new(zone()) LLoadRoot);
1736 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1737 LOperand* map = UseRegisterAtStart(instr->value());
1738 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1742 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1743 ASSERT(instr->representation().IsInteger32());
1744 ASSERT(instr->left()->representation().Equals(instr->representation()));
1745 ASSERT(instr->right()->representation().Equals(instr->representation()));
1746 LOperand* dividend = UseRegisterAtStart(instr->left());
1747 int32_t divisor = instr->right()->GetInteger32Constant();
1748 LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1749 dividend, divisor));
1750 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1751 (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1752 result = AssignEnvironment(result);
1758 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1759 ASSERT(instr->representation().IsInteger32());
1760 ASSERT(instr->left()->representation().Equals(instr->representation()));
1761 ASSERT(instr->right()->representation().Equals(instr->representation()));
1762 LOperand* dividend = UseRegister(instr->left());
1763 int32_t divisor = instr->right()->GetInteger32Constant();
1765 ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1766 (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1767 NULL : TempRegister();
1768 LInstruction* result = DefineAsRegister(
1769 new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1771 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1772 result = AssignEnvironment(result);
1778 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1779 LOperand* dividend = UseRegister(instr->left());
1780 LOperand* divisor = UseRegister(instr->right());
1781 LOperand* remainder = TempRegister();
1782 LInstruction* result =
1783 DefineAsRegister(new(zone()) LFlooringDivI(dividend, divisor, remainder));
1784 return AssignEnvironment(result);
1788 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1789 if (instr->RightIsPowerOf2()) {
1790 return DoFlooringDivByPowerOf2I(instr);
1791 } else if (instr->right()->IsConstant()) {
1792 return DoFlooringDivByConstI(instr);
1794 return DoFlooringDivI(instr);
1799 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1800 LOperand* left = NULL;
1801 LOperand* right = NULL;
1802 if (instr->representation().IsSmiOrInteger32()) {
1803 ASSERT(instr->left()->representation().Equals(instr->representation()));
1804 ASSERT(instr->right()->representation().Equals(instr->representation()));
1805 left = UseRegisterAtStart(instr->BetterLeftOperand());
1806 right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1808 ASSERT(instr->representation().IsDouble());
1809 ASSERT(instr->left()->representation().IsDouble());
1810 ASSERT(instr->right()->representation().IsDouble());
1811 left = UseRegisterAtStart(instr->left());
1812 right = UseRegisterAtStart(instr->right());
1814 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1818 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1819 ASSERT(instr->representation().IsInteger32());
1820 ASSERT(instr->left()->representation().Equals(instr->representation()));
1821 ASSERT(instr->right()->representation().Equals(instr->representation()));
1822 LOperand* dividend = UseRegisterAtStart(instr->left());
1823 int32_t divisor = instr->right()->GetInteger32Constant();
1824 LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1825 dividend, divisor));
1826 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1827 result = AssignEnvironment(result);
1833 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1834 ASSERT(instr->representation().IsInteger32());
1835 ASSERT(instr->left()->representation().Equals(instr->representation()));
1836 ASSERT(instr->right()->representation().Equals(instr->representation()));
1837 LOperand* dividend = UseRegister(instr->left());
1838 int32_t divisor = instr->right()->GetInteger32Constant();
1839 LOperand* temp = TempRegister();
1840 LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1841 dividend, divisor, temp));
1842 if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1843 result = AssignEnvironment(result);
1849 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1850 ASSERT(instr->representation().IsSmiOrInteger32());
1851 ASSERT(instr->left()->representation().Equals(instr->representation()));
1852 ASSERT(instr->right()->representation().Equals(instr->representation()));
1853 LOperand* dividend = UseRegister(instr->left());
1854 LOperand* divisor = UseRegister(instr->right());
1855 LInstruction* result = DefineAsRegister(new(zone()) LModI(dividend, divisor));
1856 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1857 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1858 result = AssignEnvironment(result);
1864 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1865 if (instr->representation().IsSmiOrInteger32()) {
1866 if (instr->RightIsPowerOf2()) {
1867 return DoModByPowerOf2I(instr);
1868 } else if (instr->right()->IsConstant()) {
1869 return DoModByConstI(instr);
1871 return DoModI(instr);
1873 } else if (instr->representation().IsDouble()) {
1874 return DoArithmeticD(Token::MOD, instr);
1876 return DoArithmeticT(Token::MOD, instr);
1881 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1882 if (instr->representation().IsSmiOrInteger32()) {
1883 ASSERT(instr->left()->representation().Equals(instr->representation()));
1884 ASSERT(instr->right()->representation().Equals(instr->representation()));
1886 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1887 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1888 bool needs_environment = can_overflow || bailout_on_minus_zero;
1890 HValue* least_const = instr->BetterLeftOperand();
1891 HValue* most_const = instr->BetterRightOperand();
1895 // LMulConstI can handle a subset of constants:
1896 // With support for overflow detection:
1899 // Without support for overflow detection:
1900 // 2^n + 1, -(2^n - 1)
1901 if (most_const->IsConstant()) {
1902 int32_t constant = HConstant::cast(most_const)->Integer32Value();
1903 bool small_constant = (constant >= -1) && (constant <= 2);
1904 bool end_range_constant = (constant <= -kMaxInt) || (constant == kMaxInt);
1905 int32_t constant_abs = Abs(constant);
1907 if (!end_range_constant &&
1909 (IsPowerOf2(constant_abs)) ||
1910 (!can_overflow && (IsPowerOf2(constant_abs + 1) ||
1911 IsPowerOf2(constant_abs - 1))))) {
1912 LConstantOperand* right = UseConstant(most_const);
1913 bool need_register = IsPowerOf2(constant_abs) && !small_constant;
1914 left = need_register ? UseRegister(least_const)
1915 : UseRegisterAtStart(least_const);
1916 LMulConstIS* mul = new(zone()) LMulConstIS(left, right);
1917 if (needs_environment) AssignEnvironment(mul);
1918 return DefineAsRegister(mul);
1922 left = UseRegisterAtStart(least_const);
1923 // LMulI/S can handle all cases, but it requires that a register is
1924 // allocated for the second operand.
1925 LInstruction* result;
1926 if (instr->representation().IsSmi()) {
1927 LOperand* right = UseRegisterAtStart(most_const);
1928 result = DefineAsRegister(new(zone()) LMulS(left, right));
1930 LOperand* right = UseRegisterAtStart(most_const);
1931 result = DefineAsRegister(new(zone()) LMulI(left, right));
1933 if (needs_environment) AssignEnvironment(result);
1935 } else if (instr->representation().IsDouble()) {
1936 return DoArithmeticD(Token::MUL, instr);
1938 return DoArithmeticT(Token::MUL, instr);
1943 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1944 ASSERT(argument_count_ == 0);
1945 allocator_->MarkAsOsrEntry();
1946 current_block_->last_environment()->set_ast_id(instr->ast_id());
1947 return AssignEnvironment(new(zone()) LOsrEntry);
1951 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1952 LParameter* result = new(zone()) LParameter;
1953 if (instr->kind() == HParameter::STACK_PARAMETER) {
1954 int spill_index = chunk_->GetParameterStackSlot(instr->index());
1955 return DefineAsSpilled(result, spill_index);
1957 ASSERT(info()->IsStub());
1958 CodeStubInterfaceDescriptor* descriptor =
1959 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
1960 int index = static_cast<int>(instr->index());
1961 Register reg = descriptor->GetParameterRegister(index);
1962 return DefineFixed(result, reg);
1967 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1968 ASSERT(instr->representation().IsDouble());
1969 // We call a C function for double power. It can't trigger a GC.
1970 // We need to use fixed result register for the call.
1971 Representation exponent_type = instr->right()->representation();
1972 ASSERT(instr->left()->representation().IsDouble());
1973 LOperand* left = UseFixedDouble(instr->left(), d0);
1974 LOperand* right = exponent_type.IsInteger32()
1975 ? UseFixed(instr->right(), x12)
1976 : exponent_type.IsDouble()
1977 ? UseFixedDouble(instr->right(), d1)
1978 : UseFixed(instr->right(), x11);
1979 LPower* result = new(zone()) LPower(left, right);
1980 return MarkAsCall(DefineFixedDouble(result, d0),
1982 CAN_DEOPTIMIZE_EAGERLY);
1986 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1987 LOperand* argument = UseRegister(instr->argument());
1988 return new(zone()) LPushArgument(argument);
1992 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1993 LOperand* context = UseFixed(instr->context(), cp);
1995 DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
1999 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2000 HValue* value = instr->value();
2001 ASSERT(value->representation().IsDouble());
2002 return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2006 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2007 LOperand* lo = UseRegister(instr->lo());
2008 LOperand* hi = UseRegister(instr->hi());
2009 LOperand* temp = TempRegister();
2010 return DefineAsRegister(new(zone()) LConstructDouble(hi, lo, temp));
2014 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2015 LOperand* context = info()->IsStub()
2016 ? UseFixed(instr->context(), cp)
2018 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2019 return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
2024 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
2025 LOperand* string = UseRegisterAtStart(instr->string());
2026 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2027 LOperand* temp = TempRegister();
2028 LSeqStringGetChar* result =
2029 new(zone()) LSeqStringGetChar(string, index, temp);
2030 return DefineAsRegister(result);
2034 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
2035 LOperand* string = UseRegister(instr->string());
2036 LOperand* index = FLAG_debug_code
2037 ? UseRegister(instr->index())
2038 : UseRegisterOrConstant(instr->index());
2039 LOperand* value = UseRegister(instr->value());
2040 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
2041 LOperand* temp = TempRegister();
2042 LSeqStringSetChar* result =
2043 new(zone()) LSeqStringSetChar(context, string, index, value, temp);
2044 return DefineAsRegister(result);
2048 LInstruction* LChunkBuilder::DoShift(Token::Value op,
2049 HBitwiseBinaryOperation* instr) {
2050 if (instr->representation().IsTagged()) {
2051 return DoArithmeticT(op, instr);
2054 ASSERT(instr->representation().IsInteger32() ||
2055 instr->representation().IsSmi());
2056 ASSERT(instr->left()->representation().Equals(instr->representation()));
2057 ASSERT(instr->right()->representation().Equals(instr->representation()));
2059 LOperand* left = instr->representation().IsSmi()
2060 ? UseRegister(instr->left())
2061 : UseRegisterAtStart(instr->left());
2063 HValue* right_value = instr->right();
2064 LOperand* right = NULL;
2065 LOperand* temp = NULL;
2066 int constant_value = 0;
2067 if (right_value->IsConstant()) {
2068 right = UseConstant(right_value);
2069 HConstant* constant = HConstant::cast(right_value);
2070 constant_value = constant->Integer32Value() & 0x1f;
2072 right = UseRegisterAtStart(right_value);
2073 if (op == Token::ROR) {
2074 temp = TempRegister();
2078 // Shift operations can only deoptimize if we do a logical shift by 0 and the
2079 // result cannot be truncated to int32.
2080 bool does_deopt = false;
2081 if ((op == Token::SHR) && (constant_value == 0)) {
2082 if (FLAG_opt_safe_uint32_operations) {
2083 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
2085 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
2089 LInstruction* result;
2090 if (instr->representation().IsInteger32()) {
2091 result = DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
2093 ASSERT(instr->representation().IsSmi());
2094 result = DefineAsRegister(
2095 new(zone()) LShiftS(op, left, right, temp, does_deopt));
2098 return does_deopt ? AssignEnvironment(result) : result;
2102 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
2103 return DoShift(Token::ROR, instr);
2107 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
2108 return DoShift(Token::SAR, instr);
2112 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
2113 return DoShift(Token::SHL, instr);
2117 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
2118 return DoShift(Token::SHR, instr);
2122 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2123 instr->ReplayEnvironment(current_block_->last_environment());
2128 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2129 if (instr->is_function_entry()) {
2130 LOperand* context = UseFixed(instr->context(), cp);
2131 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2133 ASSERT(instr->is_backwards_branch());
2134 LOperand* context = UseAny(instr->context());
2135 return AssignEnvironment(
2136 AssignPointerMap(new(zone()) LStackCheck(context)));
2141 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2142 LOperand* function = UseRegister(instr->function());
2143 LOperand* code_object = UseRegisterAtStart(instr->code_object());
2144 LOperand* temp = TempRegister();
2145 return new(zone()) LStoreCodeEntry(function, code_object, temp);
2149 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2150 LOperand* temp = TempRegister();
2153 if (instr->NeedsWriteBarrier()) {
2154 // TODO(all): Replace these constraints when RecordWriteStub has been
2156 context = UseRegisterAndClobber(instr->context());
2157 value = UseRegisterAndClobber(instr->value());
2159 context = UseRegister(instr->context());
2160 value = UseRegister(instr->value());
2162 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2163 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2167 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2168 LOperand* value = UseRegister(instr->value());
2169 if (instr->RequiresHoleCheck()) {
2170 return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2174 return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2179 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2180 LOperand* temp = NULL;
2181 LOperand* elements = NULL;
2182 LOperand* val = NULL;
2183 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2185 if (!instr->is_typed_elements() &&
2186 instr->value()->representation().IsTagged() &&
2187 instr->NeedsWriteBarrier()) {
2188 // RecordWrite() will clobber all registers.
2189 elements = UseRegisterAndClobber(instr->elements());
2190 val = UseRegisterAndClobber(instr->value());
2191 temp = TempRegister();
2193 elements = UseRegister(instr->elements());
2194 val = UseRegister(instr->value());
2195 temp = instr->key()->IsConstant() ? NULL : TempRegister();
2198 if (instr->is_typed_elements()) {
2199 ASSERT((instr->value()->representation().IsInteger32() &&
2200 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2201 (instr->value()->representation().IsDouble() &&
2202 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2203 ASSERT((instr->is_fixed_typed_array() &&
2204 instr->elements()->representation().IsTagged()) ||
2205 (instr->is_external() &&
2206 instr->elements()->representation().IsExternal()));
2207 return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2209 } else if (instr->value()->representation().IsDouble()) {
2210 ASSERT(instr->elements()->representation().IsTagged());
2211 return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2214 ASSERT(instr->elements()->representation().IsTagged());
2215 ASSERT(instr->value()->representation().IsSmiOrTagged() ||
2216 instr->value()->representation().IsInteger32());
2217 return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2222 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2223 LOperand* context = UseFixed(instr->context(), cp);
2224 LOperand* object = UseFixed(instr->object(), x2);
2225 LOperand* key = UseFixed(instr->key(), x1);
2226 LOperand* value = UseFixed(instr->value(), x0);
2228 ASSERT(instr->object()->representation().IsTagged());
2229 ASSERT(instr->key()->representation().IsTagged());
2230 ASSERT(instr->value()->representation().IsTagged());
2233 new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2237 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2238 // TODO(jbramley): It might be beneficial to allow value to be a constant in
2239 // some cases. x64 makes use of this with FLAG_track_fields, for example.
2241 LOperand* object = UseRegister(instr->object());
2243 LOperand* temp0 = NULL;
2244 LOperand* temp1 = NULL;
2246 if (instr->access().IsExternalMemory() ||
2247 instr->field_representation().IsDouble()) {
2248 value = UseRegister(instr->value());
2249 } else if (instr->NeedsWriteBarrier()) {
2250 value = UseRegisterAndClobber(instr->value());
2251 temp0 = TempRegister();
2252 temp1 = TempRegister();
2253 } else if (instr->NeedsWriteBarrierForMap()) {
2254 value = UseRegister(instr->value());
2255 temp0 = TempRegister();
2256 temp1 = TempRegister();
2258 value = UseRegister(instr->value());
2259 temp0 = TempRegister();
2262 LStoreNamedField* result =
2263 new(zone()) LStoreNamedField(object, value, temp0, temp1);
2264 if (instr->field_representation().IsHeapObject() &&
2265 !instr->value()->type().IsHeapObject()) {
2266 return AssignEnvironment(result);
2272 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2273 LOperand* context = UseFixed(instr->context(), cp);
2274 LOperand* object = UseFixed(instr->object(), x1);
2275 LOperand* value = UseFixed(instr->value(), x0);
2276 LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2277 return MarkAsCall(result, instr);
2281 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2282 LOperand* context = UseFixed(instr->context(), cp);
2283 LOperand* left = UseFixed(instr->left(), x1);
2284 LOperand* right = UseFixed(instr->right(), x0);
2286 LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2287 return MarkAsCall(DefineFixed(result, x0), instr);
2291 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2292 LOperand* string = UseRegisterAndClobber(instr->string());
2293 LOperand* index = UseRegisterAndClobber(instr->index());
2294 LOperand* context = UseAny(instr->context());
2295 LStringCharCodeAt* result =
2296 new(zone()) LStringCharCodeAt(context, string, index);
2297 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2301 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2302 LOperand* char_code = UseRegister(instr->value());
2303 LOperand* context = UseAny(instr->context());
2304 LStringCharFromCode* result =
2305 new(zone()) LStringCharFromCode(context, char_code);
2306 return AssignPointerMap(DefineAsRegister(result));
2310 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2311 HStringCompareAndBranch* instr) {
2312 ASSERT(instr->left()->representation().IsTagged());
2313 ASSERT(instr->right()->representation().IsTagged());
2314 LOperand* context = UseFixed(instr->context(), cp);
2315 LOperand* left = UseFixed(instr->left(), x1);
2316 LOperand* right = UseFixed(instr->right(), x0);
2317 LStringCompareAndBranch* result =
2318 new(zone()) LStringCompareAndBranch(context, left, right);
2319 return MarkAsCall(result, instr);
2323 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2324 if (instr->representation().IsSmiOrInteger32()) {
2325 ASSERT(instr->left()->representation().Equals(instr->representation()));
2326 ASSERT(instr->right()->representation().Equals(instr->representation()));
2328 if (instr->left()->IsConstant() &&
2329 (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2330 left = UseConstant(instr->left());
2332 left = UseRegisterAtStart(instr->left());
2334 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2335 LInstruction* result = instr->representation().IsSmi() ?
2336 DefineAsRegister(new(zone()) LSubS(left, right)) :
2337 DefineAsRegister(new(zone()) LSubI(left, right));
2338 if (instr->CheckFlag(HValue::kCanOverflow)) {
2339 result = AssignEnvironment(result);
2342 } else if (instr->representation().IsDouble()) {
2343 return DoArithmeticD(Token::SUB, instr);
2345 return DoArithmeticT(Token::SUB, instr);
2350 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2351 if (instr->HasNoUses()) {
2354 return DefineAsRegister(new(zone()) LThisFunction);
2359 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2360 LOperand* object = UseFixed(instr->value(), x0);
2361 LToFastProperties* result = new(zone()) LToFastProperties(object);
2362 return MarkAsCall(DefineFixed(result, x0), instr);
2366 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2367 HTransitionElementsKind* instr) {
2368 LOperand* object = UseRegister(instr->object());
2369 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2370 LTransitionElementsKind* result =
2371 new(zone()) LTransitionElementsKind(object, NULL,
2372 TempRegister(), TempRegister());
2375 LOperand* context = UseFixed(instr->context(), cp);
2376 LTransitionElementsKind* result =
2377 new(zone()) LTransitionElementsKind(object, context, TempRegister());
2378 return AssignPointerMap(result);
2383 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2384 HTrapAllocationMemento* instr) {
2385 LOperand* object = UseRegister(instr->object());
2386 LOperand* temp1 = TempRegister();
2387 LOperand* temp2 = TempRegister();
2388 LTrapAllocationMemento* result =
2389 new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2390 return AssignEnvironment(result);
2394 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2395 LOperand* context = UseFixed(instr->context(), cp);
2396 // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2397 // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2398 // anyway, so the input doesn't have to be in x0. We might be able to improve
2399 // the ARM back-end a little by relaxing this restriction.
2401 new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2402 return MarkAsCall(DefineFixed(result, x0), instr);
2406 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2407 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2408 if (goto_instr != NULL) return goto_instr;
2410 // We only need temp registers in some cases, but we can't dereference the
2411 // instr->type_literal() handle to test that here.
2412 LOperand* temp1 = TempRegister();
2413 LOperand* temp2 = TempRegister();
2415 return new(zone()) LTypeofIsAndBranch(
2416 UseRegister(instr->value()), temp1, temp2);
2420 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2421 switch (instr->op()) {
2423 Representation r = instr->representation();
2425 // The tagged case might need to allocate a HeapNumber for the result,
2426 // so it is handled by a separate LInstruction.
2427 LOperand* context = UseFixed(instr->context(), cp);
2428 LOperand* input = UseRegister(instr->value());
2429 LOperand* temp1 = TempRegister();
2430 LOperand* temp2 = TempRegister();
2431 LOperand* temp3 = TempRegister();
2432 LMathAbsTagged* result =
2433 new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3);
2434 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2436 LOperand* input = UseRegisterAtStart(instr->value());
2437 LMathAbs* result = new(zone()) LMathAbs(input);
2439 // The Double case can never fail so it doesn't need an environment.
2440 return DefineAsRegister(result);
2442 ASSERT(r.IsInteger32() || r.IsSmi());
2443 // The Integer32 and Smi cases need an environment because they can
2444 // deoptimize on minimum representable number.
2445 return AssignEnvironment(DefineAsRegister(result));
2450 ASSERT(instr->representation().IsDouble());
2451 ASSERT(instr->value()->representation().IsDouble());
2452 LOperand* input = UseRegister(instr->value());
2453 // TODO(all): Implement TempFPRegister.
2454 LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily.
2455 LOperand* temp1 = TempRegister();
2456 LOperand* temp2 = TempRegister();
2457 LOperand* temp3 = TempRegister();
2458 LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2459 temp1, temp2, temp3);
2460 return DefineAsRegister(result);
2463 ASSERT(instr->representation().IsInteger32());
2464 ASSERT(instr->value()->representation().IsDouble());
2465 // TODO(jbramley): ARM64 can easily handle a double argument with frintm,
2466 // but we're never asked for it here. At the moment, we fall back to the
2467 // runtime if the result doesn't fit, like the other architectures.
2468 LOperand* input = UseRegisterAtStart(instr->value());
2469 LMathFloor* result = new(zone()) LMathFloor(input);
2470 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2473 ASSERT(instr->representation().IsDouble());
2474 ASSERT(instr->value()->representation().IsDouble());
2475 LOperand* input = UseFixedDouble(instr->value(), d0);
2476 LMathLog* result = new(zone()) LMathLog(input);
2477 return MarkAsCall(DefineFixedDouble(result, d0), instr);
2479 case kMathPowHalf: {
2480 ASSERT(instr->representation().IsDouble());
2481 ASSERT(instr->value()->representation().IsDouble());
2482 LOperand* input = UseRegister(instr->value());
2483 return DefineAsRegister(new(zone()) LMathPowHalf(input));
2486 ASSERT(instr->representation().IsInteger32());
2487 ASSERT(instr->value()->representation().IsDouble());
2488 // TODO(jbramley): As with kMathFloor, we can probably handle double
2489 // results fairly easily, but we are never asked for them.
2490 LOperand* input = UseRegister(instr->value());
2491 LOperand* temp = FixedTemp(d24); // Choosen arbitrarily.
2492 LMathRound* result = new(zone()) LMathRound(input, temp);
2493 return AssignEnvironment(DefineAsRegister(result));
2496 ASSERT(instr->representation().IsDouble());
2497 ASSERT(instr->value()->representation().IsDouble());
2498 LOperand* input = UseRegisterAtStart(instr->value());
2499 return DefineAsRegister(new(zone()) LMathSqrt(input));
2502 ASSERT(instr->representation().IsInteger32());
2503 ASSERT(instr->value()->representation().IsInteger32());
2504 LOperand* input = UseRegisterAtStart(instr->value());
2505 return DefineAsRegister(new(zone()) LMathClz32(input));
2514 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2515 // Use an index that corresponds to the location in the unoptimized frame,
2516 // which the optimized frame will subsume.
2517 int env_index = instr->index();
2518 int spill_index = 0;
2519 if (instr->environment()->is_parameter_index(env_index)) {
2520 spill_index = chunk_->GetParameterStackSlot(env_index);
2522 spill_index = env_index - instr->environment()->first_local_index();
2523 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2524 Abort(kTooManySpillSlotsNeededForOSR);
2528 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2532 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2537 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2538 LOperand* context = UseFixed(instr->context(), cp);
2539 // Assign object to a fixed register different from those already used in
2540 // LForInPrepareMap.
2541 LOperand* object = UseFixed(instr->enumerable(), x0);
2542 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2543 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2547 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2548 LOperand* map = UseRegister(instr->map());
2549 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2553 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2554 LOperand* value = UseRegisterAtStart(instr->value());
2555 LOperand* map = UseRegister(instr->map());
2556 LOperand* temp = TempRegister();
2557 return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2561 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2562 LOperand* object = UseRegisterAtStart(instr->object());
2563 LOperand* index = UseRegister(instr->index());
2564 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2568 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2569 LOperand* receiver = UseRegister(instr->receiver());
2570 LOperand* function = UseRegister(instr->function());
2571 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2572 return AssignEnvironment(DefineAsRegister(result));
2576 } } // namespace v8::internal