1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/arm64/lithium-codegen-arm64.h"
10 #include "src/hydrogen-osr.h"
11 #include "src/lithium-inl.h"
16 #define DEFINE_COMPILE(type) \
17 void L##type::CompileToNative(LCodeGen* generator) { \
18 generator->Do##type(this); \
20 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
24 void LInstruction::VerifyCall() {
25 // Call instructions can use only fixed registers as temporaries and
26 // outputs because all registers are blocked by the calling convention.
27 // Inputs operands must use a fixed register or use-at-start policy or
28 // a non-register policy.
29 DCHECK(Output() == NULL ||
30 LUnallocated::cast(Output())->HasFixedPolicy() ||
31 !LUnallocated::cast(Output())->HasRegisterPolicy());
32 for (UseIterator it(this); !it.Done(); it.Advance()) {
33 LUnallocated* operand = LUnallocated::cast(it.Current());
34 DCHECK(operand->HasFixedPolicy() ||
35 operand->IsUsedAtStart());
37 for (TempIterator it(this); !it.Done(); it.Advance()) {
38 LUnallocated* operand = LUnallocated::cast(it.Current());
39 DCHECK(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
45 void LLabel::PrintDataTo(StringStream* stream) {
46 LGap::PrintDataTo(stream);
47 LLabel* rep = replacement();
49 stream->Add(" Dead block replaced with B%d", rep->block_id());
54 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
55 arguments()->PrintTo(stream);
56 stream->Add(" length ");
57 length()->PrintTo(stream);
58 stream->Add(" index ");
59 index()->PrintTo(stream);
63 void LBranch::PrintDataTo(StringStream* stream) {
64 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
65 value()->PrintTo(stream);
69 void LCallJSFunction::PrintDataTo(StringStream* stream) {
71 function()->PrintTo(stream);
72 stream->Add("#%d / ", arity());
76 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
77 for (int i = 0; i < InputCount(); i++) {
78 InputAt(i)->PrintTo(stream);
81 stream->Add("#%d / ", arity());
85 void LCallNew::PrintDataTo(StringStream* stream) {
87 constructor()->PrintTo(stream);
88 stream->Add(" #%d / ", arity());
92 void LCallNewArray::PrintDataTo(StringStream* stream) {
94 constructor()->PrintTo(stream);
95 stream->Add(" #%d / ", arity());
96 ElementsKind kind = hydrogen()->elements_kind();
97 stream->Add(" (%s) ", ElementsKindToString(kind));
101 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
102 stream->Add("if class_of_test(");
103 value()->PrintTo(stream);
104 stream->Add(", \"%o\") then B%d else B%d",
105 *hydrogen()->class_name(),
111 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
113 left()->PrintTo(stream);
114 stream->Add(" %s ", Token::String(op()));
115 right()->PrintTo(stream);
116 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
120 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
121 stream->Add("if has_cached_array_index(");
122 value()->PrintTo(stream);
123 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
127 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
128 return !gen->IsNextEmittedBlock(block_id());
132 void LGoto::PrintDataTo(StringStream* stream) {
133 stream->Add("B%d", block_id());
137 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
139 base_object()->PrintTo(stream);
141 offset()->PrintTo(stream);
145 void LInvokeFunction::PrintDataTo(StringStream* stream) {
147 function()->PrintTo(stream);
148 stream->Add(" #%d / ", arity());
152 void LInstruction::PrintTo(StringStream* stream) {
153 stream->Add("%s ", this->Mnemonic());
155 PrintOutputOperandTo(stream);
159 if (HasEnvironment()) {
161 environment()->PrintTo(stream);
164 if (HasPointerMap()) {
166 pointer_map()->PrintTo(stream);
171 void LInstruction::PrintDataTo(StringStream* stream) {
173 for (int i = 0; i < InputCount(); i++) {
174 if (i > 0) stream->Add(" ");
175 if (InputAt(i) == NULL) {
178 InputAt(i)->PrintTo(stream);
184 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
185 if (HasResult()) result()->PrintTo(stream);
189 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
190 stream->Add("if has_instance_type(");
191 value()->PrintTo(stream);
192 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
196 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
197 stream->Add("if is_object(");
198 value()->PrintTo(stream);
199 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
203 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
204 stream->Add("if is_string(");
205 value()->PrintTo(stream);
206 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
210 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
211 stream->Add("if is_smi(");
212 value()->PrintTo(stream);
213 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
217 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
218 stream->Add("if typeof ");
219 value()->PrintTo(stream);
220 stream->Add(" == \"%s\" then B%d else B%d",
221 hydrogen()->type_literal()->ToCString().get(),
222 true_block_id(), false_block_id());
226 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if is_undetectable(");
228 value()->PrintTo(stream);
229 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
233 bool LGap::IsRedundant() const {
234 for (int i = 0; i < 4; i++) {
235 if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
244 void LGap::PrintDataTo(StringStream* stream) {
245 for (int i = 0; i < 4; i++) {
247 if (parallel_moves_[i] != NULL) {
248 parallel_moves_[i]->PrintDataTo(stream);
255 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
256 context()->PrintTo(stream);
257 stream->Add("[%d]", slot_index());
261 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
263 function()->PrintTo(stream);
264 stream->Add(".code_entry = ");
265 code_object()->PrintTo(stream);
269 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
270 context()->PrintTo(stream);
271 stream->Add("[%d] <- ", slot_index());
272 value()->PrintTo(stream);
276 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
277 object()->PrintTo(stream);
279 key()->PrintTo(stream);
280 stream->Add("] <- ");
281 value()->PrintTo(stream);
285 void LStoreNamedField::PrintDataTo(StringStream* stream) {
286 object()->PrintTo(stream);
287 std::ostringstream os;
288 os << hydrogen()->access();
289 stream->Add(os.str().c_str());
291 value()->PrintTo(stream);
295 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
296 object()->PrintTo(stream);
298 stream->Add(String::cast(*name())->ToCString().get());
300 value()->PrintTo(stream);
304 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
305 stream->Add("if string_compare(");
306 left()->PrintTo(stream);
307 right()->PrintTo(stream);
308 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
312 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
313 object()->PrintTo(stream);
314 stream->Add("%p -> %p", *original_map(), *transitioned_map());
319 void LUnaryMathOperation<T>::PrintDataTo(StringStream* stream) {
320 value()->PrintTo(stream);
324 const char* LArithmeticD::Mnemonic() const {
326 case Token::ADD: return "add-d";
327 case Token::SUB: return "sub-d";
328 case Token::MUL: return "mul-d";
329 case Token::DIV: return "div-d";
330 case Token::MOD: return "mod-d";
338 const char* LArithmeticT::Mnemonic() const {
340 case Token::ADD: return "add-t";
341 case Token::SUB: return "sub-t";
342 case Token::MUL: return "mul-t";
343 case Token::MOD: return "mod-t";
344 case Token::DIV: return "div-t";
345 case Token::BIT_AND: return "bit-and-t";
346 case Token::BIT_OR: return "bit-or-t";
347 case Token::BIT_XOR: return "bit-xor-t";
348 case Token::ROR: return "ror-t";
349 case Token::SHL: return "shl-t";
350 case Token::SAR: return "sar-t";
351 case Token::SHR: return "shr-t";
359 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
360 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
361 Register::ToAllocationIndex(reg));
365 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
366 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
367 DoubleRegister::ToAllocationIndex(reg));
371 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
372 if (value->EmitAtUses()) {
373 HInstruction* instr = HInstruction::cast(value);
374 VisitInstruction(instr);
376 operand->set_virtual_register(value->id());
381 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
382 return Use(value, ToUnallocated(fixed_register));
386 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
387 DoubleRegister fixed_register) {
388 return Use(value, ToUnallocated(fixed_register));
392 LOperand* LChunkBuilder::UseRegister(HValue* value) {
393 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
397 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
398 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
402 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
404 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
405 LUnallocated::USED_AT_START));
409 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
410 return value->IsConstant() ? UseConstant(value) : UseRegister(value);
414 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
415 return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
419 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
420 return chunk_->DefineConstantOperand(HConstant::cast(value));
424 LOperand* LChunkBuilder::UseAny(HValue* value) {
425 return value->IsConstant()
427 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
431 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
432 LUnallocated* result) {
433 result->set_virtual_register(current_instruction_->id());
434 instr->set_result(result);
439 LInstruction* LChunkBuilder::DefineAsRegister(
440 LTemplateResultInstruction<1>* instr) {
442 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
446 LInstruction* LChunkBuilder::DefineAsSpilled(
447 LTemplateResultInstruction<1>* instr, int index) {
449 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
453 LInstruction* LChunkBuilder::DefineSameAsFirst(
454 LTemplateResultInstruction<1>* instr) {
456 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
460 LInstruction* LChunkBuilder::DefineFixed(
461 LTemplateResultInstruction<1>* instr, Register reg) {
462 return Define(instr, ToUnallocated(reg));
466 LInstruction* LChunkBuilder::DefineFixedDouble(
467 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
468 return Define(instr, ToUnallocated(reg));
472 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
473 HInstruction* hinstr,
474 CanDeoptimize can_deoptimize) {
475 info()->MarkAsNonDeferredCalling();
480 instr = AssignPointerMap(instr);
482 // If instruction does not have side-effects lazy deoptimization
483 // after the call will try to deoptimize to the point before the call.
484 // Thus we still need to attach environment to this call even if
485 // call sequence can not deoptimize eagerly.
486 bool needs_environment =
487 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
488 !hinstr->HasObservableSideEffects();
489 if (needs_environment && !instr->HasEnvironment()) {
490 instr = AssignEnvironment(instr);
491 // We can't really figure out if the environment is needed or not.
492 instr->environment()->set_has_been_used();
499 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
500 DCHECK(!instr->HasPointerMap());
501 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
506 LUnallocated* LChunkBuilder::TempRegister() {
507 LUnallocated* operand =
508 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
509 int vreg = allocator_->GetVirtualRegister();
510 if (!allocator_->AllocationOk()) {
511 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
514 operand->set_virtual_register(vreg);
519 LUnallocated* LChunkBuilder::TempDoubleRegister() {
520 LUnallocated* operand =
521 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_DOUBLE_REGISTER);
522 int vreg = allocator_->GetVirtualRegister();
523 if (!allocator_->AllocationOk()) {
524 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
527 operand->set_virtual_register(vreg);
532 int LPlatformChunk::GetNextSpillIndex() {
533 return spill_slot_count_++;
537 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
538 int index = GetNextSpillIndex();
539 if (kind == DOUBLE_REGISTERS) {
540 return LDoubleStackSlot::Create(index, zone());
542 DCHECK(kind == GENERAL_REGISTERS);
543 return LStackSlot::Create(index, zone());
548 LOperand* LChunkBuilder::FixedTemp(Register reg) {
549 LUnallocated* operand = ToUnallocated(reg);
550 DCHECK(operand->HasFixedPolicy());
555 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
556 LUnallocated* operand = ToUnallocated(reg);
557 DCHECK(operand->HasFixedPolicy());
562 LPlatformChunk* LChunkBuilder::Build() {
564 chunk_ = new(zone()) LPlatformChunk(info_, graph_);
565 LPhase phase("L_Building chunk", chunk_);
568 // If compiling for OSR, reserve space for the unoptimized frame,
569 // which will be subsumed into this frame.
570 if (graph()->has_osr()) {
571 // TODO(all): GetNextSpillIndex just increments a field. It has no other
572 // side effects, so we should get rid of this loop.
573 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
574 chunk_->GetNextSpillIndex();
578 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
579 for (int i = 0; i < blocks->length(); i++) {
580 DoBasicBlock(blocks->at(i));
581 if (is_aborted()) return NULL;
588 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
589 DCHECK(is_building());
590 current_block_ = block;
592 if (block->IsStartBlock()) {
593 block->UpdateEnvironment(graph_->start_environment());
595 } else if (block->predecessors()->length() == 1) {
596 // We have a single predecessor => copy environment and outgoing
597 // argument count from the predecessor.
598 DCHECK(block->phis()->length() == 0);
599 HBasicBlock* pred = block->predecessors()->at(0);
600 HEnvironment* last_environment = pred->last_environment();
601 DCHECK(last_environment != NULL);
603 // Only copy the environment, if it is later used again.
604 if (pred->end()->SecondSuccessor() == NULL) {
605 DCHECK(pred->end()->FirstSuccessor() == block);
607 if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
608 (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
609 last_environment = last_environment->Copy();
612 block->UpdateEnvironment(last_environment);
613 DCHECK(pred->argument_count() >= 0);
614 argument_count_ = pred->argument_count();
616 // We are at a state join => process phis.
617 HBasicBlock* pred = block->predecessors()->at(0);
618 // No need to copy the environment, it cannot be used later.
619 HEnvironment* last_environment = pred->last_environment();
620 for (int i = 0; i < block->phis()->length(); ++i) {
621 HPhi* phi = block->phis()->at(i);
622 if (phi->HasMergedIndex()) {
623 last_environment->SetValueAt(phi->merged_index(), phi);
626 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
627 if (block->deleted_phis()->at(i) < last_environment->length()) {
628 last_environment->SetValueAt(block->deleted_phis()->at(i),
629 graph_->GetConstantUndefined());
632 block->UpdateEnvironment(last_environment);
633 // Pick up the outgoing argument count of one of the predecessors.
634 argument_count_ = pred->argument_count();
637 // Translate hydrogen instructions to lithium ones for the current block.
638 HInstruction* current = block->first();
639 int start = chunk_->instructions()->length();
640 while ((current != NULL) && !is_aborted()) {
641 // Code for constants in registers is generated lazily.
642 if (!current->EmitAtUses()) {
643 VisitInstruction(current);
645 current = current->next();
647 int end = chunk_->instructions()->length() - 1;
649 block->set_first_instruction_index(start);
650 block->set_last_instruction_index(end);
652 block->set_argument_count(argument_count_);
653 current_block_ = NULL;
657 void LChunkBuilder::VisitInstruction(HInstruction* current) {
658 HInstruction* old_current = current_instruction_;
659 current_instruction_ = current;
661 LInstruction* instr = NULL;
662 if (current->CanReplaceWithDummyUses()) {
663 if (current->OperandCount() == 0) {
664 instr = DefineAsRegister(new(zone()) LDummy());
666 DCHECK(!current->OperandAt(0)->IsControlInstruction());
667 instr = DefineAsRegister(new(zone())
668 LDummyUse(UseAny(current->OperandAt(0))));
670 for (int i = 1; i < current->OperandCount(); ++i) {
671 if (current->OperandAt(i)->IsControlInstruction()) continue;
672 LInstruction* dummy =
673 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
674 dummy->set_hydrogen_value(current);
675 chunk_->AddInstruction(dummy, current_block_);
678 HBasicBlock* successor;
679 if (current->IsControlInstruction() &&
680 HControlInstruction::cast(current)->KnownSuccessorBlock(&successor) &&
682 instr = new(zone()) LGoto(successor);
684 instr = current->CompileToLithium(this);
688 argument_count_ += current->argument_delta();
689 DCHECK(argument_count_ >= 0);
692 AddInstruction(instr, current);
695 current_instruction_ = old_current;
699 void LChunkBuilder::AddInstruction(LInstruction* instr,
700 HInstruction* hydrogen_val) {
701 // Associate the hydrogen instruction first, since we may need it for
702 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
703 instr->set_hydrogen_value(hydrogen_val);
706 // Make sure that the lithium instruction has either no fixed register
707 // constraints in temps or the result OR no uses that are only used at
708 // start. If this invariant doesn't hold, the register allocator can decide
709 // to insert a split of a range immediately before the instruction due to an
710 // already allocated register needing to be used for the instruction's fixed
711 // register constraint. In this case, the register allocator won't see an
712 // interference between the split child and the use-at-start (it would if
713 // the it was just a plain use), so it is free to move the split child into
714 // the same register that is used for the use-at-start.
715 // See https://code.google.com/p/chromium/issues/detail?id=201590
716 if (!(instr->ClobbersRegisters() &&
717 instr->ClobbersDoubleRegisters(isolate()))) {
719 int used_at_start = 0;
720 for (UseIterator it(instr); !it.Done(); it.Advance()) {
721 LUnallocated* operand = LUnallocated::cast(it.Current());
722 if (operand->IsUsedAtStart()) ++used_at_start;
724 if (instr->Output() != NULL) {
725 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
727 for (TempIterator it(instr); !it.Done(); it.Advance()) {
728 LUnallocated* operand = LUnallocated::cast(it.Current());
729 if (operand->HasFixedPolicy()) ++fixed;
731 DCHECK(fixed == 0 || used_at_start == 0);
735 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
736 instr = AssignPointerMap(instr);
738 if (FLAG_stress_environments && !instr->HasEnvironment()) {
739 instr = AssignEnvironment(instr);
741 chunk_->AddInstruction(instr, current_block_);
743 if (instr->IsCall()) {
744 HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
745 LInstruction* instruction_needing_environment = NULL;
746 if (hydrogen_val->HasObservableSideEffects()) {
747 HSimulate* sim = HSimulate::cast(hydrogen_val->next());
748 instruction_needing_environment = instr;
749 sim->ReplayEnvironment(current_block_->last_environment());
750 hydrogen_value_for_lazy_bailout = sim;
752 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
753 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
754 chunk_->AddInstruction(bailout, current_block_);
755 if (instruction_needing_environment != NULL) {
756 // Store the lazy deopt environment with the instruction if needed.
757 // Right now it is only used for LInstanceOfKnownGlobal.
758 instruction_needing_environment->
759 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
765 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
766 HEnvironment* hydrogen_env = current_block_->last_environment();
767 int argument_index_accumulator = 0;
768 ZoneList<HValue*> objects_to_materialize(0, zone());
769 instr->set_environment(CreateEnvironment(hydrogen_env,
770 &argument_index_accumulator,
771 &objects_to_materialize));
776 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
777 // The control instruction marking the end of a block that completed
778 // abruptly (e.g., threw an exception). There is nothing specific to do.
783 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
784 HArithmeticBinaryOperation* instr) {
785 DCHECK(instr->representation().IsDouble());
786 DCHECK(instr->left()->representation().IsDouble());
787 DCHECK(instr->right()->representation().IsDouble());
789 if (op == Token::MOD) {
790 LOperand* left = UseFixedDouble(instr->left(), d0);
791 LOperand* right = UseFixedDouble(instr->right(), d1);
792 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
793 return MarkAsCall(DefineFixedDouble(result, d0), instr);
795 LOperand* left = UseRegisterAtStart(instr->left());
796 LOperand* right = UseRegisterAtStart(instr->right());
797 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
798 return DefineAsRegister(result);
803 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
804 HBinaryOperation* instr) {
805 DCHECK((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
806 (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
807 (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
808 (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
809 (op == Token::BIT_XOR));
810 HValue* left = instr->left();
811 HValue* right = instr->right();
813 // TODO(jbramley): Once we've implemented smi support for all arithmetic
814 // operations, these assertions should check IsTagged().
815 DCHECK(instr->representation().IsSmiOrTagged());
816 DCHECK(left->representation().IsSmiOrTagged());
817 DCHECK(right->representation().IsSmiOrTagged());
819 LOperand* context = UseFixed(instr->context(), cp);
820 LOperand* left_operand = UseFixed(left, x1);
821 LOperand* right_operand = UseFixed(right, x0);
822 LArithmeticT* result =
823 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
824 return MarkAsCall(DefineFixed(result, x0), instr);
828 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
829 HBoundsCheckBaseIndexInformation* instr) {
835 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
836 info()->MarkAsRequiresFrame();
837 LOperand* args = NULL;
838 LOperand* length = NULL;
839 LOperand* index = NULL;
841 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
842 args = UseRegisterAtStart(instr->arguments());
843 length = UseConstant(instr->length());
844 index = UseConstant(instr->index());
846 args = UseRegister(instr->arguments());
847 length = UseRegisterAtStart(instr->length());
848 index = UseRegisterOrConstantAtStart(instr->index());
851 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
855 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
856 if (instr->representation().IsSmiOrInteger32()) {
857 DCHECK(instr->left()->representation().Equals(instr->representation()));
858 DCHECK(instr->right()->representation().Equals(instr->representation()));
860 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr);
861 if (shifted_operation != NULL) {
862 return shifted_operation;
865 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
867 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
868 LInstruction* result = instr->representation().IsSmi() ?
869 DefineAsRegister(new(zone()) LAddS(left, right)) :
870 DefineAsRegister(new(zone()) LAddI(left, right));
871 if (instr->CheckFlag(HValue::kCanOverflow)) {
872 result = AssignEnvironment(result);
875 } else if (instr->representation().IsExternal()) {
876 DCHECK(instr->left()->representation().IsExternal());
877 DCHECK(instr->right()->representation().IsInteger32());
878 DCHECK(!instr->CheckFlag(HValue::kCanOverflow));
879 LOperand* left = UseRegisterAtStart(instr->left());
880 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
881 return DefineAsRegister(new(zone()) LAddE(left, right));
882 } else if (instr->representation().IsDouble()) {
883 return DoArithmeticD(Token::ADD, instr);
885 DCHECK(instr->representation().IsTagged());
886 return DoArithmeticT(Token::ADD, instr);
891 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
892 info()->MarkAsDeferredCalling();
893 LOperand* context = UseAny(instr->context());
894 LOperand* size = UseRegisterOrConstant(instr->size());
895 LOperand* temp1 = TempRegister();
896 LOperand* temp2 = TempRegister();
897 LOperand* temp3 = instr->MustPrefillWithFiller() ? TempRegister() : NULL;
898 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2, temp3);
899 return AssignPointerMap(DefineAsRegister(result));
903 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
904 LOperand* function = UseFixed(instr->function(), x1);
905 LOperand* receiver = UseFixed(instr->receiver(), x0);
906 LOperand* length = UseFixed(instr->length(), x2);
907 LOperand* elements = UseFixed(instr->elements(), x3);
908 LApplyArguments* result = new(zone()) LApplyArguments(function,
912 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
916 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
917 info()->MarkAsRequiresFrame();
918 LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
919 return DefineAsRegister(new(zone()) LArgumentsElements(temp));
923 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
924 info()->MarkAsRequiresFrame();
925 LOperand* value = UseRegisterAtStart(instr->value());
926 return DefineAsRegister(new(zone()) LArgumentsLength(value));
930 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
931 // There are no real uses of the arguments object.
932 // arguments.length and element access are supported directly on
933 // stack arguments, and any real arguments object use causes a bailout.
934 // So this value is never used.
939 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
940 if (instr->representation().IsSmiOrInteger32()) {
941 DCHECK(instr->left()->representation().Equals(instr->representation()));
942 DCHECK(instr->right()->representation().Equals(instr->representation()));
943 DCHECK(instr->CheckFlag(HValue::kTruncatingToInt32));
945 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr);
946 if (shifted_operation != NULL) {
947 return shifted_operation;
950 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
952 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
953 return instr->representation().IsSmi() ?
954 DefineAsRegister(new(zone()) LBitS(left, right)) :
955 DefineAsRegister(new(zone()) LBitI(left, right));
957 return DoArithmeticT(instr->op(), instr);
962 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
963 // V8 expects a label to be generated for each basic block.
964 // This is used in some places like LAllocator::IsBlockBoundary
965 // in lithium-allocator.cc
966 return new(zone()) LLabel(instr->block());
970 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
971 if (!FLAG_debug_code && instr->skip_check()) return NULL;
972 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
973 LOperand* length = !index->IsConstantOperand()
974 ? UseRegisterOrConstantAtStart(instr->length())
975 : UseRegisterAtStart(instr->length());
976 LInstruction* result = new(zone()) LBoundsCheck(index, length);
977 if (!FLAG_debug_code || !instr->skip_check()) {
978 result = AssignEnvironment(result);
984 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
985 HValue* value = instr->value();
986 Representation r = value->representation();
987 HType type = value->type();
989 if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
990 // These representations have simple checks that cannot deoptimize.
991 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
993 DCHECK(r.IsTagged());
994 if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
995 type.IsHeapNumber()) {
996 // These types have simple checks that cannot deoptimize.
997 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
1000 if (type.IsString()) {
1001 // This type cannot deoptimize, but needs a scratch register.
1002 return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
1005 ToBooleanStub::Types expected = instr->expected_input_types();
1006 bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
1007 LOperand* temp1 = needs_temps ? TempRegister() : NULL;
1008 LOperand* temp2 = needs_temps ? TempRegister() : NULL;
1010 if (expected.IsGeneric() || expected.IsEmpty()) {
1011 // The generic case cannot deoptimize because it already supports every
1012 // possible input type.
1013 DCHECK(needs_temps);
1014 return new(zone()) LBranch(UseRegister(value), temp1, temp2);
1016 return AssignEnvironment(
1017 new(zone()) LBranch(UseRegister(value), temp1, temp2));
1023 LInstruction* LChunkBuilder::DoCallJSFunction(
1024 HCallJSFunction* instr) {
1025 LOperand* function = UseFixed(instr->function(), x1);
1027 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1029 return MarkAsCall(DefineFixed(result, x0), instr);
1033 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1034 HCallWithDescriptor* instr) {
1035 CallInterfaceDescriptor descriptor = instr->descriptor();
1037 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1038 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1039 ops.Add(target, zone());
1040 for (int i = 1; i < instr->OperandCount(); i++) {
1042 UseFixed(instr->OperandAt(i), descriptor.GetParameterRegister(i - 1));
1043 ops.Add(op, zone());
1046 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1049 return MarkAsCall(DefineFixed(result, x0), instr);
1053 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1054 LOperand* context = UseFixed(instr->context(), cp);
1055 LOperand* function = UseFixed(instr->function(), x1);
1056 LCallFunction* call = new(zone()) LCallFunction(context, function);
1057 return MarkAsCall(DefineFixed(call, x0), instr);
1061 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1062 LOperand* context = UseFixed(instr->context(), cp);
1063 // The call to CallConstructStub will expect the constructor to be in x1.
1064 LOperand* constructor = UseFixed(instr->constructor(), x1);
1065 LCallNew* result = new(zone()) LCallNew(context, constructor);
1066 return MarkAsCall(DefineFixed(result, x0), instr);
1070 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1071 LOperand* context = UseFixed(instr->context(), cp);
1072 // The call to ArrayConstructCode will expect the constructor to be in x1.
1073 LOperand* constructor = UseFixed(instr->constructor(), x1);
1074 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1075 return MarkAsCall(DefineFixed(result, x0), instr);
1079 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1080 LOperand* context = UseFixed(instr->context(), cp);
1081 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1085 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1086 LOperand* context = UseFixed(instr->context(), cp);
1087 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1091 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1092 instr->ReplayEnvironment(current_block_->last_environment());
1094 // There are no real uses of a captured object.
1099 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1100 Representation from = instr->from();
1101 Representation to = instr->to();
1102 HValue* val = instr->value();
1104 if (to.IsTagged()) {
1105 LOperand* value = UseRegister(val);
1106 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1108 from = Representation::Tagged();
1110 if (from.IsTagged()) {
1111 if (to.IsDouble()) {
1112 LOperand* value = UseRegister(val);
1113 LOperand* temp = TempRegister();
1114 LInstruction* result =
1115 DefineAsRegister(new(zone()) LNumberUntagD(value, temp));
1116 if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1118 } else if (to.IsSmi()) {
1119 LOperand* value = UseRegister(val);
1120 if (val->type().IsSmi()) {
1121 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1123 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1125 DCHECK(to.IsInteger32());
1126 if (val->type().IsSmi() || val->representation().IsSmi()) {
1127 LOperand* value = UseRegisterAtStart(val);
1128 return DefineAsRegister(new(zone()) LSmiUntag(value, false));
1130 LOperand* value = UseRegister(val);
1131 LOperand* temp1 = TempRegister();
1132 LOperand* temp2 = instr->CanTruncateToInt32()
1133 ? NULL : TempDoubleRegister();
1134 LInstruction* result =
1135 DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1136 if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1140 } else if (from.IsDouble()) {
1141 if (to.IsTagged()) {
1142 info()->MarkAsDeferredCalling();
1143 LOperand* value = UseRegister(val);
1144 LOperand* temp1 = TempRegister();
1145 LOperand* temp2 = TempRegister();
1146 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1147 return AssignPointerMap(DefineAsRegister(result));
1149 DCHECK(to.IsSmi() || to.IsInteger32());
1150 if (instr->CanTruncateToInt32()) {
1151 LOperand* value = UseRegister(val);
1152 return DefineAsRegister(new(zone()) LTruncateDoubleToIntOrSmi(value));
1154 LOperand* value = UseRegister(val);
1155 LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1156 return AssignEnvironment(DefineAsRegister(result));
1159 } else if (from.IsInteger32()) {
1160 info()->MarkAsDeferredCalling();
1161 if (to.IsTagged()) {
1162 if (val->CheckFlag(HInstruction::kUint32)) {
1163 LOperand* value = UseRegister(val);
1164 LNumberTagU* result =
1165 new(zone()) LNumberTagU(value, TempRegister(), TempRegister());
1166 return AssignPointerMap(DefineAsRegister(result));
1168 STATIC_ASSERT((kMinInt == Smi::kMinValue) &&
1169 (kMaxInt == Smi::kMaxValue));
1170 LOperand* value = UseRegisterAtStart(val);
1171 return DefineAsRegister(new(zone()) LSmiTag(value));
1173 } else if (to.IsSmi()) {
1174 LOperand* value = UseRegisterAtStart(val);
1175 LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1176 if (val->CheckFlag(HInstruction::kUint32)) {
1177 result = AssignEnvironment(result);
1181 DCHECK(to.IsDouble());
1182 if (val->CheckFlag(HInstruction::kUint32)) {
1183 return DefineAsRegister(
1184 new(zone()) LUint32ToDouble(UseRegisterAtStart(val)));
1186 return DefineAsRegister(
1187 new(zone()) LInteger32ToDouble(UseRegisterAtStart(val)));
1196 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1197 LOperand* value = UseRegisterAtStart(instr->value());
1198 return AssignEnvironment(new(zone()) LCheckValue(value));
1202 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1203 LOperand* value = UseRegisterAtStart(instr->value());
1204 LOperand* temp = TempRegister();
1205 LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1206 return AssignEnvironment(result);
1210 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1211 if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps;
1212 LOperand* value = UseRegisterAtStart(instr->value());
1213 LOperand* temp = TempRegister();
1214 LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1215 if (instr->HasMigrationTarget()) {
1216 info()->MarkAsDeferredCalling();
1217 result = AssignPointerMap(result);
1223 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1224 LOperand* value = UseRegisterAtStart(instr->value());
1225 LInstruction* result = new(zone()) LCheckNonSmi(value);
1226 if (!instr->value()->type().IsHeapObject()) {
1227 result = AssignEnvironment(result);
1233 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1234 LOperand* value = UseRegisterAtStart(instr->value());
1235 return AssignEnvironment(new(zone()) LCheckSmi(value));
1239 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1240 HValue* value = instr->value();
1241 Representation input_rep = value->representation();
1242 LOperand* reg = UseRegister(value);
1243 if (input_rep.IsDouble()) {
1244 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1245 } else if (input_rep.IsInteger32()) {
1246 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1248 DCHECK(input_rep.IsSmiOrTagged());
1249 return AssignEnvironment(
1250 DefineAsRegister(new(zone()) LClampTToUint8(reg,
1251 TempDoubleRegister())));
1256 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1257 HClassOfTestAndBranch* instr) {
1258 DCHECK(instr->value()->representation().IsTagged());
1259 LOperand* value = UseRegisterAtStart(instr->value());
1260 return new(zone()) LClassOfTestAndBranch(value,
1266 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1267 HCompareNumericAndBranch* instr) {
1268 Representation r = instr->representation();
1269 if (r.IsSmiOrInteger32()) {
1270 DCHECK(instr->left()->representation().Equals(r));
1271 DCHECK(instr->right()->representation().Equals(r));
1272 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1273 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1274 return new(zone()) LCompareNumericAndBranch(left, right);
1276 DCHECK(r.IsDouble());
1277 DCHECK(instr->left()->representation().IsDouble());
1278 DCHECK(instr->right()->representation().IsDouble());
1279 if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1280 LOperand* left = UseConstant(instr->left());
1281 LOperand* right = UseConstant(instr->right());
1282 return new(zone()) LCompareNumericAndBranch(left, right);
1284 LOperand* left = UseRegisterAtStart(instr->left());
1285 LOperand* right = UseRegisterAtStart(instr->right());
1286 return new(zone()) LCompareNumericAndBranch(left, right);
1291 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1292 DCHECK(instr->left()->representation().IsTagged());
1293 DCHECK(instr->right()->representation().IsTagged());
1294 LOperand* context = UseFixed(instr->context(), cp);
1295 LOperand* left = UseFixed(instr->left(), x1);
1296 LOperand* right = UseFixed(instr->right(), x0);
1297 LCmpT* result = new(zone()) LCmpT(context, left, right);
1298 return MarkAsCall(DefineFixed(result, x0), instr);
1302 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1303 HCompareHoleAndBranch* instr) {
1304 LOperand* value = UseRegister(instr->value());
1305 if (instr->representation().IsTagged()) {
1306 return new(zone()) LCmpHoleAndBranchT(value);
1308 LOperand* temp = TempRegister();
1309 return new(zone()) LCmpHoleAndBranchD(value, temp);
1314 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1315 HCompareObjectEqAndBranch* instr) {
1316 LOperand* left = UseRegisterAtStart(instr->left());
1317 LOperand* right = UseRegisterAtStart(instr->right());
1318 return new(zone()) LCmpObjectEqAndBranch(left, right);
1322 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1323 DCHECK(instr->value()->representation().IsTagged());
1324 LOperand* value = UseRegisterAtStart(instr->value());
1325 LOperand* temp = TempRegister();
1326 return new(zone()) LCmpMapAndBranch(value, temp);
1330 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1331 Representation r = instr->representation();
1333 return DefineAsRegister(new(zone()) LConstantS);
1334 } else if (r.IsInteger32()) {
1335 return DefineAsRegister(new(zone()) LConstantI);
1336 } else if (r.IsDouble()) {
1337 return DefineAsRegister(new(zone()) LConstantD);
1338 } else if (r.IsExternal()) {
1339 return DefineAsRegister(new(zone()) LConstantE);
1340 } else if (r.IsTagged()) {
1341 return DefineAsRegister(new(zone()) LConstantT);
1349 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1350 if (instr->HasNoUses()) return NULL;
1352 if (info()->IsStub()) {
1353 return DefineFixed(new(zone()) LContext, cp);
1356 return DefineAsRegister(new(zone()) LContext);
1360 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1361 LOperand* object = UseFixed(instr->value(), x0);
1362 LDateField* result = new(zone()) LDateField(object, instr->index());
1363 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1367 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1368 return new(zone()) LDebugBreak();
1372 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1373 LOperand* context = UseFixed(instr->context(), cp);
1374 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1378 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1379 return AssignEnvironment(new(zone()) LDeoptimize);
1383 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1384 DCHECK(instr->representation().IsInteger32());
1385 DCHECK(instr->left()->representation().Equals(instr->representation()));
1386 DCHECK(instr->right()->representation().Equals(instr->representation()));
1387 LOperand* dividend = UseRegister(instr->left());
1388 int32_t divisor = instr->right()->GetInteger32Constant();
1389 LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1390 dividend, divisor));
1391 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1392 (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1393 (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1394 divisor != 1 && divisor != -1)) {
1395 result = AssignEnvironment(result);
1401 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1402 DCHECK(instr->representation().IsInteger32());
1403 DCHECK(instr->left()->representation().Equals(instr->representation()));
1404 DCHECK(instr->right()->representation().Equals(instr->representation()));
1405 LOperand* dividend = UseRegister(instr->left());
1406 int32_t divisor = instr->right()->GetInteger32Constant();
1407 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1408 ? NULL : TempRegister();
1409 LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1410 dividend, divisor, temp));
1412 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1413 !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1414 result = AssignEnvironment(result);
1420 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1421 DCHECK(instr->representation().IsSmiOrInteger32());
1422 DCHECK(instr->left()->representation().Equals(instr->representation()));
1423 DCHECK(instr->right()->representation().Equals(instr->representation()));
1424 LOperand* dividend = UseRegister(instr->left());
1425 LOperand* divisor = UseRegister(instr->right());
1426 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1427 ? NULL : TempRegister();
1428 LInstruction* result =
1429 DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp));
1430 if (!instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1431 result = AssignEnvironment(result);
1437 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1438 if (instr->representation().IsSmiOrInteger32()) {
1439 if (instr->RightIsPowerOf2()) {
1440 return DoDivByPowerOf2I(instr);
1441 } else if (instr->right()->IsConstant()) {
1442 return DoDivByConstI(instr);
1444 return DoDivI(instr);
1446 } else if (instr->representation().IsDouble()) {
1447 return DoArithmeticD(Token::DIV, instr);
1449 return DoArithmeticT(Token::DIV, instr);
1454 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1455 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1459 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1460 HEnvironment* outer = current_block_->last_environment();
1461 outer->set_ast_id(instr->ReturnId());
1462 HConstant* undefined = graph()->GetConstantUndefined();
1463 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1464 instr->arguments_count(),
1467 instr->inlining_kind());
1468 // Only replay binding of arguments object if it wasn't removed from graph.
1469 if ((instr->arguments_var() != NULL) &&
1470 instr->arguments_object()->IsLinked()) {
1471 inner->Bind(instr->arguments_var(), instr->arguments_object());
1473 inner->BindContext(instr->closure_context());
1474 inner->set_entry(instr);
1475 current_block_->UpdateEnvironment(inner);
1476 chunk_->AddInlinedClosure(instr->closure());
1481 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1487 LInstruction* LChunkBuilder::DoForceRepresentation(
1488 HForceRepresentation* instr) {
1489 // All HForceRepresentation instructions should be eliminated in the
1490 // representation change phase of Hydrogen.
1496 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1497 LOperand* context = UseFixed(instr->context(), cp);
1499 DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1503 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1504 HGetCachedArrayIndex* instr) {
1505 DCHECK(instr->value()->representation().IsTagged());
1506 LOperand* value = UseRegisterAtStart(instr->value());
1507 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1511 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1512 return new(zone()) LGoto(instr->FirstSuccessor());
1516 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1517 HHasCachedArrayIndexAndBranch* instr) {
1518 DCHECK(instr->value()->representation().IsTagged());
1519 return new(zone()) LHasCachedArrayIndexAndBranch(
1520 UseRegisterAtStart(instr->value()), TempRegister());
1524 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1525 HHasInstanceTypeAndBranch* instr) {
1526 DCHECK(instr->value()->representation().IsTagged());
1527 LOperand* value = UseRegisterAtStart(instr->value());
1528 return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1532 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1533 HInnerAllocatedObject* instr) {
1534 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1535 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1536 return DefineAsRegister(
1537 new(zone()) LInnerAllocatedObject(base_object, offset));
1541 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1542 LOperand* context = UseFixed(instr->context(), cp);
1543 LInstanceOf* result = new(zone()) LInstanceOf(
1545 UseFixed(instr->left(), InstanceofStub::left()),
1546 UseFixed(instr->right(), InstanceofStub::right()));
1547 return MarkAsCall(DefineFixed(result, x0), instr);
1551 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1552 HInstanceOfKnownGlobal* instr) {
1553 LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1554 UseFixed(instr->context(), cp),
1555 UseFixed(instr->left(), InstanceofStub::left()));
1556 return MarkAsCall(DefineFixed(result, x0), instr);
1560 LInstruction* LChunkBuilder::DoTailCallThroughMegamorphicCache(
1561 HTailCallThroughMegamorphicCache* instr) {
1562 LOperand* context = UseFixed(instr->context(), cp);
1563 LOperand* receiver_register =
1564 UseFixed(instr->receiver(), LoadDescriptor::ReceiverRegister());
1565 LOperand* name_register =
1566 UseFixed(instr->name(), LoadDescriptor::NameRegister());
1567 // Not marked as call. It can't deoptimize, and it never returns.
1568 return new (zone()) LTailCallThroughMegamorphicCache(
1569 context, receiver_register, name_register);
1573 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1574 LOperand* context = UseFixed(instr->context(), cp);
1575 // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1576 LOperand* function = UseFixed(instr->function(), x1);
1577 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1578 return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1582 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1583 HIsConstructCallAndBranch* instr) {
1584 return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1588 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1589 HCompareMinusZeroAndBranch* instr) {
1590 LOperand* value = UseRegister(instr->value());
1591 LOperand* scratch = TempRegister();
1592 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1596 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1597 DCHECK(instr->value()->representation().IsTagged());
1598 LOperand* value = UseRegisterAtStart(instr->value());
1599 LOperand* temp1 = TempRegister();
1600 LOperand* temp2 = TempRegister();
1601 return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1605 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1606 DCHECK(instr->value()->representation().IsTagged());
1607 LOperand* value = UseRegisterAtStart(instr->value());
1608 LOperand* temp = TempRegister();
1609 return new(zone()) LIsStringAndBranch(value, temp);
1613 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1614 DCHECK(instr->value()->representation().IsTagged());
1615 return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1619 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1620 HIsUndetectableAndBranch* instr) {
1621 DCHECK(instr->value()->representation().IsTagged());
1622 LOperand* value = UseRegisterAtStart(instr->value());
1623 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1627 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1628 LInstruction* pop = NULL;
1629 HEnvironment* env = current_block_->last_environment();
1631 if (env->entry()->arguments_pushed()) {
1632 int argument_count = env->arguments_environment()->parameter_count();
1633 pop = new(zone()) LDrop(argument_count);
1634 DCHECK(instr->argument_delta() == -argument_count);
1637 HEnvironment* outer =
1638 current_block_->last_environment()->DiscardInlined(false);
1639 current_block_->UpdateEnvironment(outer);
1645 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1646 LOperand* context = UseRegisterAtStart(instr->value());
1647 LInstruction* result =
1648 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1649 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
1650 result = AssignEnvironment(result);
1656 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1657 HLoadFunctionPrototype* instr) {
1658 LOperand* function = UseRegister(instr->function());
1659 LOperand* temp = TempRegister();
1660 return AssignEnvironment(DefineAsRegister(
1661 new(zone()) LLoadFunctionPrototype(function, temp)));
1665 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1666 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1667 return instr->RequiresHoleCheck()
1668 ? AssignEnvironment(DefineAsRegister(result))
1669 : DefineAsRegister(result);
1673 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1674 LOperand* context = UseFixed(instr->context(), cp);
1675 LOperand* global_object =
1676 UseFixed(instr->global_object(), LoadDescriptor::ReceiverRegister());
1677 LOperand* vector = NULL;
1678 if (FLAG_vector_ics) {
1679 vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
1682 LLoadGlobalGeneric* result =
1683 new(zone()) LLoadGlobalGeneric(context, global_object, vector);
1684 return MarkAsCall(DefineFixed(result, x0), instr);
1688 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1689 DCHECK(instr->key()->representation().IsSmiOrInteger32());
1690 ElementsKind elements_kind = instr->elements_kind();
1691 LOperand* elements = UseRegister(instr->elements());
1692 LOperand* key = UseRegisterOrConstant(instr->key());
1694 if (!instr->is_typed_elements()) {
1695 if (instr->representation().IsDouble()) {
1696 LOperand* temp = (!instr->key()->IsConstant() ||
1697 instr->RequiresHoleCheck())
1701 LLoadKeyedFixedDouble* result =
1702 new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1703 return instr->RequiresHoleCheck()
1704 ? AssignEnvironment(DefineAsRegister(result))
1705 : DefineAsRegister(result);
1707 DCHECK(instr->representation().IsSmiOrTagged() ||
1708 instr->representation().IsInteger32());
1709 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1710 LLoadKeyedFixed* result =
1711 new(zone()) LLoadKeyedFixed(elements, key, temp);
1712 return instr->RequiresHoleCheck()
1713 ? AssignEnvironment(DefineAsRegister(result))
1714 : DefineAsRegister(result);
1717 DCHECK((instr->representation().IsInteger32() &&
1718 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1719 (instr->representation().IsDouble() &&
1720 IsDoubleOrFloatElementsKind(instr->elements_kind())));
1722 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1723 LInstruction* result = DefineAsRegister(
1724 new(zone()) LLoadKeyedExternal(elements, key, temp));
1725 if ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1726 elements_kind == UINT32_ELEMENTS) &&
1727 !instr->CheckFlag(HInstruction::kUint32)) {
1728 result = AssignEnvironment(result);
1735 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1736 LOperand* context = UseFixed(instr->context(), cp);
1738 UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
1739 LOperand* key = UseFixed(instr->key(), LoadDescriptor::NameRegister());
1740 LOperand* vector = NULL;
1741 if (FLAG_vector_ics) {
1742 vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
1745 LInstruction* result =
1746 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key, vector),
1748 return MarkAsCall(result, instr);
1752 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1753 LOperand* object = UseRegisterAtStart(instr->object());
1754 return DefineAsRegister(new(zone()) LLoadNamedField(object));
1758 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1759 LOperand* context = UseFixed(instr->context(), cp);
1761 UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
1762 LOperand* vector = NULL;
1763 if (FLAG_vector_ics) {
1764 vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
1767 LInstruction* result =
1768 DefineFixed(new(zone()) LLoadNamedGeneric(context, object, vector), x0);
1769 return MarkAsCall(result, instr);
1773 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1774 return DefineAsRegister(new(zone()) LLoadRoot);
1778 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1779 LOperand* map = UseRegisterAtStart(instr->value());
1780 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1784 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1785 DCHECK(instr->representation().IsInteger32());
1786 DCHECK(instr->left()->representation().Equals(instr->representation()));
1787 DCHECK(instr->right()->representation().Equals(instr->representation()));
1788 LOperand* dividend = UseRegisterAtStart(instr->left());
1789 int32_t divisor = instr->right()->GetInteger32Constant();
1790 LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1791 dividend, divisor));
1792 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1793 (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1794 result = AssignEnvironment(result);
1800 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1801 DCHECK(instr->representation().IsInteger32());
1802 DCHECK(instr->left()->representation().Equals(instr->representation()));
1803 DCHECK(instr->right()->representation().Equals(instr->representation()));
1804 LOperand* dividend = UseRegister(instr->left());
1805 int32_t divisor = instr->right()->GetInteger32Constant();
1807 ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1808 (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1809 NULL : TempRegister();
1810 LInstruction* result = DefineAsRegister(
1811 new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1813 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1814 result = AssignEnvironment(result);
1820 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1821 LOperand* dividend = UseRegister(instr->left());
1822 LOperand* divisor = UseRegister(instr->right());
1823 LOperand* remainder = TempRegister();
1824 LInstruction* result =
1825 DefineAsRegister(new(zone()) LFlooringDivI(dividend, divisor, remainder));
1826 return AssignEnvironment(result);
1830 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1831 if (instr->RightIsPowerOf2()) {
1832 return DoFlooringDivByPowerOf2I(instr);
1833 } else if (instr->right()->IsConstant()) {
1834 return DoFlooringDivByConstI(instr);
1836 return DoFlooringDivI(instr);
1841 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1842 LOperand* left = NULL;
1843 LOperand* right = NULL;
1844 if (instr->representation().IsSmiOrInteger32()) {
1845 DCHECK(instr->left()->representation().Equals(instr->representation()));
1846 DCHECK(instr->right()->representation().Equals(instr->representation()));
1847 left = UseRegisterAtStart(instr->BetterLeftOperand());
1848 right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1850 DCHECK(instr->representation().IsDouble());
1851 DCHECK(instr->left()->representation().IsDouble());
1852 DCHECK(instr->right()->representation().IsDouble());
1853 left = UseRegisterAtStart(instr->left());
1854 right = UseRegisterAtStart(instr->right());
1856 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1860 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1861 DCHECK(instr->representation().IsInteger32());
1862 DCHECK(instr->left()->representation().Equals(instr->representation()));
1863 DCHECK(instr->right()->representation().Equals(instr->representation()));
1864 LOperand* dividend = UseRegisterAtStart(instr->left());
1865 int32_t divisor = instr->right()->GetInteger32Constant();
1866 LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1867 dividend, divisor));
1868 if (instr->CheckFlag(HValue::kLeftCanBeNegative) &&
1869 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1870 result = AssignEnvironment(result);
1876 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1877 DCHECK(instr->representation().IsInteger32());
1878 DCHECK(instr->left()->representation().Equals(instr->representation()));
1879 DCHECK(instr->right()->representation().Equals(instr->representation()));
1880 LOperand* dividend = UseRegister(instr->left());
1881 int32_t divisor = instr->right()->GetInteger32Constant();
1882 LOperand* temp = TempRegister();
1883 LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1884 dividend, divisor, temp));
1885 if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1886 result = AssignEnvironment(result);
1892 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1893 DCHECK(instr->representation().IsSmiOrInteger32());
1894 DCHECK(instr->left()->representation().Equals(instr->representation()));
1895 DCHECK(instr->right()->representation().Equals(instr->representation()));
1896 LOperand* dividend = UseRegister(instr->left());
1897 LOperand* divisor = UseRegister(instr->right());
1898 LInstruction* result = DefineAsRegister(new(zone()) LModI(dividend, divisor));
1899 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1900 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1901 result = AssignEnvironment(result);
1907 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1908 if (instr->representation().IsSmiOrInteger32()) {
1909 if (instr->RightIsPowerOf2()) {
1910 return DoModByPowerOf2I(instr);
1911 } else if (instr->right()->IsConstant()) {
1912 return DoModByConstI(instr);
1914 return DoModI(instr);
1916 } else if (instr->representation().IsDouble()) {
1917 return DoArithmeticD(Token::MOD, instr);
1919 return DoArithmeticT(Token::MOD, instr);
1924 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1925 if (instr->representation().IsSmiOrInteger32()) {
1926 DCHECK(instr->left()->representation().Equals(instr->representation()));
1927 DCHECK(instr->right()->representation().Equals(instr->representation()));
1929 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1930 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1932 HValue* least_const = instr->BetterLeftOperand();
1933 HValue* most_const = instr->BetterRightOperand();
1935 // LMulConstI can handle a subset of constants:
1936 // With support for overflow detection:
1939 // Without support for overflow detection:
1940 // 2^n + 1, -(2^n - 1)
1941 if (most_const->IsConstant()) {
1942 int32_t constant = HConstant::cast(most_const)->Integer32Value();
1943 bool small_constant = (constant >= -1) && (constant <= 2);
1944 bool end_range_constant = (constant <= -kMaxInt) || (constant == kMaxInt);
1945 int32_t constant_abs = Abs(constant);
1947 if (!end_range_constant &&
1948 (small_constant || (base::bits::IsPowerOfTwo32(constant_abs)) ||
1949 (!can_overflow && (base::bits::IsPowerOfTwo32(constant_abs + 1) ||
1950 base::bits::IsPowerOfTwo32(constant_abs - 1))))) {
1951 LConstantOperand* right = UseConstant(most_const);
1952 bool need_register =
1953 base::bits::IsPowerOfTwo32(constant_abs) && !small_constant;
1954 LOperand* left = need_register ? UseRegister(least_const)
1955 : UseRegisterAtStart(least_const);
1956 LInstruction* result =
1957 DefineAsRegister(new(zone()) LMulConstIS(left, right));
1958 if ((bailout_on_minus_zero && constant <= 0) || can_overflow) {
1959 result = AssignEnvironment(result);
1965 // LMulI/S can handle all cases, but it requires that a register is
1966 // allocated for the second operand.
1967 LOperand* left = UseRegisterAtStart(least_const);
1968 LOperand* right = UseRegisterAtStart(most_const);
1969 LInstruction* result = instr->representation().IsSmi()
1970 ? DefineAsRegister(new(zone()) LMulS(left, right))
1971 : DefineAsRegister(new(zone()) LMulI(left, right));
1972 if ((bailout_on_minus_zero && least_const != most_const) || can_overflow) {
1973 result = AssignEnvironment(result);
1976 } else if (instr->representation().IsDouble()) {
1977 return DoArithmeticD(Token::MUL, instr);
1979 return DoArithmeticT(Token::MUL, instr);
1984 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1985 DCHECK(argument_count_ == 0);
1986 allocator_->MarkAsOsrEntry();
1987 current_block_->last_environment()->set_ast_id(instr->ast_id());
1988 return AssignEnvironment(new(zone()) LOsrEntry);
1992 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1993 LParameter* result = new(zone()) LParameter;
1994 if (instr->kind() == HParameter::STACK_PARAMETER) {
1995 int spill_index = chunk_->GetParameterStackSlot(instr->index());
1996 return DefineAsSpilled(result, spill_index);
1998 DCHECK(info()->IsStub());
1999 CallInterfaceDescriptor descriptor =
2000 info()->code_stub()->GetCallInterfaceDescriptor();
2001 int index = static_cast<int>(instr->index());
2002 Register reg = descriptor.GetEnvironmentParameterRegister(index);
2003 return DefineFixed(result, reg);
2008 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
2009 DCHECK(instr->representation().IsDouble());
2010 // We call a C function for double power. It can't trigger a GC.
2011 // We need to use fixed result register for the call.
2012 Representation exponent_type = instr->right()->representation();
2013 DCHECK(instr->left()->representation().IsDouble());
2014 LOperand* left = UseFixedDouble(instr->left(), d0);
2016 if (exponent_type.IsInteger32()) {
2017 right = UseFixed(instr->right(), MathPowIntegerDescriptor::exponent());
2018 } else if (exponent_type.IsDouble()) {
2019 right = UseFixedDouble(instr->right(), d1);
2021 right = UseFixed(instr->right(), MathPowTaggedDescriptor::exponent());
2023 LPower* result = new(zone()) LPower(left, right);
2024 return MarkAsCall(DefineFixedDouble(result, d0),
2026 CAN_DEOPTIMIZE_EAGERLY);
2030 LInstruction* LChunkBuilder::DoPushArguments(HPushArguments* instr) {
2031 int argc = instr->OperandCount();
2032 AddInstruction(new(zone()) LPreparePushArguments(argc), instr);
2034 LPushArguments* push_args = new(zone()) LPushArguments(zone());
2036 for (int i = 0; i < argc; ++i) {
2037 if (push_args->ShouldSplitPush()) {
2038 AddInstruction(push_args, instr);
2039 push_args = new(zone()) LPushArguments(zone());
2041 push_args->AddArgument(UseRegister(instr->argument(i)));
2048 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2049 LOperand* context = UseFixed(instr->context(), cp);
2051 DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
2055 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2056 HValue* value = instr->value();
2057 DCHECK(value->representation().IsDouble());
2058 return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2062 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2063 LOperand* lo = UseRegisterAndClobber(instr->lo());
2064 LOperand* hi = UseRegister(instr->hi());
2065 return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2069 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2070 LOperand* context = info()->IsStub()
2071 ? UseFixed(instr->context(), cp)
2073 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2074 return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
2079 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
2080 LOperand* string = UseRegisterAtStart(instr->string());
2081 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2082 LOperand* temp = TempRegister();
2083 LSeqStringGetChar* result =
2084 new(zone()) LSeqStringGetChar(string, index, temp);
2085 return DefineAsRegister(result);
2089 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
2090 LOperand* string = UseRegister(instr->string());
2091 LOperand* index = FLAG_debug_code
2092 ? UseRegister(instr->index())
2093 : UseRegisterOrConstant(instr->index());
2094 LOperand* value = UseRegister(instr->value());
2095 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
2096 LOperand* temp = TempRegister();
2097 LSeqStringSetChar* result =
2098 new(zone()) LSeqStringSetChar(context, string, index, value, temp);
2099 return DefineAsRegister(result);
2103 HBitwiseBinaryOperation* LChunkBuilder::CanTransformToShiftedOp(HValue* val,
2105 if (!val->representation().IsInteger32()) return NULL;
2106 if (!(val->IsBitwise() || val->IsAdd() || val->IsSub())) return NULL;
2108 HBinaryOperation* hinstr = HBinaryOperation::cast(val);
2109 HValue* hleft = hinstr->left();
2110 HValue* hright = hinstr->right();
2111 DCHECK(hleft->representation().Equals(hinstr->representation()));
2112 DCHECK(hright->representation().Equals(hinstr->representation()));
2114 if ((hright->IsConstant() &&
2115 LikelyFitsImmField(hinstr, HConstant::cast(hright)->Integer32Value())) ||
2116 (hinstr->IsCommutative() && hleft->IsConstant() &&
2117 LikelyFitsImmField(hinstr, HConstant::cast(hleft)->Integer32Value()))) {
2118 // The constant operand will likely fit in the immediate field. We are
2121 // add x0, x8, #imm2
2124 // add x0, x16, x9 LSL #imm
2128 HBitwiseBinaryOperation* shift = NULL;
2129 // TODO(aleram): We will miss situations where a shift operation is used by
2130 // different instructions both as a left and right operands.
2131 if (hright->IsBitwiseBinaryShift() &&
2132 HBitwiseBinaryOperation::cast(hright)->right()->IsConstant()) {
2133 shift = HBitwiseBinaryOperation::cast(hright);
2137 } else if (hinstr->IsCommutative() &&
2138 hleft->IsBitwiseBinaryShift() &&
2139 HBitwiseBinaryOperation::cast(hleft)->right()->IsConstant()) {
2140 shift = HBitwiseBinaryOperation::cast(hleft);
2148 if ((JSShiftAmountFromHConstant(shift->right()) == 0) && shift->IsShr()) {
2149 // Shifts right by zero can deoptimize.
2157 bool LChunkBuilder::ShiftCanBeOptimizedAway(HBitwiseBinaryOperation* shift) {
2158 if (!shift->representation().IsInteger32()) {
2161 for (HUseIterator it(shift->uses()); !it.Done(); it.Advance()) {
2162 if (shift != CanTransformToShiftedOp(it.value())) {
2170 LInstruction* LChunkBuilder::TryDoOpWithShiftedRightOperand(
2171 HBinaryOperation* instr) {
2173 HBitwiseBinaryOperation* shift = CanTransformToShiftedOp(instr, &left);
2175 if ((shift != NULL) && ShiftCanBeOptimizedAway(shift)) {
2176 return DoShiftedBinaryOp(instr, left, shift);
2182 LInstruction* LChunkBuilder::DoShiftedBinaryOp(
2183 HBinaryOperation* hinstr, HValue* hleft, HBitwiseBinaryOperation* hshift) {
2184 DCHECK(hshift->IsBitwiseBinaryShift());
2185 DCHECK(!hshift->IsShr() || (JSShiftAmountFromHConstant(hshift->right()) > 0));
2187 LTemplateResultInstruction<1>* res;
2188 LOperand* left = UseRegisterAtStart(hleft);
2189 LOperand* right = UseRegisterAtStart(hshift->left());
2190 LOperand* shift_amount = UseConstant(hshift->right());
2192 switch (hshift->opcode()) {
2193 case HValue::kShl: shift_op = LSL; break;
2194 case HValue::kShr: shift_op = LSR; break;
2195 case HValue::kSar: shift_op = ASR; break;
2196 default: UNREACHABLE(); shift_op = NO_SHIFT;
2199 if (hinstr->IsBitwise()) {
2200 res = new(zone()) LBitI(left, right, shift_op, shift_amount);
2201 } else if (hinstr->IsAdd()) {
2202 res = new(zone()) LAddI(left, right, shift_op, shift_amount);
2204 DCHECK(hinstr->IsSub());
2205 res = new(zone()) LSubI(left, right, shift_op, shift_amount);
2207 if (hinstr->CheckFlag(HValue::kCanOverflow)) {
2208 AssignEnvironment(res);
2210 return DefineAsRegister(res);
2214 LInstruction* LChunkBuilder::DoShift(Token::Value op,
2215 HBitwiseBinaryOperation* instr) {
2216 if (instr->representation().IsTagged()) {
2217 return DoArithmeticT(op, instr);
2220 DCHECK(instr->representation().IsSmiOrInteger32());
2221 DCHECK(instr->left()->representation().Equals(instr->representation()));
2222 DCHECK(instr->right()->representation().Equals(instr->representation()));
2224 if (ShiftCanBeOptimizedAway(instr)) {
2228 LOperand* left = instr->representation().IsSmi()
2229 ? UseRegister(instr->left())
2230 : UseRegisterAtStart(instr->left());
2231 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2233 // The only shift that can deoptimize is `left >>> 0`, where left is negative.
2234 // In these cases, the result is a uint32 that is too large for an int32.
2235 bool right_can_be_zero = !instr->right()->IsConstant() ||
2236 (JSShiftAmountFromHConstant(instr->right()) == 0);
2237 bool can_deopt = false;
2238 if ((op == Token::SHR) && right_can_be_zero) {
2239 can_deopt = !instr->CheckFlag(HInstruction::kUint32);
2242 LInstruction* result;
2243 if (instr->representation().IsInteger32()) {
2244 result = DefineAsRegister(new (zone()) LShiftI(op, left, right, can_deopt));
2246 DCHECK(instr->representation().IsSmi());
2247 result = DefineAsRegister(new (zone()) LShiftS(op, left, right, can_deopt));
2250 return can_deopt ? AssignEnvironment(result) : result;
2254 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
2255 return DoShift(Token::ROR, instr);
2259 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
2260 return DoShift(Token::SAR, instr);
2264 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
2265 return DoShift(Token::SHL, instr);
2269 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
2270 return DoShift(Token::SHR, instr);
2274 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2275 instr->ReplayEnvironment(current_block_->last_environment());
2280 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2281 if (instr->is_function_entry()) {
2282 LOperand* context = UseFixed(instr->context(), cp);
2283 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2285 DCHECK(instr->is_backwards_branch());
2286 LOperand* context = UseAny(instr->context());
2287 return AssignEnvironment(
2288 AssignPointerMap(new(zone()) LStackCheck(context)));
2293 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2294 LOperand* function = UseRegister(instr->function());
2295 LOperand* code_object = UseRegisterAtStart(instr->code_object());
2296 LOperand* temp = TempRegister();
2297 return new(zone()) LStoreCodeEntry(function, code_object, temp);
2301 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2302 LOperand* temp = TempRegister();
2305 if (instr->NeedsWriteBarrier()) {
2306 // TODO(all): Replace these constraints when RecordWriteStub has been
2308 context = UseRegisterAndClobber(instr->context());
2309 value = UseRegisterAndClobber(instr->value());
2311 context = UseRegister(instr->context());
2312 value = UseRegister(instr->value());
2314 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2315 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2316 result = AssignEnvironment(result);
2322 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2323 LOperand* value = UseRegister(instr->value());
2324 if (instr->RequiresHoleCheck()) {
2325 return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2329 return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2334 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2335 LOperand* key = UseRegisterOrConstant(instr->key());
2336 LOperand* temp = NULL;
2337 LOperand* elements = NULL;
2338 LOperand* val = NULL;
2340 if (!instr->is_typed_elements() &&
2341 instr->value()->representation().IsTagged() &&
2342 instr->NeedsWriteBarrier()) {
2343 // RecordWrite() will clobber all registers.
2344 elements = UseRegisterAndClobber(instr->elements());
2345 val = UseRegisterAndClobber(instr->value());
2346 temp = TempRegister();
2348 elements = UseRegister(instr->elements());
2349 val = UseRegister(instr->value());
2350 temp = instr->key()->IsConstant() ? NULL : TempRegister();
2353 if (instr->is_typed_elements()) {
2354 DCHECK((instr->value()->representation().IsInteger32() &&
2355 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2356 (instr->value()->representation().IsDouble() &&
2357 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2358 DCHECK((instr->is_fixed_typed_array() &&
2359 instr->elements()->representation().IsTagged()) ||
2360 (instr->is_external() &&
2361 instr->elements()->representation().IsExternal()));
2362 return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2364 } else if (instr->value()->representation().IsDouble()) {
2365 DCHECK(instr->elements()->representation().IsTagged());
2366 return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2369 DCHECK(instr->elements()->representation().IsTagged());
2370 DCHECK(instr->value()->representation().IsSmiOrTagged() ||
2371 instr->value()->representation().IsInteger32());
2372 return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2377 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2378 LOperand* context = UseFixed(instr->context(), cp);
2380 UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2381 LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
2382 LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2384 DCHECK(instr->object()->representation().IsTagged());
2385 DCHECK(instr->key()->representation().IsTagged());
2386 DCHECK(instr->value()->representation().IsTagged());
2389 new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2393 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2394 // TODO(jbramley): It might be beneficial to allow value to be a constant in
2395 // some cases. x64 makes use of this with FLAG_track_fields, for example.
2397 LOperand* object = UseRegister(instr->object());
2399 LOperand* temp0 = NULL;
2400 LOperand* temp1 = NULL;
2402 if (instr->access().IsExternalMemory() ||
2403 instr->field_representation().IsDouble()) {
2404 value = UseRegister(instr->value());
2405 } else if (instr->NeedsWriteBarrier()) {
2406 value = UseRegisterAndClobber(instr->value());
2407 temp0 = TempRegister();
2408 temp1 = TempRegister();
2409 } else if (instr->NeedsWriteBarrierForMap()) {
2410 value = UseRegister(instr->value());
2411 temp0 = TempRegister();
2412 temp1 = TempRegister();
2414 value = UseRegister(instr->value());
2415 temp0 = TempRegister();
2418 return new(zone()) LStoreNamedField(object, value, temp0, temp1);
2422 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2423 LOperand* context = UseFixed(instr->context(), cp);
2425 UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2426 LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2428 LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2429 return MarkAsCall(result, instr);
2433 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2434 LOperand* context = UseFixed(instr->context(), cp);
2435 LOperand* left = UseFixed(instr->left(), x1);
2436 LOperand* right = UseFixed(instr->right(), x0);
2438 LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2439 return MarkAsCall(DefineFixed(result, x0), instr);
2443 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2444 LOperand* string = UseRegisterAndClobber(instr->string());
2445 LOperand* index = UseRegisterAndClobber(instr->index());
2446 LOperand* context = UseAny(instr->context());
2447 LStringCharCodeAt* result =
2448 new(zone()) LStringCharCodeAt(context, string, index);
2449 return AssignPointerMap(DefineAsRegister(result));
2453 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2454 LOperand* char_code = UseRegister(instr->value());
2455 LOperand* context = UseAny(instr->context());
2456 LStringCharFromCode* result =
2457 new(zone()) LStringCharFromCode(context, char_code);
2458 return AssignPointerMap(DefineAsRegister(result));
2462 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2463 HStringCompareAndBranch* instr) {
2464 DCHECK(instr->left()->representation().IsTagged());
2465 DCHECK(instr->right()->representation().IsTagged());
2466 LOperand* context = UseFixed(instr->context(), cp);
2467 LOperand* left = UseFixed(instr->left(), x1);
2468 LOperand* right = UseFixed(instr->right(), x0);
2469 LStringCompareAndBranch* result =
2470 new(zone()) LStringCompareAndBranch(context, left, right);
2471 return MarkAsCall(result, instr);
2475 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2476 if (instr->representation().IsSmiOrInteger32()) {
2477 DCHECK(instr->left()->representation().Equals(instr->representation()));
2478 DCHECK(instr->right()->representation().Equals(instr->representation()));
2480 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr);
2481 if (shifted_operation != NULL) {
2482 return shifted_operation;
2486 if (instr->left()->IsConstant() &&
2487 (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2488 left = UseConstant(instr->left());
2490 left = UseRegisterAtStart(instr->left());
2492 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2493 LInstruction* result = instr->representation().IsSmi() ?
2494 DefineAsRegister(new(zone()) LSubS(left, right)) :
2495 DefineAsRegister(new(zone()) LSubI(left, right));
2496 if (instr->CheckFlag(HValue::kCanOverflow)) {
2497 result = AssignEnvironment(result);
2500 } else if (instr->representation().IsDouble()) {
2501 return DoArithmeticD(Token::SUB, instr);
2503 return DoArithmeticT(Token::SUB, instr);
2508 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2509 if (instr->HasNoUses()) {
2512 return DefineAsRegister(new(zone()) LThisFunction);
2517 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2518 LOperand* object = UseFixed(instr->value(), x0);
2519 LToFastProperties* result = new(zone()) LToFastProperties(object);
2520 return MarkAsCall(DefineFixed(result, x0), instr);
2524 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2525 HTransitionElementsKind* instr) {
2526 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2527 LOperand* object = UseRegister(instr->object());
2528 LTransitionElementsKind* result =
2529 new(zone()) LTransitionElementsKind(object, NULL,
2530 TempRegister(), TempRegister());
2533 LOperand* object = UseFixed(instr->object(), x0);
2534 LOperand* context = UseFixed(instr->context(), cp);
2535 LTransitionElementsKind* result =
2536 new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2537 return MarkAsCall(result, instr);
2542 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2543 HTrapAllocationMemento* instr) {
2544 LOperand* object = UseRegister(instr->object());
2545 LOperand* temp1 = TempRegister();
2546 LOperand* temp2 = TempRegister();
2547 LTrapAllocationMemento* result =
2548 new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2549 return AssignEnvironment(result);
2553 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2554 LOperand* context = UseFixed(instr->context(), cp);
2555 // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2556 // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2557 // anyway, so the input doesn't have to be in x0. We might be able to improve
2558 // the ARM back-end a little by relaxing this restriction.
2560 new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2561 return MarkAsCall(DefineFixed(result, x0), instr);
2565 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2566 // We only need temp registers in some cases, but we can't dereference the
2567 // instr->type_literal() handle to test that here.
2568 LOperand* temp1 = TempRegister();
2569 LOperand* temp2 = TempRegister();
2571 return new(zone()) LTypeofIsAndBranch(
2572 UseRegister(instr->value()), temp1, temp2);
2576 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2577 switch (instr->op()) {
2579 Representation r = instr->representation();
2581 // The tagged case might need to allocate a HeapNumber for the result,
2582 // so it is handled by a separate LInstruction.
2583 LOperand* context = UseFixed(instr->context(), cp);
2584 LOperand* input = UseRegister(instr->value());
2585 LOperand* temp1 = TempRegister();
2586 LOperand* temp2 = TempRegister();
2587 LOperand* temp3 = TempRegister();
2588 LInstruction* result = DefineAsRegister(
2589 new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3));
2590 return AssignEnvironment(AssignPointerMap(result));
2592 LOperand* input = UseRegisterAtStart(instr->value());
2593 LInstruction* result = DefineAsRegister(new(zone()) LMathAbs(input));
2594 if (!r.IsDouble()) result = AssignEnvironment(result);
2599 DCHECK(instr->representation().IsDouble());
2600 DCHECK(instr->value()->representation().IsDouble());
2601 LOperand* input = UseRegister(instr->value());
2602 LOperand* double_temp1 = TempDoubleRegister();
2603 LOperand* temp1 = TempRegister();
2604 LOperand* temp2 = TempRegister();
2605 LOperand* temp3 = TempRegister();
2606 LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2607 temp1, temp2, temp3);
2608 return DefineAsRegister(result);
2611 DCHECK(instr->value()->representation().IsDouble());
2612 LOperand* input = UseRegisterAtStart(instr->value());
2613 if (instr->representation().IsInteger32()) {
2614 LMathFloorI* result = new(zone()) LMathFloorI(input);
2615 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2617 DCHECK(instr->representation().IsDouble());
2618 LMathFloorD* result = new(zone()) LMathFloorD(input);
2619 return DefineAsRegister(result);
2623 DCHECK(instr->representation().IsDouble());
2624 DCHECK(instr->value()->representation().IsDouble());
2625 LOperand* input = UseFixedDouble(instr->value(), d0);
2626 LMathLog* result = new(zone()) LMathLog(input);
2627 return MarkAsCall(DefineFixedDouble(result, d0), instr);
2629 case kMathPowHalf: {
2630 DCHECK(instr->representation().IsDouble());
2631 DCHECK(instr->value()->representation().IsDouble());
2632 LOperand* input = UseRegister(instr->value());
2633 return DefineAsRegister(new(zone()) LMathPowHalf(input));
2636 DCHECK(instr->value()->representation().IsDouble());
2637 LOperand* input = UseRegister(instr->value());
2638 if (instr->representation().IsInteger32()) {
2639 LOperand* temp = TempDoubleRegister();
2640 LMathRoundI* result = new(zone()) LMathRoundI(input, temp);
2641 return AssignEnvironment(DefineAsRegister(result));
2643 DCHECK(instr->representation().IsDouble());
2644 LMathRoundD* result = new(zone()) LMathRoundD(input);
2645 return DefineAsRegister(result);
2649 DCHECK(instr->value()->representation().IsDouble());
2650 LOperand* input = UseRegister(instr->value());
2651 LMathFround* result = new (zone()) LMathFround(input);
2652 return DefineAsRegister(result);
2655 DCHECK(instr->representation().IsDouble());
2656 DCHECK(instr->value()->representation().IsDouble());
2657 LOperand* input = UseRegisterAtStart(instr->value());
2658 return DefineAsRegister(new(zone()) LMathSqrt(input));
2661 DCHECK(instr->representation().IsInteger32());
2662 DCHECK(instr->value()->representation().IsInteger32());
2663 LOperand* input = UseRegisterAtStart(instr->value());
2664 return DefineAsRegister(new(zone()) LMathClz32(input));
2673 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2674 // Use an index that corresponds to the location in the unoptimized frame,
2675 // which the optimized frame will subsume.
2676 int env_index = instr->index();
2677 int spill_index = 0;
2678 if (instr->environment()->is_parameter_index(env_index)) {
2679 spill_index = chunk_->GetParameterStackSlot(env_index);
2681 spill_index = env_index - instr->environment()->first_local_index();
2682 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2683 Retry(kTooManySpillSlotsNeededForOSR);
2687 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2691 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2696 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2697 LOperand* context = UseFixed(instr->context(), cp);
2698 // Assign object to a fixed register different from those already used in
2699 // LForInPrepareMap.
2700 LOperand* object = UseFixed(instr->enumerable(), x0);
2701 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2702 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2706 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2707 LOperand* map = UseRegister(instr->map());
2708 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2712 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2713 LOperand* value = UseRegisterAtStart(instr->value());
2714 LOperand* map = UseRegister(instr->map());
2715 LOperand* temp = TempRegister();
2716 return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2720 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2721 LOperand* object = UseRegisterAtStart(instr->object());
2722 LOperand* index = UseRegisterAndClobber(instr->index());
2723 LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
2724 LInstruction* result = DefineSameAsFirst(load);
2725 return AssignPointerMap(result);
2729 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2730 LOperand* receiver = UseRegister(instr->receiver());
2731 LOperand* function = UseRegister(instr->function());
2732 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2733 return AssignEnvironment(DefineAsRegister(result));
2737 LInstruction* LChunkBuilder::DoStoreFrameContext(HStoreFrameContext* instr) {
2738 LOperand* context = UseRegisterAtStart(instr->context());
2739 return new(zone()) LStoreFrameContext(context);
2743 LInstruction* LChunkBuilder::DoAllocateBlockContext(
2744 HAllocateBlockContext* instr) {
2745 LOperand* context = UseFixed(instr->context(), cp);
2746 LOperand* function = UseRegisterAtStart(instr->function());
2747 LAllocateBlockContext* result =
2748 new(zone()) LAllocateBlockContext(context, function);
2749 return MarkAsCall(DefineFixed(result, cp), instr);
2753 LInstruction* LChunkBuilder::DoNullarySIMDOperation(
2754 HNullarySIMDOperation* instr) {
2760 LInstruction* LChunkBuilder::DoUnarySIMDOperation(
2761 HUnarySIMDOperation* instr) {
2767 LInstruction* LChunkBuilder::DoBinarySIMDOperation(
2768 HBinarySIMDOperation* instr) {
2774 LInstruction* LChunkBuilder::DoTernarySIMDOperation(
2775 HTernarySIMDOperation* instr) {
2781 LInstruction* LChunkBuilder::DoQuarternarySIMDOperation(
2782 HQuarternarySIMDOperation* instr) {
2788 } } // namespace v8::internal