1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "lithium-allocator-inl.h"
8 #include "arm64/lithium-arm64.h"
9 #include "arm64/lithium-codegen-arm64.h"
10 #include "hydrogen-osr.h"
16 #define DEFINE_COMPILE(type) \
17 void L##type::CompileToNative(LCodeGen* generator) { \
18 generator->Do##type(this); \
20 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
24 void LInstruction::VerifyCall() {
25 // Call instructions can use only fixed registers as temporaries and
26 // outputs because all registers are blocked by the calling convention.
27 // Inputs operands must use a fixed register or use-at-start policy or
28 // a non-register policy.
29 ASSERT(Output() == NULL ||
30 LUnallocated::cast(Output())->HasFixedPolicy() ||
31 !LUnallocated::cast(Output())->HasRegisterPolicy());
32 for (UseIterator it(this); !it.Done(); it.Advance()) {
33 LUnallocated* operand = LUnallocated::cast(it.Current());
34 ASSERT(operand->HasFixedPolicy() ||
35 operand->IsUsedAtStart());
37 for (TempIterator it(this); !it.Done(); it.Advance()) {
38 LUnallocated* operand = LUnallocated::cast(it.Current());
39 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
45 void LLabel::PrintDataTo(StringStream* stream) {
46 LGap::PrintDataTo(stream);
47 LLabel* rep = replacement();
49 stream->Add(" Dead block replaced with B%d", rep->block_id());
54 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
55 arguments()->PrintTo(stream);
56 stream->Add(" length ");
57 length()->PrintTo(stream);
58 stream->Add(" index ");
59 index()->PrintTo(stream);
63 void LBranch::PrintDataTo(StringStream* stream) {
64 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
65 value()->PrintTo(stream);
69 void LCallJSFunction::PrintDataTo(StringStream* stream) {
71 function()->PrintTo(stream);
72 stream->Add("#%d / ", arity());
76 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
77 for (int i = 0; i < InputCount(); i++) {
78 InputAt(i)->PrintTo(stream);
81 stream->Add("#%d / ", arity());
85 void LCallNew::PrintDataTo(StringStream* stream) {
87 constructor()->PrintTo(stream);
88 stream->Add(" #%d / ", arity());
92 void LCallNewArray::PrintDataTo(StringStream* stream) {
94 constructor()->PrintTo(stream);
95 stream->Add(" #%d / ", arity());
96 ElementsKind kind = hydrogen()->elements_kind();
97 stream->Add(" (%s) ", ElementsKindToString(kind));
101 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
102 stream->Add("if class_of_test(");
103 value()->PrintTo(stream);
104 stream->Add(", \"%o\") then B%d else B%d",
105 *hydrogen()->class_name(),
111 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
113 left()->PrintTo(stream);
114 stream->Add(" %s ", Token::String(op()));
115 right()->PrintTo(stream);
116 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
120 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
121 stream->Add("if has_cached_array_index(");
122 value()->PrintTo(stream);
123 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
127 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
128 return !gen->IsNextEmittedBlock(block_id());
132 void LGoto::PrintDataTo(StringStream* stream) {
133 stream->Add("B%d", block_id());
137 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
139 base_object()->PrintTo(stream);
141 offset()->PrintTo(stream);
145 void LInvokeFunction::PrintDataTo(StringStream* stream) {
147 function()->PrintTo(stream);
148 stream->Add(" #%d / ", arity());
152 void LInstruction::PrintTo(StringStream* stream) {
153 stream->Add("%s ", this->Mnemonic());
155 PrintOutputOperandTo(stream);
159 if (HasEnvironment()) {
161 environment()->PrintTo(stream);
164 if (HasPointerMap()) {
166 pointer_map()->PrintTo(stream);
171 void LInstruction::PrintDataTo(StringStream* stream) {
173 for (int i = 0; i < InputCount(); i++) {
174 if (i > 0) stream->Add(" ");
175 if (InputAt(i) == NULL) {
178 InputAt(i)->PrintTo(stream);
184 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
185 if (HasResult()) result()->PrintTo(stream);
189 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
190 stream->Add("if has_instance_type(");
191 value()->PrintTo(stream);
192 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
196 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
197 stream->Add("if is_object(");
198 value()->PrintTo(stream);
199 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
203 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
204 stream->Add("if is_string(");
205 value()->PrintTo(stream);
206 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
210 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
211 stream->Add("if is_smi(");
212 value()->PrintTo(stream);
213 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
217 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
218 stream->Add("if typeof ");
219 value()->PrintTo(stream);
220 stream->Add(" == \"%s\" then B%d else B%d",
221 hydrogen()->type_literal()->ToCString().get(),
222 true_block_id(), false_block_id());
226 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if is_undetectable(");
228 value()->PrintTo(stream);
229 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
233 bool LGap::IsRedundant() const {
234 for (int i = 0; i < 4; i++) {
235 if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
244 void LGap::PrintDataTo(StringStream* stream) {
245 for (int i = 0; i < 4; i++) {
247 if (parallel_moves_[i] != NULL) {
248 parallel_moves_[i]->PrintDataTo(stream);
255 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
256 context()->PrintTo(stream);
257 stream->Add("[%d]", slot_index());
261 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
263 function()->PrintTo(stream);
264 stream->Add(".code_entry = ");
265 code_object()->PrintTo(stream);
269 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
270 context()->PrintTo(stream);
271 stream->Add("[%d] <- ", slot_index());
272 value()->PrintTo(stream);
276 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
277 object()->PrintTo(stream);
279 key()->PrintTo(stream);
280 stream->Add("] <- ");
281 value()->PrintTo(stream);
285 void LStoreNamedField::PrintDataTo(StringStream* stream) {
286 object()->PrintTo(stream);
287 hydrogen()->access().PrintTo(stream);
289 value()->PrintTo(stream);
293 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
294 object()->PrintTo(stream);
296 stream->Add(String::cast(*name())->ToCString().get());
298 value()->PrintTo(stream);
302 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
303 stream->Add("if string_compare(");
304 left()->PrintTo(stream);
305 right()->PrintTo(stream);
306 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
310 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
311 object()->PrintTo(stream);
312 stream->Add("%p -> %p", *original_map(), *transitioned_map());
317 void LUnaryMathOperation<T>::PrintDataTo(StringStream* stream) {
318 value()->PrintTo(stream);
322 const char* LArithmeticD::Mnemonic() const {
324 case Token::ADD: return "add-d";
325 case Token::SUB: return "sub-d";
326 case Token::MUL: return "mul-d";
327 case Token::DIV: return "div-d";
328 case Token::MOD: return "mod-d";
336 const char* LArithmeticT::Mnemonic() const {
338 case Token::ADD: return "add-t";
339 case Token::SUB: return "sub-t";
340 case Token::MUL: return "mul-t";
341 case Token::MOD: return "mod-t";
342 case Token::DIV: return "div-t";
343 case Token::BIT_AND: return "bit-and-t";
344 case Token::BIT_OR: return "bit-or-t";
345 case Token::BIT_XOR: return "bit-xor-t";
346 case Token::ROR: return "ror-t";
347 case Token::SHL: return "shl-t";
348 case Token::SAR: return "sar-t";
349 case Token::SHR: return "shr-t";
357 void LChunkBuilder::Abort(BailoutReason reason) {
358 info()->set_bailout_reason(reason);
363 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
364 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
365 Register::ToAllocationIndex(reg));
369 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
370 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
371 DoubleRegister::ToAllocationIndex(reg));
375 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
376 if (value->EmitAtUses()) {
377 HInstruction* instr = HInstruction::cast(value);
378 VisitInstruction(instr);
380 operand->set_virtual_register(value->id());
385 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
386 return Use(value, ToUnallocated(fixed_register));
390 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
391 DoubleRegister fixed_register) {
392 return Use(value, ToUnallocated(fixed_register));
396 LOperand* LChunkBuilder::UseRegister(HValue* value) {
397 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
401 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
402 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
406 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
408 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
409 LUnallocated::USED_AT_START));
413 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
414 return value->IsConstant() ? UseConstant(value) : UseRegister(value);
418 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
419 return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
423 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
424 return chunk_->DefineConstantOperand(HConstant::cast(value));
428 LOperand* LChunkBuilder::UseAny(HValue* value) {
429 return value->IsConstant()
431 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
435 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
436 LUnallocated* result) {
437 result->set_virtual_register(current_instruction_->id());
438 instr->set_result(result);
443 LInstruction* LChunkBuilder::DefineAsRegister(
444 LTemplateResultInstruction<1>* instr) {
446 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
450 LInstruction* LChunkBuilder::DefineAsSpilled(
451 LTemplateResultInstruction<1>* instr, int index) {
453 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
457 LInstruction* LChunkBuilder::DefineSameAsFirst(
458 LTemplateResultInstruction<1>* instr) {
460 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
464 LInstruction* LChunkBuilder::DefineFixed(
465 LTemplateResultInstruction<1>* instr, Register reg) {
466 return Define(instr, ToUnallocated(reg));
470 LInstruction* LChunkBuilder::DefineFixedDouble(
471 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
472 return Define(instr, ToUnallocated(reg));
476 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
477 HInstruction* hinstr,
478 CanDeoptimize can_deoptimize) {
479 info()->MarkAsNonDeferredCalling();
484 instr = AssignPointerMap(instr);
486 // If instruction does not have side-effects lazy deoptimization
487 // after the call will try to deoptimize to the point before the call.
488 // Thus we still need to attach environment to this call even if
489 // call sequence can not deoptimize eagerly.
490 bool needs_environment =
491 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
492 !hinstr->HasObservableSideEffects();
493 if (needs_environment && !instr->HasEnvironment()) {
494 instr = AssignEnvironment(instr);
495 // We can't really figure out if the environment is needed or not.
496 instr->environment()->set_has_been_used();
503 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
504 ASSERT(!instr->HasPointerMap());
505 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
510 LUnallocated* LChunkBuilder::TempRegister() {
511 LUnallocated* operand =
512 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
513 int vreg = allocator_->GetVirtualRegister();
514 if (!allocator_->AllocationOk()) {
515 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
518 operand->set_virtual_register(vreg);
523 int LPlatformChunk::GetNextSpillIndex() {
524 return spill_slot_count_++;
528 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
529 int index = GetNextSpillIndex();
530 if (kind == DOUBLE_REGISTERS) {
531 return LDoubleStackSlot::Create(index, zone());
533 ASSERT(kind == GENERAL_REGISTERS);
534 return LStackSlot::Create(index, zone());
539 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
540 LUnallocated* operand = ToUnallocated(reg);
541 ASSERT(operand->HasFixedPolicy());
546 LPlatformChunk* LChunkBuilder::Build() {
548 chunk_ = new(zone()) LPlatformChunk(info_, graph_);
549 LPhase phase("L_Building chunk", chunk_);
552 // If compiling for OSR, reserve space for the unoptimized frame,
553 // which will be subsumed into this frame.
554 if (graph()->has_osr()) {
555 // TODO(all): GetNextSpillIndex just increments a field. It has no other
556 // side effects, so we should get rid of this loop.
557 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
558 chunk_->GetNextSpillIndex();
562 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
563 for (int i = 0; i < blocks->length(); i++) {
564 DoBasicBlock(blocks->at(i));
565 if (is_aborted()) return NULL;
572 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
573 ASSERT(is_building());
574 current_block_ = block;
576 if (block->IsStartBlock()) {
577 block->UpdateEnvironment(graph_->start_environment());
579 } else if (block->predecessors()->length() == 1) {
580 // We have a single predecessor => copy environment and outgoing
581 // argument count from the predecessor.
582 ASSERT(block->phis()->length() == 0);
583 HBasicBlock* pred = block->predecessors()->at(0);
584 HEnvironment* last_environment = pred->last_environment();
585 ASSERT(last_environment != NULL);
587 // Only copy the environment, if it is later used again.
588 if (pred->end()->SecondSuccessor() == NULL) {
589 ASSERT(pred->end()->FirstSuccessor() == block);
591 if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
592 (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
593 last_environment = last_environment->Copy();
596 block->UpdateEnvironment(last_environment);
597 ASSERT(pred->argument_count() >= 0);
598 argument_count_ = pred->argument_count();
600 // We are at a state join => process phis.
601 HBasicBlock* pred = block->predecessors()->at(0);
602 // No need to copy the environment, it cannot be used later.
603 HEnvironment* last_environment = pred->last_environment();
604 for (int i = 0; i < block->phis()->length(); ++i) {
605 HPhi* phi = block->phis()->at(i);
606 if (phi->HasMergedIndex()) {
607 last_environment->SetValueAt(phi->merged_index(), phi);
610 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
611 if (block->deleted_phis()->at(i) < last_environment->length()) {
612 last_environment->SetValueAt(block->deleted_phis()->at(i),
613 graph_->GetConstantUndefined());
616 block->UpdateEnvironment(last_environment);
617 // Pick up the outgoing argument count of one of the predecessors.
618 argument_count_ = pred->argument_count();
621 // Translate hydrogen instructions to lithium ones for the current block.
622 HInstruction* current = block->first();
623 int start = chunk_->instructions()->length();
624 while ((current != NULL) && !is_aborted()) {
625 // Code for constants in registers is generated lazily.
626 if (!current->EmitAtUses()) {
627 VisitInstruction(current);
629 current = current->next();
631 int end = chunk_->instructions()->length() - 1;
633 block->set_first_instruction_index(start);
634 block->set_last_instruction_index(end);
636 block->set_argument_count(argument_count_);
637 current_block_ = NULL;
641 void LChunkBuilder::VisitInstruction(HInstruction* current) {
642 HInstruction* old_current = current_instruction_;
643 current_instruction_ = current;
645 LInstruction* instr = NULL;
646 if (current->CanReplaceWithDummyUses()) {
647 if (current->OperandCount() == 0) {
648 instr = DefineAsRegister(new(zone()) LDummy());
650 ASSERT(!current->OperandAt(0)->IsControlInstruction());
651 instr = DefineAsRegister(new(zone())
652 LDummyUse(UseAny(current->OperandAt(0))));
654 for (int i = 1; i < current->OperandCount(); ++i) {
655 if (current->OperandAt(i)->IsControlInstruction()) continue;
656 LInstruction* dummy =
657 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
658 dummy->set_hydrogen_value(current);
659 chunk_->AddInstruction(dummy, current_block_);
662 instr = current->CompileToLithium(this);
665 argument_count_ += current->argument_delta();
666 ASSERT(argument_count_ >= 0);
669 // Associate the hydrogen instruction first, since we may need it for
670 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
671 instr->set_hydrogen_value(current);
674 // Make sure that the lithium instruction has either no fixed register
675 // constraints in temps or the result OR no uses that are only used at
676 // start. If this invariant doesn't hold, the register allocator can decide
677 // to insert a split of a range immediately before the instruction due to an
678 // already allocated register needing to be used for the instruction's fixed
679 // register constraint. In this case, the register allocator won't see an
680 // interference between the split child and the use-at-start (it would if
681 // the it was just a plain use), so it is free to move the split child into
682 // the same register that is used for the use-at-start.
683 // See https://code.google.com/p/chromium/issues/detail?id=201590
684 if (!(instr->ClobbersRegisters() &&
685 instr->ClobbersDoubleRegisters(isolate()))) {
687 int used_at_start = 0;
688 for (UseIterator it(instr); !it.Done(); it.Advance()) {
689 LUnallocated* operand = LUnallocated::cast(it.Current());
690 if (operand->IsUsedAtStart()) ++used_at_start;
692 if (instr->Output() != NULL) {
693 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
695 for (TempIterator it(instr); !it.Done(); it.Advance()) {
696 LUnallocated* operand = LUnallocated::cast(it.Current());
697 if (operand->HasFixedPolicy()) ++fixed;
699 ASSERT(fixed == 0 || used_at_start == 0);
703 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
704 instr = AssignPointerMap(instr);
706 if (FLAG_stress_environments && !instr->HasEnvironment()) {
707 instr = AssignEnvironment(instr);
709 chunk_->AddInstruction(instr, current_block_);
711 if (instr->IsCall()) {
712 HValue* hydrogen_value_for_lazy_bailout = current;
713 LInstruction* instruction_needing_environment = NULL;
714 if (current->HasObservableSideEffects()) {
715 HSimulate* sim = HSimulate::cast(current->next());
716 instruction_needing_environment = instr;
717 sim->ReplayEnvironment(current_block_->last_environment());
718 hydrogen_value_for_lazy_bailout = sim;
720 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
721 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
722 chunk_->AddInstruction(bailout, current_block_);
723 if (instruction_needing_environment != NULL) {
724 // Store the lazy deopt environment with the instruction if needed.
725 // Right now it is only used for LInstanceOfKnownGlobal.
726 instruction_needing_environment->
727 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
731 current_instruction_ = old_current;
735 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
736 HEnvironment* hydrogen_env = current_block_->last_environment();
737 int argument_index_accumulator = 0;
738 ZoneList<HValue*> objects_to_materialize(0, zone());
739 instr->set_environment(CreateEnvironment(hydrogen_env,
740 &argument_index_accumulator,
741 &objects_to_materialize));
746 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
747 // The control instruction marking the end of a block that completed
748 // abruptly (e.g., threw an exception). There is nothing specific to do.
753 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
754 HArithmeticBinaryOperation* instr) {
755 ASSERT(instr->representation().IsDouble());
756 ASSERT(instr->left()->representation().IsDouble());
757 ASSERT(instr->right()->representation().IsDouble());
759 if (op == Token::MOD) {
760 LOperand* left = UseFixedDouble(instr->left(), d0);
761 LOperand* right = UseFixedDouble(instr->right(), d1);
762 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
763 return MarkAsCall(DefineFixedDouble(result, d0), instr);
765 LOperand* left = UseRegisterAtStart(instr->left());
766 LOperand* right = UseRegisterAtStart(instr->right());
767 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
768 return DefineAsRegister(result);
773 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
774 HBinaryOperation* instr) {
775 ASSERT((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
776 (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
777 (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
778 (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
779 (op == Token::BIT_XOR));
780 HValue* left = instr->left();
781 HValue* right = instr->right();
783 // TODO(jbramley): Once we've implemented smi support for all arithmetic
784 // operations, these assertions should check IsTagged().
785 ASSERT(instr->representation().IsSmiOrTagged());
786 ASSERT(left->representation().IsSmiOrTagged());
787 ASSERT(right->representation().IsSmiOrTagged());
789 LOperand* context = UseFixed(instr->context(), cp);
790 LOperand* left_operand = UseFixed(left, x1);
791 LOperand* right_operand = UseFixed(right, x0);
792 LArithmeticT* result =
793 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
794 return MarkAsCall(DefineFixed(result, x0), instr);
798 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
799 HBoundsCheckBaseIndexInformation* instr) {
805 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
806 info()->MarkAsRequiresFrame();
807 LOperand* args = NULL;
808 LOperand* length = NULL;
809 LOperand* index = NULL;
811 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
812 args = UseRegisterAtStart(instr->arguments());
813 length = UseConstant(instr->length());
814 index = UseConstant(instr->index());
816 args = UseRegister(instr->arguments());
817 length = UseRegisterAtStart(instr->length());
818 index = UseRegisterOrConstantAtStart(instr->index());
821 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
825 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
826 if (instr->representation().IsSmiOrInteger32()) {
827 ASSERT(instr->left()->representation().Equals(instr->representation()));
828 ASSERT(instr->right()->representation().Equals(instr->representation()));
829 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
831 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
832 LInstruction* result = instr->representation().IsSmi() ?
833 DefineAsRegister(new(zone()) LAddS(left, right)) :
834 DefineAsRegister(new(zone()) LAddI(left, right));
835 if (instr->CheckFlag(HValue::kCanOverflow)) {
836 result = AssignEnvironment(result);
839 } else if (instr->representation().IsExternal()) {
840 ASSERT(instr->left()->representation().IsExternal());
841 ASSERT(instr->right()->representation().IsInteger32());
842 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
843 LOperand* left = UseRegisterAtStart(instr->left());
844 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
845 return DefineAsRegister(new(zone()) LAddE(left, right));
846 } else if (instr->representation().IsDouble()) {
847 return DoArithmeticD(Token::ADD, instr);
849 ASSERT(instr->representation().IsTagged());
850 return DoArithmeticT(Token::ADD, instr);
855 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
856 info()->MarkAsDeferredCalling();
857 LOperand* context = UseAny(instr->context());
858 LOperand* size = UseRegisterOrConstant(instr->size());
859 LOperand* temp1 = TempRegister();
860 LOperand* temp2 = TempRegister();
861 LOperand* temp3 = instr->MustPrefillWithFiller() ? TempRegister() : NULL;
862 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2, temp3);
863 return AssignPointerMap(DefineAsRegister(result));
867 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
868 LOperand* function = UseFixed(instr->function(), x1);
869 LOperand* receiver = UseFixed(instr->receiver(), x0);
870 LOperand* length = UseFixed(instr->length(), x2);
871 LOperand* elements = UseFixed(instr->elements(), x3);
872 LApplyArguments* result = new(zone()) LApplyArguments(function,
876 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
880 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
881 info()->MarkAsRequiresFrame();
882 LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
883 return DefineAsRegister(new(zone()) LArgumentsElements(temp));
887 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
888 info()->MarkAsRequiresFrame();
889 LOperand* value = UseRegisterAtStart(instr->value());
890 return DefineAsRegister(new(zone()) LArgumentsLength(value));
894 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
895 // There are no real uses of the arguments object.
896 // arguments.length and element access are supported directly on
897 // stack arguments, and any real arguments object use causes a bailout.
898 // So this value is never used.
903 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
904 if (instr->representation().IsSmiOrInteger32()) {
905 ASSERT(instr->left()->representation().Equals(instr->representation()));
906 ASSERT(instr->right()->representation().Equals(instr->representation()));
907 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
909 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
911 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
912 return instr->representation().IsSmi() ?
913 DefineAsRegister(new(zone()) LBitS(left, right)) :
914 DefineAsRegister(new(zone()) LBitI(left, right));
916 return DoArithmeticT(instr->op(), instr);
921 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
922 // V8 expects a label to be generated for each basic block.
923 // This is used in some places like LAllocator::IsBlockBoundary
924 // in lithium-allocator.cc
925 return new(zone()) LLabel(instr->block());
929 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
930 if (!FLAG_debug_code && instr->skip_check()) return NULL;
931 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
932 LOperand* length = !index->IsConstantOperand()
933 ? UseRegisterOrConstantAtStart(instr->length())
934 : UseRegisterAtStart(instr->length());
935 LInstruction* result = new(zone()) LBoundsCheck(index, length);
936 if (!FLAG_debug_code || !instr->skip_check()) {
937 result = AssignEnvironment(result);
943 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
944 LInstruction* goto_instr = CheckElideControlInstruction(instr);
945 if (goto_instr != NULL) return goto_instr;
947 HValue* value = instr->value();
948 Representation r = value->representation();
949 HType type = value->type();
951 if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
952 // These representations have simple checks that cannot deoptimize.
953 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
955 ASSERT(r.IsTagged());
956 if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
957 type.IsHeapNumber()) {
958 // These types have simple checks that cannot deoptimize.
959 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
962 if (type.IsString()) {
963 // This type cannot deoptimize, but needs a scratch register.
964 return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
967 ToBooleanStub::Types expected = instr->expected_input_types();
968 bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
969 LOperand* temp1 = needs_temps ? TempRegister() : NULL;
970 LOperand* temp2 = needs_temps ? TempRegister() : NULL;
972 if (expected.IsGeneric() || expected.IsEmpty()) {
973 // The generic case cannot deoptimize because it already supports every
974 // possible input type.
976 return new(zone()) LBranch(UseRegister(value), temp1, temp2);
978 return AssignEnvironment(
979 new(zone()) LBranch(UseRegister(value), temp1, temp2));
985 LInstruction* LChunkBuilder::DoCallJSFunction(
986 HCallJSFunction* instr) {
987 LOperand* function = UseFixed(instr->function(), x1);
989 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
991 return MarkAsCall(DefineFixed(result, x0), instr);
995 LInstruction* LChunkBuilder::DoCallWithDescriptor(
996 HCallWithDescriptor* instr) {
997 const CallInterfaceDescriptor* descriptor = instr->descriptor();
999 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1000 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1001 ops.Add(target, zone());
1002 for (int i = 1; i < instr->OperandCount(); i++) {
1003 LOperand* op = UseFixed(instr->OperandAt(i),
1004 descriptor->GetParameterRegister(i - 1));
1005 ops.Add(op, zone());
1008 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1011 return MarkAsCall(DefineFixed(result, x0), instr);
1015 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1016 LOperand* context = UseFixed(instr->context(), cp);
1017 LOperand* function = UseFixed(instr->function(), x1);
1018 LCallFunction* call = new(zone()) LCallFunction(context, function);
1019 return MarkAsCall(DefineFixed(call, x0), instr);
1023 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1024 LOperand* context = UseFixed(instr->context(), cp);
1025 // The call to CallConstructStub will expect the constructor to be in x1.
1026 LOperand* constructor = UseFixed(instr->constructor(), x1);
1027 LCallNew* result = new(zone()) LCallNew(context, constructor);
1028 return MarkAsCall(DefineFixed(result, x0), instr);
1032 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1033 LOperand* context = UseFixed(instr->context(), cp);
1034 // The call to ArrayConstructCode will expect the constructor to be in x1.
1035 LOperand* constructor = UseFixed(instr->constructor(), x1);
1036 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1037 return MarkAsCall(DefineFixed(result, x0), instr);
1041 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1042 LOperand* context = UseFixed(instr->context(), cp);
1043 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1047 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1048 LOperand* context = UseFixed(instr->context(), cp);
1049 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1053 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1054 instr->ReplayEnvironment(current_block_->last_environment());
1056 // There are no real uses of a captured object.
1061 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1062 Representation from = instr->from();
1063 Representation to = instr->to();
1064 HValue* val = instr->value();
1066 if (to.IsTagged()) {
1067 LOperand* value = UseRegister(val);
1068 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1070 from = Representation::Tagged();
1072 if (from.IsTagged()) {
1073 if (to.IsDouble()) {
1074 LOperand* value = UseRegister(val);
1075 LOperand* temp = TempRegister();
1076 LInstruction* result =
1077 DefineAsRegister(new(zone()) LNumberUntagD(value, temp));
1078 if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1080 } else if (to.IsSmi()) {
1081 LOperand* value = UseRegister(val);
1082 if (val->type().IsSmi()) {
1083 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1085 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1087 ASSERT(to.IsInteger32());
1088 if (val->type().IsSmi() || val->representation().IsSmi()) {
1089 LOperand* value = UseRegisterAtStart(val);
1090 return DefineAsRegister(new(zone()) LSmiUntag(value, false));
1092 LOperand* value = UseRegister(val);
1093 LOperand* temp1 = TempRegister();
1094 LOperand* temp2 = instr->CanTruncateToInt32() ? NULL : FixedTemp(d24);
1095 LInstruction* result =
1096 DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1097 if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1101 } else if (from.IsDouble()) {
1102 if (to.IsTagged()) {
1103 info()->MarkAsDeferredCalling();
1104 LOperand* value = UseRegister(val);
1105 LOperand* temp1 = TempRegister();
1106 LOperand* temp2 = TempRegister();
1107 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1108 return AssignPointerMap(DefineAsRegister(result));
1110 ASSERT(to.IsSmi() || to.IsInteger32());
1111 if (instr->CanTruncateToInt32()) {
1112 LOperand* value = UseRegister(val);
1113 return DefineAsRegister(new(zone()) LTruncateDoubleToIntOrSmi(value));
1115 LOperand* value = UseRegister(val);
1116 LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1117 return AssignEnvironment(DefineAsRegister(result));
1120 } else if (from.IsInteger32()) {
1121 info()->MarkAsDeferredCalling();
1122 if (to.IsTagged()) {
1123 if (val->CheckFlag(HInstruction::kUint32)) {
1124 LOperand* value = UseRegister(val);
1125 LNumberTagU* result =
1126 new(zone()) LNumberTagU(value, TempRegister(), TempRegister());
1127 return AssignPointerMap(DefineAsRegister(result));
1129 STATIC_ASSERT((kMinInt == Smi::kMinValue) &&
1130 (kMaxInt == Smi::kMaxValue));
1131 LOperand* value = UseRegisterAtStart(val);
1132 return DefineAsRegister(new(zone()) LSmiTag(value));
1134 } else if (to.IsSmi()) {
1135 LOperand* value = UseRegisterAtStart(val);
1136 LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1137 if (val->CheckFlag(HInstruction::kUint32)) {
1138 result = AssignEnvironment(result);
1142 ASSERT(to.IsDouble());
1143 if (val->CheckFlag(HInstruction::kUint32)) {
1144 return DefineAsRegister(
1145 new(zone()) LUint32ToDouble(UseRegisterAtStart(val)));
1147 return DefineAsRegister(
1148 new(zone()) LInteger32ToDouble(UseRegisterAtStart(val)));
1157 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1158 LOperand* value = UseRegisterAtStart(instr->value());
1159 return AssignEnvironment(new(zone()) LCheckValue(value));
1163 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1164 LOperand* value = UseRegisterAtStart(instr->value());
1165 LOperand* temp = TempRegister();
1166 LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1167 return AssignEnvironment(result);
1171 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1172 if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps;
1173 LOperand* value = UseRegisterAtStart(instr->value());
1174 LOperand* temp = TempRegister();
1175 LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1176 if (instr->HasMigrationTarget()) {
1177 info()->MarkAsDeferredCalling();
1178 result = AssignPointerMap(result);
1184 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1185 LOperand* value = UseRegisterAtStart(instr->value());
1186 LInstruction* result = new(zone()) LCheckNonSmi(value);
1187 if (!instr->value()->IsHeapObject()) result = AssignEnvironment(result);
1192 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1193 LOperand* value = UseRegisterAtStart(instr->value());
1194 return AssignEnvironment(new(zone()) LCheckSmi(value));
1198 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1199 HValue* value = instr->value();
1200 Representation input_rep = value->representation();
1201 LOperand* reg = UseRegister(value);
1202 if (input_rep.IsDouble()) {
1203 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1204 } else if (input_rep.IsInteger32()) {
1205 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1207 ASSERT(input_rep.IsSmiOrTagged());
1208 return AssignEnvironment(
1209 DefineAsRegister(new(zone()) LClampTToUint8(reg,
1216 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1217 HClassOfTestAndBranch* instr) {
1218 ASSERT(instr->value()->representation().IsTagged());
1219 LOperand* value = UseRegisterAtStart(instr->value());
1220 return new(zone()) LClassOfTestAndBranch(value,
1226 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1227 HCompareNumericAndBranch* instr) {
1228 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1229 if (goto_instr != NULL) return goto_instr;
1230 Representation r = instr->representation();
1231 if (r.IsSmiOrInteger32()) {
1232 ASSERT(instr->left()->representation().Equals(r));
1233 ASSERT(instr->right()->representation().Equals(r));
1234 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1235 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1236 return new(zone()) LCompareNumericAndBranch(left, right);
1238 ASSERT(r.IsDouble());
1239 ASSERT(instr->left()->representation().IsDouble());
1240 ASSERT(instr->right()->representation().IsDouble());
1241 // TODO(all): In fact the only case that we can handle more efficiently is
1242 // when one of the operand is the constant 0. Currently the MacroAssembler
1243 // will be able to cope with any constant by loading it into an internal
1244 // scratch register. This means that if the constant is used more that once,
1245 // it will be loaded multiple times. Unfortunatly crankshaft already
1246 // duplicates constant loads, but we should modify the code below once this
1247 // issue has been addressed in crankshaft.
1248 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1249 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1250 return new(zone()) LCompareNumericAndBranch(left, right);
1255 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1256 ASSERT(instr->left()->representation().IsTagged());
1257 ASSERT(instr->right()->representation().IsTagged());
1258 LOperand* context = UseFixed(instr->context(), cp);
1259 LOperand* left = UseFixed(instr->left(), x1);
1260 LOperand* right = UseFixed(instr->right(), x0);
1261 LCmpT* result = new(zone()) LCmpT(context, left, right);
1262 return MarkAsCall(DefineFixed(result, x0), instr);
1266 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1267 HCompareHoleAndBranch* instr) {
1268 LOperand* value = UseRegister(instr->value());
1269 if (instr->representation().IsTagged()) {
1270 return new(zone()) LCmpHoleAndBranchT(value);
1272 LOperand* temp = TempRegister();
1273 return new(zone()) LCmpHoleAndBranchD(value, temp);
1278 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1279 HCompareObjectEqAndBranch* instr) {
1280 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1281 if (goto_instr != NULL) return goto_instr;
1283 LOperand* left = UseRegisterAtStart(instr->left());
1284 LOperand* right = UseRegisterAtStart(instr->right());
1285 return new(zone()) LCmpObjectEqAndBranch(left, right);
1289 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1290 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1291 if (goto_instr != NULL) return goto_instr;
1293 ASSERT(instr->value()->representation().IsTagged());
1294 LOperand* value = UseRegisterAtStart(instr->value());
1295 LOperand* temp = TempRegister();
1296 return new(zone()) LCmpMapAndBranch(value, temp);
1300 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1301 Representation r = instr->representation();
1303 return DefineAsRegister(new(zone()) LConstantS);
1304 } else if (r.IsInteger32()) {
1305 return DefineAsRegister(new(zone()) LConstantI);
1306 } else if (r.IsDouble()) {
1307 return DefineAsRegister(new(zone()) LConstantD);
1308 } else if (r.IsExternal()) {
1309 return DefineAsRegister(new(zone()) LConstantE);
1310 } else if (r.IsTagged()) {
1311 return DefineAsRegister(new(zone()) LConstantT);
1319 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1320 if (instr->HasNoUses()) return NULL;
1322 if (info()->IsStub()) {
1323 return DefineFixed(new(zone()) LContext, cp);
1326 return DefineAsRegister(new(zone()) LContext);
1330 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1331 LOperand* object = UseFixed(instr->value(), x0);
1332 LDateField* result = new(zone()) LDateField(object, instr->index());
1333 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1337 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1338 return new(zone()) LDebugBreak();
1342 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1343 LOperand* context = UseFixed(instr->context(), cp);
1344 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1348 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1349 return AssignEnvironment(new(zone()) LDeoptimize);
1353 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1354 ASSERT(instr->representation().IsInteger32());
1355 ASSERT(instr->left()->representation().Equals(instr->representation()));
1356 ASSERT(instr->right()->representation().Equals(instr->representation()));
1357 LOperand* dividend = UseRegister(instr->left());
1358 int32_t divisor = instr->right()->GetInteger32Constant();
1359 LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1360 dividend, divisor));
1361 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1362 (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1363 (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1364 divisor != 1 && divisor != -1)) {
1365 result = AssignEnvironment(result);
1371 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1372 ASSERT(instr->representation().IsInteger32());
1373 ASSERT(instr->left()->representation().Equals(instr->representation()));
1374 ASSERT(instr->right()->representation().Equals(instr->representation()));
1375 LOperand* dividend = UseRegister(instr->left());
1376 int32_t divisor = instr->right()->GetInteger32Constant();
1377 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1378 ? NULL : TempRegister();
1379 LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1380 dividend, divisor, temp));
1382 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1383 !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1384 result = AssignEnvironment(result);
1390 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1391 ASSERT(instr->representation().IsSmiOrInteger32());
1392 ASSERT(instr->left()->representation().Equals(instr->representation()));
1393 ASSERT(instr->right()->representation().Equals(instr->representation()));
1394 LOperand* dividend = UseRegister(instr->left());
1395 LOperand* divisor = UseRegister(instr->right());
1396 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1397 ? NULL : TempRegister();
1398 LInstruction* result =
1399 DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp));
1400 if (!instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1401 result = AssignEnvironment(result);
1407 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1408 if (instr->representation().IsSmiOrInteger32()) {
1409 if (instr->RightIsPowerOf2()) {
1410 return DoDivByPowerOf2I(instr);
1411 } else if (instr->right()->IsConstant()) {
1412 return DoDivByConstI(instr);
1414 return DoDivI(instr);
1416 } else if (instr->representation().IsDouble()) {
1417 return DoArithmeticD(Token::DIV, instr);
1419 return DoArithmeticT(Token::DIV, instr);
1424 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1425 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1429 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1430 HEnvironment* outer = current_block_->last_environment();
1431 outer->set_ast_id(instr->ReturnId());
1432 HConstant* undefined = graph()->GetConstantUndefined();
1433 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1434 instr->arguments_count(),
1437 instr->inlining_kind());
1438 // Only replay binding of arguments object if it wasn't removed from graph.
1439 if ((instr->arguments_var() != NULL) &&
1440 instr->arguments_object()->IsLinked()) {
1441 inner->Bind(instr->arguments_var(), instr->arguments_object());
1443 inner->set_entry(instr);
1444 current_block_->UpdateEnvironment(inner);
1445 chunk_->AddInlinedClosure(instr->closure());
1450 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1456 LInstruction* LChunkBuilder::DoForceRepresentation(
1457 HForceRepresentation* instr) {
1458 // All HForceRepresentation instructions should be eliminated in the
1459 // representation change phase of Hydrogen.
1465 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1466 LOperand* context = UseFixed(instr->context(), cp);
1468 DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1472 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1473 HGetCachedArrayIndex* instr) {
1474 ASSERT(instr->value()->representation().IsTagged());
1475 LOperand* value = UseRegisterAtStart(instr->value());
1476 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1480 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1481 return new(zone()) LGoto(instr->FirstSuccessor());
1485 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1486 HHasCachedArrayIndexAndBranch* instr) {
1487 ASSERT(instr->value()->representation().IsTagged());
1488 return new(zone()) LHasCachedArrayIndexAndBranch(
1489 UseRegisterAtStart(instr->value()), TempRegister());
1493 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1494 HHasInstanceTypeAndBranch* instr) {
1495 ASSERT(instr->value()->representation().IsTagged());
1496 LOperand* value = UseRegisterAtStart(instr->value());
1497 return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1501 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1502 HInnerAllocatedObject* instr) {
1503 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1504 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1505 return DefineAsRegister(
1506 new(zone()) LInnerAllocatedObject(base_object, offset));
1510 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1511 LOperand* context = UseFixed(instr->context(), cp);
1512 LInstanceOf* result = new(zone()) LInstanceOf(
1514 UseFixed(instr->left(), InstanceofStub::left()),
1515 UseFixed(instr->right(), InstanceofStub::right()));
1516 return MarkAsCall(DefineFixed(result, x0), instr);
1520 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1521 HInstanceOfKnownGlobal* instr) {
1522 LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1523 UseFixed(instr->context(), cp),
1524 UseFixed(instr->left(), InstanceofStub::left()));
1525 return MarkAsCall(DefineFixed(result, x0), instr);
1529 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1530 LOperand* context = UseFixed(instr->context(), cp);
1531 // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1532 LOperand* function = UseFixed(instr->function(), x1);
1533 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1534 return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1538 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1539 HIsConstructCallAndBranch* instr) {
1540 return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1544 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1545 HCompareMinusZeroAndBranch* instr) {
1546 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1547 if (goto_instr != NULL) return goto_instr;
1548 LOperand* value = UseRegister(instr->value());
1549 LOperand* scratch = TempRegister();
1550 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1554 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1555 ASSERT(instr->value()->representation().IsTagged());
1556 LOperand* value = UseRegisterAtStart(instr->value());
1557 LOperand* temp1 = TempRegister();
1558 LOperand* temp2 = TempRegister();
1559 return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1563 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1564 ASSERT(instr->value()->representation().IsTagged());
1565 LOperand* value = UseRegisterAtStart(instr->value());
1566 LOperand* temp = TempRegister();
1567 return new(zone()) LIsStringAndBranch(value, temp);
1571 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1572 ASSERT(instr->value()->representation().IsTagged());
1573 return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1577 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1578 HIsUndetectableAndBranch* instr) {
1579 ASSERT(instr->value()->representation().IsTagged());
1580 LOperand* value = UseRegisterAtStart(instr->value());
1581 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1585 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1586 LInstruction* pop = NULL;
1587 HEnvironment* env = current_block_->last_environment();
1589 if (env->entry()->arguments_pushed()) {
1590 int argument_count = env->arguments_environment()->parameter_count();
1591 pop = new(zone()) LDrop(argument_count);
1592 ASSERT(instr->argument_delta() == -argument_count);
1595 HEnvironment* outer =
1596 current_block_->last_environment()->DiscardInlined(false);
1597 current_block_->UpdateEnvironment(outer);
1603 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1604 LOperand* context = UseRegisterAtStart(instr->value());
1605 LInstruction* result =
1606 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1607 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
1608 result = AssignEnvironment(result);
1614 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1615 HLoadFunctionPrototype* instr) {
1616 LOperand* function = UseRegister(instr->function());
1617 LOperand* temp = TempRegister();
1618 return AssignEnvironment(DefineAsRegister(
1619 new(zone()) LLoadFunctionPrototype(function, temp)));
1623 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1624 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1625 return instr->RequiresHoleCheck()
1626 ? AssignEnvironment(DefineAsRegister(result))
1627 : DefineAsRegister(result);
1631 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1632 LOperand* context = UseFixed(instr->context(), cp);
1633 LOperand* global_object = UseFixed(instr->global_object(), x0);
1634 LLoadGlobalGeneric* result =
1635 new(zone()) LLoadGlobalGeneric(context, global_object);
1636 return MarkAsCall(DefineFixed(result, x0), instr);
1640 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1641 ASSERT(instr->key()->representation().IsSmiOrInteger32());
1642 ElementsKind elements_kind = instr->elements_kind();
1643 LOperand* elements = UseRegister(instr->elements());
1645 if (!instr->is_typed_elements()) {
1646 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1648 if (instr->representation().IsDouble()) {
1649 LOperand* temp = (!instr->key()->IsConstant() ||
1650 instr->RequiresHoleCheck())
1654 LLoadKeyedFixedDouble* result =
1655 new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1656 return instr->RequiresHoleCheck()
1657 ? AssignEnvironment(DefineAsRegister(result))
1658 : DefineAsRegister(result);
1660 ASSERT(instr->representation().IsSmiOrTagged() ||
1661 instr->representation().IsInteger32());
1662 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1663 LLoadKeyedFixed* result =
1664 new(zone()) LLoadKeyedFixed(elements, key, temp);
1665 return instr->RequiresHoleCheck()
1666 ? AssignEnvironment(DefineAsRegister(result))
1667 : DefineAsRegister(result);
1670 ASSERT((instr->representation().IsInteger32() &&
1671 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1672 (instr->representation().IsDouble() &&
1673 IsDoubleOrFloatElementsKind(instr->elements_kind())));
1675 LOperand* key = UseRegisterOrConstant(instr->key());
1676 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1677 LInstruction* result = DefineAsRegister(
1678 new(zone()) LLoadKeyedExternal(elements, key, temp));
1679 if ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1680 elements_kind == UINT32_ELEMENTS) &&
1681 !instr->CheckFlag(HInstruction::kUint32)) {
1682 result = AssignEnvironment(result);
1689 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1690 LOperand* context = UseFixed(instr->context(), cp);
1691 LOperand* object = UseFixed(instr->object(), x1);
1692 LOperand* key = UseFixed(instr->key(), x0);
1694 LInstruction* result =
1695 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), x0);
1696 return MarkAsCall(result, instr);
1700 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1701 LOperand* object = UseRegisterAtStart(instr->object());
1702 return DefineAsRegister(new(zone()) LLoadNamedField(object));
1706 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1707 LOperand* context = UseFixed(instr->context(), cp);
1708 LOperand* object = UseFixed(instr->object(), x0);
1709 LInstruction* result =
1710 DefineFixed(new(zone()) LLoadNamedGeneric(context, object), x0);
1711 return MarkAsCall(result, instr);
1715 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1716 return DefineAsRegister(new(zone()) LLoadRoot);
1720 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1721 LOperand* map = UseRegisterAtStart(instr->value());
1722 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1726 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1727 ASSERT(instr->representation().IsInteger32());
1728 ASSERT(instr->left()->representation().Equals(instr->representation()));
1729 ASSERT(instr->right()->representation().Equals(instr->representation()));
1730 LOperand* dividend = UseRegisterAtStart(instr->left());
1731 int32_t divisor = instr->right()->GetInteger32Constant();
1732 LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1733 dividend, divisor));
1734 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1735 (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1736 result = AssignEnvironment(result);
1742 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1743 ASSERT(instr->representation().IsInteger32());
1744 ASSERT(instr->left()->representation().Equals(instr->representation()));
1745 ASSERT(instr->right()->representation().Equals(instr->representation()));
1746 LOperand* dividend = UseRegister(instr->left());
1747 int32_t divisor = instr->right()->GetInteger32Constant();
1749 ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1750 (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1751 NULL : TempRegister();
1752 LInstruction* result = DefineAsRegister(
1753 new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1755 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1756 result = AssignEnvironment(result);
1762 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1763 LOperand* dividend = UseRegister(instr->left());
1764 LOperand* divisor = UseRegister(instr->right());
1765 LOperand* remainder = TempRegister();
1766 LInstruction* result =
1767 DefineAsRegister(new(zone()) LFlooringDivI(dividend, divisor, remainder));
1768 return AssignEnvironment(result);
1772 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1773 if (instr->RightIsPowerOf2()) {
1774 return DoFlooringDivByPowerOf2I(instr);
1775 } else if (instr->right()->IsConstant()) {
1776 return DoFlooringDivByConstI(instr);
1778 return DoFlooringDivI(instr);
1783 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1784 LOperand* left = NULL;
1785 LOperand* right = NULL;
1786 if (instr->representation().IsSmiOrInteger32()) {
1787 ASSERT(instr->left()->representation().Equals(instr->representation()));
1788 ASSERT(instr->right()->representation().Equals(instr->representation()));
1789 left = UseRegisterAtStart(instr->BetterLeftOperand());
1790 right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1792 ASSERT(instr->representation().IsDouble());
1793 ASSERT(instr->left()->representation().IsDouble());
1794 ASSERT(instr->right()->representation().IsDouble());
1795 left = UseRegisterAtStart(instr->left());
1796 right = UseRegisterAtStart(instr->right());
1798 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1802 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1803 ASSERT(instr->representation().IsInteger32());
1804 ASSERT(instr->left()->representation().Equals(instr->representation()));
1805 ASSERT(instr->right()->representation().Equals(instr->representation()));
1806 LOperand* dividend = UseRegisterAtStart(instr->left());
1807 int32_t divisor = instr->right()->GetInteger32Constant();
1808 LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1809 dividend, divisor));
1810 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1811 result = AssignEnvironment(result);
1817 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1818 ASSERT(instr->representation().IsInteger32());
1819 ASSERT(instr->left()->representation().Equals(instr->representation()));
1820 ASSERT(instr->right()->representation().Equals(instr->representation()));
1821 LOperand* dividend = UseRegister(instr->left());
1822 int32_t divisor = instr->right()->GetInteger32Constant();
1823 LOperand* temp = TempRegister();
1824 LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1825 dividend, divisor, temp));
1826 if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1827 result = AssignEnvironment(result);
1833 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1834 ASSERT(instr->representation().IsSmiOrInteger32());
1835 ASSERT(instr->left()->representation().Equals(instr->representation()));
1836 ASSERT(instr->right()->representation().Equals(instr->representation()));
1837 LOperand* dividend = UseRegister(instr->left());
1838 LOperand* divisor = UseRegister(instr->right());
1839 LInstruction* result = DefineAsRegister(new(zone()) LModI(dividend, divisor));
1840 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1841 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1842 result = AssignEnvironment(result);
1848 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1849 if (instr->representation().IsSmiOrInteger32()) {
1850 if (instr->RightIsPowerOf2()) {
1851 return DoModByPowerOf2I(instr);
1852 } else if (instr->right()->IsConstant()) {
1853 return DoModByConstI(instr);
1855 return DoModI(instr);
1857 } else if (instr->representation().IsDouble()) {
1858 return DoArithmeticD(Token::MOD, instr);
1860 return DoArithmeticT(Token::MOD, instr);
1865 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1866 if (instr->representation().IsSmiOrInteger32()) {
1867 ASSERT(instr->left()->representation().Equals(instr->representation()));
1868 ASSERT(instr->right()->representation().Equals(instr->representation()));
1870 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1871 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1873 HValue* least_const = instr->BetterLeftOperand();
1874 HValue* most_const = instr->BetterRightOperand();
1876 // LMulConstI can handle a subset of constants:
1877 // With support for overflow detection:
1880 // Without support for overflow detection:
1881 // 2^n + 1, -(2^n - 1)
1882 if (most_const->IsConstant()) {
1883 int32_t constant = HConstant::cast(most_const)->Integer32Value();
1884 bool small_constant = (constant >= -1) && (constant <= 2);
1885 bool end_range_constant = (constant <= -kMaxInt) || (constant == kMaxInt);
1886 int32_t constant_abs = Abs(constant);
1888 if (!end_range_constant &&
1890 (IsPowerOf2(constant_abs)) ||
1891 (!can_overflow && (IsPowerOf2(constant_abs + 1) ||
1892 IsPowerOf2(constant_abs - 1))))) {
1893 LConstantOperand* right = UseConstant(most_const);
1894 bool need_register = IsPowerOf2(constant_abs) && !small_constant;
1895 LOperand* left = need_register ? UseRegister(least_const)
1896 : UseRegisterAtStart(least_const);
1897 LInstruction* result =
1898 DefineAsRegister(new(zone()) LMulConstIS(left, right));
1899 if ((bailout_on_minus_zero && constant <= 0) || can_overflow) {
1900 result = AssignEnvironment(result);
1906 // LMulI/S can handle all cases, but it requires that a register is
1907 // allocated for the second operand.
1908 LOperand* left = UseRegisterAtStart(least_const);
1909 LOperand* right = UseRegisterAtStart(most_const);
1910 LInstruction* result = instr->representation().IsSmi()
1911 ? DefineAsRegister(new(zone()) LMulS(left, right))
1912 : DefineAsRegister(new(zone()) LMulI(left, right));
1913 if ((bailout_on_minus_zero && least_const != most_const) || can_overflow) {
1914 result = AssignEnvironment(result);
1917 } else if (instr->representation().IsDouble()) {
1918 return DoArithmeticD(Token::MUL, instr);
1920 return DoArithmeticT(Token::MUL, instr);
1925 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1926 ASSERT(argument_count_ == 0);
1927 allocator_->MarkAsOsrEntry();
1928 current_block_->last_environment()->set_ast_id(instr->ast_id());
1929 return AssignEnvironment(new(zone()) LOsrEntry);
1933 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1934 LParameter* result = new(zone()) LParameter;
1935 if (instr->kind() == HParameter::STACK_PARAMETER) {
1936 int spill_index = chunk_->GetParameterStackSlot(instr->index());
1937 return DefineAsSpilled(result, spill_index);
1939 ASSERT(info()->IsStub());
1940 CodeStubInterfaceDescriptor* descriptor =
1941 info()->code_stub()->GetInterfaceDescriptor();
1942 int index = static_cast<int>(instr->index());
1943 Register reg = descriptor->GetParameterRegister(index);
1944 return DefineFixed(result, reg);
1949 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1950 ASSERT(instr->representation().IsDouble());
1951 // We call a C function for double power. It can't trigger a GC.
1952 // We need to use fixed result register for the call.
1953 Representation exponent_type = instr->right()->representation();
1954 ASSERT(instr->left()->representation().IsDouble());
1955 LOperand* left = UseFixedDouble(instr->left(), d0);
1956 LOperand* right = exponent_type.IsInteger32()
1957 ? UseFixed(instr->right(), x12)
1958 : exponent_type.IsDouble()
1959 ? UseFixedDouble(instr->right(), d1)
1960 : UseFixed(instr->right(), x11);
1961 LPower* result = new(zone()) LPower(left, right);
1962 return MarkAsCall(DefineFixedDouble(result, d0),
1964 CAN_DEOPTIMIZE_EAGERLY);
1968 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1969 LOperand* argument = UseRegister(instr->argument());
1970 return new(zone()) LPushArgument(argument);
1974 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1975 LOperand* context = UseFixed(instr->context(), cp);
1977 DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
1981 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
1982 HValue* value = instr->value();
1983 ASSERT(value->representation().IsDouble());
1984 return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
1988 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
1989 LOperand* lo = UseRegister(instr->lo());
1990 LOperand* hi = UseRegister(instr->hi());
1991 LOperand* temp = TempRegister();
1992 return DefineAsRegister(new(zone()) LConstructDouble(hi, lo, temp));
1996 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1997 LOperand* context = info()->IsStub()
1998 ? UseFixed(instr->context(), cp)
2000 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2001 return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
2006 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
2007 LOperand* string = UseRegisterAtStart(instr->string());
2008 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2009 LOperand* temp = TempRegister();
2010 LSeqStringGetChar* result =
2011 new(zone()) LSeqStringGetChar(string, index, temp);
2012 return DefineAsRegister(result);
2016 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
2017 LOperand* string = UseRegister(instr->string());
2018 LOperand* index = FLAG_debug_code
2019 ? UseRegister(instr->index())
2020 : UseRegisterOrConstant(instr->index());
2021 LOperand* value = UseRegister(instr->value());
2022 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
2023 LOperand* temp = TempRegister();
2024 LSeqStringSetChar* result =
2025 new(zone()) LSeqStringSetChar(context, string, index, value, temp);
2026 return DefineAsRegister(result);
2030 LInstruction* LChunkBuilder::DoShift(Token::Value op,
2031 HBitwiseBinaryOperation* instr) {
2032 if (instr->representation().IsTagged()) {
2033 return DoArithmeticT(op, instr);
2036 ASSERT(instr->representation().IsInteger32() ||
2037 instr->representation().IsSmi());
2038 ASSERT(instr->left()->representation().Equals(instr->representation()));
2039 ASSERT(instr->right()->representation().Equals(instr->representation()));
2041 LOperand* left = instr->representation().IsSmi()
2042 ? UseRegister(instr->left())
2043 : UseRegisterAtStart(instr->left());
2045 HValue* right_value = instr->right();
2046 LOperand* right = NULL;
2047 LOperand* temp = NULL;
2048 int constant_value = 0;
2049 if (right_value->IsConstant()) {
2050 right = UseConstant(right_value);
2051 HConstant* constant = HConstant::cast(right_value);
2052 constant_value = constant->Integer32Value() & 0x1f;
2054 right = UseRegisterAtStart(right_value);
2055 if (op == Token::ROR) {
2056 temp = TempRegister();
2060 // Shift operations can only deoptimize if we do a logical shift by 0 and the
2061 // result cannot be truncated to int32.
2062 bool does_deopt = false;
2063 if ((op == Token::SHR) && (constant_value == 0)) {
2064 if (FLAG_opt_safe_uint32_operations) {
2065 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
2067 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
2071 LInstruction* result;
2072 if (instr->representation().IsInteger32()) {
2073 result = DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
2075 ASSERT(instr->representation().IsSmi());
2076 result = DefineAsRegister(
2077 new(zone()) LShiftS(op, left, right, temp, does_deopt));
2080 return does_deopt ? AssignEnvironment(result) : result;
2084 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
2085 return DoShift(Token::ROR, instr);
2089 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
2090 return DoShift(Token::SAR, instr);
2094 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
2095 return DoShift(Token::SHL, instr);
2099 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
2100 return DoShift(Token::SHR, instr);
2104 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2105 instr->ReplayEnvironment(current_block_->last_environment());
2110 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2111 if (instr->is_function_entry()) {
2112 LOperand* context = UseFixed(instr->context(), cp);
2113 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2115 ASSERT(instr->is_backwards_branch());
2116 LOperand* context = UseAny(instr->context());
2117 return AssignEnvironment(
2118 AssignPointerMap(new(zone()) LStackCheck(context)));
2123 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2124 LOperand* function = UseRegister(instr->function());
2125 LOperand* code_object = UseRegisterAtStart(instr->code_object());
2126 LOperand* temp = TempRegister();
2127 return new(zone()) LStoreCodeEntry(function, code_object, temp);
2131 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2132 LOperand* temp = TempRegister();
2135 if (instr->NeedsWriteBarrier()) {
2136 // TODO(all): Replace these constraints when RecordWriteStub has been
2138 context = UseRegisterAndClobber(instr->context());
2139 value = UseRegisterAndClobber(instr->value());
2141 context = UseRegister(instr->context());
2142 value = UseRegister(instr->value());
2144 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2145 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2146 result = AssignEnvironment(result);
2152 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2153 LOperand* value = UseRegister(instr->value());
2154 if (instr->RequiresHoleCheck()) {
2155 return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2159 return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2164 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2165 LOperand* temp = NULL;
2166 LOperand* elements = NULL;
2167 LOperand* val = NULL;
2169 if (!instr->is_typed_elements() &&
2170 instr->value()->representation().IsTagged() &&
2171 instr->NeedsWriteBarrier()) {
2172 // RecordWrite() will clobber all registers.
2173 elements = UseRegisterAndClobber(instr->elements());
2174 val = UseRegisterAndClobber(instr->value());
2175 temp = TempRegister();
2177 elements = UseRegister(instr->elements());
2178 val = UseRegister(instr->value());
2179 temp = instr->key()->IsConstant() ? NULL : TempRegister();
2182 if (instr->is_typed_elements()) {
2183 ASSERT((instr->value()->representation().IsInteger32() &&
2184 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2185 (instr->value()->representation().IsDouble() &&
2186 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2187 ASSERT((instr->is_fixed_typed_array() &&
2188 instr->elements()->representation().IsTagged()) ||
2189 (instr->is_external() &&
2190 instr->elements()->representation().IsExternal()));
2191 LOperand* key = UseRegisterOrConstant(instr->key());
2192 return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2194 } else if (instr->value()->representation().IsDouble()) {
2195 ASSERT(instr->elements()->representation().IsTagged());
2196 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2197 return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2200 ASSERT(instr->elements()->representation().IsTagged());
2201 ASSERT(instr->value()->representation().IsSmiOrTagged() ||
2202 instr->value()->representation().IsInteger32());
2203 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2204 return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2209 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2210 LOperand* context = UseFixed(instr->context(), cp);
2211 LOperand* object = UseFixed(instr->object(), x2);
2212 LOperand* key = UseFixed(instr->key(), x1);
2213 LOperand* value = UseFixed(instr->value(), x0);
2215 ASSERT(instr->object()->representation().IsTagged());
2216 ASSERT(instr->key()->representation().IsTagged());
2217 ASSERT(instr->value()->representation().IsTagged());
2220 new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2224 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2225 // TODO(jbramley): It might be beneficial to allow value to be a constant in
2226 // some cases. x64 makes use of this with FLAG_track_fields, for example.
2228 LOperand* object = UseRegister(instr->object());
2230 LOperand* temp0 = NULL;
2231 LOperand* temp1 = NULL;
2233 if (instr->access().IsExternalMemory() ||
2234 instr->field_representation().IsDouble()) {
2235 value = UseRegister(instr->value());
2236 } else if (instr->NeedsWriteBarrier()) {
2237 value = UseRegisterAndClobber(instr->value());
2238 temp0 = TempRegister();
2239 temp1 = TempRegister();
2240 } else if (instr->NeedsWriteBarrierForMap()) {
2241 value = UseRegister(instr->value());
2242 temp0 = TempRegister();
2243 temp1 = TempRegister();
2245 value = UseRegister(instr->value());
2246 temp0 = TempRegister();
2249 LStoreNamedField* result =
2250 new(zone()) LStoreNamedField(object, value, temp0, temp1);
2251 if (instr->field_representation().IsHeapObject() &&
2252 !instr->value()->type().IsHeapObject()) {
2253 return AssignEnvironment(result);
2259 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2260 LOperand* context = UseFixed(instr->context(), cp);
2261 LOperand* object = UseFixed(instr->object(), x1);
2262 LOperand* value = UseFixed(instr->value(), x0);
2263 LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2264 return MarkAsCall(result, instr);
2268 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2269 LOperand* context = UseFixed(instr->context(), cp);
2270 LOperand* left = UseFixed(instr->left(), x1);
2271 LOperand* right = UseFixed(instr->right(), x0);
2273 LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2274 return MarkAsCall(DefineFixed(result, x0), instr);
2278 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2279 LOperand* string = UseRegisterAndClobber(instr->string());
2280 LOperand* index = UseRegisterAndClobber(instr->index());
2281 LOperand* context = UseAny(instr->context());
2282 LStringCharCodeAt* result =
2283 new(zone()) LStringCharCodeAt(context, string, index);
2284 return AssignPointerMap(DefineAsRegister(result));
2288 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2289 LOperand* char_code = UseRegister(instr->value());
2290 LOperand* context = UseAny(instr->context());
2291 LStringCharFromCode* result =
2292 new(zone()) LStringCharFromCode(context, char_code);
2293 return AssignPointerMap(DefineAsRegister(result));
2297 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2298 HStringCompareAndBranch* instr) {
2299 ASSERT(instr->left()->representation().IsTagged());
2300 ASSERT(instr->right()->representation().IsTagged());
2301 LOperand* context = UseFixed(instr->context(), cp);
2302 LOperand* left = UseFixed(instr->left(), x1);
2303 LOperand* right = UseFixed(instr->right(), x0);
2304 LStringCompareAndBranch* result =
2305 new(zone()) LStringCompareAndBranch(context, left, right);
2306 return MarkAsCall(result, instr);
2310 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2311 if (instr->representation().IsSmiOrInteger32()) {
2312 ASSERT(instr->left()->representation().Equals(instr->representation()));
2313 ASSERT(instr->right()->representation().Equals(instr->representation()));
2315 if (instr->left()->IsConstant() &&
2316 (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2317 left = UseConstant(instr->left());
2319 left = UseRegisterAtStart(instr->left());
2321 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2322 LInstruction* result = instr->representation().IsSmi() ?
2323 DefineAsRegister(new(zone()) LSubS(left, right)) :
2324 DefineAsRegister(new(zone()) LSubI(left, right));
2325 if (instr->CheckFlag(HValue::kCanOverflow)) {
2326 result = AssignEnvironment(result);
2329 } else if (instr->representation().IsDouble()) {
2330 return DoArithmeticD(Token::SUB, instr);
2332 return DoArithmeticT(Token::SUB, instr);
2337 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2338 if (instr->HasNoUses()) {
2341 return DefineAsRegister(new(zone()) LThisFunction);
2346 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2347 LOperand* object = UseFixed(instr->value(), x0);
2348 LToFastProperties* result = new(zone()) LToFastProperties(object);
2349 return MarkAsCall(DefineFixed(result, x0), instr);
2353 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2354 HTransitionElementsKind* instr) {
2355 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2356 LOperand* object = UseRegister(instr->object());
2357 LTransitionElementsKind* result =
2358 new(zone()) LTransitionElementsKind(object, NULL,
2359 TempRegister(), TempRegister());
2362 LOperand* object = UseFixed(instr->object(), x0);
2363 LOperand* context = UseFixed(instr->context(), cp);
2364 LTransitionElementsKind* result =
2365 new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2366 return MarkAsCall(result, instr);
2371 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2372 HTrapAllocationMemento* instr) {
2373 LOperand* object = UseRegister(instr->object());
2374 LOperand* temp1 = TempRegister();
2375 LOperand* temp2 = TempRegister();
2376 LTrapAllocationMemento* result =
2377 new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2378 return AssignEnvironment(result);
2382 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2383 LOperand* context = UseFixed(instr->context(), cp);
2384 // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2385 // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2386 // anyway, so the input doesn't have to be in x0. We might be able to improve
2387 // the ARM back-end a little by relaxing this restriction.
2389 new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2390 return MarkAsCall(DefineFixed(result, x0), instr);
2394 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2395 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2396 if (goto_instr != NULL) return goto_instr;
2398 // We only need temp registers in some cases, but we can't dereference the
2399 // instr->type_literal() handle to test that here.
2400 LOperand* temp1 = TempRegister();
2401 LOperand* temp2 = TempRegister();
2403 return new(zone()) LTypeofIsAndBranch(
2404 UseRegister(instr->value()), temp1, temp2);
2408 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2409 switch (instr->op()) {
2411 Representation r = instr->representation();
2413 // The tagged case might need to allocate a HeapNumber for the result,
2414 // so it is handled by a separate LInstruction.
2415 LOperand* context = UseFixed(instr->context(), cp);
2416 LOperand* input = UseRegister(instr->value());
2417 LOperand* temp1 = TempRegister();
2418 LOperand* temp2 = TempRegister();
2419 LOperand* temp3 = TempRegister();
2420 LInstruction* result = DefineAsRegister(
2421 new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3));
2422 return AssignEnvironment(AssignPointerMap(result));
2424 LOperand* input = UseRegisterAtStart(instr->value());
2425 LInstruction* result = DefineAsRegister(new(zone()) LMathAbs(input));
2426 if (!r.IsDouble()) result = AssignEnvironment(result);
2431 ASSERT(instr->representation().IsDouble());
2432 ASSERT(instr->value()->representation().IsDouble());
2433 LOperand* input = UseRegister(instr->value());
2434 // TODO(all): Implement TempFPRegister.
2435 LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily.
2436 LOperand* temp1 = TempRegister();
2437 LOperand* temp2 = TempRegister();
2438 LOperand* temp3 = TempRegister();
2439 LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2440 temp1, temp2, temp3);
2441 return DefineAsRegister(result);
2444 ASSERT(instr->value()->representation().IsDouble());
2445 LOperand* input = UseRegisterAtStart(instr->value());
2446 if (instr->representation().IsInteger32()) {
2447 LMathFloorI* result = new(zone()) LMathFloorI(input);
2448 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2450 ASSERT(instr->representation().IsDouble());
2451 LMathFloorD* result = new(zone()) LMathFloorD(input);
2452 return DefineAsRegister(result);
2456 ASSERT(instr->representation().IsDouble());
2457 ASSERT(instr->value()->representation().IsDouble());
2458 LOperand* input = UseFixedDouble(instr->value(), d0);
2459 LMathLog* result = new(zone()) LMathLog(input);
2460 return MarkAsCall(DefineFixedDouble(result, d0), instr);
2462 case kMathPowHalf: {
2463 ASSERT(instr->representation().IsDouble());
2464 ASSERT(instr->value()->representation().IsDouble());
2465 LOperand* input = UseRegister(instr->value());
2466 return DefineAsRegister(new(zone()) LMathPowHalf(input));
2469 ASSERT(instr->value()->representation().IsDouble());
2470 LOperand* input = UseRegister(instr->value());
2471 if (instr->representation().IsInteger32()) {
2472 LMathRoundI* result = new(zone()) LMathRoundI(input, FixedTemp(d24));
2473 return AssignEnvironment(DefineAsRegister(result));
2475 ASSERT(instr->representation().IsDouble());
2476 LMathRoundD* result = new(zone()) LMathRoundD(input);
2477 return DefineAsRegister(result);
2481 ASSERT(instr->representation().IsDouble());
2482 ASSERT(instr->value()->representation().IsDouble());
2483 LOperand* input = UseRegisterAtStart(instr->value());
2484 return DefineAsRegister(new(zone()) LMathSqrt(input));
2487 ASSERT(instr->representation().IsInteger32());
2488 ASSERT(instr->value()->representation().IsInteger32());
2489 LOperand* input = UseRegisterAtStart(instr->value());
2490 return DefineAsRegister(new(zone()) LMathClz32(input));
2499 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2500 // Use an index that corresponds to the location in the unoptimized frame,
2501 // which the optimized frame will subsume.
2502 int env_index = instr->index();
2503 int spill_index = 0;
2504 if (instr->environment()->is_parameter_index(env_index)) {
2505 spill_index = chunk_->GetParameterStackSlot(env_index);
2507 spill_index = env_index - instr->environment()->first_local_index();
2508 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2509 Abort(kTooManySpillSlotsNeededForOSR);
2513 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2517 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2522 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2523 LOperand* context = UseFixed(instr->context(), cp);
2524 // Assign object to a fixed register different from those already used in
2525 // LForInPrepareMap.
2526 LOperand* object = UseFixed(instr->enumerable(), x0);
2527 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2528 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2532 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2533 LOperand* map = UseRegister(instr->map());
2534 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2538 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2539 LOperand* value = UseRegisterAtStart(instr->value());
2540 LOperand* map = UseRegister(instr->map());
2541 LOperand* temp = TempRegister();
2542 return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2546 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2547 LOperand* object = UseRegisterAtStart(instr->object());
2548 LOperand* index = UseRegisterAndClobber(instr->index());
2549 LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
2550 LInstruction* result = DefineSameAsFirst(load);
2551 return AssignPointerMap(result);
2555 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2556 LOperand* receiver = UseRegister(instr->receiver());
2557 LOperand* function = UseRegister(instr->function());
2558 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2559 return AssignEnvironment(DefineAsRegister(result));
2563 } } // namespace v8::internal