1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
32 #include "lithium-allocator-inl.h"
33 #include "x64/lithium-x64.h"
34 #include "x64/lithium-codegen-x64.h"
35 #include "hydrogen-osr.h"
40 #define DEFINE_COMPILE(type) \
41 void L##type::CompileToNative(LCodeGen* generator) { \
42 generator->Do##type(this); \
44 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
49 void LInstruction::VerifyCall() {
50 // Call instructions can use only fixed registers as temporaries and
51 // outputs because all registers are blocked by the calling convention.
52 // Inputs operands must use a fixed register or use-at-start policy or
53 // a non-register policy.
54 ASSERT(Output() == NULL ||
55 LUnallocated::cast(Output())->HasFixedPolicy() ||
56 !LUnallocated::cast(Output())->HasRegisterPolicy());
57 for (UseIterator it(this); !it.Done(); it.Advance()) {
58 LUnallocated* operand = LUnallocated::cast(it.Current());
59 ASSERT(operand->HasFixedPolicy() ||
60 operand->IsUsedAtStart());
62 for (TempIterator it(this); !it.Done(); it.Advance()) {
63 LUnallocated* operand = LUnallocated::cast(it.Current());
64 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
70 void LInstruction::PrintTo(StringStream* stream) {
71 stream->Add("%s ", this->Mnemonic());
73 PrintOutputOperandTo(stream);
77 if (HasEnvironment()) {
79 environment()->PrintTo(stream);
82 if (HasPointerMap()) {
84 pointer_map()->PrintTo(stream);
89 void LInstruction::PrintDataTo(StringStream* stream) {
91 for (int i = 0; i < InputCount(); i++) {
92 if (i > 0) stream->Add(" ");
93 if (InputAt(i) == NULL) {
96 InputAt(i)->PrintTo(stream);
102 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
103 if (HasResult()) result()->PrintTo(stream);
107 void LLabel::PrintDataTo(StringStream* stream) {
108 LGap::PrintDataTo(stream);
109 LLabel* rep = replacement();
111 stream->Add(" Dead block replaced with B%d", rep->block_id());
116 bool LGap::IsRedundant() const {
117 for (int i = 0; i < 4; i++) {
118 if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
127 void LGap::PrintDataTo(StringStream* stream) {
128 for (int i = 0; i < 4; i++) {
130 if (parallel_moves_[i] != NULL) {
131 parallel_moves_[i]->PrintDataTo(stream);
138 const char* LArithmeticD::Mnemonic() const {
140 case Token::ADD: return "add-d";
141 case Token::SUB: return "sub-d";
142 case Token::MUL: return "mul-d";
143 case Token::DIV: return "div-d";
144 case Token::MOD: return "mod-d";
152 const char* LArithmeticT::Mnemonic() const {
154 case Token::ADD: return "add-t";
155 case Token::SUB: return "sub-t";
156 case Token::MUL: return "mul-t";
157 case Token::MOD: return "mod-t";
158 case Token::DIV: return "div-t";
159 case Token::BIT_AND: return "bit-and-t";
160 case Token::BIT_OR: return "bit-or-t";
161 case Token::BIT_XOR: return "bit-xor-t";
162 case Token::ROR: return "ror-t";
163 case Token::SHL: return "sal-t";
164 case Token::SAR: return "sar-t";
165 case Token::SHR: return "shr-t";
173 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
174 return !gen->IsNextEmittedBlock(block_id());
178 void LGoto::PrintDataTo(StringStream* stream) {
179 stream->Add("B%d", block_id());
183 void LBranch::PrintDataTo(StringStream* stream) {
184 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
185 value()->PrintTo(stream);
189 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
191 left()->PrintTo(stream);
192 stream->Add(" %s ", Token::String(op()));
193 right()->PrintTo(stream);
194 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
198 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
199 stream->Add("if is_object(");
200 value()->PrintTo(stream);
201 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
205 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
206 stream->Add("if is_string(");
207 value()->PrintTo(stream);
208 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
212 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
213 stream->Add("if is_smi(");
214 value()->PrintTo(stream);
215 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
219 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
220 stream->Add("if is_undetectable(");
221 value()->PrintTo(stream);
222 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
226 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if string_compare(");
228 left()->PrintTo(stream);
229 right()->PrintTo(stream);
230 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
234 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
235 stream->Add("if has_instance_type(");
236 value()->PrintTo(stream);
237 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
241 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
242 stream->Add("if has_cached_array_index(");
243 value()->PrintTo(stream);
244 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
248 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
249 stream->Add("if class_of_test(");
250 value()->PrintTo(stream);
251 stream->Add(", \"%o\") then B%d else B%d",
252 *hydrogen()->class_name(),
258 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
259 stream->Add("if typeof ");
260 value()->PrintTo(stream);
261 stream->Add(" == \"%s\" then B%d else B%d",
262 hydrogen()->type_literal()->ToCString().get(),
263 true_block_id(), false_block_id());
267 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
269 function()->PrintTo(stream);
270 stream->Add(".code_entry = ");
271 code_object()->PrintTo(stream);
275 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
277 base_object()->PrintTo(stream);
279 offset()->PrintTo(stream);
283 void LCallJSFunction::PrintDataTo(StringStream* stream) {
285 function()->PrintTo(stream);
286 stream->Add("#%d / ", arity());
290 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
291 for (int i = 0; i < InputCount(); i++) {
292 InputAt(i)->PrintTo(stream);
295 stream->Add("#%d / ", arity());
299 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
300 context()->PrintTo(stream);
301 stream->Add("[%d]", slot_index());
305 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
306 context()->PrintTo(stream);
307 stream->Add("[%d] <- ", slot_index());
308 value()->PrintTo(stream);
312 void LInvokeFunction::PrintDataTo(StringStream* stream) {
314 function()->PrintTo(stream);
315 stream->Add(" #%d / ", arity());
319 void LCallNew::PrintDataTo(StringStream* stream) {
321 constructor()->PrintTo(stream);
322 stream->Add(" #%d / ", arity());
326 void LCallNewArray::PrintDataTo(StringStream* stream) {
328 constructor()->PrintTo(stream);
329 stream->Add(" #%d / ", arity());
330 ElementsKind kind = hydrogen()->elements_kind();
331 stream->Add(" (%s) ", ElementsKindToString(kind));
335 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
336 arguments()->PrintTo(stream);
338 stream->Add(" length ");
339 length()->PrintTo(stream);
341 stream->Add(" index ");
342 index()->PrintTo(stream);
346 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
347 return spill_slot_count_++;
351 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
352 // All stack slots are Double stack slots on x64.
353 // Alternatively, at some point, start using half-size
354 // stack slots for int32 values.
355 int index = GetNextSpillIndex(kind);
356 if (kind == DOUBLE_REGISTERS) {
357 return LDoubleStackSlot::Create(index, zone());
359 ASSERT(kind == GENERAL_REGISTERS);
360 return LStackSlot::Create(index, zone());
365 void LStoreNamedField::PrintDataTo(StringStream* stream) {
366 object()->PrintTo(stream);
367 hydrogen()->access().PrintTo(stream);
369 value()->PrintTo(stream);
373 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
374 object()->PrintTo(stream);
376 stream->Add(String::cast(*name())->ToCString().get());
378 value()->PrintTo(stream);
382 void LLoadKeyed::PrintDataTo(StringStream* stream) {
383 elements()->PrintTo(stream);
385 key()->PrintTo(stream);
386 if (hydrogen()->IsDehoisted()) {
387 stream->Add(" + %d]", additional_index());
394 void LStoreKeyed::PrintDataTo(StringStream* stream) {
395 elements()->PrintTo(stream);
397 key()->PrintTo(stream);
398 if (hydrogen()->IsDehoisted()) {
399 stream->Add(" + %d] <-", additional_index());
401 stream->Add("] <- ");
404 if (value() == NULL) {
405 ASSERT(hydrogen()->IsConstantHoleStore() &&
406 hydrogen()->value()->representation().IsDouble());
407 stream->Add("<the hole(nan)>");
409 value()->PrintTo(stream);
414 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
415 object()->PrintTo(stream);
417 key()->PrintTo(stream);
418 stream->Add("] <- ");
419 value()->PrintTo(stream);
423 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
424 object()->PrintTo(stream);
425 stream->Add(" %p -> %p", *original_map(), *transitioned_map());
429 LPlatformChunk* LChunkBuilder::Build() {
431 chunk_ = new(zone()) LPlatformChunk(info(), graph());
432 LPhase phase("L_Building chunk", chunk_);
435 // If compiling for OSR, reserve space for the unoptimized frame,
436 // which will be subsumed into this frame.
437 if (graph()->has_osr()) {
438 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
439 chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
443 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
444 for (int i = 0; i < blocks->length(); i++) {
445 HBasicBlock* next = NULL;
446 if (i < blocks->length() - 1) next = blocks->at(i + 1);
447 DoBasicBlock(blocks->at(i), next);
448 if (is_aborted()) return NULL;
455 void LCodeGen::Abort(BailoutReason reason) {
456 info()->set_bailout_reason(reason);
461 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
462 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
463 Register::ToAllocationIndex(reg));
467 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
468 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
469 XMMRegister::ToAllocationIndex(reg));
473 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
474 return Use(value, ToUnallocated(fixed_register));
478 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
479 return Use(value, ToUnallocated(reg));
483 LOperand* LChunkBuilder::UseRegister(HValue* value) {
484 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
488 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
490 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
491 LUnallocated::USED_AT_START));
495 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
496 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
500 LOperand* LChunkBuilder::UseTempRegisterOrConstant(HValue* value) {
501 return value->IsConstant()
502 ? chunk_->DefineConstantOperand(HConstant::cast(value))
503 : UseTempRegister(value);
507 LOperand* LChunkBuilder::Use(HValue* value) {
508 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
512 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
513 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
514 LUnallocated::USED_AT_START));
518 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
519 return value->IsConstant()
520 ? chunk_->DefineConstantOperand(HConstant::cast(value))
525 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
526 return value->IsConstant()
527 ? chunk_->DefineConstantOperand(HConstant::cast(value))
532 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
533 return value->IsConstant()
534 ? chunk_->DefineConstantOperand(HConstant::cast(value))
535 : UseRegister(value);
539 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
540 return value->IsConstant()
541 ? chunk_->DefineConstantOperand(HConstant::cast(value))
542 : UseRegisterAtStart(value);
546 LOperand* LChunkBuilder::UseConstant(HValue* value) {
547 return chunk_->DefineConstantOperand(HConstant::cast(value));
551 LOperand* LChunkBuilder::UseAny(HValue* value) {
552 return value->IsConstant()
553 ? chunk_->DefineConstantOperand(HConstant::cast(value))
554 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
558 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
559 if (value->EmitAtUses()) {
560 HInstruction* instr = HInstruction::cast(value);
561 VisitInstruction(instr);
563 operand->set_virtual_register(value->id());
568 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
569 LUnallocated* result) {
570 result->set_virtual_register(current_instruction_->id());
571 instr->set_result(result);
576 LInstruction* LChunkBuilder::DefineAsRegister(
577 LTemplateResultInstruction<1>* instr) {
579 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
583 LInstruction* LChunkBuilder::DefineAsSpilled(
584 LTemplateResultInstruction<1>* instr,
587 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
591 LInstruction* LChunkBuilder::DefineSameAsFirst(
592 LTemplateResultInstruction<1>* instr) {
594 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
598 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
600 return Define(instr, ToUnallocated(reg));
604 LInstruction* LChunkBuilder::DefineFixedDouble(
605 LTemplateResultInstruction<1>* instr,
607 return Define(instr, ToUnallocated(reg));
611 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
612 HEnvironment* hydrogen_env = current_block_->last_environment();
613 int argument_index_accumulator = 0;
614 ZoneList<HValue*> objects_to_materialize(0, zone());
615 instr->set_environment(CreateEnvironment(hydrogen_env,
616 &argument_index_accumulator,
617 &objects_to_materialize));
622 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
623 HInstruction* hinstr,
624 CanDeoptimize can_deoptimize) {
625 info()->MarkAsNonDeferredCalling();
631 instr = AssignPointerMap(instr);
633 if (hinstr->HasObservableSideEffects()) {
634 ASSERT(hinstr->next()->IsSimulate());
635 HSimulate* sim = HSimulate::cast(hinstr->next());
636 ASSERT(instruction_pending_deoptimization_environment_ == NULL);
637 ASSERT(pending_deoptimization_ast_id_.IsNone());
638 instruction_pending_deoptimization_environment_ = instr;
639 pending_deoptimization_ast_id_ = sim->ast_id();
642 // If instruction does not have side-effects lazy deoptimization
643 // after the call will try to deoptimize to the point before the call.
644 // Thus we still need to attach environment to this call even if
645 // call sequence can not deoptimize eagerly.
646 bool needs_environment =
647 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
648 !hinstr->HasObservableSideEffects();
649 if (needs_environment && !instr->HasEnvironment()) {
650 instr = AssignEnvironment(instr);
657 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
658 ASSERT(!instr->HasPointerMap());
659 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
664 LUnallocated* LChunkBuilder::TempRegister() {
665 LUnallocated* operand =
666 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
667 int vreg = allocator_->GetVirtualRegister();
668 if (!allocator_->AllocationOk()) {
669 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
672 operand->set_virtual_register(vreg);
677 LOperand* LChunkBuilder::FixedTemp(Register reg) {
678 LUnallocated* operand = ToUnallocated(reg);
679 ASSERT(operand->HasFixedPolicy());
684 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
685 LUnallocated* operand = ToUnallocated(reg);
686 ASSERT(operand->HasFixedPolicy());
691 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
692 return new(zone()) LLabel(instr->block());
696 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
697 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
701 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
707 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
708 return AssignEnvironment(new(zone()) LDeoptimize);
712 LInstruction* LChunkBuilder::DoShift(Token::Value op,
713 HBitwiseBinaryOperation* instr) {
714 if (instr->representation().IsSmiOrInteger32()) {
715 ASSERT(instr->left()->representation().Equals(instr->representation()));
716 ASSERT(instr->right()->representation().Equals(instr->representation()));
717 LOperand* left = UseRegisterAtStart(instr->left());
719 HValue* right_value = instr->right();
720 LOperand* right = NULL;
721 int constant_value = 0;
722 if (right_value->IsConstant()) {
723 HConstant* constant = HConstant::cast(right_value);
724 right = chunk_->DefineConstantOperand(constant);
725 constant_value = constant->Integer32Value() & 0x1f;
727 right = UseFixed(right_value, rcx);
730 // Shift operations can only deoptimize if we do a logical shift by 0 and
731 // the result cannot be truncated to int32.
732 bool does_deopt = false;
733 if (op == Token::SHR && constant_value == 0) {
734 if (FLAG_opt_safe_uint32_operations) {
735 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
737 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
741 LInstruction* result =
742 DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
743 return does_deopt ? AssignEnvironment(result) : result;
745 return DoArithmeticT(op, instr);
750 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
751 HArithmeticBinaryOperation* instr) {
752 ASSERT(instr->representation().IsDouble());
753 ASSERT(instr->left()->representation().IsDouble());
754 ASSERT(instr->right()->representation().IsDouble());
755 if (op == Token::MOD) {
756 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
757 LOperand* right = UseFixedDouble(instr->BetterRightOperand(), xmm1);
758 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
759 return MarkAsCall(DefineSameAsFirst(result), instr);
761 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
762 LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
763 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
764 return DefineSameAsFirst(result);
769 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
770 HBinaryOperation* instr) {
771 HValue* left = instr->left();
772 HValue* right = instr->right();
773 ASSERT(left->representation().IsTagged());
774 ASSERT(right->representation().IsTagged());
775 LOperand* context = UseFixed(instr->context(), rsi);
776 LOperand* left_operand = UseFixed(left, rdx);
777 LOperand* right_operand = UseFixed(right, rax);
778 LArithmeticT* result =
779 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
780 return MarkAsCall(DefineFixed(result, rax), instr);
784 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
785 ASSERT(is_building());
786 current_block_ = block;
787 next_block_ = next_block;
788 if (block->IsStartBlock()) {
789 block->UpdateEnvironment(graph_->start_environment());
791 } else if (block->predecessors()->length() == 1) {
792 // We have a single predecessor => copy environment and outgoing
793 // argument count from the predecessor.
794 ASSERT(block->phis()->length() == 0);
795 HBasicBlock* pred = block->predecessors()->at(0);
796 HEnvironment* last_environment = pred->last_environment();
797 ASSERT(last_environment != NULL);
798 // Only copy the environment, if it is later used again.
799 if (pred->end()->SecondSuccessor() == NULL) {
800 ASSERT(pred->end()->FirstSuccessor() == block);
802 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
803 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
804 last_environment = last_environment->Copy();
807 block->UpdateEnvironment(last_environment);
808 ASSERT(pred->argument_count() >= 0);
809 argument_count_ = pred->argument_count();
811 // We are at a state join => process phis.
812 HBasicBlock* pred = block->predecessors()->at(0);
813 // No need to copy the environment, it cannot be used later.
814 HEnvironment* last_environment = pred->last_environment();
815 for (int i = 0; i < block->phis()->length(); ++i) {
816 HPhi* phi = block->phis()->at(i);
817 if (phi->HasMergedIndex()) {
818 last_environment->SetValueAt(phi->merged_index(), phi);
821 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
822 if (block->deleted_phis()->at(i) < last_environment->length()) {
823 last_environment->SetValueAt(block->deleted_phis()->at(i),
824 graph_->GetConstantUndefined());
827 block->UpdateEnvironment(last_environment);
828 // Pick up the outgoing argument count of one of the predecessors.
829 argument_count_ = pred->argument_count();
831 HInstruction* current = block->first();
832 int start = chunk_->instructions()->length();
833 while (current != NULL && !is_aborted()) {
834 // Code for constants in registers is generated lazily.
835 if (!current->EmitAtUses()) {
836 VisitInstruction(current);
838 current = current->next();
840 int end = chunk_->instructions()->length() - 1;
842 block->set_first_instruction_index(start);
843 block->set_last_instruction_index(end);
845 block->set_argument_count(argument_count_);
847 current_block_ = NULL;
851 void LChunkBuilder::VisitInstruction(HInstruction* current) {
852 HInstruction* old_current = current_instruction_;
853 current_instruction_ = current;
855 LInstruction* instr = NULL;
856 if (current->CanReplaceWithDummyUses()) {
857 if (current->OperandCount() == 0) {
858 instr = DefineAsRegister(new(zone()) LDummy());
860 instr = DefineAsRegister(new(zone())
861 LDummyUse(UseAny(current->OperandAt(0))));
863 for (int i = 1; i < current->OperandCount(); ++i) {
864 LInstruction* dummy =
865 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
866 dummy->set_hydrogen_value(current);
867 chunk_->AddInstruction(dummy, current_block_);
870 instr = current->CompileToLithium(this);
873 argument_count_ += current->argument_delta();
874 ASSERT(argument_count_ >= 0);
877 // Associate the hydrogen instruction first, since we may need it for
878 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
879 instr->set_hydrogen_value(current);
882 // Make sure that the lithium instruction has either no fixed register
883 // constraints in temps or the result OR no uses that are only used at
884 // start. If this invariant doesn't hold, the register allocator can decide
885 // to insert a split of a range immediately before the instruction due to an
886 // already allocated register needing to be used for the instruction's fixed
887 // register constraint. In this case, The register allocator won't see an
888 // interference between the split child and the use-at-start (it would if
889 // the it was just a plain use), so it is free to move the split child into
890 // the same register that is used for the use-at-start.
891 // See https://code.google.com/p/chromium/issues/detail?id=201590
892 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
894 int used_at_start = 0;
895 for (UseIterator it(instr); !it.Done(); it.Advance()) {
896 LUnallocated* operand = LUnallocated::cast(it.Current());
897 if (operand->IsUsedAtStart()) ++used_at_start;
899 if (instr->Output() != NULL) {
900 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
902 for (TempIterator it(instr); !it.Done(); it.Advance()) {
903 LUnallocated* operand = LUnallocated::cast(it.Current());
904 if (operand->HasFixedPolicy()) ++fixed;
906 ASSERT(fixed == 0 || used_at_start == 0);
910 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
911 instr = AssignPointerMap(instr);
913 if (FLAG_stress_environments && !instr->HasEnvironment()) {
914 instr = AssignEnvironment(instr);
916 chunk_->AddInstruction(instr, current_block_);
918 current_instruction_ = old_current;
922 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
923 return new(zone()) LGoto(instr->FirstSuccessor());
927 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
928 return new(zone()) LDebugBreak();
932 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
933 LInstruction* goto_instr = CheckElideControlInstruction(instr);
934 if (goto_instr != NULL) return goto_instr;
936 HValue* value = instr->value();
937 LBranch* result = new(zone()) LBranch(UseRegister(value));
938 // Tagged values that are not known smis or booleans require a
939 // deoptimization environment. If the instruction is generic no
940 // environment is needed since all cases are handled.
941 ToBooleanStub::Types expected = instr->expected_input_types();
942 Representation rep = value->representation();
943 HType type = value->type();
944 if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() &&
945 !expected.IsGeneric()) {
946 return AssignEnvironment(result);
952 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
953 ASSERT(instr->value()->representation().IsTagged());
954 LOperand* value = UseRegisterAtStart(instr->value());
955 return new(zone()) LCmpMapAndBranch(value);
959 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
960 info()->MarkAsRequiresFrame();
961 return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
965 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
966 info()->MarkAsRequiresFrame();
967 return DefineAsRegister(new(zone()) LArgumentsElements);
971 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
972 LOperand* left = UseFixed(instr->left(), rax);
973 LOperand* right = UseFixed(instr->right(), rdx);
974 LOperand* context = UseFixed(instr->context(), rsi);
975 LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
976 return MarkAsCall(DefineFixed(result, rax), instr);
980 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
981 HInstanceOfKnownGlobal* instr) {
982 LInstanceOfKnownGlobal* result =
983 new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->context(), rsi),
984 UseFixed(instr->left(), rax),
986 return MarkAsCall(DefineFixed(result, rax), instr);
990 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
991 LOperand* receiver = UseRegister(instr->receiver());
992 LOperand* function = UseRegisterAtStart(instr->function());
993 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
994 return AssignEnvironment(DefineSameAsFirst(result));
998 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
999 LOperand* function = UseFixed(instr->function(), rdi);
1000 LOperand* receiver = UseFixed(instr->receiver(), rax);
1001 LOperand* length = UseFixed(instr->length(), rbx);
1002 LOperand* elements = UseFixed(instr->elements(), rcx);
1003 LApplyArguments* result = new(zone()) LApplyArguments(function,
1007 return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1011 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1012 LOperand* argument = UseOrConstant(instr->argument());
1013 return new(zone()) LPushArgument(argument);
1017 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1018 HStoreCodeEntry* store_code_entry) {
1019 LOperand* function = UseRegister(store_code_entry->function());
1020 LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1021 return new(zone()) LStoreCodeEntry(function, code_object);
1025 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1026 HInnerAllocatedObject* instr) {
1027 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1028 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1029 return DefineAsRegister(
1030 new(zone()) LInnerAllocatedObject(base_object, offset));
1034 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1035 return instr->HasNoUses()
1037 : DefineAsRegister(new(zone()) LThisFunction);
1041 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1042 if (instr->HasNoUses()) return NULL;
1044 if (info()->IsStub()) {
1045 return DefineFixed(new(zone()) LContext, rsi);
1048 return DefineAsRegister(new(zone()) LContext);
1052 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1053 LOperand* context = UseRegisterAtStart(instr->value());
1054 return DefineAsRegister(new(zone()) LOuterContext(context));
1058 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1059 LOperand* context = UseFixed(instr->context(), rsi);
1060 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1064 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1065 LOperand* context = UseRegisterAtStart(instr->value());
1066 return DefineAsRegister(new(zone()) LGlobalObject(context));
1070 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1071 LOperand* global_object = UseRegisterAtStart(instr->value());
1072 return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1076 LInstruction* LChunkBuilder::DoCallJSFunction(
1077 HCallJSFunction* instr) {
1078 LOperand* function = UseFixed(instr->function(), rdi);
1080 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1082 return MarkAsCall(DefineFixed(result, rax), instr);
1086 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1087 HCallWithDescriptor* instr) {
1088 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1090 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1091 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1092 ops.Add(target, zone());
1093 for (int i = 1; i < instr->OperandCount(); i++) {
1094 LOperand* op = UseFixed(instr->OperandAt(i),
1095 descriptor->GetParameterRegister(i - 1));
1096 ops.Add(op, zone());
1099 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1100 descriptor, ops, zone());
1101 return MarkAsCall(DefineFixed(result, rax), instr);
1105 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1106 LOperand* context = UseFixed(instr->context(), rsi);
1107 LOperand* function = UseFixed(instr->function(), rdi);
1108 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1109 return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1113 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1114 switch (instr->op()) {
1115 case kMathFloor: return DoMathFloor(instr);
1116 case kMathRound: return DoMathRound(instr);
1117 case kMathAbs: return DoMathAbs(instr);
1118 case kMathLog: return DoMathLog(instr);
1119 case kMathExp: return DoMathExp(instr);
1120 case kMathSqrt: return DoMathSqrt(instr);
1121 case kMathPowHalf: return DoMathPowHalf(instr);
1129 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1130 LOperand* input = UseRegisterAtStart(instr->value());
1131 LMathFloor* result = new(zone()) LMathFloor(input);
1132 return AssignEnvironment(DefineAsRegister(result));
1136 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1137 LOperand* input = UseRegisterAtStart(instr->value());
1138 LMathRound* result = new(zone()) LMathRound(input);
1139 return AssignEnvironment(DefineAsRegister(result));
1143 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1144 LOperand* context = UseAny(instr->context());
1145 LOperand* input = UseRegisterAtStart(instr->value());
1146 LMathAbs* result = new(zone()) LMathAbs(context, input);
1147 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1151 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1152 ASSERT(instr->representation().IsDouble());
1153 ASSERT(instr->value()->representation().IsDouble());
1154 LOperand* input = UseRegisterAtStart(instr->value());
1155 return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1159 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1160 ASSERT(instr->representation().IsDouble());
1161 ASSERT(instr->value()->representation().IsDouble());
1162 LOperand* value = UseTempRegister(instr->value());
1163 LOperand* temp1 = TempRegister();
1164 LOperand* temp2 = TempRegister();
1165 LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1166 return DefineAsRegister(result);
1170 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1171 LOperand* input = UseRegisterAtStart(instr->value());
1172 LMathSqrt* result = new(zone()) LMathSqrt(input);
1173 return DefineSameAsFirst(result);
1177 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1178 LOperand* input = UseRegisterAtStart(instr->value());
1179 LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1180 return DefineSameAsFirst(result);
1184 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1185 LOperand* context = UseFixed(instr->context(), rsi);
1186 LOperand* constructor = UseFixed(instr->constructor(), rdi);
1187 LCallNew* result = new(zone()) LCallNew(context, constructor);
1188 return MarkAsCall(DefineFixed(result, rax), instr);
1192 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1193 LOperand* context = UseFixed(instr->context(), rsi);
1194 LOperand* constructor = UseFixed(instr->constructor(), rdi);
1195 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1196 return MarkAsCall(DefineFixed(result, rax), instr);
1200 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1201 LOperand* context = UseFixed(instr->context(), rsi);
1202 LOperand* function = UseFixed(instr->function(), rdi);
1203 LCallFunction* call = new(zone()) LCallFunction(context, function);
1204 LInstruction* result = DefineFixed(call, rax);
1205 if (instr->IsTailCall()) return result;
1206 return MarkAsCall(result, instr);
1210 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1211 LOperand* context = UseFixed(instr->context(), rsi);
1212 LCallRuntime* result = new(zone()) LCallRuntime(context);
1213 return MarkAsCall(DefineFixed(result, rax), instr);
1217 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1218 return DoShift(Token::ROR, instr);
1222 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1223 return DoShift(Token::SHR, instr);
1227 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1228 return DoShift(Token::SAR, instr);
1232 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1233 return DoShift(Token::SHL, instr);
1237 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1238 if (instr->representation().IsSmiOrInteger32()) {
1239 ASSERT(instr->left()->representation().Equals(instr->representation()));
1240 ASSERT(instr->right()->representation().Equals(instr->representation()));
1241 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1243 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1244 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1245 return DefineSameAsFirst(new(zone()) LBitI(left, right));
1247 return DoArithmeticT(instr->op(), instr);
1252 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1253 if (instr->representation().IsSmiOrInteger32()) {
1254 ASSERT(instr->left()->representation().Equals(instr->representation()));
1255 ASSERT(instr->right()->representation().Equals(instr->representation()));
1256 if (instr->RightIsPowerOf2()) {
1257 ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1258 LOperand* value = UseRegisterAtStart(instr->left());
1260 new(zone()) LDivI(value, UseOrConstant(instr->right()), NULL);
1261 return AssignEnvironment(DefineSameAsFirst(div));
1263 // The temporary operand is necessary to ensure that right is not allocated
1265 LOperand* temp = FixedTemp(rdx);
1266 LOperand* dividend = UseFixed(instr->left(), rax);
1267 LOperand* divisor = UseRegister(instr->right());
1268 LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
1269 return AssignEnvironment(DefineFixed(result, rax));
1270 } else if (instr->representation().IsDouble()) {
1271 return DoArithmeticD(Token::DIV, instr);
1273 return DoArithmeticT(Token::DIV, instr);
1278 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1279 HValue* right = instr->right();
1280 if (!right->IsConstant()) {
1281 ASSERT(right->representation().IsInteger32());
1282 // The temporary operand is necessary to ensure that right is not allocated
1284 LOperand* temp = FixedTemp(rdx);
1285 LOperand* dividend = UseFixed(instr->left(), rax);
1286 LOperand* divisor = UseRegister(instr->right());
1287 LDivI* flooring_div = new(zone()) LDivI(dividend, divisor, temp);
1288 return AssignEnvironment(DefineFixed(flooring_div, rax));
1291 ASSERT(right->IsConstant() && HConstant::cast(right)->HasInteger32Value());
1292 LOperand* divisor = chunk_->DefineConstantOperand(HConstant::cast(right));
1293 int32_t divisor_si = HConstant::cast(right)->Integer32Value();
1294 if (divisor_si == 0) {
1295 LOperand* dividend = UseRegister(instr->left());
1296 return AssignEnvironment(DefineAsRegister(
1297 new(zone()) LMathFloorOfDiv(dividend, divisor, NULL)));
1298 } else if (IsPowerOf2(abs(divisor_si))) {
1299 LOperand* dividend = UseRegisterAtStart(instr->left());
1300 LInstruction* result = DefineAsRegister(
1301 new(zone()) LMathFloorOfDiv(dividend, divisor, NULL));
1302 return divisor_si < 0 ? AssignEnvironment(result) : result;
1305 LOperand* dividend = UseRegisterAtStart(instr->left());
1306 LOperand* temp = TempRegister();
1307 LInstruction* result = DefineAsRegister(
1308 new(zone()) LMathFloorOfDiv(dividend, divisor, temp));
1309 return divisor_si < 0 ? AssignEnvironment(result) : result;
1314 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1315 HValue* left = instr->left();
1316 HValue* right = instr->right();
1317 if (instr->representation().IsSmiOrInteger32()) {
1318 ASSERT(left->representation().Equals(instr->representation()));
1319 ASSERT(right->representation().Equals(instr->representation()));
1320 if (instr->RightIsPowerOf2()) {
1321 ASSERT(!right->CanBeZero());
1322 LModI* mod = new(zone()) LModI(UseRegisterAtStart(left),
1323 UseOrConstant(right),
1325 LInstruction* result = DefineSameAsFirst(mod);
1326 return (left->CanBeNegative() &&
1327 instr->CheckFlag(HValue::kBailoutOnMinusZero))
1328 ? AssignEnvironment(result)
1331 // The temporary operand is necessary to ensure that right is not
1332 // allocated into edx.
1333 LModI* mod = new(zone()) LModI(UseFixed(left, rax),
1336 LInstruction* result = DefineFixed(mod, rdx);
1337 return (right->CanBeZero() ||
1338 (left->RangeCanInclude(kMinInt) &&
1339 right->RangeCanInclude(-1) &&
1340 instr->CheckFlag(HValue::kBailoutOnMinusZero)) ||
1341 (left->CanBeNegative() &&
1342 instr->CanBeZero() &&
1343 instr->CheckFlag(HValue::kBailoutOnMinusZero)))
1344 ? AssignEnvironment(result)
1347 } else if (instr->representation().IsDouble()) {
1348 return DoArithmeticD(Token::MOD, instr);
1350 return DoArithmeticT(Token::MOD, instr);
1355 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1356 if (instr->representation().IsSmiOrInteger32()) {
1357 ASSERT(instr->left()->representation().Equals(instr->representation()));
1358 ASSERT(instr->right()->representation().Equals(instr->representation()));
1359 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1360 LOperand* right = UseOrConstant(instr->BetterRightOperand());
1361 LMulI* mul = new(zone()) LMulI(left, right);
1362 if (instr->CheckFlag(HValue::kCanOverflow) ||
1363 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1364 AssignEnvironment(mul);
1366 return DefineSameAsFirst(mul);
1367 } else if (instr->representation().IsDouble()) {
1368 return DoArithmeticD(Token::MUL, instr);
1370 return DoArithmeticT(Token::MUL, instr);
1375 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1376 if (instr->representation().IsSmiOrInteger32()) {
1377 ASSERT(instr->left()->representation().Equals(instr->representation()));
1378 ASSERT(instr->right()->representation().Equals(instr->representation()));
1379 LOperand* left = UseRegisterAtStart(instr->left());
1380 LOperand* right = UseOrConstantAtStart(instr->right());
1381 LSubI* sub = new(zone()) LSubI(left, right);
1382 LInstruction* result = DefineSameAsFirst(sub);
1383 if (instr->CheckFlag(HValue::kCanOverflow)) {
1384 result = AssignEnvironment(result);
1387 } else if (instr->representation().IsDouble()) {
1388 return DoArithmeticD(Token::SUB, instr);
1390 return DoArithmeticT(Token::SUB, instr);
1395 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1396 if (instr->representation().IsSmiOrInteger32()) {
1397 // Check to see if it would be advantageous to use an lea instruction rather
1398 // than an add. This is the case when no overflow check is needed and there
1399 // are multiple uses of the add's inputs, so using a 3-register add will
1400 // preserve all input values for later uses.
1401 bool use_lea = LAddI::UseLea(instr);
1402 ASSERT(instr->left()->representation().Equals(instr->representation()));
1403 ASSERT(instr->right()->representation().Equals(instr->representation()));
1404 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1405 HValue* right_candidate = instr->BetterRightOperand();
1406 LOperand* right = use_lea
1407 ? UseRegisterOrConstantAtStart(right_candidate)
1408 : UseOrConstantAtStart(right_candidate);
1409 LAddI* add = new(zone()) LAddI(left, right);
1410 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1411 LInstruction* result = use_lea
1412 ? DefineAsRegister(add)
1413 : DefineSameAsFirst(add);
1415 result = AssignEnvironment(result);
1418 } else if (instr->representation().IsExternal()) {
1419 ASSERT(instr->left()->representation().IsExternal());
1420 ASSERT(instr->right()->representation().IsInteger32());
1421 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1422 bool use_lea = LAddI::UseLea(instr);
1423 LOperand* left = UseRegisterAtStart(instr->left());
1424 HValue* right_candidate = instr->right();
1425 LOperand* right = use_lea
1426 ? UseRegisterOrConstantAtStart(right_candidate)
1427 : UseOrConstantAtStart(right_candidate);
1428 LAddI* add = new(zone()) LAddI(left, right);
1429 LInstruction* result = use_lea
1430 ? DefineAsRegister(add)
1431 : DefineSameAsFirst(add);
1433 } else if (instr->representation().IsDouble()) {
1434 return DoArithmeticD(Token::ADD, instr);
1436 return DoArithmeticT(Token::ADD, instr);
1442 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1443 LOperand* left = NULL;
1444 LOperand* right = NULL;
1445 if (instr->representation().IsSmiOrInteger32()) {
1446 ASSERT(instr->left()->representation().Equals(instr->representation()));
1447 ASSERT(instr->right()->representation().Equals(instr->representation()));
1448 left = UseRegisterAtStart(instr->BetterLeftOperand());
1449 right = UseOrConstantAtStart(instr->BetterRightOperand());
1451 ASSERT(instr->representation().IsDouble());
1452 ASSERT(instr->left()->representation().IsDouble());
1453 ASSERT(instr->right()->representation().IsDouble());
1454 left = UseRegisterAtStart(instr->left());
1455 right = UseRegisterAtStart(instr->right());
1457 LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1458 return DefineSameAsFirst(minmax);
1462 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1463 ASSERT(instr->representation().IsDouble());
1464 // We call a C function for double power. It can't trigger a GC.
1465 // We need to use fixed result register for the call.
1466 Representation exponent_type = instr->right()->representation();
1467 ASSERT(instr->left()->representation().IsDouble());
1468 LOperand* left = UseFixedDouble(instr->left(), xmm2);
1469 LOperand* right = exponent_type.IsDouble() ?
1470 UseFixedDouble(instr->right(), xmm1) : UseFixed(instr->right(), rdx);
1471 LPower* result = new(zone()) LPower(left, right);
1472 return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1473 CAN_DEOPTIMIZE_EAGERLY);
1477 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1478 ASSERT(instr->left()->representation().IsTagged());
1479 ASSERT(instr->right()->representation().IsTagged());
1480 LOperand* context = UseFixed(instr->context(), rsi);
1481 LOperand* left = UseFixed(instr->left(), rdx);
1482 LOperand* right = UseFixed(instr->right(), rax);
1483 LCmpT* result = new(zone()) LCmpT(context, left, right);
1484 return MarkAsCall(DefineFixed(result, rax), instr);
1488 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1489 HCompareNumericAndBranch* instr) {
1490 Representation r = instr->representation();
1491 if (r.IsSmiOrInteger32()) {
1492 ASSERT(instr->left()->representation().Equals(r));
1493 ASSERT(instr->right()->representation().Equals(r));
1494 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1495 LOperand* right = UseOrConstantAtStart(instr->right());
1496 return new(zone()) LCompareNumericAndBranch(left, right);
1498 ASSERT(r.IsDouble());
1499 ASSERT(instr->left()->representation().IsDouble());
1500 ASSERT(instr->right()->representation().IsDouble());
1503 if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1504 left = UseRegisterOrConstantAtStart(instr->left());
1505 right = UseRegisterOrConstantAtStart(instr->right());
1507 left = UseRegisterAtStart(instr->left());
1508 right = UseRegisterAtStart(instr->right());
1510 return new(zone()) LCompareNumericAndBranch(left, right);
1515 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1516 HCompareObjectEqAndBranch* instr) {
1517 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1518 if (goto_instr != NULL) return goto_instr;
1519 LOperand* left = UseRegisterAtStart(instr->left());
1520 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1521 return new(zone()) LCmpObjectEqAndBranch(left, right);
1525 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1526 HCompareHoleAndBranch* instr) {
1527 LOperand* value = UseRegisterAtStart(instr->value());
1528 return new(zone()) LCmpHoleAndBranch(value);
1532 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1533 HCompareMinusZeroAndBranch* instr) {
1534 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1535 if (goto_instr != NULL) return goto_instr;
1536 LOperand* value = UseRegister(instr->value());
1537 return new(zone()) LCompareMinusZeroAndBranch(value);
1541 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1542 ASSERT(instr->value()->representation().IsTagged());
1543 return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
1547 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1548 ASSERT(instr->value()->representation().IsTagged());
1549 LOperand* value = UseRegisterAtStart(instr->value());
1550 LOperand* temp = TempRegister();
1551 return new(zone()) LIsStringAndBranch(value, temp);
1555 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1556 ASSERT(instr->value()->representation().IsTagged());
1557 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1561 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1562 HIsUndetectableAndBranch* instr) {
1563 ASSERT(instr->value()->representation().IsTagged());
1564 LOperand* value = UseRegisterAtStart(instr->value());
1565 LOperand* temp = TempRegister();
1566 return new(zone()) LIsUndetectableAndBranch(value, temp);
1570 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1571 HStringCompareAndBranch* instr) {
1573 ASSERT(instr->left()->representation().IsTagged());
1574 ASSERT(instr->right()->representation().IsTagged());
1575 LOperand* context = UseFixed(instr->context(), rsi);
1576 LOperand* left = UseFixed(instr->left(), rdx);
1577 LOperand* right = UseFixed(instr->right(), rax);
1578 LStringCompareAndBranch* result =
1579 new(zone()) LStringCompareAndBranch(context, left, right);
1581 return MarkAsCall(result, instr);
1585 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1586 HHasInstanceTypeAndBranch* instr) {
1587 ASSERT(instr->value()->representation().IsTagged());
1588 LOperand* value = UseRegisterAtStart(instr->value());
1589 return new(zone()) LHasInstanceTypeAndBranch(value);
1593 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1594 HGetCachedArrayIndex* instr) {
1595 ASSERT(instr->value()->representation().IsTagged());
1596 LOperand* value = UseRegisterAtStart(instr->value());
1598 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1602 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1603 HHasCachedArrayIndexAndBranch* instr) {
1604 ASSERT(instr->value()->representation().IsTagged());
1605 LOperand* value = UseRegisterAtStart(instr->value());
1606 return new(zone()) LHasCachedArrayIndexAndBranch(value);
1610 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1611 HClassOfTestAndBranch* instr) {
1612 LOperand* value = UseRegister(instr->value());
1613 return new(zone()) LClassOfTestAndBranch(value,
1619 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1620 LOperand* map = UseRegisterAtStart(instr->value());
1621 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1625 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1626 LOperand* object = UseRegisterAtStart(instr->value());
1627 return DefineAsRegister(new(zone()) LElementsKind(object));
1631 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1632 LOperand* object = UseRegister(instr->value());
1633 LValueOf* result = new(zone()) LValueOf(object);
1634 return DefineSameAsFirst(result);
1638 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1639 LOperand* object = UseFixed(instr->value(), rax);
1640 LDateField* result = new(zone()) LDateField(object, instr->index());
1641 return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1645 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1646 LOperand* string = UseRegisterAtStart(instr->string());
1647 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1648 return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1652 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1653 LOperand* string = UseRegisterAtStart(instr->string());
1654 LOperand* index = FLAG_debug_code
1655 ? UseRegisterAtStart(instr->index())
1656 : UseRegisterOrConstantAtStart(instr->index());
1657 LOperand* value = FLAG_debug_code
1658 ? UseRegisterAtStart(instr->value())
1659 : UseRegisterOrConstantAtStart(instr->value());
1660 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), rsi) : NULL;
1661 LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1663 if (FLAG_debug_code) {
1664 result = MarkAsCall(result, instr);
1670 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1671 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1672 LOperand* length = Use(instr->length());
1673 return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1677 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1678 HBoundsCheckBaseIndexInformation* instr) {
1684 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1685 // The control instruction marking the end of a block that completed
1686 // abruptly (e.g., threw an exception). There is nothing specific to do.
1691 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1692 LOperand* context = UseFixed(instr->context(), rsi);
1693 LOperand* value = UseFixed(instr->value(), rax);
1694 return MarkAsCall(new(zone()) LThrow(context, value), instr);
1698 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1703 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1704 // All HForceRepresentation instructions should be eliminated in the
1705 // representation change phase of Hydrogen.
1711 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1712 Representation from = instr->from();
1713 Representation to = instr->to();
1715 if (to.IsTagged()) {
1716 LOperand* value = UseRegister(instr->value());
1717 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1719 from = Representation::Tagged();
1721 // Only mark conversions that might need to allocate as calling rather than
1722 // all changes. This makes simple, non-allocating conversion not have to force
1723 // building a stack frame.
1724 if (from.IsTagged()) {
1725 if (to.IsDouble()) {
1726 LOperand* value = UseRegister(instr->value());
1727 LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1728 return AssignEnvironment(DefineAsRegister(res));
1729 } else if (to.IsSmi()) {
1730 HValue* val = instr->value();
1731 LOperand* value = UseRegister(val);
1732 if (val->type().IsSmi()) {
1733 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1735 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1737 ASSERT(to.IsInteger32());
1738 HValue* val = instr->value();
1739 LOperand* value = UseRegister(val);
1740 if (val->type().IsSmi() || val->representation().IsSmi()) {
1741 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1743 bool truncating = instr->CanTruncateToInt32();
1744 LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
1745 LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
1746 return AssignEnvironment(DefineSameAsFirst(res));
1749 } else if (from.IsDouble()) {
1750 if (to.IsTagged()) {
1751 info()->MarkAsDeferredCalling();
1752 LOperand* value = UseRegister(instr->value());
1753 LOperand* temp = TempRegister();
1755 // Make sure that temp and result_temp are different registers.
1756 LUnallocated* result_temp = TempRegister();
1757 LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1758 return AssignPointerMap(Define(result, result_temp));
1759 } else if (to.IsSmi()) {
1760 LOperand* value = UseRegister(instr->value());
1761 return AssignEnvironment(
1762 DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1764 ASSERT(to.IsInteger32());
1765 LOperand* value = UseRegister(instr->value());
1766 return AssignEnvironment(
1767 DefineAsRegister(new(zone()) LDoubleToI(value)));
1769 } else if (from.IsInteger32()) {
1770 info()->MarkAsDeferredCalling();
1771 if (to.IsTagged()) {
1772 HValue* val = instr->value();
1773 LOperand* value = UseRegister(val);
1774 if (val->CheckFlag(HInstruction::kUint32)) {
1775 LOperand* temp = FixedTemp(xmm1);
1776 LNumberTagU* result = new(zone()) LNumberTagU(value, temp);
1777 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1778 } else if (val->HasRange() && val->range()->IsInSmiRange()) {
1779 return DefineSameAsFirst(new(zone()) LSmiTag(value));
1781 LNumberTagI* result = new(zone()) LNumberTagI(value);
1782 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1784 } else if (to.IsSmi()) {
1785 HValue* val = instr->value();
1786 LOperand* value = UseRegister(val);
1787 LInstruction* result = NULL;
1788 if (val->CheckFlag(HInstruction::kUint32)) {
1789 result = DefineAsRegister(new(zone()) LUint32ToSmi(value));
1790 if (val->HasRange() && val->range()->IsInSmiRange() &&
1791 val->range()->upper() != kMaxInt) {
1795 result = DefineAsRegister(new(zone()) LInteger32ToSmi(value));
1796 if (val->HasRange() && val->range()->IsInSmiRange()) {
1800 return AssignEnvironment(result);
1802 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1803 LOperand* temp = FixedTemp(xmm1);
1804 return DefineAsRegister(
1805 new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
1807 ASSERT(to.IsDouble());
1808 LOperand* value = Use(instr->value());
1809 return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1818 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1819 LOperand* value = UseRegisterAtStart(instr->value());
1820 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1824 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1825 LOperand* value = UseRegisterAtStart(instr->value());
1826 return AssignEnvironment(new(zone()) LCheckSmi(value));
1830 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1831 LOperand* value = UseRegisterAtStart(instr->value());
1832 LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
1833 return AssignEnvironment(result);
1837 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1838 LOperand* value = UseRegisterAtStart(instr->value());
1839 return AssignEnvironment(new(zone()) LCheckValue(value));
1843 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1844 LOperand* value = NULL;
1845 if (!instr->CanOmitMapChecks()) {
1846 value = UseRegisterAtStart(instr->value());
1847 if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1849 LCheckMaps* result = new(zone()) LCheckMaps(value);
1850 if (!instr->CanOmitMapChecks()) {
1851 AssignEnvironment(result);
1852 if (instr->has_migration_target()) return AssignPointerMap(result);
1858 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1859 HValue* value = instr->value();
1860 Representation input_rep = value->representation();
1861 LOperand* reg = UseRegister(value);
1862 if (input_rep.IsDouble()) {
1863 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1864 } else if (input_rep.IsInteger32()) {
1865 return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
1867 ASSERT(input_rep.IsSmiOrTagged());
1868 // Register allocator doesn't (yet) support allocation of double
1869 // temps. Reserve xmm1 explicitly.
1870 LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
1872 return AssignEnvironment(DefineSameAsFirst(result));
1877 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1878 LOperand* context = info()->IsStub() ? UseFixed(instr->context(), rsi) : NULL;
1879 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1880 return new(zone()) LReturn(
1881 UseFixed(instr->value(), rax), context, parameter_count);
1885 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1886 Representation r = instr->representation();
1888 return DefineAsRegister(new(zone()) LConstantS);
1889 } else if (r.IsInteger32()) {
1890 return DefineAsRegister(new(zone()) LConstantI);
1891 } else if (r.IsDouble()) {
1892 LOperand* temp = TempRegister();
1893 return DefineAsRegister(new(zone()) LConstantD(temp));
1894 } else if (r.IsExternal()) {
1895 return DefineAsRegister(new(zone()) LConstantE);
1896 } else if (r.IsTagged()) {
1897 return DefineAsRegister(new(zone()) LConstantT);
1905 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1906 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1907 return instr->RequiresHoleCheck()
1908 ? AssignEnvironment(DefineAsRegister(result))
1909 : DefineAsRegister(result);
1913 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1914 LOperand* context = UseFixed(instr->context(), rsi);
1915 LOperand* global_object = UseFixed(instr->global_object(), rax);
1916 LLoadGlobalGeneric* result =
1917 new(zone()) LLoadGlobalGeneric(context, global_object);
1918 return MarkAsCall(DefineFixed(result, rax), instr);
1922 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1923 LOperand* value = UseRegister(instr->value());
1924 // Use a temp to avoid reloading the cell value address in the case where
1925 // we perform a hole check.
1926 return instr->RequiresHoleCheck()
1927 ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
1928 : new(zone()) LStoreGlobalCell(value, NULL);
1932 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1933 LOperand* context = UseRegisterAtStart(instr->value());
1934 LInstruction* result =
1935 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1936 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1940 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1944 context = UseRegister(instr->context());
1945 if (instr->NeedsWriteBarrier()) {
1946 value = UseTempRegister(instr->value());
1947 temp = TempRegister();
1949 value = UseRegister(instr->value());
1952 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
1953 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1957 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1958 // Use the special mov rax, moffs64 encoding for external
1959 // memory accesses with 64-bit word-sized values.
1960 if (instr->access().IsExternalMemory() &&
1961 instr->access().offset() == 0 &&
1962 (instr->access().representation().IsSmi() ||
1963 instr->access().representation().IsTagged() ||
1964 instr->access().representation().IsHeapObject() ||
1965 instr->access().representation().IsExternal())) {
1966 LOperand* obj = UseRegisterOrConstantAtStart(instr->object());
1967 return DefineFixed(new(zone()) LLoadNamedField(obj), rax);
1969 LOperand* obj = UseRegisterAtStart(instr->object());
1970 return DefineAsRegister(new(zone()) LLoadNamedField(obj));
1974 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1975 LOperand* context = UseFixed(instr->context(), rsi);
1976 LOperand* object = UseFixed(instr->object(), rax);
1977 LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
1978 return MarkAsCall(DefineFixed(result, rax), instr);
1982 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1983 HLoadFunctionPrototype* instr) {
1984 return AssignEnvironment(DefineAsRegister(
1985 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1989 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1990 return DefineAsRegister(new(zone()) LLoadRoot);
1994 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1995 HLoadExternalArrayPointer* instr) {
1996 LOperand* input = UseRegisterAtStart(instr->value());
1997 return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
2001 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2002 ASSERT(instr->key()->representation().IsInteger32());
2003 ElementsKind elements_kind = instr->elements_kind();
2004 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2005 LLoadKeyed* result = NULL;
2007 if (!instr->is_typed_elements()) {
2008 LOperand* obj = UseRegisterAtStart(instr->elements());
2009 result = new(zone()) LLoadKeyed(obj, key);
2012 (instr->representation().IsInteger32() &&
2013 !(IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2014 (instr->representation().IsDouble() &&
2015 (IsDoubleOrFloatElementsKind(instr->elements_kind()))));
2016 LOperand* backing_store = UseRegister(instr->elements());
2017 result = new(zone()) LLoadKeyed(backing_store, key);
2020 DefineAsRegister(result);
2021 bool can_deoptimize = instr->RequiresHoleCheck() ||
2022 (elements_kind == EXTERNAL_UINT32_ELEMENTS) ||
2023 (elements_kind == UINT32_ELEMENTS);
2024 // An unsigned int array load might overflow and cause a deopt, make sure it
2025 // has an environment.
2026 return can_deoptimize ? AssignEnvironment(result) : result;
2030 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2031 LOperand* context = UseFixed(instr->context(), rsi);
2032 LOperand* object = UseFixed(instr->object(), rdx);
2033 LOperand* key = UseFixed(instr->key(), rax);
2035 LLoadKeyedGeneric* result =
2036 new(zone()) LLoadKeyedGeneric(context, object, key);
2037 return MarkAsCall(DefineFixed(result, rax), instr);
2041 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2042 ElementsKind elements_kind = instr->elements_kind();
2044 if (!instr->is_typed_elements()) {
2045 ASSERT(instr->elements()->representation().IsTagged());
2046 bool needs_write_barrier = instr->NeedsWriteBarrier();
2047 LOperand* object = NULL;
2048 LOperand* key = NULL;
2049 LOperand* val = NULL;
2051 Representation value_representation = instr->value()->representation();
2052 if (value_representation.IsDouble()) {
2053 object = UseRegisterAtStart(instr->elements());
2054 val = UseTempRegister(instr->value());
2055 key = UseRegisterOrConstantAtStart(instr->key());
2057 ASSERT(value_representation.IsSmiOrTagged() ||
2058 value_representation.IsInteger32());
2059 if (needs_write_barrier) {
2060 object = UseTempRegister(instr->elements());
2061 val = UseTempRegister(instr->value());
2062 key = UseTempRegister(instr->key());
2064 object = UseRegisterAtStart(instr->elements());
2065 val = UseRegisterOrConstantAtStart(instr->value());
2066 key = UseRegisterOrConstantAtStart(instr->key());
2070 return new(zone()) LStoreKeyed(object, key, val);
2074 (instr->value()->representation().IsInteger32() &&
2075 !IsDoubleOrFloatElementsKind(elements_kind)) ||
2076 (instr->value()->representation().IsDouble() &&
2077 IsDoubleOrFloatElementsKind(elements_kind)));
2078 ASSERT((instr->is_fixed_typed_array() &&
2079 instr->elements()->representation().IsTagged()) ||
2080 (instr->is_external() &&
2081 instr->elements()->representation().IsExternal()));
2082 bool val_is_temp_register =
2083 elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2084 elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
2085 elements_kind == FLOAT32_ELEMENTS;
2086 LOperand* val = val_is_temp_register ? UseTempRegister(instr->value())
2087 : UseRegister(instr->value());
2088 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2089 LOperand* backing_store = UseRegister(instr->elements());
2090 return new(zone()) LStoreKeyed(backing_store, key, val);
2094 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2095 LOperand* context = UseFixed(instr->context(), rsi);
2096 LOperand* object = UseFixed(instr->object(), rdx);
2097 LOperand* key = UseFixed(instr->key(), rcx);
2098 LOperand* value = UseFixed(instr->value(), rax);
2100 ASSERT(instr->object()->representation().IsTagged());
2101 ASSERT(instr->key()->representation().IsTagged());
2102 ASSERT(instr->value()->representation().IsTagged());
2104 LStoreKeyedGeneric* result =
2105 new(zone()) LStoreKeyedGeneric(context, object, key, value);
2106 return MarkAsCall(result, instr);
2110 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2111 HTransitionElementsKind* instr) {
2112 LOperand* object = UseRegister(instr->object());
2113 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2114 LOperand* object = UseRegister(instr->object());
2115 LOperand* new_map_reg = TempRegister();
2116 LOperand* temp_reg = TempRegister();
2117 LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(
2118 object, NULL, new_map_reg, temp_reg);
2121 LOperand* context = UseFixed(instr->context(), rsi);
2122 LTransitionElementsKind* result =
2123 new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2124 return AssignPointerMap(result);
2129 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2130 HTrapAllocationMemento* instr) {
2131 LOperand* object = UseRegister(instr->object());
2132 LOperand* temp = TempRegister();
2133 LTrapAllocationMemento* result =
2134 new(zone()) LTrapAllocationMemento(object, temp);
2135 return AssignEnvironment(result);
2139 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2140 bool is_in_object = instr->access().IsInobject();
2141 bool is_external_location = instr->access().IsExternalMemory() &&
2142 instr->access().offset() == 0;
2143 bool needs_write_barrier = instr->NeedsWriteBarrier();
2144 bool needs_write_barrier_for_map = instr->has_transition() &&
2145 instr->NeedsWriteBarrierForMap();
2148 if (needs_write_barrier) {
2150 ? UseRegister(instr->object())
2151 : UseTempRegister(instr->object());
2152 } else if (is_external_location) {
2153 ASSERT(!is_in_object);
2154 ASSERT(!needs_write_barrier);
2155 ASSERT(!needs_write_barrier_for_map);
2156 obj = UseRegisterOrConstant(instr->object());
2158 obj = needs_write_barrier_for_map
2159 ? UseRegister(instr->object())
2160 : UseRegisterAtStart(instr->object());
2163 bool can_be_constant = instr->value()->IsConstant() &&
2164 HConstant::cast(instr->value())->NotInNewSpace() &&
2165 !(FLAG_track_double_fields && instr->field_representation().IsDouble());
2168 if (needs_write_barrier) {
2169 val = UseTempRegister(instr->value());
2170 } else if (is_external_location) {
2171 val = UseFixed(instr->value(), rax);
2172 } else if (can_be_constant) {
2173 val = UseRegisterOrConstant(instr->value());
2174 } else if (FLAG_track_fields && instr->field_representation().IsSmi()) {
2175 val = UseRegister(instr->value());
2176 } else if (FLAG_track_double_fields &&
2177 instr->field_representation().IsDouble()) {
2178 val = UseRegisterAtStart(instr->value());
2180 val = UseRegister(instr->value());
2183 // We only need a scratch register if we have a write barrier or we
2184 // have a store into the properties array (not in-object-property).
2185 LOperand* temp = (!is_in_object || needs_write_barrier ||
2186 needs_write_barrier_for_map) ? TempRegister() : NULL;
2188 LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
2189 if (FLAG_track_heap_object_fields &&
2190 instr->field_representation().IsHeapObject()) {
2191 if (!instr->value()->type().IsHeapObject()) {
2192 return AssignEnvironment(result);
2199 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2200 LOperand* context = UseFixed(instr->context(), rsi);
2201 LOperand* object = UseFixed(instr->object(), rdx);
2202 LOperand* value = UseFixed(instr->value(), rax);
2204 LStoreNamedGeneric* result =
2205 new(zone()) LStoreNamedGeneric(context, object, value);
2206 return MarkAsCall(result, instr);
2210 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2211 LOperand* context = UseFixed(instr->context(), rsi);
2212 LOperand* left = UseFixed(instr->left(), rdx);
2213 LOperand* right = UseFixed(instr->right(), rax);
2215 DefineFixed(new(zone()) LStringAdd(context, left, right), rax), instr);
2219 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2220 LOperand* string = UseTempRegister(instr->string());
2221 LOperand* index = UseTempRegister(instr->index());
2222 LOperand* context = UseAny(instr->context());
2223 LStringCharCodeAt* result =
2224 new(zone()) LStringCharCodeAt(context, string, index);
2225 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2229 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2230 LOperand* char_code = UseRegister(instr->value());
2231 LOperand* context = UseAny(instr->context());
2232 LStringCharFromCode* result =
2233 new(zone()) LStringCharFromCode(context, char_code);
2234 return AssignPointerMap(DefineAsRegister(result));
2238 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2239 info()->MarkAsDeferredCalling();
2240 LOperand* context = UseAny(instr->context());
2241 LOperand* size = instr->size()->IsConstant()
2242 ? UseConstant(instr->size())
2243 : UseTempRegister(instr->size());
2244 LOperand* temp = TempRegister();
2245 LAllocate* result = new(zone()) LAllocate(context, size, temp);
2246 return AssignPointerMap(DefineAsRegister(result));
2250 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2251 LOperand* context = UseFixed(instr->context(), rsi);
2252 LRegExpLiteral* result = new(zone()) LRegExpLiteral(context);
2253 return MarkAsCall(DefineFixed(result, rax), instr);
2257 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2258 LOperand* context = UseFixed(instr->context(), rsi);
2259 LFunctionLiteral* result = new(zone()) LFunctionLiteral(context);
2260 return MarkAsCall(DefineFixed(result, rax), instr);
2264 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2265 ASSERT(argument_count_ == 0);
2266 allocator_->MarkAsOsrEntry();
2267 current_block_->last_environment()->set_ast_id(instr->ast_id());
2268 return AssignEnvironment(new(zone()) LOsrEntry);
2272 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2273 LParameter* result = new(zone()) LParameter;
2274 if (instr->kind() == HParameter::STACK_PARAMETER) {
2275 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2276 return DefineAsSpilled(result, spill_index);
2278 ASSERT(info()->IsStub());
2279 CodeStubInterfaceDescriptor* descriptor =
2280 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2281 int index = static_cast<int>(instr->index());
2282 Register reg = descriptor->GetParameterRegister(index);
2283 return DefineFixed(result, reg);
2288 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2289 // Use an index that corresponds to the location in the unoptimized frame,
2290 // which the optimized frame will subsume.
2291 int env_index = instr->index();
2292 int spill_index = 0;
2293 if (instr->environment()->is_parameter_index(env_index)) {
2294 spill_index = chunk()->GetParameterStackSlot(env_index);
2296 spill_index = env_index - instr->environment()->first_local_index();
2297 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2298 Abort(kTooManySpillSlotsNeededForOSR);
2302 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2306 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2307 LOperand* context = UseFixed(instr->context(), rsi);
2308 LCallStub* result = new(zone()) LCallStub(context);
2309 return MarkAsCall(DefineFixed(result, rax), instr);
2313 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2314 // There are no real uses of the arguments object.
2315 // arguments.length and element access are supported directly on
2316 // stack arguments, and any real arguments object use causes a bailout.
2317 // So this value is never used.
2322 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2323 instr->ReplayEnvironment(current_block_->last_environment());
2325 // There are no real uses of a captured object.
2330 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2331 info()->MarkAsRequiresFrame();
2332 LOperand* args = UseRegister(instr->arguments());
2335 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2336 length = UseRegisterOrConstant(instr->length());
2337 index = UseOrConstant(instr->index());
2339 length = UseTempRegister(instr->length());
2340 index = Use(instr->index());
2342 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2346 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2347 LOperand* object = UseFixed(instr->value(), rax);
2348 LToFastProperties* result = new(zone()) LToFastProperties(object);
2349 return MarkAsCall(DefineFixed(result, rax), instr);
2353 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2354 LOperand* context = UseFixed(instr->context(), rsi);
2355 LOperand* value = UseAtStart(instr->value());
2356 LTypeof* result = new(zone()) LTypeof(context, value);
2357 return MarkAsCall(DefineFixed(result, rax), instr);
2361 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2362 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2363 if (goto_instr != NULL) return goto_instr;
2365 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2369 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2370 HIsConstructCallAndBranch* instr) {
2371 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2375 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2376 instr->ReplayEnvironment(current_block_->last_environment());
2378 // If there is an instruction pending deoptimization environment create a
2379 // lazy bailout instruction to capture the environment.
2380 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2381 LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
2382 LInstruction* result = AssignEnvironment(lazy_bailout);
2383 // Store the lazy deopt environment with the instruction if needed. Right
2384 // now it is only used for LInstanceOfKnownGlobal.
2385 instruction_pending_deoptimization_environment_->
2386 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2387 instruction_pending_deoptimization_environment_ = NULL;
2388 pending_deoptimization_ast_id_ = BailoutId::None();
2396 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2397 info()->MarkAsDeferredCalling();
2398 if (instr->is_function_entry()) {
2399 LOperand* context = UseFixed(instr->context(), rsi);
2400 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2402 ASSERT(instr->is_backwards_branch());
2403 LOperand* context = UseAny(instr->context());
2404 return AssignEnvironment(
2405 AssignPointerMap(new(zone()) LStackCheck(context)));
2410 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2411 HEnvironment* outer = current_block_->last_environment();
2412 HConstant* undefined = graph()->GetConstantUndefined();
2413 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2414 instr->arguments_count(),
2417 instr->inlining_kind());
2418 // Only replay binding of arguments object if it wasn't removed from graph.
2419 if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2420 inner->Bind(instr->arguments_var(), instr->arguments_object());
2422 inner->set_entry(instr);
2423 current_block_->UpdateEnvironment(inner);
2424 chunk_->AddInlinedClosure(instr->closure());
2429 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2430 LInstruction* pop = NULL;
2432 HEnvironment* env = current_block_->last_environment();
2434 if (env->entry()->arguments_pushed()) {
2435 int argument_count = env->arguments_environment()->parameter_count();
2436 pop = new(zone()) LDrop(argument_count);
2437 ASSERT(instr->argument_delta() == -argument_count);
2440 HEnvironment* outer = current_block_->last_environment()->
2441 DiscardInlined(false);
2442 current_block_->UpdateEnvironment(outer);
2448 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2449 LOperand* context = UseFixed(instr->context(), rsi);
2450 LOperand* object = UseFixed(instr->enumerable(), rax);
2451 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2452 return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
2456 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2457 LOperand* map = UseRegister(instr->map());
2458 return AssignEnvironment(DefineAsRegister(
2459 new(zone()) LForInCacheArray(map)));
2463 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2464 LOperand* value = UseRegisterAtStart(instr->value());
2465 LOperand* map = UseRegisterAtStart(instr->map());
2466 return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2470 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2471 LOperand* object = UseRegister(instr->object());
2472 LOperand* index = UseTempRegister(instr->index());
2473 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2477 } } // namespace v8::internal
2479 #endif // V8_TARGET_ARCH_X64