1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_IA32
32 #include "lithium-allocator-inl.h"
33 #include "ia32/lithium-ia32.h"
34 #include "ia32/lithium-codegen-ia32.h"
35 #include "hydrogen-osr.h"
40 #define DEFINE_COMPILE(type) \
41 void L##type::CompileToNative(LCodeGen* generator) { \
42 generator->Do##type(this); \
44 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
49 void LInstruction::VerifyCall() {
50 // Call instructions can use only fixed registers as temporaries and
51 // outputs because all registers are blocked by the calling convention.
52 // Inputs operands must use a fixed register or use-at-start policy or
53 // a non-register policy.
54 ASSERT(Output() == NULL ||
55 LUnallocated::cast(Output())->HasFixedPolicy() ||
56 !LUnallocated::cast(Output())->HasRegisterPolicy());
57 for (UseIterator it(this); !it.Done(); it.Advance()) {
58 LUnallocated* operand = LUnallocated::cast(it.Current());
59 ASSERT(operand->HasFixedPolicy() ||
60 operand->IsUsedAtStart());
62 for (TempIterator it(this); !it.Done(); it.Advance()) {
63 LUnallocated* operand = LUnallocated::cast(it.Current());
64 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
70 bool LInstruction::HasDoubleRegisterResult() {
71 return HasResult() && result()->IsDoubleRegister();
75 bool LInstruction::HasDoubleRegisterInput() {
76 for (int i = 0; i < InputCount(); i++) {
77 LOperand* op = InputAt(i);
78 if (op != NULL && op->IsDoubleRegister()) {
86 bool LInstruction::IsDoubleInput(X87Register reg, LCodeGen* cgen) {
87 for (int i = 0; i < InputCount(); i++) {
88 LOperand* op = InputAt(i);
89 if (op != NULL && op->IsDoubleRegister()) {
90 if (cgen->ToX87Register(op).is(reg)) return true;
97 void LInstruction::PrintTo(StringStream* stream) {
98 stream->Add("%s ", this->Mnemonic());
100 PrintOutputOperandTo(stream);
104 if (HasEnvironment()) {
106 environment()->PrintTo(stream);
109 if (HasPointerMap()) {
111 pointer_map()->PrintTo(stream);
116 void LInstruction::PrintDataTo(StringStream* stream) {
118 for (int i = 0; i < InputCount(); i++) {
119 if (i > 0) stream->Add(" ");
120 if (InputAt(i) == NULL) {
123 InputAt(i)->PrintTo(stream);
129 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
130 if (HasResult()) result()->PrintTo(stream);
134 void LLabel::PrintDataTo(StringStream* stream) {
135 LGap::PrintDataTo(stream);
136 LLabel* rep = replacement();
138 stream->Add(" Dead block replaced with B%d", rep->block_id());
143 bool LGap::IsRedundant() const {
144 for (int i = 0; i < 4; i++) {
145 if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
154 void LGap::PrintDataTo(StringStream* stream) {
155 for (int i = 0; i < 4; i++) {
157 if (parallel_moves_[i] != NULL) {
158 parallel_moves_[i]->PrintDataTo(stream);
165 const char* LArithmeticD::Mnemonic() const {
167 case Token::ADD: return "add-d";
168 case Token::SUB: return "sub-d";
169 case Token::MUL: return "mul-d";
170 case Token::DIV: return "div-d";
171 case Token::MOD: return "mod-d";
179 const char* LArithmeticT::Mnemonic() const {
181 case Token::ADD: return "add-t";
182 case Token::SUB: return "sub-t";
183 case Token::MUL: return "mul-t";
184 case Token::MOD: return "mod-t";
185 case Token::DIV: return "div-t";
186 case Token::BIT_AND: return "bit-and-t";
187 case Token::BIT_OR: return "bit-or-t";
188 case Token::BIT_XOR: return "bit-xor-t";
189 case Token::ROR: return "ror-t";
190 case Token::SHL: return "sal-t";
191 case Token::SAR: return "sar-t";
192 case Token::SHR: return "shr-t";
200 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
201 return !gen->IsNextEmittedBlock(block_id());
205 void LGoto::PrintDataTo(StringStream* stream) {
206 stream->Add("B%d", block_id());
210 void LBranch::PrintDataTo(StringStream* stream) {
211 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
212 value()->PrintTo(stream);
216 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
218 left()->PrintTo(stream);
219 stream->Add(" %s ", Token::String(op()));
220 right()->PrintTo(stream);
221 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
225 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
226 stream->Add("if is_object(");
227 value()->PrintTo(stream);
228 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
232 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
233 stream->Add("if is_string(");
234 value()->PrintTo(stream);
235 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
239 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
240 stream->Add("if is_smi(");
241 value()->PrintTo(stream);
242 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
246 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
247 stream->Add("if is_undetectable(");
248 value()->PrintTo(stream);
249 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
253 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
254 stream->Add("if string_compare(");
255 left()->PrintTo(stream);
256 right()->PrintTo(stream);
257 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
261 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
262 stream->Add("if has_instance_type(");
263 value()->PrintTo(stream);
264 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
268 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
269 stream->Add("if has_cached_array_index(");
270 value()->PrintTo(stream);
271 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
275 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
276 stream->Add("if class_of_test(");
277 value()->PrintTo(stream);
278 stream->Add(", \"%o\") then B%d else B%d",
279 *hydrogen()->class_name(),
285 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
286 stream->Add("if typeof ");
287 value()->PrintTo(stream);
288 stream->Add(" == \"%s\" then B%d else B%d",
289 hydrogen()->type_literal()->ToCString().get(),
290 true_block_id(), false_block_id());
294 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
296 function()->PrintTo(stream);
297 stream->Add(".code_entry = ");
298 code_object()->PrintTo(stream);
302 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
304 base_object()->PrintTo(stream);
306 offset()->PrintTo(stream);
310 void LCallJSFunction::PrintDataTo(StringStream* stream) {
312 function()->PrintTo(stream);
313 stream->Add("#%d / ", arity());
317 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
318 for (int i = 0; i < InputCount(); i++) {
319 InputAt(i)->PrintTo(stream);
322 stream->Add("#%d / ", arity());
326 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
327 context()->PrintTo(stream);
328 stream->Add("[%d]", slot_index());
332 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
333 context()->PrintTo(stream);
334 stream->Add("[%d] <- ", slot_index());
335 value()->PrintTo(stream);
339 void LInvokeFunction::PrintDataTo(StringStream* stream) {
341 context()->PrintTo(stream);
343 function()->PrintTo(stream);
344 stream->Add(" #%d / ", arity());
348 void LCallNew::PrintDataTo(StringStream* stream) {
350 context()->PrintTo(stream);
352 constructor()->PrintTo(stream);
353 stream->Add(" #%d / ", arity());
357 void LCallNewArray::PrintDataTo(StringStream* stream) {
359 context()->PrintTo(stream);
361 constructor()->PrintTo(stream);
362 stream->Add(" #%d / ", arity());
363 ElementsKind kind = hydrogen()->elements_kind();
364 stream->Add(" (%s) ", ElementsKindToString(kind));
368 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
369 arguments()->PrintTo(stream);
371 stream->Add(" length ");
372 length()->PrintTo(stream);
374 stream->Add(" index ");
375 index()->PrintTo(stream);
379 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
381 case GENERAL_REGISTERS: return spill_slot_count_++;
382 case DOUBLE_REGISTERS: {
383 // Skip a slot if for a double-width slot.
385 spill_slot_count_ |= 1;
387 return spill_slot_count_++;
389 case FLOAT32x4_REGISTERS:
390 case INT32x4_REGISTERS: {
391 // Skip three slots if for a quad-width slot.
392 spill_slot_count_ += 3;
393 num_double_slots_ += 2; // for dynamic frame alignment
394 return spill_slot_count_++;
403 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
404 int index = GetNextSpillIndex(kind);
406 case GENERAL_REGISTERS: return LStackSlot::Create(index, zone());
407 case DOUBLE_REGISTERS: return LDoubleStackSlot::Create(index, zone());
408 case FLOAT32x4_REGISTERS: return LFloat32x4StackSlot::Create(index, zone());
409 case INT32x4_REGISTERS: return LInt32x4StackSlot::Create(index, zone());
417 void LStoreNamedField::PrintDataTo(StringStream* stream) {
418 object()->PrintTo(stream);
419 hydrogen()->access().PrintTo(stream);
421 value()->PrintTo(stream);
425 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
426 object()->PrintTo(stream);
428 stream->Add(String::cast(*name())->ToCString().get());
430 value()->PrintTo(stream);
434 void LLoadKeyed::PrintDataTo(StringStream* stream) {
435 elements()->PrintTo(stream);
437 key()->PrintTo(stream);
438 if (hydrogen()->IsDehoisted()) {
439 stream->Add(" + %d]", additional_index());
446 void LStoreKeyed::PrintDataTo(StringStream* stream) {
447 elements()->PrintTo(stream);
449 key()->PrintTo(stream);
450 if (hydrogen()->IsDehoisted()) {
451 stream->Add(" + %d] <-", additional_index());
453 stream->Add("] <- ");
456 if (value() == NULL) {
457 ASSERT(hydrogen()->IsConstantHoleStore() &&
458 hydrogen()->value()->representation().IsDouble());
459 stream->Add("<the hole(nan)>");
461 value()->PrintTo(stream);
466 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
467 object()->PrintTo(stream);
469 key()->PrintTo(stream);
470 stream->Add("] <- ");
471 value()->PrintTo(stream);
475 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
476 object()->PrintTo(stream);
477 stream->Add(" %p -> %p", *original_map(), *transitioned_map());
481 LPlatformChunk* LChunkBuilder::Build() {
483 chunk_ = new(zone()) LPlatformChunk(info(), graph());
484 LPhase phase("L_Building chunk", chunk_);
487 // Reserve the first spill slot for the state of dynamic alignment.
488 if (info()->IsOptimizing()) {
489 int alignment_state_index = chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
490 ASSERT_EQ(alignment_state_index, 0);
491 USE(alignment_state_index);
494 // If compiling for OSR, reserve space for the unoptimized frame,
495 // which will be subsumed into this frame.
496 if (graph()->has_osr()) {
497 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
498 chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
502 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
503 for (int i = 0; i < blocks->length(); i++) {
504 HBasicBlock* next = NULL;
505 if (i < blocks->length() - 1) next = blocks->at(i + 1);
506 DoBasicBlock(blocks->at(i), next);
507 if (is_aborted()) return NULL;
514 void LChunkBuilder::Abort(BailoutReason reason) {
515 info()->set_bailout_reason(reason);
520 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
521 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
522 Register::ToAllocationIndex(reg));
526 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
527 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
528 XMMRegister::ToAllocationIndex(reg));
532 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
533 return Use(value, ToUnallocated(fixed_register));
537 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
538 return Use(value, ToUnallocated(reg));
542 LOperand* LChunkBuilder::UseRegister(HValue* value) {
543 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
547 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
549 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
550 LUnallocated::USED_AT_START));
554 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
555 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
559 LOperand* LChunkBuilder::Use(HValue* value) {
560 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
564 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
565 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
566 LUnallocated::USED_AT_START));
570 static inline bool CanBeImmediateConstant(HValue* value) {
571 return value->IsConstant() && HConstant::cast(value)->NotInNewSpace();
575 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
576 return CanBeImmediateConstant(value)
577 ? chunk_->DefineConstantOperand(HConstant::cast(value))
582 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
583 return CanBeImmediateConstant(value)
584 ? chunk_->DefineConstantOperand(HConstant::cast(value))
589 LOperand* LChunkBuilder::UseFixedOrConstant(HValue* value,
590 Register fixed_register) {
591 return CanBeImmediateConstant(value)
592 ? chunk_->DefineConstantOperand(HConstant::cast(value))
593 : UseFixed(value, fixed_register);
597 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
598 return CanBeImmediateConstant(value)
599 ? chunk_->DefineConstantOperand(HConstant::cast(value))
600 : UseRegister(value);
604 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
605 return CanBeImmediateConstant(value)
606 ? chunk_->DefineConstantOperand(HConstant::cast(value))
607 : UseRegisterAtStart(value);
611 LOperand* LChunkBuilder::UseConstant(HValue* value) {
612 return chunk_->DefineConstantOperand(HConstant::cast(value));
616 LOperand* LChunkBuilder::UseAny(HValue* value) {
617 return value->IsConstant()
618 ? chunk_->DefineConstantOperand(HConstant::cast(value))
619 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
623 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
624 if (value->EmitAtUses()) {
625 HInstruction* instr = HInstruction::cast(value);
626 VisitInstruction(instr);
628 operand->set_virtual_register(value->id());
633 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
634 LUnallocated* result) {
635 result->set_virtual_register(current_instruction_->id());
636 instr->set_result(result);
641 LInstruction* LChunkBuilder::DefineAsRegister(
642 LTemplateResultInstruction<1>* instr) {
644 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
648 LInstruction* LChunkBuilder::DefineAsSpilled(
649 LTemplateResultInstruction<1>* instr,
652 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
656 LInstruction* LChunkBuilder::DefineSameAsFirst(
657 LTemplateResultInstruction<1>* instr) {
659 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
663 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
665 return Define(instr, ToUnallocated(reg));
669 LInstruction* LChunkBuilder::DefineFixedDouble(
670 LTemplateResultInstruction<1>* instr,
672 return Define(instr, ToUnallocated(reg));
676 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
677 HEnvironment* hydrogen_env = current_block_->last_environment();
678 int argument_index_accumulator = 0;
679 ZoneList<HValue*> objects_to_materialize(0, zone());
680 instr->set_environment(CreateEnvironment(hydrogen_env,
681 &argument_index_accumulator,
682 &objects_to_materialize));
687 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
688 HInstruction* hinstr,
689 CanDeoptimize can_deoptimize) {
690 info()->MarkAsNonDeferredCalling();
696 instr = AssignPointerMap(instr);
698 // If instruction does not have side-effects lazy deoptimization
699 // after the call will try to deoptimize to the point before the call.
700 // Thus we still need to attach environment to this call even if
701 // call sequence can not deoptimize eagerly.
702 bool needs_environment =
703 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
704 !hinstr->HasObservableSideEffects();
705 if (needs_environment && !instr->HasEnvironment()) {
706 instr = AssignEnvironment(instr);
713 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
714 ASSERT(!instr->HasPointerMap());
715 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
720 LUnallocated* LChunkBuilder::TempRegister() {
721 LUnallocated* operand =
722 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
723 int vreg = allocator_->GetVirtualRegister();
724 if (!allocator_->AllocationOk()) {
725 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
728 operand->set_virtual_register(vreg);
733 LOperand* LChunkBuilder::FixedTemp(Register reg) {
734 LUnallocated* operand = ToUnallocated(reg);
735 ASSERT(operand->HasFixedPolicy());
740 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
741 LUnallocated* operand = ToUnallocated(reg);
742 ASSERT(operand->HasFixedPolicy());
747 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
748 return new(zone()) LLabel(instr->block());
752 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
753 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
757 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
763 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
764 return AssignEnvironment(new(zone()) LDeoptimize);
768 LInstruction* LChunkBuilder::DoShift(Token::Value op,
769 HBitwiseBinaryOperation* instr) {
770 if (instr->representation().IsSmiOrInteger32()) {
771 ASSERT(instr->left()->representation().Equals(instr->representation()));
772 ASSERT(instr->right()->representation().Equals(instr->representation()));
773 LOperand* left = UseRegisterAtStart(instr->left());
775 HValue* right_value = instr->right();
776 LOperand* right = NULL;
777 int constant_value = 0;
778 bool does_deopt = false;
779 if (right_value->IsConstant()) {
780 HConstant* constant = HConstant::cast(right_value);
781 right = chunk_->DefineConstantOperand(constant);
782 constant_value = constant->Integer32Value() & 0x1f;
783 // Left shifts can deoptimize if we shift by > 0 and the result cannot be
785 if (instr->representation().IsSmi() && constant_value > 0) {
786 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
789 right = UseFixed(right_value, ecx);
792 // Shift operations can only deoptimize if we do a logical shift by 0 and
793 // the result cannot be truncated to int32.
794 if (op == Token::SHR && constant_value == 0) {
795 if (FLAG_opt_safe_uint32_operations) {
796 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
798 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
802 LInstruction* result =
803 DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
804 return does_deopt ? AssignEnvironment(result) : result;
806 return DoArithmeticT(op, instr);
811 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
812 HArithmeticBinaryOperation* instr) {
813 ASSERT(instr->representation().IsDouble());
814 ASSERT(instr->left()->representation().IsDouble());
815 ASSERT(instr->right()->representation().IsDouble());
816 if (op == Token::MOD) {
817 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
818 LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
819 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
820 return MarkAsCall(DefineSameAsFirst(result), instr);
822 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
823 LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
824 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
825 return DefineSameAsFirst(result);
830 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
831 HBinaryOperation* instr) {
832 HValue* left = instr->left();
833 HValue* right = instr->right();
834 ASSERT(left->representation().IsTagged());
835 ASSERT(right->representation().IsTagged());
836 LOperand* context = UseFixed(instr->context(), esi);
837 LOperand* left_operand = UseFixed(left, edx);
838 LOperand* right_operand = UseFixed(right, eax);
839 LArithmeticT* result =
840 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
841 return MarkAsCall(DefineFixed(result, eax), instr);
845 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
846 ASSERT(is_building());
847 current_block_ = block;
848 next_block_ = next_block;
849 if (block->IsStartBlock()) {
850 block->UpdateEnvironment(graph_->start_environment());
852 } else if (block->predecessors()->length() == 1) {
853 // We have a single predecessor => copy environment and outgoing
854 // argument count from the predecessor.
855 ASSERT(block->phis()->length() == 0);
856 HBasicBlock* pred = block->predecessors()->at(0);
857 HEnvironment* last_environment = pred->last_environment();
858 ASSERT(last_environment != NULL);
859 // Only copy the environment, if it is later used again.
860 if (pred->end()->SecondSuccessor() == NULL) {
861 ASSERT(pred->end()->FirstSuccessor() == block);
863 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
864 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
865 last_environment = last_environment->Copy();
868 block->UpdateEnvironment(last_environment);
869 ASSERT(pred->argument_count() >= 0);
870 argument_count_ = pred->argument_count();
872 // We are at a state join => process phis.
873 HBasicBlock* pred = block->predecessors()->at(0);
874 // No need to copy the environment, it cannot be used later.
875 HEnvironment* last_environment = pred->last_environment();
876 for (int i = 0; i < block->phis()->length(); ++i) {
877 HPhi* phi = block->phis()->at(i);
878 if (phi->HasMergedIndex()) {
879 last_environment->SetValueAt(phi->merged_index(), phi);
882 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
883 if (block->deleted_phis()->at(i) < last_environment->length()) {
884 last_environment->SetValueAt(block->deleted_phis()->at(i),
885 graph_->GetConstantUndefined());
888 block->UpdateEnvironment(last_environment);
889 // Pick up the outgoing argument count of one of the predecessors.
890 argument_count_ = pred->argument_count();
892 HInstruction* current = block->first();
893 int start = chunk_->instructions()->length();
894 while (current != NULL && !is_aborted()) {
895 // Code for constants in registers is generated lazily.
896 if (!current->EmitAtUses()) {
897 VisitInstruction(current);
899 current = current->next();
901 int end = chunk_->instructions()->length() - 1;
903 block->set_first_instruction_index(start);
904 block->set_last_instruction_index(end);
906 block->set_argument_count(argument_count_);
908 current_block_ = NULL;
912 void LChunkBuilder::VisitInstruction(HInstruction* current) {
913 HInstruction* old_current = current_instruction_;
914 current_instruction_ = current;
916 LInstruction* instr = NULL;
917 if (current->CanReplaceWithDummyUses()) {
918 if (current->OperandCount() == 0) {
919 instr = DefineAsRegister(new(zone()) LDummy());
921 ASSERT(!current->OperandAt(0)->IsControlInstruction());
922 instr = DefineAsRegister(new(zone())
923 LDummyUse(UseAny(current->OperandAt(0))));
925 for (int i = 1; i < current->OperandCount(); ++i) {
926 if (current->OperandAt(i)->IsControlInstruction()) continue;
927 LInstruction* dummy =
928 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
929 dummy->set_hydrogen_value(current);
930 chunk_->AddInstruction(dummy, current_block_);
933 instr = current->CompileToLithium(this);
936 argument_count_ += current->argument_delta();
937 ASSERT(argument_count_ >= 0);
940 // Associate the hydrogen instruction first, since we may need it for
941 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
942 instr->set_hydrogen_value(current);
945 // Make sure that the lithium instruction has either no fixed register
946 // constraints in temps or the result OR no uses that are only used at
947 // start. If this invariant doesn't hold, the register allocator can decide
948 // to insert a split of a range immediately before the instruction due to an
949 // already allocated register needing to be used for the instruction's fixed
950 // register constraint. In this case, The register allocator won't see an
951 // interference between the split child and the use-at-start (it would if
952 // the it was just a plain use), so it is free to move the split child into
953 // the same register that is used for the use-at-start.
954 // See https://code.google.com/p/chromium/issues/detail?id=201590
955 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
957 int used_at_start = 0;
958 for (UseIterator it(instr); !it.Done(); it.Advance()) {
959 LUnallocated* operand = LUnallocated::cast(it.Current());
960 if (operand->IsUsedAtStart()) ++used_at_start;
962 if (instr->Output() != NULL) {
963 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
965 for (TempIterator it(instr); !it.Done(); it.Advance()) {
966 LUnallocated* operand = LUnallocated::cast(it.Current());
967 if (operand->HasFixedPolicy()) ++fixed;
969 ASSERT(fixed == 0 || used_at_start == 0);
973 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
974 instr = AssignPointerMap(instr);
976 if (FLAG_stress_environments && !instr->HasEnvironment()) {
977 instr = AssignEnvironment(instr);
979 if (!CpuFeatures::IsSafeForSnapshot(SSE2) && instr->IsGoto() &&
980 LGoto::cast(instr)->jumps_to_join()) {
981 // TODO(olivf) Since phis of spilled values are joined as registers
982 // (not in the stack slot), we need to allow the goto gaps to keep one
983 // x87 register alive. To ensure all other values are still spilled, we
984 // insert a fpu register barrier right before.
985 LClobberDoubles* clobber = new(zone()) LClobberDoubles();
986 clobber->set_hydrogen_value(current);
987 chunk_->AddInstruction(clobber, current_block_);
989 chunk_->AddInstruction(instr, current_block_);
991 if (instr->IsCall()) {
992 HValue* hydrogen_value_for_lazy_bailout = current;
993 LInstruction* instruction_needing_environment = NULL;
994 if (current->HasObservableSideEffects()) {
995 HSimulate* sim = HSimulate::cast(current->next());
996 instruction_needing_environment = instr;
997 sim->ReplayEnvironment(current_block_->last_environment());
998 hydrogen_value_for_lazy_bailout = sim;
1000 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
1001 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
1002 chunk_->AddInstruction(bailout, current_block_);
1003 if (instruction_needing_environment != NULL) {
1004 // Store the lazy deopt environment with the instruction if needed.
1005 // Right now it is only used for LInstanceOfKnownGlobal.
1006 instruction_needing_environment->
1007 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
1011 current_instruction_ = old_current;
1015 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1016 return new(zone()) LGoto(instr->FirstSuccessor());
1020 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1021 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1022 if (goto_instr != NULL) return goto_instr;
1024 HValue* value = instr->value();
1025 Representation r = value->representation();
1026 HType type = value->type();
1027 ToBooleanStub::Types expected = instr->expected_input_types();
1028 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
1030 bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
1031 type.IsJSArray() || type.IsHeapNumber() || type.IsString();
1032 LOperand* temp = !easy_case && expected.NeedsMap() ? TempRegister() : NULL;
1033 LInstruction* branch = new(zone()) LBranch(UseRegister(value), temp);
1035 ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
1036 !expected.IsGeneric())) {
1037 branch = AssignEnvironment(branch);
1043 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1044 return new(zone()) LDebugBreak();
1048 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1049 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1050 if (goto_instr != NULL) return goto_instr;
1052 ASSERT(instr->value()->representation().IsTagged());
1053 LOperand* value = UseRegisterAtStart(instr->value());
1054 return new(zone()) LCmpMapAndBranch(value);
1058 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1059 info()->MarkAsRequiresFrame();
1060 return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
1064 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1065 info()->MarkAsRequiresFrame();
1066 return DefineAsRegister(new(zone()) LArgumentsElements);
1070 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1071 LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
1072 LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
1073 LOperand* context = UseFixed(instr->context(), esi);
1074 LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1075 return MarkAsCall(DefineFixed(result, eax), instr);
1079 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1080 HInstanceOfKnownGlobal* instr) {
1081 LInstanceOfKnownGlobal* result =
1082 new(zone()) LInstanceOfKnownGlobal(
1083 UseFixed(instr->context(), esi),
1084 UseFixed(instr->left(), InstanceofStub::left()),
1086 return MarkAsCall(DefineFixed(result, eax), instr);
1090 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1091 LOperand* receiver = UseRegister(instr->receiver());
1092 LOperand* function = UseRegister(instr->function());
1093 LOperand* temp = TempRegister();
1094 LWrapReceiver* result =
1095 new(zone()) LWrapReceiver(receiver, function, temp);
1096 return AssignEnvironment(DefineSameAsFirst(result));
1100 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1101 LOperand* function = UseFixed(instr->function(), edi);
1102 LOperand* receiver = UseFixed(instr->receiver(), eax);
1103 LOperand* length = UseFixed(instr->length(), ebx);
1104 LOperand* elements = UseFixed(instr->elements(), ecx);
1105 LApplyArguments* result = new(zone()) LApplyArguments(function,
1109 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1113 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1114 LOperand* argument = UseAny(instr->argument());
1115 return new(zone()) LPushArgument(argument);
1119 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1120 HStoreCodeEntry* store_code_entry) {
1121 LOperand* function = UseRegister(store_code_entry->function());
1122 LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1123 return new(zone()) LStoreCodeEntry(function, code_object);
1127 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1128 HInnerAllocatedObject* instr) {
1129 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1130 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1131 return DefineAsRegister(
1132 new(zone()) LInnerAllocatedObject(base_object, offset));
1136 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1137 return instr->HasNoUses()
1139 : DefineAsRegister(new(zone()) LThisFunction);
1143 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1144 if (instr->HasNoUses()) return NULL;
1146 if (info()->IsStub()) {
1147 return DefineFixed(new(zone()) LContext, esi);
1150 return DefineAsRegister(new(zone()) LContext);
1154 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1155 LOperand* context = UseFixed(instr->context(), esi);
1156 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1160 LInstruction* LChunkBuilder::DoCallJSFunction(
1161 HCallJSFunction* instr) {
1162 LOperand* function = UseFixed(instr->function(), edi);
1164 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1166 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1170 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1171 HCallWithDescriptor* instr) {
1172 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1174 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1175 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1176 ops.Add(target, zone());
1177 for (int i = 1; i < instr->OperandCount(); i++) {
1178 LOperand* op = UseFixed(instr->OperandAt(i),
1179 descriptor->GetParameterRegister(i - 1));
1180 ops.Add(op, zone());
1183 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1184 descriptor, ops, zone());
1185 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1189 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1190 LOperand* context = UseFixed(instr->context(), esi);
1191 LOperand* function = UseFixed(instr->function(), edi);
1192 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1193 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1197 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1198 switch (instr->op()) {
1199 case kMathFloor: return DoMathFloor(instr);
1200 case kMathRound: return DoMathRound(instr);
1201 case kMathAbs: return DoMathAbs(instr);
1202 case kMathLog: return DoMathLog(instr);
1203 case kMathExp: return DoMathExp(instr);
1204 case kMathSqrt: return DoMathSqrt(instr);
1205 case kMathPowHalf: return DoMathPowHalf(instr);
1206 case kMathClz32: return DoMathClz32(instr);
1214 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1215 LOperand* input = UseRegisterAtStart(instr->value());
1216 LMathFloor* result = new(zone()) LMathFloor(input);
1217 return AssignEnvironment(DefineAsRegister(result));
1221 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1222 LOperand* input = UseRegister(instr->value());
1223 LOperand* temp = FixedTemp(xmm4);
1224 LMathRound* result = new(zone()) LMathRound(input, temp);
1225 return AssignEnvironment(DefineAsRegister(result));
1229 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1230 LOperand* context = UseAny(instr->context()); // Deferred use.
1231 LOperand* input = UseRegisterAtStart(instr->value());
1232 LInstruction* result =
1233 DefineSameAsFirst(new(zone()) LMathAbs(context, input));
1234 Representation r = instr->value()->representation();
1235 if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1236 if (!r.IsDouble()) result = AssignEnvironment(result);
1241 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1242 ASSERT(instr->representation().IsDouble());
1243 ASSERT(instr->value()->representation().IsDouble());
1244 LOperand* input = UseRegisterAtStart(instr->value());
1245 return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1249 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1250 LOperand* input = UseRegisterAtStart(instr->value());
1251 LMathClz32* result = new(zone()) LMathClz32(input);
1252 return DefineAsRegister(result);
1256 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1257 ASSERT(instr->representation().IsDouble());
1258 ASSERT(instr->value()->representation().IsDouble());
1259 LOperand* value = UseTempRegister(instr->value());
1260 LOperand* temp1 = TempRegister();
1261 LOperand* temp2 = TempRegister();
1262 LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1263 return DefineAsRegister(result);
1267 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1268 LOperand* input = UseRegisterAtStart(instr->value());
1269 LMathSqrt* result = new(zone()) LMathSqrt(input);
1270 return DefineSameAsFirst(result);
1274 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1275 LOperand* input = UseRegisterAtStart(instr->value());
1276 LOperand* temp = TempRegister();
1277 LMathPowHalf* result = new(zone()) LMathPowHalf(input, temp);
1278 return DefineSameAsFirst(result);
1282 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1283 LOperand* context = UseFixed(instr->context(), esi);
1284 LOperand* constructor = UseFixed(instr->constructor(), edi);
1285 LCallNew* result = new(zone()) LCallNew(context, constructor);
1286 return MarkAsCall(DefineFixed(result, eax), instr);
1290 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1291 LOperand* context = UseFixed(instr->context(), esi);
1292 LOperand* constructor = UseFixed(instr->constructor(), edi);
1293 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1294 return MarkAsCall(DefineFixed(result, eax), instr);
1298 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1299 LOperand* context = UseFixed(instr->context(), esi);
1300 LOperand* function = UseFixed(instr->function(), edi);
1301 LCallFunction* call = new(zone()) LCallFunction(context, function);
1302 return MarkAsCall(DefineFixed(call, eax), instr);
1306 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1307 LOperand* context = UseFixed(instr->context(), esi);
1308 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), eax), instr);
1312 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1313 return DoShift(Token::ROR, instr);
1317 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1318 return DoShift(Token::SHR, instr);
1322 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1323 return DoShift(Token::SAR, instr);
1327 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1328 return DoShift(Token::SHL, instr);
1332 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1333 if (instr->representation().IsSmiOrInteger32()) {
1334 ASSERT(instr->left()->representation().Equals(instr->representation()));
1335 ASSERT(instr->right()->representation().Equals(instr->representation()));
1336 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1338 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1339 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1340 return DefineSameAsFirst(new(zone()) LBitI(left, right));
1342 return DoArithmeticT(instr->op(), instr);
1347 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1348 ASSERT(instr->representation().IsSmiOrInteger32());
1349 ASSERT(instr->left()->representation().Equals(instr->representation()));
1350 ASSERT(instr->right()->representation().Equals(instr->representation()));
1351 LOperand* dividend = UseRegister(instr->left());
1352 int32_t divisor = instr->right()->GetInteger32Constant();
1353 LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1354 dividend, divisor));
1355 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1356 (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1357 (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1358 divisor != 1 && divisor != -1)) {
1359 result = AssignEnvironment(result);
1365 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1366 ASSERT(instr->representation().IsInteger32());
1367 ASSERT(instr->left()->representation().Equals(instr->representation()));
1368 ASSERT(instr->right()->representation().Equals(instr->representation()));
1369 LOperand* dividend = UseRegister(instr->left());
1370 int32_t divisor = instr->right()->GetInteger32Constant();
1371 LOperand* temp1 = FixedTemp(eax);
1372 LOperand* temp2 = FixedTemp(edx);
1373 LInstruction* result = DefineFixed(new(zone()) LDivByConstI(
1374 dividend, divisor, temp1, temp2), edx);
1376 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1377 !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1378 result = AssignEnvironment(result);
1384 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1385 ASSERT(instr->representation().IsSmiOrInteger32());
1386 ASSERT(instr->left()->representation().Equals(instr->representation()));
1387 ASSERT(instr->right()->representation().Equals(instr->representation()));
1388 LOperand* dividend = UseFixed(instr->left(), eax);
1389 LOperand* divisor = UseRegister(instr->right());
1390 LOperand* temp = FixedTemp(edx);
1391 LInstruction* result = DefineFixed(new(zone()) LDivI(
1392 dividend, divisor, temp), eax);
1393 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1394 instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1395 instr->CheckFlag(HValue::kCanOverflow) ||
1396 (!instr->IsMathFloorOfDiv() &&
1397 !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) {
1398 result = AssignEnvironment(result);
1404 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1405 if (instr->representation().IsSmiOrInteger32()) {
1406 if (instr->RightIsPowerOf2()) {
1407 return DoDivByPowerOf2I(instr);
1408 } else if (instr->right()->IsConstant()) {
1409 return DoDivByConstI(instr);
1411 return DoDivI(instr);
1413 } else if (instr->representation().IsDouble()) {
1414 return DoArithmeticD(Token::DIV, instr);
1416 return DoArithmeticT(Token::DIV, instr);
1421 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1422 LOperand* dividend = UseRegisterAtStart(instr->left());
1423 int32_t divisor = instr->right()->GetInteger32Constant();
1424 LInstruction* result = DefineSameAsFirst(new(zone()) LFlooringDivByPowerOf2I(
1425 dividend, divisor));
1426 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1427 (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1428 result = AssignEnvironment(result);
1434 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1435 ASSERT(instr->representation().IsInteger32());
1436 ASSERT(instr->left()->representation().Equals(instr->representation()));
1437 ASSERT(instr->right()->representation().Equals(instr->representation()));
1438 LOperand* dividend = UseRegister(instr->left());
1439 int32_t divisor = instr->right()->GetInteger32Constant();
1440 LOperand* temp1 = FixedTemp(eax);
1441 LOperand* temp2 = FixedTemp(edx);
1443 ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1444 (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1445 NULL : TempRegister();
1446 LInstruction* result =
1447 DefineFixed(new(zone()) LFlooringDivByConstI(dividend,
1454 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1455 result = AssignEnvironment(result);
1461 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1462 if (instr->RightIsPowerOf2()) {
1463 return DoFlooringDivByPowerOf2I(instr);
1464 } else if (instr->right()->IsConstant()) {
1465 return DoFlooringDivByConstI(instr);
1467 return DoDivI(instr);
1472 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1473 ASSERT(instr->representation().IsSmiOrInteger32());
1474 ASSERT(instr->left()->representation().Equals(instr->representation()));
1475 ASSERT(instr->right()->representation().Equals(instr->representation()));
1476 LOperand* dividend = UseRegisterAtStart(instr->left());
1477 int32_t divisor = instr->right()->GetInteger32Constant();
1478 LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1479 dividend, divisor));
1480 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1481 result = AssignEnvironment(result);
1487 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1488 ASSERT(instr->representation().IsSmiOrInteger32());
1489 ASSERT(instr->left()->representation().Equals(instr->representation()));
1490 ASSERT(instr->right()->representation().Equals(instr->representation()));
1491 LOperand* dividend = UseRegister(instr->left());
1492 int32_t divisor = instr->right()->GetInteger32Constant();
1493 LOperand* temp1 = FixedTemp(eax);
1494 LOperand* temp2 = FixedTemp(edx);
1495 LInstruction* result = DefineFixed(new(zone()) LModByConstI(
1496 dividend, divisor, temp1, temp2), eax);
1497 if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1498 result = AssignEnvironment(result);
1504 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1505 ASSERT(instr->representation().IsSmiOrInteger32());
1506 ASSERT(instr->left()->representation().Equals(instr->representation()));
1507 ASSERT(instr->right()->representation().Equals(instr->representation()));
1508 LOperand* dividend = UseFixed(instr->left(), eax);
1509 LOperand* divisor = UseRegister(instr->right());
1510 LOperand* temp = FixedTemp(edx);
1511 LInstruction* result = DefineFixed(new(zone()) LModI(
1512 dividend, divisor, temp), edx);
1513 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1514 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1515 result = AssignEnvironment(result);
1521 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1522 if (instr->representation().IsSmiOrInteger32()) {
1523 if (instr->RightIsPowerOf2()) {
1524 return DoModByPowerOf2I(instr);
1525 } else if (instr->right()->IsConstant()) {
1526 return DoModByConstI(instr);
1528 return DoModI(instr);
1530 } else if (instr->representation().IsDouble()) {
1531 return DoArithmeticD(Token::MOD, instr);
1533 return DoArithmeticT(Token::MOD, instr);
1538 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1539 if (instr->representation().IsSmiOrInteger32()) {
1540 ASSERT(instr->left()->representation().Equals(instr->representation()));
1541 ASSERT(instr->right()->representation().Equals(instr->representation()));
1542 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1543 LOperand* right = UseOrConstant(instr->BetterRightOperand());
1544 LOperand* temp = NULL;
1545 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1546 temp = TempRegister();
1548 LMulI* mul = new(zone()) LMulI(left, right, temp);
1549 if (instr->CheckFlag(HValue::kCanOverflow) ||
1550 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1551 AssignEnvironment(mul);
1553 return DefineSameAsFirst(mul);
1554 } else if (instr->representation().IsDouble()) {
1555 return DoArithmeticD(Token::MUL, instr);
1557 return DoArithmeticT(Token::MUL, instr);
1562 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1563 if (instr->representation().IsSmiOrInteger32()) {
1564 ASSERT(instr->left()->representation().Equals(instr->representation()));
1565 ASSERT(instr->right()->representation().Equals(instr->representation()));
1566 LOperand* left = UseRegisterAtStart(instr->left());
1567 LOperand* right = UseOrConstantAtStart(instr->right());
1568 LSubI* sub = new(zone()) LSubI(left, right);
1569 LInstruction* result = DefineSameAsFirst(sub);
1570 if (instr->CheckFlag(HValue::kCanOverflow)) {
1571 result = AssignEnvironment(result);
1574 } else if (instr->representation().IsDouble()) {
1575 return DoArithmeticD(Token::SUB, instr);
1577 return DoArithmeticT(Token::SUB, instr);
1582 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1583 if (instr->representation().IsSmiOrInteger32()) {
1584 ASSERT(instr->left()->representation().Equals(instr->representation()));
1585 ASSERT(instr->right()->representation().Equals(instr->representation()));
1586 // Check to see if it would be advantageous to use an lea instruction rather
1587 // than an add. This is the case when no overflow check is needed and there
1588 // are multiple uses of the add's inputs, so using a 3-register add will
1589 // preserve all input values for later uses.
1590 bool use_lea = LAddI::UseLea(instr);
1591 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1592 HValue* right_candidate = instr->BetterRightOperand();
1593 LOperand* right = use_lea
1594 ? UseRegisterOrConstantAtStart(right_candidate)
1595 : UseOrConstantAtStart(right_candidate);
1596 LAddI* add = new(zone()) LAddI(left, right);
1597 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1598 LInstruction* result = use_lea
1599 ? DefineAsRegister(add)
1600 : DefineSameAsFirst(add);
1602 result = AssignEnvironment(result);
1605 } else if (instr->representation().IsDouble()) {
1606 return DoArithmeticD(Token::ADD, instr);
1607 } else if (instr->representation().IsExternal()) {
1608 ASSERT(instr->left()->representation().IsExternal());
1609 ASSERT(instr->right()->representation().IsInteger32());
1610 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1611 bool use_lea = LAddI::UseLea(instr);
1612 LOperand* left = UseRegisterAtStart(instr->left());
1613 HValue* right_candidate = instr->right();
1614 LOperand* right = use_lea
1615 ? UseRegisterOrConstantAtStart(right_candidate)
1616 : UseOrConstantAtStart(right_candidate);
1617 LAddI* add = new(zone()) LAddI(left, right);
1618 LInstruction* result = use_lea
1619 ? DefineAsRegister(add)
1620 : DefineSameAsFirst(add);
1623 return DoArithmeticT(Token::ADD, instr);
1628 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1629 LOperand* left = NULL;
1630 LOperand* right = NULL;
1631 if (instr->representation().IsSmiOrInteger32()) {
1632 ASSERT(instr->left()->representation().Equals(instr->representation()));
1633 ASSERT(instr->right()->representation().Equals(instr->representation()));
1634 left = UseRegisterAtStart(instr->BetterLeftOperand());
1635 right = UseOrConstantAtStart(instr->BetterRightOperand());
1637 ASSERT(instr->representation().IsDouble());
1638 ASSERT(instr->left()->representation().IsDouble());
1639 ASSERT(instr->right()->representation().IsDouble());
1640 left = UseRegisterAtStart(instr->left());
1641 right = UseRegisterAtStart(instr->right());
1643 LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1644 return DefineSameAsFirst(minmax);
1648 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1649 ASSERT(instr->representation().IsDouble());
1650 // We call a C function for double power. It can't trigger a GC.
1651 // We need to use fixed result register for the call.
1652 Representation exponent_type = instr->right()->representation();
1653 ASSERT(instr->left()->representation().IsDouble());
1654 LOperand* left = UseFixedDouble(instr->left(), xmm2);
1655 LOperand* right = exponent_type.IsDouble() ?
1656 UseFixedDouble(instr->right(), xmm1) :
1657 UseFixed(instr->right(), eax);
1658 LPower* result = new(zone()) LPower(left, right);
1659 return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1660 CAN_DEOPTIMIZE_EAGERLY);
1664 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1665 ASSERT(instr->left()->representation().IsSmiOrTagged());
1666 ASSERT(instr->right()->representation().IsSmiOrTagged());
1667 LOperand* context = UseFixed(instr->context(), esi);
1668 LOperand* left = UseFixed(instr->left(), edx);
1669 LOperand* right = UseFixed(instr->right(), eax);
1670 LCmpT* result = new(zone()) LCmpT(context, left, right);
1671 return MarkAsCall(DefineFixed(result, eax), instr);
1675 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1676 HCompareNumericAndBranch* instr) {
1677 Representation r = instr->representation();
1678 if (r.IsSmiOrInteger32()) {
1679 ASSERT(instr->left()->representation().Equals(r));
1680 ASSERT(instr->right()->representation().Equals(r));
1681 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1682 LOperand* right = UseOrConstantAtStart(instr->right());
1683 return new(zone()) LCompareNumericAndBranch(left, right);
1685 ASSERT(r.IsDouble());
1686 ASSERT(instr->left()->representation().IsDouble());
1687 ASSERT(instr->right()->representation().IsDouble());
1690 if (CanBeImmediateConstant(instr->left()) &&
1691 CanBeImmediateConstant(instr->right())) {
1692 // The code generator requires either both inputs to be constant
1693 // operands, or neither.
1694 left = UseConstant(instr->left());
1695 right = UseConstant(instr->right());
1697 left = UseRegisterAtStart(instr->left());
1698 right = UseRegisterAtStart(instr->right());
1700 return new(zone()) LCompareNumericAndBranch(left, right);
1705 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1706 HCompareObjectEqAndBranch* instr) {
1707 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1708 if (goto_instr != NULL) return goto_instr;
1709 LOperand* left = UseRegisterAtStart(instr->left());
1710 LOperand* right = UseOrConstantAtStart(instr->right());
1711 return new(zone()) LCmpObjectEqAndBranch(left, right);
1715 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1716 HCompareHoleAndBranch* instr) {
1717 LOperand* value = UseRegisterAtStart(instr->value());
1718 return new(zone()) LCmpHoleAndBranch(value);
1722 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1723 HCompareMinusZeroAndBranch* instr) {
1724 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1725 if (goto_instr != NULL) return goto_instr;
1726 LOperand* value = UseRegister(instr->value());
1727 LOperand* scratch = TempRegister();
1728 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1732 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1733 ASSERT(instr->value()->representation().IsSmiOrTagged());
1734 LOperand* temp = TempRegister();
1735 return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1739 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1740 ASSERT(instr->value()->representation().IsTagged());
1741 LOperand* temp = TempRegister();
1742 return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1746 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1747 ASSERT(instr->value()->representation().IsTagged());
1748 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1752 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1753 HIsUndetectableAndBranch* instr) {
1754 ASSERT(instr->value()->representation().IsTagged());
1755 return new(zone()) LIsUndetectableAndBranch(
1756 UseRegisterAtStart(instr->value()), TempRegister());
1760 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1761 HStringCompareAndBranch* instr) {
1762 ASSERT(instr->left()->representation().IsTagged());
1763 ASSERT(instr->right()->representation().IsTagged());
1764 LOperand* context = UseFixed(instr->context(), esi);
1765 LOperand* left = UseFixed(instr->left(), edx);
1766 LOperand* right = UseFixed(instr->right(), eax);
1768 LStringCompareAndBranch* result = new(zone())
1769 LStringCompareAndBranch(context, left, right);
1771 return MarkAsCall(result, instr);
1775 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1776 HHasInstanceTypeAndBranch* instr) {
1777 ASSERT(instr->value()->representation().IsTagged());
1778 return new(zone()) LHasInstanceTypeAndBranch(
1779 UseRegisterAtStart(instr->value()),
1784 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1785 HGetCachedArrayIndex* instr) {
1786 ASSERT(instr->value()->representation().IsTagged());
1787 LOperand* value = UseRegisterAtStart(instr->value());
1789 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1793 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1794 HHasCachedArrayIndexAndBranch* instr) {
1795 ASSERT(instr->value()->representation().IsTagged());
1796 return new(zone()) LHasCachedArrayIndexAndBranch(
1797 UseRegisterAtStart(instr->value()));
1801 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1802 HClassOfTestAndBranch* instr) {
1803 ASSERT(instr->value()->representation().IsTagged());
1804 return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1810 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1811 LOperand* map = UseRegisterAtStart(instr->value());
1812 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1816 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1817 LOperand* date = UseFixed(instr->value(), eax);
1818 LDateField* result =
1819 new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
1820 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1824 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1825 LOperand* string = UseRegisterAtStart(instr->string());
1826 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1827 return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1831 LOperand* LChunkBuilder::GetSeqStringSetCharOperand(HSeqStringSetChar* instr) {
1832 if (instr->encoding() == String::ONE_BYTE_ENCODING) {
1833 if (FLAG_debug_code) {
1834 return UseFixed(instr->value(), eax);
1836 return UseFixedOrConstant(instr->value(), eax);
1839 if (FLAG_debug_code) {
1840 return UseRegisterAtStart(instr->value());
1842 return UseRegisterOrConstantAtStart(instr->value());
1848 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1849 LOperand* string = UseRegisterAtStart(instr->string());
1850 LOperand* index = FLAG_debug_code
1851 ? UseRegisterAtStart(instr->index())
1852 : UseRegisterOrConstantAtStart(instr->index());
1853 LOperand* value = GetSeqStringSetCharOperand(instr);
1854 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), esi) : NULL;
1855 LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1857 if (FLAG_debug_code) {
1858 result = MarkAsCall(result, instr);
1864 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1865 return AssignEnvironment(new(zone()) LBoundsCheck(
1866 UseRegisterOrConstantAtStart(instr->index()),
1867 UseAtStart(instr->length())));
1871 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1872 HBoundsCheckBaseIndexInformation* instr) {
1878 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1879 // The control instruction marking the end of a block that completed
1880 // abruptly (e.g., threw an exception). There is nothing specific to do.
1885 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1890 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1891 // All HForceRepresentation instructions should be eliminated in the
1892 // representation change phase of Hydrogen.
1898 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1899 Representation from = instr->from();
1900 Representation to = instr->to();
1902 if (to.IsTagged()) {
1903 LOperand* value = UseRegister(instr->value());
1904 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1906 from = Representation::Tagged();
1908 // Only mark conversions that might need to allocate as calling rather than
1909 // all changes. This makes simple, non-allocating conversion not have to force
1910 // building a stack frame.
1911 if (from.IsTagged()) {
1912 if (to.IsDouble()) {
1913 LOperand* value = UseRegister(instr->value());
1914 // Temp register only necessary for minus zero check.
1915 LOperand* temp = TempRegister();
1916 LInstruction* result = DefineAsRegister(
1917 new(zone()) LNumberUntagD(value, temp));
1918 if (!instr->value()->representation().IsSmi()) {
1919 result = AssignEnvironment(result);
1922 } else if (to.IsSIMD128()) {
1923 LOperand* value = UseRegister(instr->value());
1924 LOperand* temp = TempRegister();
1925 LTaggedToSIMD128* res = new(zone()) LTaggedToSIMD128(value, temp, to);
1926 return AssignEnvironment(DefineAsRegister(res));
1927 } else if (to.IsSmi()) {
1928 HValue* val = instr->value();
1929 LOperand* value = UseRegister(val);
1930 if (val->type().IsSmi()) {
1931 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1933 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1935 ASSERT(to.IsInteger32());
1936 HValue* val = instr->value();
1937 if (val->type().IsSmi() || val->representation().IsSmi()) {
1938 LOperand* value = UseRegister(val);
1939 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1941 bool truncating = instr->CanTruncateToInt32();
1942 LOperand* xmm_temp =
1943 (CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating)
1944 ? FixedTemp(xmm1) : NULL;
1945 LInstruction* result = DefineSameAsFirst(
1946 new(zone()) LTaggedToI(UseRegister(val), xmm_temp));
1947 if (!instr->value()->representation().IsSmi()) {
1948 // Note: Only deopts in deferred code.
1949 result = AssignEnvironment(result);
1954 } else if (from.IsDouble()) {
1955 if (to.IsTagged()) {
1956 info()->MarkAsDeferredCalling();
1957 LOperand* value = UseRegisterAtStart(instr->value());
1958 LOperand* temp = FLAG_inline_new ? TempRegister() : NULL;
1960 // Make sure that temp and result_temp are different registers.
1961 LUnallocated* result_temp = TempRegister();
1962 LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1963 return AssignPointerMap(Define(result, result_temp));
1964 } else if (to.IsSmi()) {
1965 LOperand* value = UseRegister(instr->value());
1966 return AssignEnvironment(
1967 DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1969 ASSERT(to.IsInteger32());
1970 bool truncating = instr->CanTruncateToInt32();
1971 bool needs_temp = CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating;
1972 LOperand* value = needs_temp ?
1973 UseTempRegister(instr->value()) : UseRegister(instr->value());
1974 LOperand* temp = needs_temp ? TempRegister() : NULL;
1975 LInstruction* result =
1976 DefineAsRegister(new(zone()) LDoubleToI(value, temp));
1977 if (!truncating) result = AssignEnvironment(result);
1980 } else if (from.IsInteger32()) {
1981 info()->MarkAsDeferredCalling();
1982 if (to.IsTagged()) {
1983 HValue* val = instr->value();
1984 LOperand* value = UseRegister(val);
1985 if (!instr->CheckFlag(HValue::kCanOverflow)) {
1986 return DefineSameAsFirst(new(zone()) LSmiTag(value));
1987 } else if (val->CheckFlag(HInstruction::kUint32)) {
1988 LOperand* temp1 = TempRegister();
1989 LOperand* temp2 = CpuFeatures::IsSupported(SSE2) ? FixedTemp(xmm1)
1991 LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1992 return AssignPointerMap(DefineSameAsFirst(result));
1994 LOperand* temp = TempRegister();
1995 LNumberTagI* result = new(zone()) LNumberTagI(value, temp);
1996 return AssignPointerMap(DefineSameAsFirst(result));
1998 } else if (to.IsSmi()) {
1999 HValue* val = instr->value();
2000 LOperand* value = UseRegister(val);
2001 LInstruction* result = DefineSameAsFirst(new(zone()) LSmiTag(value));
2002 if (instr->CheckFlag(HValue::kCanOverflow)) {
2003 result = AssignEnvironment(result);
2007 ASSERT(to.IsDouble());
2008 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
2009 LOperand* temp = FixedTemp(xmm1);
2010 return DefineAsRegister(
2011 new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
2013 return DefineAsRegister(
2014 new(zone()) LInteger32ToDouble(Use(instr->value())));
2017 } else if (from.IsSIMD128()) {
2018 ASSERT(to.IsTagged());
2019 info()->MarkAsDeferredCalling();
2020 LOperand* value = UseRegister(instr->value());
2021 LOperand* temp = TempRegister();
2023 // Make sure that temp and result_temp are different registers.
2024 LUnallocated* result_temp = TempRegister();
2025 LSIMD128ToTagged* result = new(zone()) LSIMD128ToTagged(value, temp);
2026 return AssignPointerMap(Define(result, result_temp));
2033 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
2034 LOperand* value = UseAtStart(instr->value());
2035 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
2039 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
2040 LOperand* value = UseRegisterAtStart(instr->value());
2041 return AssignEnvironment(new(zone()) LCheckSmi(value));
2045 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
2046 LOperand* value = UseRegisterAtStart(instr->value());
2047 LOperand* temp = TempRegister();
2048 LCheckInstanceType* result = new(zone()) LCheckInstanceType(value, temp);
2049 return AssignEnvironment(result);
2053 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
2054 // If the object is in new space, we'll emit a global cell compare and so
2055 // want the value in a register. If the object gets promoted before we
2056 // emit code, we will still get the register but will do an immediate
2057 // compare instead of the cell compare. This is safe.
2058 LOperand* value = instr->object_in_new_space()
2059 ? UseRegisterAtStart(instr->value()) : UseAtStart(instr->value());
2060 return AssignEnvironment(new(zone()) LCheckValue(value));
2064 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
2065 LOperand* value = NULL;
2066 if (!instr->CanOmitMapChecks()) {
2067 value = UseRegisterAtStart(instr->value());
2068 if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
2070 LCheckMaps* result = new(zone()) LCheckMaps(value);
2071 if (!instr->CanOmitMapChecks()) {
2072 // Note: Only deopts in deferred code.
2073 AssignEnvironment(result);
2074 if (instr->has_migration_target()) return AssignPointerMap(result);
2080 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
2081 HValue* value = instr->value();
2082 Representation input_rep = value->representation();
2083 if (input_rep.IsDouble()) {
2084 LOperand* reg = UseRegister(value);
2085 return DefineFixed(new(zone()) LClampDToUint8(reg), eax);
2086 } else if (input_rep.IsInteger32()) {
2087 LOperand* reg = UseFixed(value, eax);
2088 return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
2090 ASSERT(input_rep.IsSmiOrTagged());
2091 if (CpuFeatures::IsSupported(SSE2)) {
2092 LOperand* reg = UseFixed(value, eax);
2093 // Register allocator doesn't (yet) support allocation of double
2094 // temps. Reserve xmm1 explicitly.
2095 LOperand* temp = FixedTemp(xmm1);
2096 LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp);
2097 return AssignEnvironment(DefineFixed(result, eax));
2099 LOperand* value = UseRegister(instr->value());
2100 LClampTToUint8NoSSE2* res =
2101 new(zone()) LClampTToUint8NoSSE2(value, TempRegister(),
2102 TempRegister(), TempRegister());
2103 return AssignEnvironment(DefineFixed(res, ecx));
2109 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2110 HValue* value = instr->value();
2111 ASSERT(value->representation().IsDouble());
2112 return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2116 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2117 LOperand* lo = UseRegister(instr->lo());
2118 LOperand* hi = UseRegister(instr->hi());
2119 return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2123 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2124 LOperand* context = info()->IsStub() ? UseFixed(instr->context(), esi) : NULL;
2125 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2126 return new(zone()) LReturn(
2127 UseFixed(instr->value(), eax), context, parameter_count);
2131 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2132 Representation r = instr->representation();
2134 return DefineAsRegister(new(zone()) LConstantS);
2135 } else if (r.IsInteger32()) {
2136 return DefineAsRegister(new(zone()) LConstantI);
2137 } else if (r.IsDouble()) {
2138 double value = instr->DoubleValue();
2139 bool value_is_zero = BitCast<uint64_t, double>(value) == 0;
2140 LOperand* temp = value_is_zero ? NULL : TempRegister();
2141 return DefineAsRegister(new(zone()) LConstantD(temp));
2142 } else if (r.IsExternal()) {
2143 return DefineAsRegister(new(zone()) LConstantE);
2144 } else if (r.IsTagged()) {
2145 return DefineAsRegister(new(zone()) LConstantT);
2153 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2154 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2155 return instr->RequiresHoleCheck()
2156 ? AssignEnvironment(DefineAsRegister(result))
2157 : DefineAsRegister(result);
2161 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2162 LOperand* context = UseFixed(instr->context(), esi);
2163 LOperand* global_object = UseFixed(instr->global_object(), edx);
2164 LLoadGlobalGeneric* result =
2165 new(zone()) LLoadGlobalGeneric(context, global_object);
2166 return MarkAsCall(DefineFixed(result, eax), instr);
2170 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2171 LStoreGlobalCell* result =
2172 new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
2173 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2177 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2178 LOperand* context = UseRegisterAtStart(instr->value());
2179 LInstruction* result =
2180 DefineAsRegister(new(zone()) LLoadContextSlot(context));
2181 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2182 result = AssignEnvironment(result);
2188 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2191 LOperand* context = UseRegister(instr->context());
2192 if (instr->NeedsWriteBarrier()) {
2193 value = UseTempRegister(instr->value());
2194 temp = TempRegister();
2196 value = UseRegister(instr->value());
2199 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2200 if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2201 result = AssignEnvironment(result);
2207 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2208 LOperand* obj = (instr->access().IsExternalMemory() &&
2209 instr->access().offset() == 0)
2210 ? UseRegisterOrConstantAtStart(instr->object())
2211 : UseRegisterAtStart(instr->object());
2212 return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2216 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2217 LOperand* context = UseFixed(instr->context(), esi);
2218 LOperand* object = UseFixed(instr->object(), edx);
2219 LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
2220 return MarkAsCall(DefineFixed(result, eax), instr);
2224 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2225 HLoadFunctionPrototype* instr) {
2226 return AssignEnvironment(DefineAsRegister(
2227 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()),
2232 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2233 return DefineAsRegister(new(zone()) LLoadRoot);
2237 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2238 ASSERT(instr->key()->representation().IsSmiOrInteger32());
2239 ElementsKind elements_kind = instr->elements_kind();
2240 bool clobbers_key = ExternalArrayOpRequiresTemp(
2241 instr->key()->representation(), elements_kind);
2242 LOperand* key = clobbers_key
2243 ? UseTempRegister(instr->key())
2244 : UseRegisterOrConstantAtStart(instr->key());
2245 LInstruction* result = NULL;
2247 bool load_128bits_without_sse2 = IsSIMD128ElementsKind(elements_kind) &&
2248 !CPU::SupportsSIMD128InCrankshaft();
2249 if (!instr->is_typed_elements()) {
2250 LOperand* obj = UseRegisterAtStart(instr->elements());
2251 result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key, NULL));
2254 (instr->representation().IsInteger32() &&
2255 !(IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2256 (instr->representation().IsDouble() &&
2257 (IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2258 (CPU::SupportsSIMD128InCrankshaft()
2259 ? instr->representation().IsFloat32x4()
2260 : instr->representation().IsTagged() &&
2261 (IsFloat32x4ElementsKind(instr->elements_kind()))) ||
2262 (CPU::SupportsSIMD128InCrankshaft()
2263 ? instr->representation().IsInt32x4()
2264 : instr->representation().IsTagged() &&
2265 (IsInt32x4ElementsKind(instr->elements_kind()))));
2266 LOperand* backing_store = UseRegister(instr->elements());
2267 result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key,
2268 load_128bits_without_sse2 ? TempRegister() : NULL));
2269 if (load_128bits_without_sse2) {
2270 info()->MarkAsDeferredCalling();
2271 AssignPointerMap(result);
2275 if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2276 // see LCodeGen::DoLoadKeyedExternalArray
2277 ((instr->elements_kind() == EXTERNAL_UINT32_ELEMENTS ||
2278 instr->elements_kind() == UINT32_ELEMENTS) &&
2279 !instr->CheckFlag(HInstruction::kUint32)) :
2280 // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2281 // LCodeGen::DoLoadKeyedFixedArray
2282 instr->RequiresHoleCheck()) {
2283 result = AssignEnvironment(result);
2289 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2290 LOperand* context = UseFixed(instr->context(), esi);
2291 LOperand* object = UseFixed(instr->object(), edx);
2292 LOperand* key = UseFixed(instr->key(), ecx);
2294 LLoadKeyedGeneric* result =
2295 new(zone()) LLoadKeyedGeneric(context, object, key);
2296 return MarkAsCall(DefineFixed(result, eax), instr);
2300 LOperand* LChunkBuilder::GetStoreKeyedValueOperand(HStoreKeyed* instr) {
2301 ElementsKind elements_kind = instr->elements_kind();
2303 // Determine if we need a byte register in this case for the value.
2304 bool val_is_fixed_register =
2305 elements_kind == EXTERNAL_INT8_ELEMENTS ||
2306 elements_kind == EXTERNAL_UINT8_ELEMENTS ||
2307 elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2308 elements_kind == UINT8_ELEMENTS ||
2309 elements_kind == INT8_ELEMENTS ||
2310 elements_kind == UINT8_CLAMPED_ELEMENTS;
2311 if (val_is_fixed_register) {
2312 return UseFixed(instr->value(), eax);
2315 if (!CpuFeatures::IsSafeForSnapshot(SSE2) &&
2316 IsDoubleOrFloatElementsKind(elements_kind)) {
2317 return UseRegisterAtStart(instr->value());
2320 return UseRegister(instr->value());
2324 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2325 if (!instr->is_typed_elements()) {
2326 ASSERT(instr->elements()->representation().IsTagged());
2327 ASSERT(instr->key()->representation().IsInteger32() ||
2328 instr->key()->representation().IsSmi());
2330 if (instr->value()->representation().IsDouble()) {
2331 LOperand* object = UseRegisterAtStart(instr->elements());
2332 LOperand* val = NULL;
2333 val = UseRegisterAtStart(instr->value());
2334 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2335 return new(zone()) LStoreKeyed(object, key, val, NULL);
2337 ASSERT(instr->value()->representation().IsSmiOrTagged());
2338 bool needs_write_barrier = instr->NeedsWriteBarrier();
2340 LOperand* obj = UseRegister(instr->elements());
2343 if (needs_write_barrier) {
2344 val = UseTempRegister(instr->value());
2345 key = UseTempRegister(instr->key());
2347 val = UseRegisterOrConstantAtStart(instr->value());
2348 key = UseRegisterOrConstantAtStart(instr->key());
2350 return new(zone()) LStoreKeyed(obj, key, val, NULL);
2354 ElementsKind elements_kind = instr->elements_kind();
2356 (instr->value()->representation().IsInteger32() &&
2357 !IsDoubleOrFloatElementsKind(elements_kind)) ||
2358 (instr->value()->representation().IsDouble() &&
2359 IsDoubleOrFloatElementsKind(elements_kind)) ||
2360 (CPU::SupportsSIMD128InCrankshaft()
2361 ? instr->value()->representation().IsFloat32x4()
2362 : instr->value()->representation().IsTagged() &&
2363 IsFloat32x4ElementsKind(elements_kind)) ||
2364 (CPU::SupportsSIMD128InCrankshaft()
2365 ? instr->value()->representation().IsInt32x4()
2366 : instr->value()->representation().IsTagged() &&
2367 IsInt32x4ElementsKind(elements_kind)));
2368 ASSERT((instr->is_fixed_typed_array() &&
2369 instr->elements()->representation().IsTagged()) ||
2370 (instr->is_external() &&
2371 instr->elements()->representation().IsExternal()));
2373 LOperand* backing_store = UseRegister(instr->elements());
2374 LOperand* val = GetStoreKeyedValueOperand(instr);
2375 bool clobbers_key = ExternalArrayOpRequiresTemp(
2376 instr->key()->representation(), elements_kind);
2377 LOperand* key = clobbers_key
2378 ? UseTempRegister(instr->key())
2379 : UseRegisterOrConstantAtStart(instr->key());
2380 bool store_128bits_without_sse2 = IsSIMD128ElementsKind(elements_kind) &&
2381 !CPU::SupportsSIMD128InCrankshaft();
2382 LStoreKeyed* result =
2383 new(zone()) LStoreKeyed(backing_store, key, val,
2384 store_128bits_without_sse2 ? TempRegister() : NULL);
2385 return store_128bits_without_sse2 ? AssignEnvironment(result) : result;
2389 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2390 LOperand* context = UseFixed(instr->context(), esi);
2391 LOperand* object = UseFixed(instr->object(), edx);
2392 LOperand* key = UseFixed(instr->key(), ecx);
2393 LOperand* value = UseFixed(instr->value(), eax);
2395 ASSERT(instr->object()->representation().IsTagged());
2396 ASSERT(instr->key()->representation().IsTagged());
2397 ASSERT(instr->value()->representation().IsTagged());
2399 LStoreKeyedGeneric* result =
2400 new(zone()) LStoreKeyedGeneric(context, object, key, value);
2401 return MarkAsCall(result, instr);
2405 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2406 HTransitionElementsKind* instr) {
2407 LOperand* object = UseRegister(instr->object());
2408 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2409 LOperand* object = UseRegister(instr->object());
2410 LOperand* new_map_reg = TempRegister();
2411 LOperand* temp_reg = TempRegister();
2412 LTransitionElementsKind* result =
2413 new(zone()) LTransitionElementsKind(object, NULL,
2414 new_map_reg, temp_reg);
2417 LOperand* context = UseFixed(instr->context(), esi);
2418 LTransitionElementsKind* result =
2419 new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2420 return AssignPointerMap(result);
2425 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2426 HTrapAllocationMemento* instr) {
2427 LOperand* object = UseRegister(instr->object());
2428 LOperand* temp = TempRegister();
2429 LTrapAllocationMemento* result =
2430 new(zone()) LTrapAllocationMemento(object, temp);
2431 return AssignEnvironment(result);
2435 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2436 bool is_in_object = instr->access().IsInobject();
2437 bool is_external_location = instr->access().IsExternalMemory() &&
2438 instr->access().offset() == 0;
2439 bool needs_write_barrier = instr->NeedsWriteBarrier();
2440 bool needs_write_barrier_for_map = instr->has_transition() &&
2441 instr->NeedsWriteBarrierForMap();
2444 if (needs_write_barrier) {
2446 ? UseRegister(instr->object())
2447 : UseTempRegister(instr->object());
2448 } else if (is_external_location) {
2449 ASSERT(!is_in_object);
2450 ASSERT(!needs_write_barrier);
2451 ASSERT(!needs_write_barrier_for_map);
2452 obj = UseRegisterOrConstant(instr->object());
2454 obj = needs_write_barrier_for_map
2455 ? UseRegister(instr->object())
2456 : UseRegisterAtStart(instr->object());
2459 bool can_be_constant = instr->value()->IsConstant() &&
2460 HConstant::cast(instr->value())->NotInNewSpace() &&
2461 !instr->field_representation().IsDouble();
2464 if (instr->field_representation().IsInteger8() ||
2465 instr->field_representation().IsUInteger8()) {
2466 // mov_b requires a byte register (i.e. any of eax, ebx, ecx, edx).
2467 // Just force the value to be in eax and we're safe here.
2468 val = UseFixed(instr->value(), eax);
2469 } else if (needs_write_barrier) {
2470 val = UseTempRegister(instr->value());
2471 } else if (can_be_constant) {
2472 val = UseRegisterOrConstant(instr->value());
2473 } else if (instr->field_representation().IsSmi()) {
2474 val = UseTempRegister(instr->value());
2475 } else if (instr->field_representation().IsDouble()) {
2476 val = UseRegisterAtStart(instr->value());
2478 val = UseRegister(instr->value());
2481 // We only need a scratch register if we have a write barrier or we
2482 // have a store into the properties array (not in-object-property).
2483 LOperand* temp = (!is_in_object || needs_write_barrier ||
2484 needs_write_barrier_for_map) ? TempRegister() : NULL;
2486 // We need a temporary register for write barrier of the map field.
2487 LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
2489 LInstruction* result =
2490 new(zone()) LStoreNamedField(obj, val, temp, temp_map);
2491 if (!instr->access().IsExternalMemory() &&
2492 instr->field_representation().IsHeapObject() &&
2493 (val->IsConstantOperand()
2494 ? HConstant::cast(instr->value())->HasSmiValue()
2495 : !instr->value()->type().IsHeapObject())) {
2496 result = AssignEnvironment(result);
2502 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2503 LOperand* context = UseFixed(instr->context(), esi);
2504 LOperand* object = UseFixed(instr->object(), edx);
2505 LOperand* value = UseFixed(instr->value(), eax);
2507 LStoreNamedGeneric* result =
2508 new(zone()) LStoreNamedGeneric(context, object, value);
2509 return MarkAsCall(result, instr);
2513 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2514 LOperand* context = UseFixed(instr->context(), esi);
2515 LOperand* left = UseFixed(instr->left(), edx);
2516 LOperand* right = UseFixed(instr->right(), eax);
2517 LStringAdd* string_add = new(zone()) LStringAdd(context, left, right);
2518 return MarkAsCall(DefineFixed(string_add, eax), instr);
2522 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2523 LOperand* string = UseTempRegister(instr->string());
2524 LOperand* index = UseTempRegister(instr->index());
2525 LOperand* context = UseAny(instr->context());
2526 LStringCharCodeAt* result =
2527 new(zone()) LStringCharCodeAt(context, string, index);
2528 return AssignPointerMap(DefineAsRegister(result));
2532 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2533 LOperand* char_code = UseRegister(instr->value());
2534 LOperand* context = UseAny(instr->context());
2535 LStringCharFromCode* result =
2536 new(zone()) LStringCharFromCode(context, char_code);
2537 return AssignPointerMap(DefineAsRegister(result));
2541 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2542 info()->MarkAsDeferredCalling();
2543 LOperand* context = UseAny(instr->context());
2544 LOperand* size = instr->size()->IsConstant()
2545 ? UseConstant(instr->size())
2546 : UseTempRegister(instr->size());
2547 LOperand* temp = TempRegister();
2548 LAllocate* result = new(zone()) LAllocate(context, size, temp);
2549 return AssignPointerMap(DefineAsRegister(result));
2553 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2554 LOperand* context = UseFixed(instr->context(), esi);
2556 DefineFixed(new(zone()) LRegExpLiteral(context), eax), instr);
2560 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2561 LOperand* context = UseFixed(instr->context(), esi);
2563 DefineFixed(new(zone()) LFunctionLiteral(context), eax), instr);
2567 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2568 ASSERT(argument_count_ == 0);
2569 allocator_->MarkAsOsrEntry();
2570 current_block_->last_environment()->set_ast_id(instr->ast_id());
2571 return AssignEnvironment(new(zone()) LOsrEntry);
2575 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2576 LParameter* result = new(zone()) LParameter;
2577 if (instr->kind() == HParameter::STACK_PARAMETER) {
2578 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2579 return DefineAsSpilled(result, spill_index);
2581 ASSERT(info()->IsStub());
2582 CodeStubInterfaceDescriptor* descriptor =
2583 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2584 int index = static_cast<int>(instr->index());
2585 Register reg = descriptor->GetParameterRegister(index);
2586 return DefineFixed(result, reg);
2591 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2592 // Use an index that corresponds to the location in the unoptimized frame,
2593 // which the optimized frame will subsume.
2594 int env_index = instr->index();
2595 int spill_index = 0;
2596 if (instr->environment()->is_parameter_index(env_index)) {
2597 spill_index = chunk()->GetParameterStackSlot(env_index);
2599 spill_index = env_index - instr->environment()->first_local_index();
2600 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2601 Abort(kNotEnoughSpillSlotsForOsr);
2604 if (spill_index == 0) {
2605 // The dynamic frame alignment state overwrites the first local.
2606 // The first local is saved at the end of the unoptimized frame.
2607 spill_index = graph()->osr()->UnoptimizedFrameSlots();
2610 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2614 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2615 LOperand* context = UseFixed(instr->context(), esi);
2616 LCallStub* result = new(zone()) LCallStub(context);
2617 return MarkAsCall(DefineFixed(result, eax), instr);
2621 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2622 // There are no real uses of the arguments object.
2623 // arguments.length and element access are supported directly on
2624 // stack arguments, and any real arguments object use causes a bailout.
2625 // So this value is never used.
2630 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2631 instr->ReplayEnvironment(current_block_->last_environment());
2633 // There are no real uses of a captured object.
2638 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2639 info()->MarkAsRequiresFrame();
2640 LOperand* args = UseRegister(instr->arguments());
2643 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2644 length = UseRegisterOrConstant(instr->length());
2645 index = UseOrConstant(instr->index());
2647 length = UseTempRegister(instr->length());
2648 index = Use(instr->index());
2650 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2654 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2655 LOperand* object = UseFixed(instr->value(), eax);
2656 LToFastProperties* result = new(zone()) LToFastProperties(object);
2657 return MarkAsCall(DefineFixed(result, eax), instr);
2661 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2662 LOperand* context = UseFixed(instr->context(), esi);
2663 LOperand* value = UseAtStart(instr->value());
2664 LTypeof* result = new(zone()) LTypeof(context, value);
2665 return MarkAsCall(DefineFixed(result, eax), instr);
2669 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2670 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2671 if (goto_instr != NULL) return goto_instr;
2672 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2676 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2677 HIsConstructCallAndBranch* instr) {
2678 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2682 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2683 instr->ReplayEnvironment(current_block_->last_environment());
2688 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2689 info()->MarkAsDeferredCalling();
2690 if (instr->is_function_entry()) {
2691 LOperand* context = UseFixed(instr->context(), esi);
2692 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2694 ASSERT(instr->is_backwards_branch());
2695 LOperand* context = UseAny(instr->context());
2696 return AssignEnvironment(
2697 AssignPointerMap(new(zone()) LStackCheck(context)));
2702 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2703 HEnvironment* outer = current_block_->last_environment();
2704 HConstant* undefined = graph()->GetConstantUndefined();
2705 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2706 instr->arguments_count(),
2709 instr->inlining_kind());
2710 // Only replay binding of arguments object if it wasn't removed from graph.
2711 if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2712 inner->Bind(instr->arguments_var(), instr->arguments_object());
2714 inner->set_entry(instr);
2715 current_block_->UpdateEnvironment(inner);
2716 chunk_->AddInlinedClosure(instr->closure());
2721 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2722 LInstruction* pop = NULL;
2724 HEnvironment* env = current_block_->last_environment();
2726 if (env->entry()->arguments_pushed()) {
2727 int argument_count = env->arguments_environment()->parameter_count();
2728 pop = new(zone()) LDrop(argument_count);
2729 ASSERT(instr->argument_delta() == -argument_count);
2732 HEnvironment* outer = current_block_->last_environment()->
2733 DiscardInlined(false);
2734 current_block_->UpdateEnvironment(outer);
2739 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2740 LOperand* context = UseFixed(instr->context(), esi);
2741 LOperand* object = UseFixed(instr->enumerable(), eax);
2742 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2743 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
2747 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2748 LOperand* map = UseRegister(instr->map());
2749 return AssignEnvironment(DefineAsRegister(
2750 new(zone()) LForInCacheArray(map)));
2754 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2755 LOperand* value = UseRegisterAtStart(instr->value());
2756 LOperand* map = UseRegisterAtStart(instr->map());
2757 return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2761 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2762 LOperand* object = UseRegister(instr->object());
2763 LOperand* index = UseTempRegister(instr->index());
2764 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2768 const char* LNullarySIMDOperation::Mnemonic() const {
2770 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
2772 return #module "-" #function;
2773 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
2774 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
2782 LInstruction* LChunkBuilder::DoNullarySIMDOperation(
2783 HNullarySIMDOperation* instr) {
2784 LNullarySIMDOperation* result =
2785 new(zone()) LNullarySIMDOperation(instr->op());
2786 switch (instr->op()) {
2787 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
2789 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
2790 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
2791 return DefineAsRegister(result);
2799 const char* LUnarySIMDOperation::Mnemonic() const {
2801 case kSIMD128Change: return "SIMD128-change";
2802 #define SIMD_UNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5) \
2804 return #module "-" #function;
2805 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
2806 SIMD_UNARY_OPERATIONS_FOR_PROPERTY_ACCESS(SIMD_UNARY_OPERATION_CASE_ITEM)
2807 #undef SIMD_UNARY_OPERATION_CASE_ITEM
2815 LInstruction* LChunkBuilder::DoUnarySIMDOperation(HUnarySIMDOperation* instr) {
2816 LOperand* input = UseRegisterAtStart(instr->value());
2817 LUnarySIMDOperation* result =
2818 new(zone()) LUnarySIMDOperation(input, instr->op());
2819 switch (instr->op()) {
2820 case kSIMD128Change:
2821 return AssignEnvironment(DefineAsRegister(result));
2824 case kFloat32x4Reciprocal:
2825 case kFloat32x4ReciprocalSqrt:
2826 case kFloat32x4Sqrt:
2829 return DefineSameAsFirst(result);
2830 case kFloat32x4BitsToInt32x4:
2831 case kFloat32x4ToInt32x4:
2832 case kInt32x4BitsToFloat32x4:
2833 case kInt32x4ToFloat32x4:
2834 case kFloat32x4Splat:
2836 case kFloat32x4GetSignMask:
2837 case kFloat32x4GetX:
2838 case kFloat32x4GetY:
2839 case kFloat32x4GetZ:
2840 case kFloat32x4GetW:
2841 case kInt32x4GetSignMask:
2846 case kInt32x4GetFlagX:
2847 case kInt32x4GetFlagY:
2848 case kInt32x4GetFlagZ:
2849 case kInt32x4GetFlagW:
2850 return DefineAsRegister(result);
2858 const char* LBinarySIMDOperation::Mnemonic() const {
2860 #define SIMD_BINARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6) \
2862 return #module "-" #function;
2863 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
2864 #undef SIMD_BINARY_OPERATION_CASE_ITEM
2872 LInstruction* LChunkBuilder::DoBinarySIMDOperation(
2873 HBinarySIMDOperation* instr) {
2874 switch (instr->op()) {
2881 case kFloat32x4Scale:
2882 case kFloat32x4WithX:
2883 case kFloat32x4WithY:
2884 case kFloat32x4WithZ:
2885 case kFloat32x4WithW:
2896 case kInt32x4WithFlagX:
2897 case kInt32x4WithFlagY:
2898 case kInt32x4WithFlagZ:
2899 case kInt32x4WithFlagW:
2900 case kInt32x4GreaterThan:
2902 case kInt32x4LessThan: {
2903 LOperand* left = UseRegisterAtStart(instr->left());
2904 LOperand* right = UseRegisterAtStart(instr->right());
2905 LBinarySIMDOperation* result =
2906 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2907 if (instr->op() == kInt32x4WithFlagX ||
2908 instr->op() == kInt32x4WithFlagY ||
2909 instr->op() == kInt32x4WithFlagZ ||
2910 instr->op() == kInt32x4WithFlagW) {
2911 return AssignEnvironment(DefineSameAsFirst(result));
2913 return DefineSameAsFirst(result);
2916 case kFloat32x4Shuffle:
2917 case kInt32x4Shuffle:
2918 case kInt32x4ShiftLeft:
2919 case kInt32x4ShiftRight:
2920 case kInt32x4ShiftRightArithmetic: {
2921 LOperand* left = UseRegisterAtStart(instr->left());
2922 LOperand* right = UseOrConstant(instr->right());
2923 LBinarySIMDOperation* result =
2924 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2925 return AssignEnvironment(DefineSameAsFirst(result));
2927 case kFloat32x4LessThan:
2928 case kFloat32x4LessThanOrEqual:
2929 case kFloat32x4Equal:
2930 case kFloat32x4NotEqual:
2931 case kFloat32x4GreaterThanOrEqual:
2932 case kFloat32x4GreaterThan: {
2933 LOperand* left = UseRegisterAtStart(instr->left());
2934 LOperand* right = UseRegisterAtStart(instr->right());
2935 LBinarySIMDOperation* result =
2936 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2937 return DefineAsRegister(result);
2946 const char* LTernarySIMDOperation::Mnemonic() const {
2948 #define SIMD_TERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6, \
2951 return #module "-" #function;
2952 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
2953 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
2961 LInstruction* LChunkBuilder::DoTernarySIMDOperation(
2962 HTernarySIMDOperation* instr) {
2963 LOperand* first = UseRegisterAtStart(instr->first());
2964 LOperand* second = UseRegisterAtStart(instr->second());
2965 LOperand* third = instr->op() == kFloat32x4ShuffleMix
2966 ? UseOrConstant(instr->third())
2967 : UseRegisterAtStart(instr->third());
2968 LTernarySIMDOperation* result =
2969 new(zone()) LTernarySIMDOperation(first, second, third, instr->op());
2970 switch (instr->op()) {
2971 case kInt32x4Select: {
2972 return DefineAsRegister(result);
2974 case kFloat32x4ShuffleMix: {
2975 return AssignEnvironment(DefineSameAsFirst(result));
2977 case kFloat32x4Clamp: {
2978 return DefineSameAsFirst(result);
2987 const char* LQuarternarySIMDOperation::Mnemonic() const {
2989 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, \
2992 return #module "-" #function;
2993 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
2994 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
3002 LInstruction* LChunkBuilder::DoQuarternarySIMDOperation(
3003 HQuarternarySIMDOperation* instr) {
3004 LOperand* x = UseRegisterAtStart(instr->x());
3005 LOperand* y = UseRegisterAtStart(instr->y());
3006 LOperand* z = UseRegisterAtStart(instr->z());
3007 LOperand* w = UseRegisterAtStart(instr->w());
3008 LQuarternarySIMDOperation* result =
3009 new(zone()) LQuarternarySIMDOperation(x, y, z, w, instr->op());
3010 if (instr->op() == kInt32x4Bool) {
3011 return AssignEnvironment(DefineAsRegister(result));
3013 return DefineAsRegister(result);
3018 } } // namespace v8::internal
3020 #endif // V8_TARGET_ARCH_IA32