1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_IA32
32 #include "lithium-allocator-inl.h"
33 #include "ia32/lithium-ia32.h"
34 #include "ia32/lithium-codegen-ia32.h"
35 #include "hydrogen-osr.h"
40 #define DEFINE_COMPILE(type) \
41 void L##type::CompileToNative(LCodeGen* generator) { \
42 generator->Do##type(this); \
44 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
49 void LInstruction::VerifyCall() {
50 // Call instructions can use only fixed registers as temporaries and
51 // outputs because all registers are blocked by the calling convention.
52 // Inputs operands must use a fixed register or use-at-start policy or
53 // a non-register policy.
54 ASSERT(Output() == NULL ||
55 LUnallocated::cast(Output())->HasFixedPolicy() ||
56 !LUnallocated::cast(Output())->HasRegisterPolicy());
57 for (UseIterator it(this); !it.Done(); it.Advance()) {
58 LUnallocated* operand = LUnallocated::cast(it.Current());
59 ASSERT(operand->HasFixedPolicy() ||
60 operand->IsUsedAtStart());
62 for (TempIterator it(this); !it.Done(); it.Advance()) {
63 LUnallocated* operand = LUnallocated::cast(it.Current());
64 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
70 bool LInstruction::HasDoubleRegisterResult() {
71 return HasResult() && result()->IsDoubleRegister();
75 bool LInstruction::HasDoubleRegisterInput() {
76 for (int i = 0; i < InputCount(); i++) {
77 LOperand* op = InputAt(i);
78 if (op != NULL && op->IsDoubleRegister()) {
86 bool LInstruction::IsDoubleInput(X87Register reg, LCodeGen* cgen) {
87 for (int i = 0; i < InputCount(); i++) {
88 LOperand* op = InputAt(i);
89 if (op != NULL && op->IsDoubleRegister()) {
90 if (cgen->ToX87Register(op).is(reg)) return true;
97 void LInstruction::PrintTo(StringStream* stream) {
98 stream->Add("%s ", this->Mnemonic());
100 PrintOutputOperandTo(stream);
104 if (HasEnvironment()) {
106 environment()->PrintTo(stream);
109 if (HasPointerMap()) {
111 pointer_map()->PrintTo(stream);
116 void LInstruction::PrintDataTo(StringStream* stream) {
118 for (int i = 0; i < InputCount(); i++) {
119 if (i > 0) stream->Add(" ");
120 if (InputAt(i) == NULL) {
123 InputAt(i)->PrintTo(stream);
129 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
130 if (HasResult()) result()->PrintTo(stream);
134 void LLabel::PrintDataTo(StringStream* stream) {
135 LGap::PrintDataTo(stream);
136 LLabel* rep = replacement();
138 stream->Add(" Dead block replaced with B%d", rep->block_id());
143 bool LGap::IsRedundant() const {
144 for (int i = 0; i < 4; i++) {
145 if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
154 void LGap::PrintDataTo(StringStream* stream) {
155 for (int i = 0; i < 4; i++) {
157 if (parallel_moves_[i] != NULL) {
158 parallel_moves_[i]->PrintDataTo(stream);
165 const char* LArithmeticD::Mnemonic() const {
167 case Token::ADD: return "add-d";
168 case Token::SUB: return "sub-d";
169 case Token::MUL: return "mul-d";
170 case Token::DIV: return "div-d";
171 case Token::MOD: return "mod-d";
179 const char* LArithmeticT::Mnemonic() const {
181 case Token::ADD: return "add-t";
182 case Token::SUB: return "sub-t";
183 case Token::MUL: return "mul-t";
184 case Token::MOD: return "mod-t";
185 case Token::DIV: return "div-t";
186 case Token::BIT_AND: return "bit-and-t";
187 case Token::BIT_OR: return "bit-or-t";
188 case Token::BIT_XOR: return "bit-xor-t";
189 case Token::ROR: return "ror-t";
190 case Token::SHL: return "sal-t";
191 case Token::SAR: return "sar-t";
192 case Token::SHR: return "shr-t";
200 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
201 return !gen->IsNextEmittedBlock(block_id());
205 void LGoto::PrintDataTo(StringStream* stream) {
206 stream->Add("B%d", block_id());
210 void LBranch::PrintDataTo(StringStream* stream) {
211 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
212 value()->PrintTo(stream);
216 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
218 left()->PrintTo(stream);
219 stream->Add(" %s ", Token::String(op()));
220 right()->PrintTo(stream);
221 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
225 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
226 stream->Add("if is_object(");
227 value()->PrintTo(stream);
228 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
232 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
233 stream->Add("if is_string(");
234 value()->PrintTo(stream);
235 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
239 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
240 stream->Add("if is_smi(");
241 value()->PrintTo(stream);
242 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
246 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
247 stream->Add("if is_undetectable(");
248 value()->PrintTo(stream);
249 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
253 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
254 stream->Add("if string_compare(");
255 left()->PrintTo(stream);
256 right()->PrintTo(stream);
257 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
261 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
262 stream->Add("if has_instance_type(");
263 value()->PrintTo(stream);
264 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
268 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
269 stream->Add("if has_cached_array_index(");
270 value()->PrintTo(stream);
271 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
275 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
276 stream->Add("if class_of_test(");
277 value()->PrintTo(stream);
278 stream->Add(", \"%o\") then B%d else B%d",
279 *hydrogen()->class_name(),
285 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
286 stream->Add("if typeof ");
287 value()->PrintTo(stream);
288 stream->Add(" == \"%s\" then B%d else B%d",
289 hydrogen()->type_literal()->ToCString().get(),
290 true_block_id(), false_block_id());
294 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
296 function()->PrintTo(stream);
297 stream->Add(".code_entry = ");
298 code_object()->PrintTo(stream);
302 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
304 base_object()->PrintTo(stream);
306 offset()->PrintTo(stream);
310 void LCallJSFunction::PrintDataTo(StringStream* stream) {
312 function()->PrintTo(stream);
313 stream->Add("#%d / ", arity());
317 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
318 for (int i = 0; i < InputCount(); i++) {
319 InputAt(i)->PrintTo(stream);
322 stream->Add("#%d / ", arity());
326 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
327 context()->PrintTo(stream);
328 stream->Add("[%d]", slot_index());
332 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
333 context()->PrintTo(stream);
334 stream->Add("[%d] <- ", slot_index());
335 value()->PrintTo(stream);
339 void LInvokeFunction::PrintDataTo(StringStream* stream) {
341 context()->PrintTo(stream);
343 function()->PrintTo(stream);
344 stream->Add(" #%d / ", arity());
348 void LCallNew::PrintDataTo(StringStream* stream) {
350 context()->PrintTo(stream);
352 constructor()->PrintTo(stream);
353 stream->Add(" #%d / ", arity());
357 void LCallNewArray::PrintDataTo(StringStream* stream) {
359 context()->PrintTo(stream);
361 constructor()->PrintTo(stream);
362 stream->Add(" #%d / ", arity());
363 ElementsKind kind = hydrogen()->elements_kind();
364 stream->Add(" (%s) ", ElementsKindToString(kind));
368 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
369 arguments()->PrintTo(stream);
371 stream->Add(" length ");
372 length()->PrintTo(stream);
374 stream->Add(" index ");
375 index()->PrintTo(stream);
379 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
381 case GENERAL_REGISTERS: return spill_slot_count_++;
382 case DOUBLE_REGISTERS: {
383 // Skip a slot if for a double-width slot.
385 spill_slot_count_ |= 1;
387 return spill_slot_count_++;
389 case FLOAT32x4_REGISTERS:
390 case INT32x4_REGISTERS: {
391 // Skip three slots if for a quad-width slot.
392 spill_slot_count_ += 3;
393 num_double_slots_ += 2; // for dynamic frame alignment
394 return spill_slot_count_++;
403 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
404 int index = GetNextSpillIndex(kind);
406 case GENERAL_REGISTERS: return LStackSlot::Create(index, zone());
407 case DOUBLE_REGISTERS: return LDoubleStackSlot::Create(index, zone());
408 case FLOAT32x4_REGISTERS: return LFloat32x4StackSlot::Create(index, zone());
409 case INT32x4_REGISTERS: return LInt32x4StackSlot::Create(index, zone());
417 void LStoreNamedField::PrintDataTo(StringStream* stream) {
418 object()->PrintTo(stream);
419 hydrogen()->access().PrintTo(stream);
421 value()->PrintTo(stream);
425 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
426 object()->PrintTo(stream);
428 stream->Add(String::cast(*name())->ToCString().get());
430 value()->PrintTo(stream);
434 void LLoadKeyed::PrintDataTo(StringStream* stream) {
435 elements()->PrintTo(stream);
437 key()->PrintTo(stream);
438 if (hydrogen()->IsDehoisted()) {
439 stream->Add(" + %d]", additional_index());
446 void LStoreKeyed::PrintDataTo(StringStream* stream) {
447 elements()->PrintTo(stream);
449 key()->PrintTo(stream);
450 if (hydrogen()->IsDehoisted()) {
451 stream->Add(" + %d] <-", additional_index());
453 stream->Add("] <- ");
456 if (value() == NULL) {
457 ASSERT(hydrogen()->IsConstantHoleStore() &&
458 hydrogen()->value()->representation().IsDouble());
459 stream->Add("<the hole(nan)>");
461 value()->PrintTo(stream);
466 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
467 object()->PrintTo(stream);
469 key()->PrintTo(stream);
470 stream->Add("] <- ");
471 value()->PrintTo(stream);
475 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
476 object()->PrintTo(stream);
477 stream->Add(" %p -> %p", *original_map(), *transitioned_map());
481 LPlatformChunk* LChunkBuilder::Build() {
483 chunk_ = new(zone()) LPlatformChunk(info(), graph());
484 LPhase phase("L_Building chunk", chunk_);
487 // Reserve the first spill slot for the state of dynamic alignment.
488 if (info()->IsOptimizing()) {
489 int alignment_state_index = chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
490 ASSERT_EQ(alignment_state_index, 0);
491 USE(alignment_state_index);
494 // If compiling for OSR, reserve space for the unoptimized frame,
495 // which will be subsumed into this frame.
496 if (graph()->has_osr()) {
497 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
498 chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
502 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
503 for (int i = 0; i < blocks->length(); i++) {
504 HBasicBlock* next = NULL;
505 if (i < blocks->length() - 1) next = blocks->at(i + 1);
506 DoBasicBlock(blocks->at(i), next);
507 if (is_aborted()) return NULL;
514 void LChunkBuilder::Abort(BailoutReason reason) {
515 info()->set_bailout_reason(reason);
520 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
521 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
522 Register::ToAllocationIndex(reg));
526 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
527 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
528 XMMRegister::ToAllocationIndex(reg));
532 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
533 return Use(value, ToUnallocated(fixed_register));
537 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
538 return Use(value, ToUnallocated(reg));
542 LOperand* LChunkBuilder::UseRegister(HValue* value) {
543 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
547 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
549 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
550 LUnallocated::USED_AT_START));
554 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
555 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
559 LOperand* LChunkBuilder::Use(HValue* value) {
560 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
564 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
565 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
566 LUnallocated::USED_AT_START));
570 static inline bool CanBeImmediateConstant(HValue* value) {
571 return value->IsConstant() && HConstant::cast(value)->NotInNewSpace();
575 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
576 return CanBeImmediateConstant(value)
577 ? chunk_->DefineConstantOperand(HConstant::cast(value))
582 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
583 return CanBeImmediateConstant(value)
584 ? chunk_->DefineConstantOperand(HConstant::cast(value))
589 LOperand* LChunkBuilder::UseFixedOrConstant(HValue* value,
590 Register fixed_register) {
591 return CanBeImmediateConstant(value)
592 ? chunk_->DefineConstantOperand(HConstant::cast(value))
593 : UseFixed(value, fixed_register);
597 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
598 return CanBeImmediateConstant(value)
599 ? chunk_->DefineConstantOperand(HConstant::cast(value))
600 : UseRegister(value);
604 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
605 return CanBeImmediateConstant(value)
606 ? chunk_->DefineConstantOperand(HConstant::cast(value))
607 : UseRegisterAtStart(value);
611 LOperand* LChunkBuilder::UseConstant(HValue* value) {
612 return chunk_->DefineConstantOperand(HConstant::cast(value));
616 LOperand* LChunkBuilder::UseAny(HValue* value) {
617 return value->IsConstant()
618 ? chunk_->DefineConstantOperand(HConstant::cast(value))
619 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
623 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
624 if (value->EmitAtUses()) {
625 HInstruction* instr = HInstruction::cast(value);
626 VisitInstruction(instr);
628 operand->set_virtual_register(value->id());
633 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
634 LUnallocated* result) {
635 result->set_virtual_register(current_instruction_->id());
636 instr->set_result(result);
641 LInstruction* LChunkBuilder::DefineAsRegister(
642 LTemplateResultInstruction<1>* instr) {
644 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
648 LInstruction* LChunkBuilder::DefineAsSpilled(
649 LTemplateResultInstruction<1>* instr,
652 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
656 LInstruction* LChunkBuilder::DefineSameAsFirst(
657 LTemplateResultInstruction<1>* instr) {
659 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
663 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
665 return Define(instr, ToUnallocated(reg));
669 LInstruction* LChunkBuilder::DefineFixedDouble(
670 LTemplateResultInstruction<1>* instr,
672 return Define(instr, ToUnallocated(reg));
676 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
677 HEnvironment* hydrogen_env = current_block_->last_environment();
678 int argument_index_accumulator = 0;
679 ZoneList<HValue*> objects_to_materialize(0, zone());
680 instr->set_environment(CreateEnvironment(hydrogen_env,
681 &argument_index_accumulator,
682 &objects_to_materialize));
687 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
688 HInstruction* hinstr,
689 CanDeoptimize can_deoptimize) {
690 info()->MarkAsNonDeferredCalling();
696 instr = AssignPointerMap(instr);
698 if (hinstr->HasObservableSideEffects()) {
699 ASSERT(hinstr->next()->IsSimulate());
700 HSimulate* sim = HSimulate::cast(hinstr->next());
701 ASSERT(instruction_pending_deoptimization_environment_ == NULL);
702 ASSERT(pending_deoptimization_ast_id_.IsNone());
703 instruction_pending_deoptimization_environment_ = instr;
704 pending_deoptimization_ast_id_ = sim->ast_id();
707 // If instruction does not have side-effects lazy deoptimization
708 // after the call will try to deoptimize to the point before the call.
709 // Thus we still need to attach environment to this call even if
710 // call sequence can not deoptimize eagerly.
711 bool needs_environment =
712 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
713 !hinstr->HasObservableSideEffects();
714 if (needs_environment && !instr->HasEnvironment()) {
715 instr = AssignEnvironment(instr);
722 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
723 ASSERT(!instr->HasPointerMap());
724 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
729 LUnallocated* LChunkBuilder::TempRegister() {
730 LUnallocated* operand =
731 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
732 int vreg = allocator_->GetVirtualRegister();
733 if (!allocator_->AllocationOk()) {
734 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
737 operand->set_virtual_register(vreg);
742 LOperand* LChunkBuilder::FixedTemp(Register reg) {
743 LUnallocated* operand = ToUnallocated(reg);
744 ASSERT(operand->HasFixedPolicy());
749 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
750 LUnallocated* operand = ToUnallocated(reg);
751 ASSERT(operand->HasFixedPolicy());
756 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
757 return new(zone()) LLabel(instr->block());
761 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
762 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
766 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
772 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
773 return AssignEnvironment(new(zone()) LDeoptimize);
777 LInstruction* LChunkBuilder::DoShift(Token::Value op,
778 HBitwiseBinaryOperation* instr) {
779 if (instr->representation().IsSmiOrInteger32()) {
780 ASSERT(instr->left()->representation().Equals(instr->representation()));
781 ASSERT(instr->right()->representation().Equals(instr->representation()));
782 LOperand* left = UseRegisterAtStart(instr->left());
784 HValue* right_value = instr->right();
785 LOperand* right = NULL;
786 int constant_value = 0;
787 bool does_deopt = false;
788 if (right_value->IsConstant()) {
789 HConstant* constant = HConstant::cast(right_value);
790 right = chunk_->DefineConstantOperand(constant);
791 constant_value = constant->Integer32Value() & 0x1f;
792 // Left shifts can deoptimize if we shift by > 0 and the result cannot be
794 if (instr->representation().IsSmi() && constant_value > 0) {
795 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
798 right = UseFixed(right_value, ecx);
801 // Shift operations can only deoptimize if we do a logical shift by 0 and
802 // the result cannot be truncated to int32.
803 if (op == Token::SHR && constant_value == 0) {
804 if (FLAG_opt_safe_uint32_operations) {
805 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
807 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
811 LInstruction* result =
812 DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
813 return does_deopt ? AssignEnvironment(result) : result;
815 return DoArithmeticT(op, instr);
820 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
821 HArithmeticBinaryOperation* instr) {
822 ASSERT(instr->representation().IsDouble());
823 ASSERT(instr->left()->representation().IsDouble());
824 ASSERT(instr->right()->representation().IsDouble());
825 if (op == Token::MOD) {
826 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
827 LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
828 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
829 return MarkAsCall(DefineSameAsFirst(result), instr);
831 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
832 LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
833 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
834 return DefineSameAsFirst(result);
839 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
840 HBinaryOperation* instr) {
841 HValue* left = instr->left();
842 HValue* right = instr->right();
843 ASSERT(left->representation().IsTagged());
844 ASSERT(right->representation().IsTagged());
845 LOperand* context = UseFixed(instr->context(), esi);
846 LOperand* left_operand = UseFixed(left, edx);
847 LOperand* right_operand = UseFixed(right, eax);
848 LArithmeticT* result =
849 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
850 return MarkAsCall(DefineFixed(result, eax), instr);
854 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
855 ASSERT(is_building());
856 current_block_ = block;
857 next_block_ = next_block;
858 if (block->IsStartBlock()) {
859 block->UpdateEnvironment(graph_->start_environment());
861 } else if (block->predecessors()->length() == 1) {
862 // We have a single predecessor => copy environment and outgoing
863 // argument count from the predecessor.
864 ASSERT(block->phis()->length() == 0);
865 HBasicBlock* pred = block->predecessors()->at(0);
866 HEnvironment* last_environment = pred->last_environment();
867 ASSERT(last_environment != NULL);
868 // Only copy the environment, if it is later used again.
869 if (pred->end()->SecondSuccessor() == NULL) {
870 ASSERT(pred->end()->FirstSuccessor() == block);
872 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
873 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
874 last_environment = last_environment->Copy();
877 block->UpdateEnvironment(last_environment);
878 ASSERT(pred->argument_count() >= 0);
879 argument_count_ = pred->argument_count();
881 // We are at a state join => process phis.
882 HBasicBlock* pred = block->predecessors()->at(0);
883 // No need to copy the environment, it cannot be used later.
884 HEnvironment* last_environment = pred->last_environment();
885 for (int i = 0; i < block->phis()->length(); ++i) {
886 HPhi* phi = block->phis()->at(i);
887 if (phi->HasMergedIndex()) {
888 last_environment->SetValueAt(phi->merged_index(), phi);
891 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
892 if (block->deleted_phis()->at(i) < last_environment->length()) {
893 last_environment->SetValueAt(block->deleted_phis()->at(i),
894 graph_->GetConstantUndefined());
897 block->UpdateEnvironment(last_environment);
898 // Pick up the outgoing argument count of one of the predecessors.
899 argument_count_ = pred->argument_count();
901 HInstruction* current = block->first();
902 int start = chunk_->instructions()->length();
903 while (current != NULL && !is_aborted()) {
904 // Code for constants in registers is generated lazily.
905 if (!current->EmitAtUses()) {
906 VisitInstruction(current);
908 current = current->next();
910 int end = chunk_->instructions()->length() - 1;
912 block->set_first_instruction_index(start);
913 block->set_last_instruction_index(end);
915 block->set_argument_count(argument_count_);
917 current_block_ = NULL;
921 void LChunkBuilder::VisitInstruction(HInstruction* current) {
922 HInstruction* old_current = current_instruction_;
923 current_instruction_ = current;
925 LInstruction* instr = NULL;
926 if (current->CanReplaceWithDummyUses()) {
927 if (current->OperandCount() == 0) {
928 instr = DefineAsRegister(new(zone()) LDummy());
930 ASSERT(!current->OperandAt(0)->IsControlInstruction());
931 instr = DefineAsRegister(new(zone())
932 LDummyUse(UseAny(current->OperandAt(0))));
934 for (int i = 1; i < current->OperandCount(); ++i) {
935 if (current->OperandAt(i)->IsControlInstruction()) continue;
936 LInstruction* dummy =
937 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
938 dummy->set_hydrogen_value(current);
939 chunk_->AddInstruction(dummy, current_block_);
942 instr = current->CompileToLithium(this);
945 argument_count_ += current->argument_delta();
946 ASSERT(argument_count_ >= 0);
949 // Associate the hydrogen instruction first, since we may need it for
950 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
951 instr->set_hydrogen_value(current);
954 // Make sure that the lithium instruction has either no fixed register
955 // constraints in temps or the result OR no uses that are only used at
956 // start. If this invariant doesn't hold, the register allocator can decide
957 // to insert a split of a range immediately before the instruction due to an
958 // already allocated register needing to be used for the instruction's fixed
959 // register constraint. In this case, The register allocator won't see an
960 // interference between the split child and the use-at-start (it would if
961 // the it was just a plain use), so it is free to move the split child into
962 // the same register that is used for the use-at-start.
963 // See https://code.google.com/p/chromium/issues/detail?id=201590
964 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
966 int used_at_start = 0;
967 for (UseIterator it(instr); !it.Done(); it.Advance()) {
968 LUnallocated* operand = LUnallocated::cast(it.Current());
969 if (operand->IsUsedAtStart()) ++used_at_start;
971 if (instr->Output() != NULL) {
972 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
974 for (TempIterator it(instr); !it.Done(); it.Advance()) {
975 LUnallocated* operand = LUnallocated::cast(it.Current());
976 if (operand->HasFixedPolicy()) ++fixed;
978 ASSERT(fixed == 0 || used_at_start == 0);
982 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
983 instr = AssignPointerMap(instr);
985 if (FLAG_stress_environments && !instr->HasEnvironment()) {
986 instr = AssignEnvironment(instr);
988 if (!CpuFeatures::IsSafeForSnapshot(SSE2) && instr->IsGoto() &&
989 LGoto::cast(instr)->jumps_to_join()) {
990 // TODO(olivf) Since phis of spilled values are joined as registers
991 // (not in the stack slot), we need to allow the goto gaps to keep one
992 // x87 register alive. To ensure all other values are still spilled, we
993 // insert a fpu register barrier right before.
994 LClobberDoubles* clobber = new(zone()) LClobberDoubles();
995 clobber->set_hydrogen_value(current);
996 chunk_->AddInstruction(clobber, current_block_);
998 chunk_->AddInstruction(instr, current_block_);
1000 current_instruction_ = old_current;
1004 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1005 return new(zone()) LGoto(instr->FirstSuccessor());
1009 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1010 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1011 if (goto_instr != NULL) return goto_instr;
1013 ToBooleanStub::Types expected = instr->expected_input_types();
1015 // Tagged values that are not known smis or booleans require a
1016 // deoptimization environment. If the instruction is generic no
1017 // environment is needed since all cases are handled.
1018 HValue* value = instr->value();
1019 Representation rep = value->representation();
1020 HType type = value->type();
1021 if (!rep.IsTagged() || type.IsSmi() || type.IsBoolean()) {
1022 return new(zone()) LBranch(UseRegister(value), NULL);
1025 bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
1026 LOperand* temp = needs_temp ? TempRegister() : NULL;
1028 // The Generic stub does not have a deopt, so we need no environment.
1029 if (expected.IsGeneric()) {
1030 return new(zone()) LBranch(UseRegister(value), temp);
1033 // We need a temporary register when we have to access the map *or* we have
1034 // no type info yet, in which case we handle all cases (including the ones
1036 return AssignEnvironment(new(zone()) LBranch(UseRegister(value), temp));
1040 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1041 return new(zone()) LDebugBreak();
1045 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1046 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1047 if (goto_instr != NULL) return goto_instr;
1049 ASSERT(instr->value()->representation().IsTagged());
1050 LOperand* value = UseRegisterAtStart(instr->value());
1051 return new(zone()) LCmpMapAndBranch(value);
1055 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1056 info()->MarkAsRequiresFrame();
1057 return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
1061 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1062 info()->MarkAsRequiresFrame();
1063 return DefineAsRegister(new(zone()) LArgumentsElements);
1067 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1068 LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
1069 LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
1070 LOperand* context = UseFixed(instr->context(), esi);
1071 LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1072 return MarkAsCall(DefineFixed(result, eax), instr);
1076 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1077 HInstanceOfKnownGlobal* instr) {
1078 LInstanceOfKnownGlobal* result =
1079 new(zone()) LInstanceOfKnownGlobal(
1080 UseFixed(instr->context(), esi),
1081 UseFixed(instr->left(), InstanceofStub::left()),
1083 return MarkAsCall(DefineFixed(result, eax), instr);
1087 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1088 LOperand* receiver = UseRegister(instr->receiver());
1089 LOperand* function = UseRegister(instr->function());
1090 LOperand* temp = TempRegister();
1091 LWrapReceiver* result =
1092 new(zone()) LWrapReceiver(receiver, function, temp);
1093 return AssignEnvironment(DefineSameAsFirst(result));
1097 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1098 LOperand* function = UseFixed(instr->function(), edi);
1099 LOperand* receiver = UseFixed(instr->receiver(), eax);
1100 LOperand* length = UseFixed(instr->length(), ebx);
1101 LOperand* elements = UseFixed(instr->elements(), ecx);
1102 LApplyArguments* result = new(zone()) LApplyArguments(function,
1106 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1110 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1111 LOperand* argument = UseAny(instr->argument());
1112 return new(zone()) LPushArgument(argument);
1116 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1117 HStoreCodeEntry* store_code_entry) {
1118 LOperand* function = UseRegister(store_code_entry->function());
1119 LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1120 return new(zone()) LStoreCodeEntry(function, code_object);
1124 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1125 HInnerAllocatedObject* instr) {
1126 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1127 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1128 return DefineAsRegister(
1129 new(zone()) LInnerAllocatedObject(base_object, offset));
1133 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1134 return instr->HasNoUses()
1136 : DefineAsRegister(new(zone()) LThisFunction);
1140 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1141 if (instr->HasNoUses()) return NULL;
1143 if (info()->IsStub()) {
1144 return DefineFixed(new(zone()) LContext, esi);
1147 return DefineAsRegister(new(zone()) LContext);
1151 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1152 LOperand* context = UseFixed(instr->context(), esi);
1153 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1157 LInstruction* LChunkBuilder::DoCallJSFunction(
1158 HCallJSFunction* instr) {
1159 LOperand* function = UseFixed(instr->function(), edi);
1161 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1163 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1167 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1168 HCallWithDescriptor* instr) {
1169 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1171 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1172 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1173 ops.Add(target, zone());
1174 for (int i = 1; i < instr->OperandCount(); i++) {
1175 LOperand* op = UseFixed(instr->OperandAt(i),
1176 descriptor->GetParameterRegister(i - 1));
1177 ops.Add(op, zone());
1180 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1181 descriptor, ops, zone());
1182 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1186 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1187 LOperand* context = UseFixed(instr->context(), esi);
1188 LOperand* function = UseFixed(instr->function(), edi);
1189 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1190 return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1194 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1195 switch (instr->op()) {
1196 case kMathFloor: return DoMathFloor(instr);
1197 case kMathRound: return DoMathRound(instr);
1198 case kMathAbs: return DoMathAbs(instr);
1199 case kMathLog: return DoMathLog(instr);
1200 case kMathExp: return DoMathExp(instr);
1201 case kMathSqrt: return DoMathSqrt(instr);
1202 case kMathPowHalf: return DoMathPowHalf(instr);
1210 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1211 LOperand* input = UseRegisterAtStart(instr->value());
1212 LMathFloor* result = new(zone()) LMathFloor(input);
1213 return AssignEnvironment(DefineAsRegister(result));
1217 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1218 LOperand* input = UseRegister(instr->value());
1219 LOperand* temp = FixedTemp(xmm4);
1220 LMathRound* result = new(zone()) LMathRound(input, temp);
1221 return AssignEnvironment(DefineAsRegister(result));
1225 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1226 LOperand* context = UseAny(instr->context()); // Deferred use.
1227 LOperand* input = UseRegisterAtStart(instr->value());
1228 LMathAbs* result = new(zone()) LMathAbs(context, input);
1229 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1233 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1234 ASSERT(instr->representation().IsDouble());
1235 ASSERT(instr->value()->representation().IsDouble());
1236 LOperand* input = UseRegisterAtStart(instr->value());
1237 return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1241 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1242 ASSERT(instr->representation().IsDouble());
1243 ASSERT(instr->value()->representation().IsDouble());
1244 LOperand* value = UseTempRegister(instr->value());
1245 LOperand* temp1 = TempRegister();
1246 LOperand* temp2 = TempRegister();
1247 LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1248 return DefineAsRegister(result);
1252 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1253 LOperand* input = UseRegisterAtStart(instr->value());
1254 LMathSqrt* result = new(zone()) LMathSqrt(input);
1255 return DefineSameAsFirst(result);
1259 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1260 LOperand* input = UseRegisterAtStart(instr->value());
1261 LOperand* temp = TempRegister();
1262 LMathPowHalf* result = new(zone()) LMathPowHalf(input, temp);
1263 return DefineSameAsFirst(result);
1267 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1268 LOperand* context = UseFixed(instr->context(), esi);
1269 LOperand* constructor = UseFixed(instr->constructor(), edi);
1270 LCallNew* result = new(zone()) LCallNew(context, constructor);
1271 return MarkAsCall(DefineFixed(result, eax), instr);
1275 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1276 LOperand* context = UseFixed(instr->context(), esi);
1277 LOperand* constructor = UseFixed(instr->constructor(), edi);
1278 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1279 return MarkAsCall(DefineFixed(result, eax), instr);
1283 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1284 LOperand* context = UseFixed(instr->context(), esi);
1285 LOperand* function = UseFixed(instr->function(), edi);
1286 LCallFunction* call = new(zone()) LCallFunction(context, function);
1287 return MarkAsCall(DefineFixed(call, eax), instr);
1291 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1292 LOperand* context = UseFixed(instr->context(), esi);
1293 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), eax), instr);
1297 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1298 return DoShift(Token::ROR, instr);
1302 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1303 return DoShift(Token::SHR, instr);
1307 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1308 return DoShift(Token::SAR, instr);
1312 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1313 return DoShift(Token::SHL, instr);
1317 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1318 if (instr->representation().IsSmiOrInteger32()) {
1319 ASSERT(instr->left()->representation().Equals(instr->representation()));
1320 ASSERT(instr->right()->representation().Equals(instr->representation()));
1321 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1323 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1324 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1325 return DefineSameAsFirst(new(zone()) LBitI(left, right));
1327 return DoArithmeticT(instr->op(), instr);
1332 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1333 if (instr->representation().IsSmiOrInteger32()) {
1334 ASSERT(instr->left()->representation().Equals(instr->representation()));
1335 ASSERT(instr->right()->representation().Equals(instr->representation()));
1336 if (instr->RightIsPowerOf2()) {
1337 ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1338 LOperand* value = UseRegisterAtStart(instr->left());
1340 new(zone()) LDivI(value, UseOrConstant(instr->right()), NULL);
1341 return AssignEnvironment(DefineSameAsFirst(div));
1343 // The temporary operand is necessary to ensure that right is not allocated
1345 LOperand* temp = FixedTemp(edx);
1346 LOperand* dividend = UseFixed(instr->left(), eax);
1347 LOperand* divisor = UseRegister(instr->right());
1348 LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
1349 return AssignEnvironment(DefineFixed(result, eax));
1350 } else if (instr->representation().IsDouble()) {
1351 return DoArithmeticD(Token::DIV, instr);
1353 return DoArithmeticT(Token::DIV, instr);
1358 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1359 HValue* right = instr->right();
1360 if (!right->IsConstant()) {
1361 ASSERT(right->representation().IsInteger32());
1362 // The temporary operand is necessary to ensure that right is not allocated
1364 LOperand* temp = FixedTemp(edx);
1365 LOperand* dividend = UseFixed(instr->left(), eax);
1366 LOperand* divisor = UseRegister(instr->right());
1367 LDivI* flooring_div = new(zone()) LDivI(dividend, divisor, temp);
1368 return AssignEnvironment(DefineFixed(flooring_div, eax));
1371 ASSERT(right->IsConstant() && HConstant::cast(right)->HasInteger32Value());
1372 LOperand* divisor = chunk_->DefineConstantOperand(HConstant::cast(right));
1373 int32_t divisor_si = HConstant::cast(right)->Integer32Value();
1374 if (divisor_si == 0) {
1375 LOperand* dividend = UseRegister(instr->left());
1376 return AssignEnvironment(DefineAsRegister(
1377 new(zone()) LMathFloorOfDiv(dividend, divisor, NULL)));
1378 } else if (IsPowerOf2(abs(divisor_si))) {
1379 // use dividend as temp if divisor < 0 && divisor != -1
1380 LOperand* dividend = divisor_si < -1 ? UseTempRegister(instr->left()) :
1381 UseRegisterAtStart(instr->left());
1382 LInstruction* result = DefineAsRegister(
1383 new(zone()) LMathFloorOfDiv(dividend, divisor, NULL));
1384 return divisor_si < 0 ? AssignEnvironment(result) : result;
1386 // needs edx:eax, plus a temp
1387 LOperand* dividend = UseFixed(instr->left(), eax);
1388 LOperand* temp = TempRegister();
1389 LInstruction* result = DefineFixed(
1390 new(zone()) LMathFloorOfDiv(dividend, divisor, temp), edx);
1391 return divisor_si < 0 ? AssignEnvironment(result) : result;
1396 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1397 HValue* left = instr->left();
1398 HValue* right = instr->right();
1399 if (instr->representation().IsSmiOrInteger32()) {
1400 ASSERT(instr->left()->representation().Equals(instr->representation()));
1401 ASSERT(instr->right()->representation().Equals(instr->representation()));
1403 if (instr->RightIsPowerOf2()) {
1404 ASSERT(!right->CanBeZero());
1405 LModI* mod = new(zone()) LModI(UseRegisterAtStart(left),
1406 UseOrConstant(right),
1408 LInstruction* result = DefineSameAsFirst(mod);
1409 return (left->CanBeNegative() &&
1410 instr->CheckFlag(HValue::kBailoutOnMinusZero))
1411 ? AssignEnvironment(result)
1413 return AssignEnvironment(DefineSameAsFirst(mod));
1415 // The temporary operand is necessary to ensure that right is not
1416 // allocated into edx.
1417 LModI* mod = new(zone()) LModI(UseFixed(left, eax),
1420 LInstruction* result = DefineFixed(mod, edx);
1421 return (right->CanBeZero() ||
1422 (left->RangeCanInclude(kMinInt) &&
1423 right->RangeCanInclude(-1) &&
1424 instr->CheckFlag(HValue::kBailoutOnMinusZero)) ||
1425 (left->CanBeNegative() &&
1426 instr->CanBeZero() &&
1427 instr->CheckFlag(HValue::kBailoutOnMinusZero)))
1428 ? AssignEnvironment(result)
1431 } else if (instr->representation().IsDouble()) {
1432 return DoArithmeticD(Token::MOD, instr);
1434 return DoArithmeticT(Token::MOD, instr);
1439 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1440 if (instr->representation().IsSmiOrInteger32()) {
1441 ASSERT(instr->left()->representation().Equals(instr->representation()));
1442 ASSERT(instr->right()->representation().Equals(instr->representation()));
1443 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1444 LOperand* right = UseOrConstant(instr->BetterRightOperand());
1445 LOperand* temp = NULL;
1446 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1447 temp = TempRegister();
1449 LMulI* mul = new(zone()) LMulI(left, right, temp);
1450 if (instr->CheckFlag(HValue::kCanOverflow) ||
1451 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1452 AssignEnvironment(mul);
1454 return DefineSameAsFirst(mul);
1455 } else if (instr->representation().IsDouble()) {
1456 return DoArithmeticD(Token::MUL, instr);
1458 return DoArithmeticT(Token::MUL, instr);
1463 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1464 if (instr->representation().IsSmiOrInteger32()) {
1465 ASSERT(instr->left()->representation().Equals(instr->representation()));
1466 ASSERT(instr->right()->representation().Equals(instr->representation()));
1467 LOperand* left = UseRegisterAtStart(instr->left());
1468 LOperand* right = UseOrConstantAtStart(instr->right());
1469 LSubI* sub = new(zone()) LSubI(left, right);
1470 LInstruction* result = DefineSameAsFirst(sub);
1471 if (instr->CheckFlag(HValue::kCanOverflow)) {
1472 result = AssignEnvironment(result);
1475 } else if (instr->representation().IsDouble()) {
1476 return DoArithmeticD(Token::SUB, instr);
1478 return DoArithmeticT(Token::SUB, instr);
1483 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1484 if (instr->representation().IsSmiOrInteger32()) {
1485 ASSERT(instr->left()->representation().Equals(instr->representation()));
1486 ASSERT(instr->right()->representation().Equals(instr->representation()));
1487 // Check to see if it would be advantageous to use an lea instruction rather
1488 // than an add. This is the case when no overflow check is needed and there
1489 // are multiple uses of the add's inputs, so using a 3-register add will
1490 // preserve all input values for later uses.
1491 bool use_lea = LAddI::UseLea(instr);
1492 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1493 HValue* right_candidate = instr->BetterRightOperand();
1494 LOperand* right = use_lea
1495 ? UseRegisterOrConstantAtStart(right_candidate)
1496 : UseOrConstantAtStart(right_candidate);
1497 LAddI* add = new(zone()) LAddI(left, right);
1498 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1499 LInstruction* result = use_lea
1500 ? DefineAsRegister(add)
1501 : DefineSameAsFirst(add);
1503 result = AssignEnvironment(result);
1506 } else if (instr->representation().IsDouble()) {
1507 return DoArithmeticD(Token::ADD, instr);
1508 } else if (instr->representation().IsExternal()) {
1509 ASSERT(instr->left()->representation().IsExternal());
1510 ASSERT(instr->right()->representation().IsInteger32());
1511 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1512 bool use_lea = LAddI::UseLea(instr);
1513 LOperand* left = UseRegisterAtStart(instr->left());
1514 HValue* right_candidate = instr->right();
1515 LOperand* right = use_lea
1516 ? UseRegisterOrConstantAtStart(right_candidate)
1517 : UseOrConstantAtStart(right_candidate);
1518 LAddI* add = new(zone()) LAddI(left, right);
1519 LInstruction* result = use_lea
1520 ? DefineAsRegister(add)
1521 : DefineSameAsFirst(add);
1524 return DoArithmeticT(Token::ADD, instr);
1529 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1530 LOperand* left = NULL;
1531 LOperand* right = NULL;
1532 if (instr->representation().IsSmiOrInteger32()) {
1533 ASSERT(instr->left()->representation().Equals(instr->representation()));
1534 ASSERT(instr->right()->representation().Equals(instr->representation()));
1535 left = UseRegisterAtStart(instr->BetterLeftOperand());
1536 right = UseOrConstantAtStart(instr->BetterRightOperand());
1538 ASSERT(instr->representation().IsDouble());
1539 ASSERT(instr->left()->representation().IsDouble());
1540 ASSERT(instr->right()->representation().IsDouble());
1541 left = UseRegisterAtStart(instr->left());
1542 right = UseRegisterAtStart(instr->right());
1544 LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1545 return DefineSameAsFirst(minmax);
1549 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1550 ASSERT(instr->representation().IsDouble());
1551 // We call a C function for double power. It can't trigger a GC.
1552 // We need to use fixed result register for the call.
1553 Representation exponent_type = instr->right()->representation();
1554 ASSERT(instr->left()->representation().IsDouble());
1555 LOperand* left = UseFixedDouble(instr->left(), xmm2);
1556 LOperand* right = exponent_type.IsDouble() ?
1557 UseFixedDouble(instr->right(), xmm1) :
1558 UseFixed(instr->right(), eax);
1559 LPower* result = new(zone()) LPower(left, right);
1560 return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1561 CAN_DEOPTIMIZE_EAGERLY);
1565 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1566 ASSERT(instr->left()->representation().IsSmiOrTagged());
1567 ASSERT(instr->right()->representation().IsSmiOrTagged());
1568 LOperand* context = UseFixed(instr->context(), esi);
1569 LOperand* left = UseFixed(instr->left(), edx);
1570 LOperand* right = UseFixed(instr->right(), eax);
1571 LCmpT* result = new(zone()) LCmpT(context, left, right);
1572 return MarkAsCall(DefineFixed(result, eax), instr);
1576 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1577 HCompareNumericAndBranch* instr) {
1578 Representation r = instr->representation();
1579 if (r.IsSmiOrInteger32()) {
1580 ASSERT(instr->left()->representation().Equals(r));
1581 ASSERT(instr->right()->representation().Equals(r));
1582 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1583 LOperand* right = UseOrConstantAtStart(instr->right());
1584 return new(zone()) LCompareNumericAndBranch(left, right);
1586 ASSERT(r.IsDouble());
1587 ASSERT(instr->left()->representation().IsDouble());
1588 ASSERT(instr->right()->representation().IsDouble());
1591 if (CanBeImmediateConstant(instr->left()) &&
1592 CanBeImmediateConstant(instr->right())) {
1593 // The code generator requires either both inputs to be constant
1594 // operands, or neither.
1595 left = UseConstant(instr->left());
1596 right = UseConstant(instr->right());
1598 left = UseRegisterAtStart(instr->left());
1599 right = UseRegisterAtStart(instr->right());
1601 return new(zone()) LCompareNumericAndBranch(left, right);
1606 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1607 HCompareObjectEqAndBranch* instr) {
1608 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1609 if (goto_instr != NULL) return goto_instr;
1610 LOperand* left = UseRegisterAtStart(instr->left());
1611 LOperand* right = UseOrConstantAtStart(instr->right());
1612 return new(zone()) LCmpObjectEqAndBranch(left, right);
1616 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1617 HCompareHoleAndBranch* instr) {
1618 LOperand* value = UseRegisterAtStart(instr->value());
1619 return new(zone()) LCmpHoleAndBranch(value);
1623 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1624 HCompareMinusZeroAndBranch* instr) {
1625 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1626 if (goto_instr != NULL) return goto_instr;
1627 LOperand* value = UseRegister(instr->value());
1628 LOperand* scratch = TempRegister();
1629 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1633 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1634 ASSERT(instr->value()->representation().IsSmiOrTagged());
1635 LOperand* temp = TempRegister();
1636 return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1640 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1641 ASSERT(instr->value()->representation().IsTagged());
1642 LOperand* temp = TempRegister();
1643 return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1647 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1648 ASSERT(instr->value()->representation().IsTagged());
1649 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1653 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1654 HIsUndetectableAndBranch* instr) {
1655 ASSERT(instr->value()->representation().IsTagged());
1656 return new(zone()) LIsUndetectableAndBranch(
1657 UseRegisterAtStart(instr->value()), TempRegister());
1661 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1662 HStringCompareAndBranch* instr) {
1663 ASSERT(instr->left()->representation().IsTagged());
1664 ASSERT(instr->right()->representation().IsTagged());
1665 LOperand* context = UseFixed(instr->context(), esi);
1666 LOperand* left = UseFixed(instr->left(), edx);
1667 LOperand* right = UseFixed(instr->right(), eax);
1669 LStringCompareAndBranch* result = new(zone())
1670 LStringCompareAndBranch(context, left, right);
1672 return MarkAsCall(result, instr);
1676 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1677 HHasInstanceTypeAndBranch* instr) {
1678 ASSERT(instr->value()->representation().IsTagged());
1679 return new(zone()) LHasInstanceTypeAndBranch(
1680 UseRegisterAtStart(instr->value()),
1685 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1686 HGetCachedArrayIndex* instr) {
1687 ASSERT(instr->value()->representation().IsTagged());
1688 LOperand* value = UseRegisterAtStart(instr->value());
1690 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1694 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1695 HHasCachedArrayIndexAndBranch* instr) {
1696 ASSERT(instr->value()->representation().IsTagged());
1697 return new(zone()) LHasCachedArrayIndexAndBranch(
1698 UseRegisterAtStart(instr->value()));
1702 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1703 HClassOfTestAndBranch* instr) {
1704 ASSERT(instr->value()->representation().IsTagged());
1705 return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1711 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1712 LOperand* map = UseRegisterAtStart(instr->value());
1713 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1717 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1718 LOperand* date = UseFixed(instr->value(), eax);
1719 LDateField* result =
1720 new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
1721 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1725 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1726 LOperand* string = UseRegisterAtStart(instr->string());
1727 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1728 return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1732 LOperand* LChunkBuilder::GetSeqStringSetCharOperand(HSeqStringSetChar* instr) {
1733 if (instr->encoding() == String::ONE_BYTE_ENCODING) {
1734 if (FLAG_debug_code) {
1735 return UseFixed(instr->value(), eax);
1737 return UseFixedOrConstant(instr->value(), eax);
1740 if (FLAG_debug_code) {
1741 return UseRegisterAtStart(instr->value());
1743 return UseRegisterOrConstantAtStart(instr->value());
1749 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1750 LOperand* string = UseRegisterAtStart(instr->string());
1751 LOperand* index = FLAG_debug_code
1752 ? UseRegisterAtStart(instr->index())
1753 : UseRegisterOrConstantAtStart(instr->index());
1754 LOperand* value = GetSeqStringSetCharOperand(instr);
1755 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), esi) : NULL;
1756 LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1758 if (FLAG_debug_code) {
1759 result = MarkAsCall(result, instr);
1765 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1766 return AssignEnvironment(new(zone()) LBoundsCheck(
1767 UseRegisterOrConstantAtStart(instr->index()),
1768 UseAtStart(instr->length())));
1772 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1773 HBoundsCheckBaseIndexInformation* instr) {
1779 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1780 // The control instruction marking the end of a block that completed
1781 // abruptly (e.g., threw an exception). There is nothing specific to do.
1786 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1791 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1792 // All HForceRepresentation instructions should be eliminated in the
1793 // representation change phase of Hydrogen.
1799 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1800 Representation from = instr->from();
1801 Representation to = instr->to();
1803 if (to.IsTagged()) {
1804 LOperand* value = UseRegister(instr->value());
1805 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1807 from = Representation::Tagged();
1809 // Only mark conversions that might need to allocate as calling rather than
1810 // all changes. This makes simple, non-allocating conversion not have to force
1811 // building a stack frame.
1812 if (from.IsTagged()) {
1813 if (to.IsDouble()) {
1814 LOperand* value = UseRegister(instr->value());
1815 // Temp register only necessary for minus zero check.
1816 LOperand* temp = TempRegister();
1817 LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1818 return AssignEnvironment(DefineAsRegister(res));
1819 } else if (to.IsSIMD128()) {
1820 LOperand* value = UseRegister(instr->value());
1821 LOperand* temp = TempRegister();
1822 LTaggedToSIMD128* res = new(zone()) LTaggedToSIMD128(value, temp, to);
1823 return AssignEnvironment(DefineAsRegister(res));
1824 } else if (to.IsSmi()) {
1825 HValue* val = instr->value();
1826 LOperand* value = UseRegister(val);
1827 if (val->type().IsSmi()) {
1828 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1830 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1832 ASSERT(to.IsInteger32());
1833 HValue* val = instr->value();
1834 if (val->type().IsSmi() || val->representation().IsSmi()) {
1835 LOperand* value = UseRegister(val);
1836 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1838 bool truncating = instr->CanTruncateToInt32();
1839 LOperand* xmm_temp =
1840 (CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating)
1841 ? FixedTemp(xmm1) : NULL;
1842 LTaggedToI* res = new(zone()) LTaggedToI(UseRegister(val), xmm_temp);
1843 return AssignEnvironment(DefineSameAsFirst(res));
1846 } else if (from.IsDouble()) {
1847 if (to.IsTagged()) {
1848 info()->MarkAsDeferredCalling();
1849 LOperand* value = UseRegisterAtStart(instr->value());
1850 LOperand* temp = FLAG_inline_new ? TempRegister() : NULL;
1852 // Make sure that temp and result_temp are different registers.
1853 LUnallocated* result_temp = TempRegister();
1854 LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1855 return AssignPointerMap(Define(result, result_temp));
1856 } else if (to.IsSmi()) {
1857 LOperand* value = UseRegister(instr->value());
1858 return AssignEnvironment(
1859 DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1861 ASSERT(to.IsInteger32());
1862 bool truncating = instr->CanTruncateToInt32();
1863 bool needs_temp = CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating;
1864 LOperand* value = needs_temp ?
1865 UseTempRegister(instr->value()) : UseRegister(instr->value());
1866 LOperand* temp = needs_temp ? TempRegister() : NULL;
1867 return AssignEnvironment(
1868 DefineAsRegister(new(zone()) LDoubleToI(value, temp)));
1870 } else if (from.IsInteger32()) {
1871 info()->MarkAsDeferredCalling();
1872 if (to.IsTagged()) {
1873 HValue* val = instr->value();
1874 LOperand* value = UseRegister(val);
1875 if (val->HasRange() && val->range()->IsInSmiRange()) {
1876 return DefineSameAsFirst(new(zone()) LSmiTag(value));
1877 } else if (val->CheckFlag(HInstruction::kUint32)) {
1878 LOperand* temp = CpuFeatures::IsSupported(SSE2) ? FixedTemp(xmm1)
1880 LNumberTagU* result = new(zone()) LNumberTagU(value, temp);
1881 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1883 LNumberTagI* result = new(zone()) LNumberTagI(value);
1884 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1886 } else if (to.IsSmi()) {
1887 HValue* val = instr->value();
1888 LOperand* value = UseRegister(val);
1889 LInstruction* result = val->CheckFlag(HInstruction::kUint32)
1890 ? DefineSameAsFirst(new(zone()) LUint32ToSmi(value))
1891 : DefineSameAsFirst(new(zone()) LInteger32ToSmi(value));
1892 if (val->HasRange() && val->range()->IsInSmiRange()) {
1895 return AssignEnvironment(result);
1897 ASSERT(to.IsDouble());
1898 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1899 LOperand* temp = FixedTemp(xmm1);
1900 return DefineAsRegister(
1901 new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
1903 return DefineAsRegister(
1904 new(zone()) LInteger32ToDouble(Use(instr->value())));
1907 } else if (from.IsSIMD128()) {
1908 ASSERT(to.IsTagged());
1909 info()->MarkAsDeferredCalling();
1910 LOperand* value = UseRegister(instr->value());
1911 LOperand* temp = TempRegister();
1913 // Make sure that temp and result_temp are different registers.
1914 LUnallocated* result_temp = TempRegister();
1915 LSIMD128ToTagged* result = new(zone()) LSIMD128ToTagged(value, temp);
1916 return AssignPointerMap(Define(result, result_temp));
1923 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1924 LOperand* value = UseAtStart(instr->value());
1925 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1929 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1930 LOperand* value = UseRegisterAtStart(instr->value());
1931 return AssignEnvironment(new(zone()) LCheckSmi(value));
1935 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1936 LOperand* value = UseRegisterAtStart(instr->value());
1937 LOperand* temp = TempRegister();
1938 LCheckInstanceType* result = new(zone()) LCheckInstanceType(value, temp);
1939 return AssignEnvironment(result);
1943 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1944 // If the object is in new space, we'll emit a global cell compare and so
1945 // want the value in a register. If the object gets promoted before we
1946 // emit code, we will still get the register but will do an immediate
1947 // compare instead of the cell compare. This is safe.
1948 LOperand* value = instr->object_in_new_space()
1949 ? UseRegisterAtStart(instr->value()) : UseAtStart(instr->value());
1950 return AssignEnvironment(new(zone()) LCheckValue(value));
1954 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1955 LOperand* value = NULL;
1956 if (!instr->CanOmitMapChecks()) {
1957 value = UseRegisterAtStart(instr->value());
1958 if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1960 LCheckMaps* result = new(zone()) LCheckMaps(value);
1961 if (!instr->CanOmitMapChecks()) {
1962 AssignEnvironment(result);
1963 if (instr->has_migration_target()) return AssignPointerMap(result);
1969 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1970 HValue* value = instr->value();
1971 Representation input_rep = value->representation();
1972 if (input_rep.IsDouble()) {
1973 LOperand* reg = UseRegister(value);
1974 return DefineFixed(new(zone()) LClampDToUint8(reg), eax);
1975 } else if (input_rep.IsInteger32()) {
1976 LOperand* reg = UseFixed(value, eax);
1977 return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
1979 ASSERT(input_rep.IsSmiOrTagged());
1980 if (CpuFeatures::IsSupported(SSE2)) {
1981 LOperand* reg = UseFixed(value, eax);
1982 // Register allocator doesn't (yet) support allocation of double
1983 // temps. Reserve xmm1 explicitly.
1984 LOperand* temp = FixedTemp(xmm1);
1985 LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp);
1986 return AssignEnvironment(DefineFixed(result, eax));
1988 LOperand* value = UseRegister(instr->value());
1989 LClampTToUint8NoSSE2* res =
1990 new(zone()) LClampTToUint8NoSSE2(value, TempRegister(),
1991 TempRegister(), TempRegister());
1992 return AssignEnvironment(DefineFixed(res, ecx));
1998 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1999 LOperand* context = info()->IsStub() ? UseFixed(instr->context(), esi) : NULL;
2000 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2001 return new(zone()) LReturn(
2002 UseFixed(instr->value(), eax), context, parameter_count);
2006 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2007 Representation r = instr->representation();
2009 return DefineAsRegister(new(zone()) LConstantS);
2010 } else if (r.IsInteger32()) {
2011 return DefineAsRegister(new(zone()) LConstantI);
2012 } else if (r.IsDouble()) {
2013 double value = instr->DoubleValue();
2014 bool value_is_zero = BitCast<uint64_t, double>(value) == 0;
2015 LOperand* temp = value_is_zero ? NULL : TempRegister();
2016 return DefineAsRegister(new(zone()) LConstantD(temp));
2017 } else if (r.IsExternal()) {
2018 return DefineAsRegister(new(zone()) LConstantE);
2019 } else if (r.IsTagged()) {
2020 return DefineAsRegister(new(zone()) LConstantT);
2028 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2029 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2030 return instr->RequiresHoleCheck()
2031 ? AssignEnvironment(DefineAsRegister(result))
2032 : DefineAsRegister(result);
2036 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2037 LOperand* context = UseFixed(instr->context(), esi);
2038 LOperand* global_object = UseFixed(instr->global_object(), edx);
2039 LLoadGlobalGeneric* result =
2040 new(zone()) LLoadGlobalGeneric(context, global_object);
2041 return MarkAsCall(DefineFixed(result, eax), instr);
2045 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2046 LStoreGlobalCell* result =
2047 new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
2048 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2052 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2053 LOperand* context = UseRegisterAtStart(instr->value());
2054 LInstruction* result =
2055 DefineAsRegister(new(zone()) LLoadContextSlot(context));
2056 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2060 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2063 LOperand* context = UseRegister(instr->context());
2064 if (instr->NeedsWriteBarrier()) {
2065 value = UseTempRegister(instr->value());
2066 temp = TempRegister();
2068 value = UseRegister(instr->value());
2071 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2072 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2076 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2077 LOperand* obj = (instr->access().IsExternalMemory() &&
2078 instr->access().offset() == 0)
2079 ? UseRegisterOrConstantAtStart(instr->object())
2080 : UseRegisterAtStart(instr->object());
2081 return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2085 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2086 LOperand* context = UseFixed(instr->context(), esi);
2087 LOperand* object = UseFixed(instr->object(), edx);
2088 LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
2089 return MarkAsCall(DefineFixed(result, eax), instr);
2093 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2094 HLoadFunctionPrototype* instr) {
2095 return AssignEnvironment(DefineAsRegister(
2096 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()),
2101 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2102 return DefineAsRegister(new(zone()) LLoadRoot);
2106 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2107 ASSERT(instr->key()->representation().IsSmiOrInteger32());
2108 ElementsKind elements_kind = instr->elements_kind();
2109 bool clobbers_key = ExternalArrayOpRequiresTemp(
2110 instr->key()->representation(), elements_kind);
2111 LOperand* key = clobbers_key
2112 ? UseTempRegister(instr->key())
2113 : UseRegisterOrConstantAtStart(instr->key());
2114 LLoadKeyed* result = NULL;
2116 bool load_128bits_without_sse2 = IsSIMD128ElementsKind(elements_kind) &&
2117 !CPU::SupportsSIMD128InCrankshaft();
2118 if (!instr->is_typed_elements()) {
2119 LOperand* obj = UseRegisterAtStart(instr->elements());
2120 result = new(zone()) LLoadKeyed(obj, key, NULL);
2123 (instr->representation().IsInteger32() &&
2124 !(IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2125 (instr->representation().IsDouble() &&
2126 (IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2127 (CPU::SupportsSIMD128InCrankshaft()
2128 ? instr->representation().IsFloat32x4()
2129 : instr->representation().IsTagged() &&
2130 (IsFloat32x4ElementsKind(instr->elements_kind()))) ||
2131 (CPU::SupportsSIMD128InCrankshaft()
2132 ? instr->representation().IsInt32x4()
2133 : instr->representation().IsTagged() &&
2134 (IsInt32x4ElementsKind(instr->elements_kind()))));
2135 LOperand* backing_store = UseRegister(instr->elements());
2136 result = new(zone()) LLoadKeyed(backing_store, key,
2137 load_128bits_without_sse2 ? TempRegister() : NULL);
2138 if (load_128bits_without_sse2) {
2139 info()->MarkAsDeferredCalling();
2140 AssignPointerMap(result);
2144 DefineAsRegister(result);
2145 bool can_deoptimize = instr->RequiresHoleCheck() ||
2146 (elements_kind == EXTERNAL_UINT32_ELEMENTS);
2147 // An unsigned int array load might overflow and cause a deopt, make sure it
2148 // has an environment.
2149 return can_deoptimize ? AssignEnvironment(result) : result;
2153 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2154 LOperand* context = UseFixed(instr->context(), esi);
2155 LOperand* object = UseFixed(instr->object(), edx);
2156 LOperand* key = UseFixed(instr->key(), ecx);
2158 LLoadKeyedGeneric* result =
2159 new(zone()) LLoadKeyedGeneric(context, object, key);
2160 return MarkAsCall(DefineFixed(result, eax), instr);
2164 LOperand* LChunkBuilder::GetStoreKeyedValueOperand(HStoreKeyed* instr) {
2165 ElementsKind elements_kind = instr->elements_kind();
2167 // Determine if we need a byte register in this case for the value.
2168 bool val_is_fixed_register =
2169 elements_kind == EXTERNAL_INT8_ELEMENTS ||
2170 elements_kind == EXTERNAL_UINT8_ELEMENTS ||
2171 elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2172 elements_kind == UINT8_ELEMENTS ||
2173 elements_kind == INT8_ELEMENTS ||
2174 elements_kind == UINT8_CLAMPED_ELEMENTS;
2175 if (val_is_fixed_register) {
2176 return UseFixed(instr->value(), eax);
2179 if (!CpuFeatures::IsSafeForSnapshot(SSE2) &&
2180 IsDoubleOrFloatElementsKind(elements_kind)) {
2181 return UseRegisterAtStart(instr->value());
2184 return UseRegister(instr->value());
2188 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2189 if (!instr->is_typed_elements()) {
2190 ASSERT(instr->elements()->representation().IsTagged());
2191 ASSERT(instr->key()->representation().IsInteger32() ||
2192 instr->key()->representation().IsSmi());
2194 if (instr->value()->representation().IsDouble()) {
2195 LOperand* object = UseRegisterAtStart(instr->elements());
2196 LOperand* val = NULL;
2197 val = UseRegisterAtStart(instr->value());
2198 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2199 return new(zone()) LStoreKeyed(object, key, val, NULL);
2201 ASSERT(instr->value()->representation().IsSmiOrTagged());
2202 bool needs_write_barrier = instr->NeedsWriteBarrier();
2204 LOperand* obj = UseRegister(instr->elements());
2207 if (needs_write_barrier) {
2208 val = UseTempRegister(instr->value());
2209 key = UseTempRegister(instr->key());
2211 val = UseRegisterOrConstantAtStart(instr->value());
2212 key = UseRegisterOrConstantAtStart(instr->key());
2214 return new(zone()) LStoreKeyed(obj, key, val, NULL);
2218 ElementsKind elements_kind = instr->elements_kind();
2220 (instr->value()->representation().IsInteger32() &&
2221 !IsDoubleOrFloatElementsKind(elements_kind)) ||
2222 (instr->value()->representation().IsDouble() &&
2223 IsDoubleOrFloatElementsKind(elements_kind)) ||
2224 (CPU::SupportsSIMD128InCrankshaft()
2225 ? instr->value()->representation().IsFloat32x4()
2226 : instr->value()->representation().IsTagged() &&
2227 IsFloat32x4ElementsKind(elements_kind)) ||
2228 (CPU::SupportsSIMD128InCrankshaft()
2229 ? instr->value()->representation().IsInt32x4()
2230 : instr->value()->representation().IsTagged() &&
2231 IsInt32x4ElementsKind(elements_kind)));
2232 ASSERT((instr->is_fixed_typed_array() &&
2233 instr->elements()->representation().IsTagged()) ||
2234 (instr->is_external() &&
2235 instr->elements()->representation().IsExternal()));
2237 LOperand* backing_store = UseRegister(instr->elements());
2238 LOperand* val = GetStoreKeyedValueOperand(instr);
2239 bool clobbers_key = ExternalArrayOpRequiresTemp(
2240 instr->key()->representation(), elements_kind);
2241 LOperand* key = clobbers_key
2242 ? UseTempRegister(instr->key())
2243 : UseRegisterOrConstantAtStart(instr->key());
2244 bool store_128bits_without_sse2 = IsSIMD128ElementsKind(elements_kind) &&
2245 !CPU::SupportsSIMD128InCrankshaft();
2246 LStoreKeyed* result =
2247 new(zone()) LStoreKeyed(backing_store, key, val,
2248 store_128bits_without_sse2 ? TempRegister() : NULL);
2249 return store_128bits_without_sse2 ? AssignEnvironment(result) : result;
2253 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2254 LOperand* context = UseFixed(instr->context(), esi);
2255 LOperand* object = UseFixed(instr->object(), edx);
2256 LOperand* key = UseFixed(instr->key(), ecx);
2257 LOperand* value = UseFixed(instr->value(), eax);
2259 ASSERT(instr->object()->representation().IsTagged());
2260 ASSERT(instr->key()->representation().IsTagged());
2261 ASSERT(instr->value()->representation().IsTagged());
2263 LStoreKeyedGeneric* result =
2264 new(zone()) LStoreKeyedGeneric(context, object, key, value);
2265 return MarkAsCall(result, instr);
2269 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2270 HTransitionElementsKind* instr) {
2271 LOperand* object = UseRegister(instr->object());
2272 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2273 LOperand* object = UseRegister(instr->object());
2274 LOperand* new_map_reg = TempRegister();
2275 LOperand* temp_reg = TempRegister();
2276 LTransitionElementsKind* result =
2277 new(zone()) LTransitionElementsKind(object, NULL,
2278 new_map_reg, temp_reg);
2281 LOperand* context = UseFixed(instr->context(), esi);
2282 LTransitionElementsKind* result =
2283 new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2284 return AssignPointerMap(result);
2289 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2290 HTrapAllocationMemento* instr) {
2291 LOperand* object = UseRegister(instr->object());
2292 LOperand* temp = TempRegister();
2293 LTrapAllocationMemento* result =
2294 new(zone()) LTrapAllocationMemento(object, temp);
2295 return AssignEnvironment(result);
2299 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2300 bool is_in_object = instr->access().IsInobject();
2301 bool is_external_location = instr->access().IsExternalMemory() &&
2302 instr->access().offset() == 0;
2303 bool needs_write_barrier = instr->NeedsWriteBarrier();
2304 bool needs_write_barrier_for_map = instr->has_transition() &&
2305 instr->NeedsWriteBarrierForMap();
2308 if (needs_write_barrier) {
2310 ? UseRegister(instr->object())
2311 : UseTempRegister(instr->object());
2312 } else if (is_external_location) {
2313 ASSERT(!is_in_object);
2314 ASSERT(!needs_write_barrier);
2315 ASSERT(!needs_write_barrier_for_map);
2316 obj = UseRegisterOrConstant(instr->object());
2318 obj = needs_write_barrier_for_map
2319 ? UseRegister(instr->object())
2320 : UseRegisterAtStart(instr->object());
2323 bool can_be_constant = instr->value()->IsConstant() &&
2324 HConstant::cast(instr->value())->NotInNewSpace() &&
2325 !(FLAG_track_double_fields && instr->field_representation().IsDouble());
2328 if (instr->field_representation().IsInteger8() ||
2329 instr->field_representation().IsUInteger8()) {
2330 // mov_b requires a byte register (i.e. any of eax, ebx, ecx, edx).
2331 // Just force the value to be in eax and we're safe here.
2332 val = UseFixed(instr->value(), eax);
2333 } else if (needs_write_barrier) {
2334 val = UseTempRegister(instr->value());
2335 } else if (can_be_constant) {
2336 val = UseRegisterOrConstant(instr->value());
2337 } else if (FLAG_track_fields && instr->field_representation().IsSmi()) {
2338 val = UseTempRegister(instr->value());
2339 } else if (FLAG_track_double_fields &&
2340 instr->field_representation().IsDouble()) {
2341 val = UseRegisterAtStart(instr->value());
2343 val = UseRegister(instr->value());
2346 // We only need a scratch register if we have a write barrier or we
2347 // have a store into the properties array (not in-object-property).
2348 LOperand* temp = (!is_in_object || needs_write_barrier ||
2349 needs_write_barrier_for_map) ? TempRegister() : NULL;
2351 // We need a temporary register for write barrier of the map field.
2352 LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
2354 LStoreNamedField* result =
2355 new(zone()) LStoreNamedField(obj, val, temp, temp_map);
2356 if (FLAG_track_heap_object_fields &&
2357 instr->field_representation().IsHeapObject()) {
2358 if (!instr->value()->type().IsHeapObject()) {
2359 return AssignEnvironment(result);
2366 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2367 LOperand* context = UseFixed(instr->context(), esi);
2368 LOperand* object = UseFixed(instr->object(), edx);
2369 LOperand* value = UseFixed(instr->value(), eax);
2371 LStoreNamedGeneric* result =
2372 new(zone()) LStoreNamedGeneric(context, object, value);
2373 return MarkAsCall(result, instr);
2377 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2378 LOperand* context = UseFixed(instr->context(), esi);
2379 LOperand* left = UseFixed(instr->left(), edx);
2380 LOperand* right = UseFixed(instr->right(), eax);
2381 LStringAdd* string_add = new(zone()) LStringAdd(context, left, right);
2382 return MarkAsCall(DefineFixed(string_add, eax), instr);
2386 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2387 LOperand* string = UseTempRegister(instr->string());
2388 LOperand* index = UseTempRegister(instr->index());
2389 LOperand* context = UseAny(instr->context());
2390 LStringCharCodeAt* result =
2391 new(zone()) LStringCharCodeAt(context, string, index);
2392 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2396 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2397 LOperand* char_code = UseRegister(instr->value());
2398 LOperand* context = UseAny(instr->context());
2399 LStringCharFromCode* result =
2400 new(zone()) LStringCharFromCode(context, char_code);
2401 return AssignPointerMap(DefineAsRegister(result));
2405 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2406 info()->MarkAsDeferredCalling();
2407 LOperand* context = UseAny(instr->context());
2408 LOperand* size = instr->size()->IsConstant()
2409 ? UseConstant(instr->size())
2410 : UseTempRegister(instr->size());
2411 LOperand* temp = TempRegister();
2412 LAllocate* result = new(zone()) LAllocate(context, size, temp);
2413 return AssignPointerMap(DefineAsRegister(result));
2417 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2418 LOperand* context = UseFixed(instr->context(), esi);
2420 DefineFixed(new(zone()) LRegExpLiteral(context), eax), instr);
2424 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2425 LOperand* context = UseFixed(instr->context(), esi);
2427 DefineFixed(new(zone()) LFunctionLiteral(context), eax), instr);
2431 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2432 ASSERT(argument_count_ == 0);
2433 allocator_->MarkAsOsrEntry();
2434 current_block_->last_environment()->set_ast_id(instr->ast_id());
2435 return AssignEnvironment(new(zone()) LOsrEntry);
2439 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2440 LParameter* result = new(zone()) LParameter;
2441 if (instr->kind() == HParameter::STACK_PARAMETER) {
2442 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2443 return DefineAsSpilled(result, spill_index);
2445 ASSERT(info()->IsStub());
2446 CodeStubInterfaceDescriptor* descriptor =
2447 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2448 int index = static_cast<int>(instr->index());
2449 Register reg = descriptor->GetParameterRegister(index);
2450 return DefineFixed(result, reg);
2455 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2456 // Use an index that corresponds to the location in the unoptimized frame,
2457 // which the optimized frame will subsume.
2458 int env_index = instr->index();
2459 int spill_index = 0;
2460 if (instr->environment()->is_parameter_index(env_index)) {
2461 spill_index = chunk()->GetParameterStackSlot(env_index);
2463 spill_index = env_index - instr->environment()->first_local_index();
2464 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2465 Abort(kNotEnoughSpillSlotsForOsr);
2468 if (spill_index == 0) {
2469 // The dynamic frame alignment state overwrites the first local.
2470 // The first local is saved at the end of the unoptimized frame.
2471 spill_index = graph()->osr()->UnoptimizedFrameSlots();
2474 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2478 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2479 LOperand* context = UseFixed(instr->context(), esi);
2480 LCallStub* result = new(zone()) LCallStub(context);
2481 return MarkAsCall(DefineFixed(result, eax), instr);
2485 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2486 // There are no real uses of the arguments object.
2487 // arguments.length and element access are supported directly on
2488 // stack arguments, and any real arguments object use causes a bailout.
2489 // So this value is never used.
2494 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2495 instr->ReplayEnvironment(current_block_->last_environment());
2497 // There are no real uses of a captured object.
2502 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2503 info()->MarkAsRequiresFrame();
2504 LOperand* args = UseRegister(instr->arguments());
2507 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2508 length = UseRegisterOrConstant(instr->length());
2509 index = UseOrConstant(instr->index());
2511 length = UseTempRegister(instr->length());
2512 index = Use(instr->index());
2514 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2518 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2519 LOperand* object = UseFixed(instr->value(), eax);
2520 LToFastProperties* result = new(zone()) LToFastProperties(object);
2521 return MarkAsCall(DefineFixed(result, eax), instr);
2525 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2526 LOperand* context = UseFixed(instr->context(), esi);
2527 LOperand* value = UseAtStart(instr->value());
2528 LTypeof* result = new(zone()) LTypeof(context, value);
2529 return MarkAsCall(DefineFixed(result, eax), instr);
2533 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2534 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2535 if (goto_instr != NULL) return goto_instr;
2536 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2540 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2541 HIsConstructCallAndBranch* instr) {
2542 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2546 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2547 instr->ReplayEnvironment(current_block_->last_environment());
2549 // If there is an instruction pending deoptimization environment create a
2550 // lazy bailout instruction to capture the environment.
2551 if (!pending_deoptimization_ast_id_.IsNone()) {
2552 ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
2553 LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
2554 LInstruction* result = AssignEnvironment(lazy_bailout);
2555 // Store the lazy deopt environment with the instruction if needed. Right
2556 // now it is only used for LInstanceOfKnownGlobal.
2557 instruction_pending_deoptimization_environment_->
2558 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2559 instruction_pending_deoptimization_environment_ = NULL;
2560 pending_deoptimization_ast_id_ = BailoutId::None();
2568 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2569 info()->MarkAsDeferredCalling();
2570 if (instr->is_function_entry()) {
2571 LOperand* context = UseFixed(instr->context(), esi);
2572 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2574 ASSERT(instr->is_backwards_branch());
2575 LOperand* context = UseAny(instr->context());
2576 return AssignEnvironment(
2577 AssignPointerMap(new(zone()) LStackCheck(context)));
2582 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2583 HEnvironment* outer = current_block_->last_environment();
2584 HConstant* undefined = graph()->GetConstantUndefined();
2585 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2586 instr->arguments_count(),
2589 instr->inlining_kind());
2590 // Only replay binding of arguments object if it wasn't removed from graph.
2591 if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2592 inner->Bind(instr->arguments_var(), instr->arguments_object());
2594 inner->set_entry(instr);
2595 current_block_->UpdateEnvironment(inner);
2596 chunk_->AddInlinedClosure(instr->closure());
2601 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2602 LInstruction* pop = NULL;
2604 HEnvironment* env = current_block_->last_environment();
2606 if (env->entry()->arguments_pushed()) {
2607 int argument_count = env->arguments_environment()->parameter_count();
2608 pop = new(zone()) LDrop(argument_count);
2609 ASSERT(instr->argument_delta() == -argument_count);
2612 HEnvironment* outer = current_block_->last_environment()->
2613 DiscardInlined(false);
2614 current_block_->UpdateEnvironment(outer);
2619 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2620 LOperand* context = UseFixed(instr->context(), esi);
2621 LOperand* object = UseFixed(instr->enumerable(), eax);
2622 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2623 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
2627 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2628 LOperand* map = UseRegister(instr->map());
2629 return AssignEnvironment(DefineAsRegister(
2630 new(zone()) LForInCacheArray(map)));
2634 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2635 LOperand* value = UseRegisterAtStart(instr->value());
2636 LOperand* map = UseRegisterAtStart(instr->map());
2637 return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2641 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2642 LOperand* object = UseRegister(instr->object());
2643 LOperand* index = UseTempRegister(instr->index());
2644 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2648 const char* LNullarySIMDOperation::Mnemonic() const {
2650 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
2652 return #module "-" #function;
2653 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
2654 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
2662 LInstruction* LChunkBuilder::DoNullarySIMDOperation(
2663 HNullarySIMDOperation* instr) {
2664 LNullarySIMDOperation* result =
2665 new(zone()) LNullarySIMDOperation(instr->op());
2666 switch (instr->op()) {
2667 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
2669 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
2670 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
2671 return DefineAsRegister(result);
2679 const char* LUnarySIMDOperation::Mnemonic() const {
2681 case kSIMD128Change: return "SIMD128-change";
2682 #define SIMD_UNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5) \
2684 return #module "-" #function;
2685 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
2686 SIMD_UNARY_OPERATIONS_FOR_PROPERTY_ACCESS(SIMD_UNARY_OPERATION_CASE_ITEM)
2687 #undef SIMD_UNARY_OPERATION_CASE_ITEM
2695 LInstruction* LChunkBuilder::DoUnarySIMDOperation(HUnarySIMDOperation* instr) {
2696 LOperand* input = UseRegisterAtStart(instr->value());
2697 LUnarySIMDOperation* result =
2698 new(zone()) LUnarySIMDOperation(input, instr->op());
2699 switch (instr->op()) {
2700 case kSIMD128Change:
2701 return AssignEnvironment(DefineAsRegister(result));
2704 case kFloat32x4Reciprocal:
2705 case kFloat32x4ReciprocalSqrt:
2706 case kFloat32x4Sqrt:
2709 return DefineSameAsFirst(result);
2710 case kFloat32x4BitsToInt32x4:
2711 case kFloat32x4ToInt32x4:
2712 case kInt32x4BitsToFloat32x4:
2713 case kInt32x4ToFloat32x4:
2714 case kFloat32x4Splat:
2716 case kFloat32x4GetSignMask:
2717 case kFloat32x4GetX:
2718 case kFloat32x4GetY:
2719 case kFloat32x4GetZ:
2720 case kFloat32x4GetW:
2721 case kInt32x4GetSignMask:
2726 case kInt32x4GetFlagX:
2727 case kInt32x4GetFlagY:
2728 case kInt32x4GetFlagZ:
2729 case kInt32x4GetFlagW:
2730 return DefineAsRegister(result);
2738 const char* LBinarySIMDOperation::Mnemonic() const {
2740 #define SIMD_BINARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6) \
2742 return #module "-" #function;
2743 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
2744 #undef SIMD_BINARY_OPERATION_CASE_ITEM
2752 LInstruction* LChunkBuilder::DoBinarySIMDOperation(
2753 HBinarySIMDOperation* instr) {
2754 switch (instr->op()) {
2761 case kFloat32x4Scale:
2762 case kFloat32x4WithX:
2763 case kFloat32x4WithY:
2764 case kFloat32x4WithZ:
2765 case kFloat32x4WithW:
2776 case kInt32x4WithFlagX:
2777 case kInt32x4WithFlagY:
2778 case kInt32x4WithFlagZ:
2779 case kInt32x4WithFlagW:
2780 case kInt32x4GreaterThan:
2782 case kInt32x4LessThan: {
2783 LOperand* left = UseRegisterAtStart(instr->left());
2784 LOperand* right = UseRegisterAtStart(instr->right());
2785 LBinarySIMDOperation* result =
2786 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2787 if (instr->op() == kInt32x4WithFlagX ||
2788 instr->op() == kInt32x4WithFlagY ||
2789 instr->op() == kInt32x4WithFlagZ ||
2790 instr->op() == kInt32x4WithFlagW) {
2791 return AssignEnvironment(DefineSameAsFirst(result));
2793 return DefineSameAsFirst(result);
2796 case kFloat32x4Shuffle:
2797 case kInt32x4Shuffle:
2798 case kInt32x4ShiftLeft:
2799 case kInt32x4ShiftRight:
2800 case kInt32x4ShiftRightArithmetic: {
2801 LOperand* left = UseRegisterAtStart(instr->left());
2802 LOperand* right = UseOrConstant(instr->right());
2803 LBinarySIMDOperation* result =
2804 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2805 return AssignEnvironment(DefineSameAsFirst(result));
2807 case kFloat32x4LessThan:
2808 case kFloat32x4LessThanOrEqual:
2809 case kFloat32x4Equal:
2810 case kFloat32x4NotEqual:
2811 case kFloat32x4GreaterThanOrEqual:
2812 case kFloat32x4GreaterThan: {
2813 LOperand* left = UseRegisterAtStart(instr->left());
2814 LOperand* right = UseRegisterAtStart(instr->right());
2815 LBinarySIMDOperation* result =
2816 new(zone()) LBinarySIMDOperation(left, right, instr->op());
2817 return DefineAsRegister(result);
2826 const char* LTernarySIMDOperation::Mnemonic() const {
2828 #define SIMD_TERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6, \
2831 return #module "-" #function;
2832 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
2833 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
2841 LInstruction* LChunkBuilder::DoTernarySIMDOperation(
2842 HTernarySIMDOperation* instr) {
2843 LOperand* first = UseRegisterAtStart(instr->first());
2844 LOperand* second = UseRegisterAtStart(instr->second());
2845 LOperand* third = instr->op() == kFloat32x4ShuffleMix
2846 ? UseOrConstant(instr->third())
2847 : UseRegisterAtStart(instr->third());
2848 LTernarySIMDOperation* result =
2849 new(zone()) LTernarySIMDOperation(first, second, third, instr->op());
2850 switch (instr->op()) {
2851 case kInt32x4Select: {
2852 return DefineAsRegister(result);
2854 case kFloat32x4ShuffleMix: {
2855 return AssignEnvironment(DefineSameAsFirst(result));
2857 case kFloat32x4Clamp: {
2858 return DefineSameAsFirst(result);
2867 const char* LQuarternarySIMDOperation::Mnemonic() const {
2869 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, \
2872 return #module "-" #function;
2873 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
2874 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
2882 LInstruction* LChunkBuilder::DoQuarternarySIMDOperation(
2883 HQuarternarySIMDOperation* instr) {
2884 LOperand* x = UseRegisterAtStart(instr->x());
2885 LOperand* y = UseRegisterAtStart(instr->y());
2886 LOperand* z = UseRegisterAtStart(instr->z());
2887 LOperand* w = UseRegisterAtStart(instr->w());
2888 LQuarternarySIMDOperation* result =
2889 new(zone()) LQuarternarySIMDOperation(x, y, z, w, instr->op());
2890 if (instr->op() == kInt32x4Bool) {
2891 return AssignEnvironment(DefineAsRegister(result));
2893 return DefineAsRegister(result);
2898 } } // namespace v8::internal
2900 #endif // V8_TARGET_ARCH_IA32