1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "lithium-allocator-inl.h"
31 #include "arm/lithium-arm.h"
32 #include "arm/lithium-codegen-arm.h"
33 #include "hydrogen-osr.h"
38 #define DEFINE_COMPILE(type) \
39 void L##type::CompileToNative(LCodeGen* generator) { \
40 generator->Do##type(this); \
42 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
46 void LInstruction::VerifyCall() {
47 // Call instructions can use only fixed registers as temporaries and
48 // outputs because all registers are blocked by the calling convention.
49 // Inputs operands must use a fixed register or use-at-start policy or
50 // a non-register policy.
51 ASSERT(Output() == NULL ||
52 LUnallocated::cast(Output())->HasFixedPolicy() ||
53 !LUnallocated::cast(Output())->HasRegisterPolicy());
54 for (UseIterator it(this); !it.Done(); it.Advance()) {
55 LUnallocated* operand = LUnallocated::cast(it.Current());
56 ASSERT(operand->HasFixedPolicy() ||
57 operand->IsUsedAtStart());
59 for (TempIterator it(this); !it.Done(); it.Advance()) {
60 LUnallocated* operand = LUnallocated::cast(it.Current());
61 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
67 void LInstruction::PrintTo(StringStream* stream) {
68 stream->Add("%s ", this->Mnemonic());
70 PrintOutputOperandTo(stream);
74 if (HasEnvironment()) {
76 environment()->PrintTo(stream);
79 if (HasPointerMap()) {
81 pointer_map()->PrintTo(stream);
86 void LInstruction::PrintDataTo(StringStream* stream) {
88 for (int i = 0; i < InputCount(); i++) {
89 if (i > 0) stream->Add(" ");
90 if (InputAt(i) == NULL) {
93 InputAt(i)->PrintTo(stream);
99 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
100 if (HasResult()) result()->PrintTo(stream);
104 void LLabel::PrintDataTo(StringStream* stream) {
105 LGap::PrintDataTo(stream);
106 LLabel* rep = replacement();
108 stream->Add(" Dead block replaced with B%d", rep->block_id());
113 bool LGap::IsRedundant() const {
114 for (int i = 0; i < 4; i++) {
115 if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
124 void LGap::PrintDataTo(StringStream* stream) {
125 for (int i = 0; i < 4; i++) {
127 if (parallel_moves_[i] != NULL) {
128 parallel_moves_[i]->PrintDataTo(stream);
135 const char* LArithmeticD::Mnemonic() const {
137 case Token::ADD: return "add-d";
138 case Token::SUB: return "sub-d";
139 case Token::MUL: return "mul-d";
140 case Token::DIV: return "div-d";
141 case Token::MOD: return "mod-d";
149 const char* LArithmeticT::Mnemonic() const {
151 case Token::ADD: return "add-t";
152 case Token::SUB: return "sub-t";
153 case Token::MUL: return "mul-t";
154 case Token::MOD: return "mod-t";
155 case Token::DIV: return "div-t";
156 case Token::BIT_AND: return "bit-and-t";
157 case Token::BIT_OR: return "bit-or-t";
158 case Token::BIT_XOR: return "bit-xor-t";
159 case Token::ROR: return "ror-t";
160 case Token::SHL: return "shl-t";
161 case Token::SAR: return "sar-t";
162 case Token::SHR: return "shr-t";
170 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
171 return !gen->IsNextEmittedBlock(block_id());
175 void LGoto::PrintDataTo(StringStream* stream) {
176 stream->Add("B%d", block_id());
180 void LBranch::PrintDataTo(StringStream* stream) {
181 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
182 value()->PrintTo(stream);
186 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
188 left()->PrintTo(stream);
189 stream->Add(" %s ", Token::String(op()));
190 right()->PrintTo(stream);
191 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
195 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
196 stream->Add("if is_object(");
197 value()->PrintTo(stream);
198 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
202 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
203 stream->Add("if is_string(");
204 value()->PrintTo(stream);
205 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
209 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
210 stream->Add("if is_smi(");
211 value()->PrintTo(stream);
212 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
216 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
217 stream->Add("if is_undetectable(");
218 value()->PrintTo(stream);
219 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
224 stream->Add("if string_compare(");
225 left()->PrintTo(stream);
226 right()->PrintTo(stream);
227 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
231 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
232 stream->Add("if has_instance_type(");
233 value()->PrintTo(stream);
234 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
238 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
239 stream->Add("if has_cached_array_index(");
240 value()->PrintTo(stream);
241 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
245 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
246 stream->Add("if class_of_test(");
247 value()->PrintTo(stream);
248 stream->Add(", \"%o\") then B%d else B%d",
249 *hydrogen()->class_name(),
255 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
256 stream->Add("if typeof ");
257 value()->PrintTo(stream);
258 stream->Add(" == \"%s\" then B%d else B%d",
259 hydrogen()->type_literal()->ToCString().get(),
260 true_block_id(), false_block_id());
264 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
266 function()->PrintTo(stream);
267 stream->Add(".code_entry = ");
268 code_object()->PrintTo(stream);
272 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
274 base_object()->PrintTo(stream);
276 offset()->PrintTo(stream);
280 void LCallJSFunction::PrintDataTo(StringStream* stream) {
282 function()->PrintTo(stream);
283 stream->Add("#%d / ", arity());
287 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
288 for (int i = 0; i < InputCount(); i++) {
289 InputAt(i)->PrintTo(stream);
292 stream->Add("#%d / ", arity());
296 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
297 context()->PrintTo(stream);
298 stream->Add("[%d]", slot_index());
302 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
303 context()->PrintTo(stream);
304 stream->Add("[%d] <- ", slot_index());
305 value()->PrintTo(stream);
309 void LInvokeFunction::PrintDataTo(StringStream* stream) {
311 function()->PrintTo(stream);
312 stream->Add(" #%d / ", arity());
316 void LCallNew::PrintDataTo(StringStream* stream) {
318 constructor()->PrintTo(stream);
319 stream->Add(" #%d / ", arity());
323 void LCallNewArray::PrintDataTo(StringStream* stream) {
325 constructor()->PrintTo(stream);
326 stream->Add(" #%d / ", arity());
327 ElementsKind kind = hydrogen()->elements_kind();
328 stream->Add(" (%s) ", ElementsKindToString(kind));
332 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
333 arguments()->PrintTo(stream);
334 stream->Add(" length ");
335 length()->PrintTo(stream);
336 stream->Add(" index ");
337 index()->PrintTo(stream);
341 void LStoreNamedField::PrintDataTo(StringStream* stream) {
342 object()->PrintTo(stream);
343 hydrogen()->access().PrintTo(stream);
345 value()->PrintTo(stream);
349 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
350 object()->PrintTo(stream);
352 stream->Add(String::cast(*name())->ToCString().get());
354 value()->PrintTo(stream);
358 void LLoadKeyed::PrintDataTo(StringStream* stream) {
359 elements()->PrintTo(stream);
361 key()->PrintTo(stream);
362 if (hydrogen()->IsDehoisted()) {
363 stream->Add(" + %d]", additional_index());
370 void LStoreKeyed::PrintDataTo(StringStream* stream) {
371 elements()->PrintTo(stream);
373 key()->PrintTo(stream);
374 if (hydrogen()->IsDehoisted()) {
375 stream->Add(" + %d] <-", additional_index());
377 stream->Add("] <- ");
380 if (value() == NULL) {
381 ASSERT(hydrogen()->IsConstantHoleStore() &&
382 hydrogen()->value()->representation().IsDouble());
383 stream->Add("<the hole(nan)>");
385 value()->PrintTo(stream);
390 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
391 object()->PrintTo(stream);
393 key()->PrintTo(stream);
394 stream->Add("] <- ");
395 value()->PrintTo(stream);
399 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
400 object()->PrintTo(stream);
401 stream->Add(" %p -> %p", *original_map(), *transitioned_map());
405 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
406 // Skip a slot if for a double-width slot.
407 if (kind == DOUBLE_REGISTERS) spill_slot_count_++;
408 return spill_slot_count_++;
412 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
413 int index = GetNextSpillIndex(kind);
414 if (kind == DOUBLE_REGISTERS) {
415 return LDoubleStackSlot::Create(index, zone());
417 ASSERT(kind == GENERAL_REGISTERS);
418 return LStackSlot::Create(index, zone());
423 LPlatformChunk* LChunkBuilder::Build() {
425 chunk_ = new(zone()) LPlatformChunk(info(), graph());
426 LPhase phase("L_Building chunk", chunk_);
429 // If compiling for OSR, reserve space for the unoptimized frame,
430 // which will be subsumed into this frame.
431 if (graph()->has_osr()) {
432 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
433 chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
437 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
438 for (int i = 0; i < blocks->length(); i++) {
439 HBasicBlock* next = NULL;
440 if (i < blocks->length() - 1) next = blocks->at(i + 1);
441 DoBasicBlock(blocks->at(i), next);
442 if (is_aborted()) return NULL;
449 void LChunkBuilder::Abort(BailoutReason reason) {
450 info()->set_bailout_reason(reason);
455 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
456 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
457 Register::ToAllocationIndex(reg));
461 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
462 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
463 DoubleRegister::ToAllocationIndex(reg));
467 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
468 return Use(value, ToUnallocated(fixed_register));
472 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
473 return Use(value, ToUnallocated(reg));
477 LOperand* LChunkBuilder::UseRegister(HValue* value) {
478 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
482 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
484 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
485 LUnallocated::USED_AT_START));
489 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
490 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
494 LOperand* LChunkBuilder::Use(HValue* value) {
495 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
499 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
500 return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
501 LUnallocated::USED_AT_START));
505 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
506 return value->IsConstant()
507 ? chunk_->DefineConstantOperand(HConstant::cast(value))
512 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
513 return value->IsConstant()
514 ? chunk_->DefineConstantOperand(HConstant::cast(value))
519 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
520 return value->IsConstant()
521 ? chunk_->DefineConstantOperand(HConstant::cast(value))
522 : UseRegister(value);
526 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
527 return value->IsConstant()
528 ? chunk_->DefineConstantOperand(HConstant::cast(value))
529 : UseRegisterAtStart(value);
533 LOperand* LChunkBuilder::UseConstant(HValue* value) {
534 return chunk_->DefineConstantOperand(HConstant::cast(value));
538 LOperand* LChunkBuilder::UseAny(HValue* value) {
539 return value->IsConstant()
540 ? chunk_->DefineConstantOperand(HConstant::cast(value))
541 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
545 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
546 if (value->EmitAtUses()) {
547 HInstruction* instr = HInstruction::cast(value);
548 VisitInstruction(instr);
550 operand->set_virtual_register(value->id());
555 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
556 LUnallocated* result) {
557 result->set_virtual_register(current_instruction_->id());
558 instr->set_result(result);
563 LInstruction* LChunkBuilder::DefineAsRegister(
564 LTemplateResultInstruction<1>* instr) {
566 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
570 LInstruction* LChunkBuilder::DefineAsSpilled(
571 LTemplateResultInstruction<1>* instr, int index) {
573 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
577 LInstruction* LChunkBuilder::DefineSameAsFirst(
578 LTemplateResultInstruction<1>* instr) {
580 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
584 LInstruction* LChunkBuilder::DefineFixed(
585 LTemplateResultInstruction<1>* instr, Register reg) {
586 return Define(instr, ToUnallocated(reg));
590 LInstruction* LChunkBuilder::DefineFixedDouble(
591 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
592 return Define(instr, ToUnallocated(reg));
596 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
597 HEnvironment* hydrogen_env = current_block_->last_environment();
598 int argument_index_accumulator = 0;
599 ZoneList<HValue*> objects_to_materialize(0, zone());
600 instr->set_environment(CreateEnvironment(hydrogen_env,
601 &argument_index_accumulator,
602 &objects_to_materialize));
607 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
608 HInstruction* hinstr,
609 CanDeoptimize can_deoptimize) {
610 info()->MarkAsNonDeferredCalling();
615 instr = AssignPointerMap(instr);
617 if (hinstr->HasObservableSideEffects()) {
618 ASSERT(hinstr->next()->IsSimulate());
619 HSimulate* sim = HSimulate::cast(hinstr->next());
620 ASSERT(instruction_pending_deoptimization_environment_ == NULL);
621 ASSERT(pending_deoptimization_ast_id_.IsNone());
622 instruction_pending_deoptimization_environment_ = instr;
623 pending_deoptimization_ast_id_ = sim->ast_id();
626 // If instruction does not have side-effects lazy deoptimization
627 // after the call will try to deoptimize to the point before the call.
628 // Thus we still need to attach environment to this call even if
629 // call sequence can not deoptimize eagerly.
630 bool needs_environment =
631 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
632 !hinstr->HasObservableSideEffects();
633 if (needs_environment && !instr->HasEnvironment()) {
634 instr = AssignEnvironment(instr);
641 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
642 ASSERT(!instr->HasPointerMap());
643 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
648 LUnallocated* LChunkBuilder::TempRegister() {
649 LUnallocated* operand =
650 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
651 int vreg = allocator_->GetVirtualRegister();
652 if (!allocator_->AllocationOk()) {
653 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
656 operand->set_virtual_register(vreg);
661 LOperand* LChunkBuilder::FixedTemp(Register reg) {
662 LUnallocated* operand = ToUnallocated(reg);
663 ASSERT(operand->HasFixedPolicy());
668 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
669 LUnallocated* operand = ToUnallocated(reg);
670 ASSERT(operand->HasFixedPolicy());
675 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
676 return new(zone()) LLabel(instr->block());
680 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
681 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
685 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
691 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
692 return AssignEnvironment(new(zone()) LDeoptimize);
696 LInstruction* LChunkBuilder::DoShift(Token::Value op,
697 HBitwiseBinaryOperation* instr) {
698 if (instr->representation().IsSmiOrInteger32()) {
699 ASSERT(instr->left()->representation().Equals(instr->representation()));
700 ASSERT(instr->right()->representation().Equals(instr->representation()));
701 LOperand* left = UseRegisterAtStart(instr->left());
703 HValue* right_value = instr->right();
704 LOperand* right = NULL;
705 int constant_value = 0;
706 bool does_deopt = false;
707 if (right_value->IsConstant()) {
708 HConstant* constant = HConstant::cast(right_value);
709 right = chunk_->DefineConstantOperand(constant);
710 constant_value = constant->Integer32Value() & 0x1f;
711 // Left shifts can deoptimize if we shift by > 0 and the result cannot be
713 if (instr->representation().IsSmi() && constant_value > 0) {
714 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
717 right = UseRegisterAtStart(right_value);
720 // Shift operations can only deoptimize if we do a logical shift
721 // by 0 and the result cannot be truncated to int32.
722 if (op == Token::SHR && constant_value == 0) {
723 if (FLAG_opt_safe_uint32_operations) {
724 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
726 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
730 LInstruction* result =
731 DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
732 return does_deopt ? AssignEnvironment(result) : result;
734 return DoArithmeticT(op, instr);
739 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
740 HArithmeticBinaryOperation* instr) {
741 ASSERT(instr->representation().IsDouble());
742 ASSERT(instr->left()->representation().IsDouble());
743 ASSERT(instr->right()->representation().IsDouble());
744 if (op == Token::MOD) {
745 LOperand* left = UseFixedDouble(instr->left(), d0);
746 LOperand* right = UseFixedDouble(instr->right(), d1);
747 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
748 return MarkAsCall(DefineFixedDouble(result, d0), instr);
750 LOperand* left = UseRegisterAtStart(instr->left());
751 LOperand* right = UseRegisterAtStart(instr->right());
752 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
753 return DefineAsRegister(result);
758 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
759 HBinaryOperation* instr) {
760 HValue* left = instr->left();
761 HValue* right = instr->right();
762 ASSERT(left->representation().IsTagged());
763 ASSERT(right->representation().IsTagged());
764 LOperand* context = UseFixed(instr->context(), cp);
765 LOperand* left_operand = UseFixed(left, r1);
766 LOperand* right_operand = UseFixed(right, r0);
767 LArithmeticT* result =
768 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
769 return MarkAsCall(DefineFixed(result, r0), instr);
773 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
774 ASSERT(is_building());
775 current_block_ = block;
776 next_block_ = next_block;
777 if (block->IsStartBlock()) {
778 block->UpdateEnvironment(graph_->start_environment());
780 } else if (block->predecessors()->length() == 1) {
781 // We have a single predecessor => copy environment and outgoing
782 // argument count from the predecessor.
783 ASSERT(block->phis()->length() == 0);
784 HBasicBlock* pred = block->predecessors()->at(0);
785 HEnvironment* last_environment = pred->last_environment();
786 ASSERT(last_environment != NULL);
787 // Only copy the environment, if it is later used again.
788 if (pred->end()->SecondSuccessor() == NULL) {
789 ASSERT(pred->end()->FirstSuccessor() == block);
791 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
792 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
793 last_environment = last_environment->Copy();
796 block->UpdateEnvironment(last_environment);
797 ASSERT(pred->argument_count() >= 0);
798 argument_count_ = pred->argument_count();
800 // We are at a state join => process phis.
801 HBasicBlock* pred = block->predecessors()->at(0);
802 // No need to copy the environment, it cannot be used later.
803 HEnvironment* last_environment = pred->last_environment();
804 for (int i = 0; i < block->phis()->length(); ++i) {
805 HPhi* phi = block->phis()->at(i);
806 if (phi->HasMergedIndex()) {
807 last_environment->SetValueAt(phi->merged_index(), phi);
810 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
811 if (block->deleted_phis()->at(i) < last_environment->length()) {
812 last_environment->SetValueAt(block->deleted_phis()->at(i),
813 graph_->GetConstantUndefined());
816 block->UpdateEnvironment(last_environment);
817 // Pick up the outgoing argument count of one of the predecessors.
818 argument_count_ = pred->argument_count();
820 HInstruction* current = block->first();
821 int start = chunk_->instructions()->length();
822 while (current != NULL && !is_aborted()) {
823 // Code for constants in registers is generated lazily.
824 if (!current->EmitAtUses()) {
825 VisitInstruction(current);
827 current = current->next();
829 int end = chunk_->instructions()->length() - 1;
831 block->set_first_instruction_index(start);
832 block->set_last_instruction_index(end);
834 block->set_argument_count(argument_count_);
836 current_block_ = NULL;
840 void LChunkBuilder::VisitInstruction(HInstruction* current) {
841 HInstruction* old_current = current_instruction_;
842 current_instruction_ = current;
843 if (current->has_position()) position_ = current->position();
845 LInstruction* instr = NULL;
846 if (current->CanReplaceWithDummyUses()) {
847 if (current->OperandCount() == 0) {
848 instr = DefineAsRegister(new(zone()) LDummy());
850 instr = DefineAsRegister(new(zone())
851 LDummyUse(UseAny(current->OperandAt(0))));
853 for (int i = 1; i < current->OperandCount(); ++i) {
854 LInstruction* dummy =
855 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
856 dummy->set_hydrogen_value(current);
857 chunk_->AddInstruction(dummy, current_block_);
860 instr = current->CompileToLithium(this);
863 argument_count_ += current->argument_delta();
864 ASSERT(argument_count_ >= 0);
867 // Associate the hydrogen instruction first, since we may need it for
868 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
869 instr->set_hydrogen_value(current);
872 // Make sure that the lithium instruction has either no fixed register
873 // constraints in temps or the result OR no uses that are only used at
874 // start. If this invariant doesn't hold, the register allocator can decide
875 // to insert a split of a range immediately before the instruction due to an
876 // already allocated register needing to be used for the instruction's fixed
877 // register constraint. In this case, The register allocator won't see an
878 // interference between the split child and the use-at-start (it would if
879 // the it was just a plain use), so it is free to move the split child into
880 // the same register that is used for the use-at-start.
881 // See https://code.google.com/p/chromium/issues/detail?id=201590
882 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
884 int used_at_start = 0;
885 for (UseIterator it(instr); !it.Done(); it.Advance()) {
886 LUnallocated* operand = LUnallocated::cast(it.Current());
887 if (operand->IsUsedAtStart()) ++used_at_start;
889 if (instr->Output() != NULL) {
890 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
892 for (TempIterator it(instr); !it.Done(); it.Advance()) {
893 LUnallocated* operand = LUnallocated::cast(it.Current());
894 if (operand->HasFixedPolicy()) ++fixed;
896 ASSERT(fixed == 0 || used_at_start == 0);
900 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
901 instr = AssignPointerMap(instr);
903 if (FLAG_stress_environments && !instr->HasEnvironment()) {
904 instr = AssignEnvironment(instr);
906 chunk_->AddInstruction(instr, current_block_);
908 current_instruction_ = old_current;
912 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
913 return new(zone()) LGoto(instr->FirstSuccessor());
917 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
918 LInstruction* goto_instr = CheckElideControlInstruction(instr);
919 if (goto_instr != NULL) return goto_instr;
921 HValue* value = instr->value();
922 LBranch* result = new(zone()) LBranch(UseRegister(value));
923 // Tagged values that are not known smis or booleans require a
924 // deoptimization environment. If the instruction is generic no
925 // environment is needed since all cases are handled.
926 Representation rep = value->representation();
927 HType type = value->type();
928 ToBooleanStub::Types expected = instr->expected_input_types();
929 if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() &&
930 !expected.IsGeneric()) {
931 return AssignEnvironment(result);
937 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
938 return new(zone()) LDebugBreak();
942 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
943 ASSERT(instr->value()->representation().IsTagged());
944 LOperand* value = UseRegisterAtStart(instr->value());
945 LOperand* temp = TempRegister();
946 return new(zone()) LCmpMapAndBranch(value, temp);
950 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
951 info()->MarkAsRequiresFrame();
952 LOperand* value = UseRegister(instr->value());
953 return DefineAsRegister(new(zone()) LArgumentsLength(value));
957 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
958 info()->MarkAsRequiresFrame();
959 return DefineAsRegister(new(zone()) LArgumentsElements);
963 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
964 LOperand* context = UseFixed(instr->context(), cp);
965 LInstanceOf* result =
966 new(zone()) LInstanceOf(context, UseFixed(instr->left(), r0),
967 UseFixed(instr->right(), r1));
968 return MarkAsCall(DefineFixed(result, r0), instr);
972 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
973 HInstanceOfKnownGlobal* instr) {
974 LInstanceOfKnownGlobal* result =
975 new(zone()) LInstanceOfKnownGlobal(
976 UseFixed(instr->context(), cp),
977 UseFixed(instr->left(), r0),
979 return MarkAsCall(DefineFixed(result, r0), instr);
983 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
984 LOperand* receiver = UseRegisterAtStart(instr->receiver());
985 LOperand* function = UseRegisterAtStart(instr->function());
986 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
987 return AssignEnvironment(DefineAsRegister(result));
991 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
992 LOperand* function = UseFixed(instr->function(), r1);
993 LOperand* receiver = UseFixed(instr->receiver(), r0);
994 LOperand* length = UseFixed(instr->length(), r2);
995 LOperand* elements = UseFixed(instr->elements(), r3);
996 LApplyArguments* result = new(zone()) LApplyArguments(function,
1000 return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1004 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1005 LOperand* argument = Use(instr->argument());
1006 return new(zone()) LPushArgument(argument);
1010 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1011 HStoreCodeEntry* store_code_entry) {
1012 LOperand* function = UseRegister(store_code_entry->function());
1013 LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1014 return new(zone()) LStoreCodeEntry(function, code_object);
1018 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1019 HInnerAllocatedObject* instr) {
1020 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1021 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1022 return DefineAsRegister(
1023 new(zone()) LInnerAllocatedObject(base_object, offset));
1027 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1028 return instr->HasNoUses()
1030 : DefineAsRegister(new(zone()) LThisFunction);
1034 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1035 if (instr->HasNoUses()) return NULL;
1037 if (info()->IsStub()) {
1038 return DefineFixed(new(zone()) LContext, cp);
1041 return DefineAsRegister(new(zone()) LContext);
1045 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1046 LOperand* context = UseRegisterAtStart(instr->value());
1047 return DefineAsRegister(new(zone()) LOuterContext(context));
1051 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1052 LOperand* context = UseFixed(instr->context(), cp);
1053 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1057 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1058 LOperand* context = UseRegisterAtStart(instr->value());
1059 return DefineAsRegister(new(zone()) LGlobalObject(context));
1063 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1064 LOperand* global_object = UseRegisterAtStart(instr->value());
1065 return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1069 LInstruction* LChunkBuilder::DoCallJSFunction(
1070 HCallJSFunction* instr) {
1071 LOperand* function = UseFixed(instr->function(), r1);
1073 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1075 return MarkAsCall(DefineFixed(result, r0), instr);
1079 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1080 HCallWithDescriptor* instr) {
1081 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1083 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1084 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1085 ops.Add(target, zone());
1086 for (int i = 1; i < instr->OperandCount(); i++) {
1087 LOperand* op = UseFixed(instr->OperandAt(i),
1088 descriptor->GetParameterRegister(i - 1));
1089 ops.Add(op, zone());
1092 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1093 descriptor, ops, zone());
1094 return MarkAsCall(DefineFixed(result, r0), instr);
1098 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1099 LOperand* context = UseFixed(instr->context(), cp);
1100 LOperand* function = UseFixed(instr->function(), r1);
1101 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1102 return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1106 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1107 switch (instr->op()) {
1108 case kMathFloor: return DoMathFloor(instr);
1109 case kMathRound: return DoMathRound(instr);
1110 case kMathAbs: return DoMathAbs(instr);
1111 case kMathLog: return DoMathLog(instr);
1112 case kMathExp: return DoMathExp(instr);
1113 case kMathSqrt: return DoMathSqrt(instr);
1114 case kMathPowHalf: return DoMathPowHalf(instr);
1122 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1123 LOperand* input = UseRegister(instr->value());
1124 LMathFloor* result = new(zone()) LMathFloor(input);
1125 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1129 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1130 LOperand* input = UseRegister(instr->value());
1131 LOperand* temp = FixedTemp(d3);
1132 LMathRound* result = new(zone()) LMathRound(input, temp);
1133 return AssignEnvironment(DefineAsRegister(result));
1137 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1138 Representation r = instr->value()->representation();
1139 LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1141 : UseFixed(instr->context(), cp);
1142 LOperand* input = UseRegister(instr->value());
1143 LMathAbs* result = new(zone()) LMathAbs(context, input);
1144 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1148 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1149 ASSERT(instr->representation().IsDouble());
1150 ASSERT(instr->value()->representation().IsDouble());
1151 LOperand* input = UseFixedDouble(instr->value(), d0);
1152 return MarkAsCall(DefineFixedDouble(new(zone()) LMathLog(input), d0), instr);
1156 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1157 ASSERT(instr->representation().IsDouble());
1158 ASSERT(instr->value()->representation().IsDouble());
1159 LOperand* input = UseRegister(instr->value());
1160 LOperand* temp1 = TempRegister();
1161 LOperand* temp2 = TempRegister();
1162 LOperand* double_temp = FixedTemp(d3); // Chosen by fair dice roll.
1163 LMathExp* result = new(zone()) LMathExp(input, double_temp, temp1, temp2);
1164 return DefineAsRegister(result);
1168 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1169 LOperand* input = UseRegisterAtStart(instr->value());
1170 LMathSqrt* result = new(zone()) LMathSqrt(input);
1171 return DefineAsRegister(result);
1175 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1176 LOperand* input = UseRegisterAtStart(instr->value());
1177 LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1178 return DefineAsRegister(result);
1182 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1183 LOperand* context = UseFixed(instr->context(), cp);
1184 LOperand* constructor = UseFixed(instr->constructor(), r1);
1185 LCallNew* result = new(zone()) LCallNew(context, constructor);
1186 return MarkAsCall(DefineFixed(result, r0), instr);
1190 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1191 LOperand* context = UseFixed(instr->context(), cp);
1192 LOperand* constructor = UseFixed(instr->constructor(), r1);
1193 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1194 return MarkAsCall(DefineFixed(result, r0), instr);
1198 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1199 LOperand* context = UseFixed(instr->context(), cp);
1200 LOperand* function = UseFixed(instr->function(), r1);
1201 LCallFunction* call = new(zone()) LCallFunction(context, function);
1202 LInstruction* result = DefineFixed(call, r0);
1203 if (instr->IsTailCall()) return result;
1204 return MarkAsCall(result, instr);
1208 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1209 LOperand* context = UseFixed(instr->context(), cp);
1210 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), r0), instr);
1214 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1215 return DoShift(Token::ROR, instr);
1219 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1220 return DoShift(Token::SHR, instr);
1224 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1225 return DoShift(Token::SAR, instr);
1229 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1230 return DoShift(Token::SHL, instr);
1234 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1235 if (instr->representation().IsSmiOrInteger32()) {
1236 ASSERT(instr->left()->representation().Equals(instr->representation()));
1237 ASSERT(instr->right()->representation().Equals(instr->representation()));
1238 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1240 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1241 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1242 return DefineAsRegister(new(zone()) LBitI(left, right));
1244 return DoArithmeticT(instr->op(), instr);
1249 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1250 if (instr->representation().IsSmiOrInteger32()) {
1251 ASSERT(instr->left()->representation().Equals(instr->representation()));
1252 ASSERT(instr->right()->representation().Equals(instr->representation()));
1253 if (instr->RightIsPowerOf2()) {
1254 ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1255 LOperand* value = UseRegisterAtStart(instr->left());
1256 LDivI* div = new(zone()) LDivI(value, UseConstant(instr->right()), NULL);
1257 return AssignEnvironment(DefineAsRegister(div));
1259 LOperand* dividend = UseRegister(instr->left());
1260 LOperand* divisor = UseRegister(instr->right());
1261 LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d4);
1262 LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1263 return AssignEnvironment(DefineAsRegister(div));
1264 } else if (instr->representation().IsDouble()) {
1265 return DoArithmeticD(Token::DIV, instr);
1267 return DoArithmeticT(Token::DIV, instr);
1272 bool LChunkBuilder::HasMagicNumberForDivisor(int32_t divisor) {
1273 uint32_t divisor_abs = abs(divisor);
1274 // Dividing by 0, 1, and powers of 2 is easy.
1275 // Note that IsPowerOf2(0) returns true;
1276 ASSERT(IsPowerOf2(0) == true);
1277 if (IsPowerOf2(divisor_abs)) return true;
1279 // We have magic numbers for a few specific divisors.
1280 // Details and proofs can be found in:
1281 // - Hacker's Delight, Henry S. Warren, Jr.
1282 // - The PowerPC Compiler Writer’s Guide
1283 // and probably many others.
1286 // <divisor with magic numbers> * <power of 2>
1288 // <divisor with magic numbers> * <other divisor with magic numbers>
1289 int32_t power_of_2_factor =
1290 CompilerIntrinsics::CountTrailingZeros(divisor_abs);
1291 DivMagicNumbers magic_numbers =
1292 DivMagicNumberFor(divisor_abs >> power_of_2_factor);
1293 if (magic_numbers.M != InvalidDivMagicNumber.M) return true;
1299 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1300 // LMathFloorOfDiv can only handle a subset of divisors, so fall
1301 // back to a flooring division in all other cases.
1302 HValue* right = instr->right();
1303 if (!right->IsInteger32Constant() ||
1304 (!CpuFeatures::IsSupported(SUDIV) &&
1305 !HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value()))) {
1306 LOperand* dividend = UseRegister(instr->left());
1307 LOperand* divisor = UseRegister(right);
1308 LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d4);
1309 LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1310 return AssignEnvironment(DefineAsRegister(div));
1313 LOperand* dividend = UseRegister(instr->left());
1314 LOperand* divisor = CpuFeatures::IsSupported(SUDIV)
1315 ? UseRegister(right)
1316 : UseOrConstant(right);
1317 LOperand* remainder = TempRegister();
1318 return AssignEnvironment(DefineAsRegister(
1319 new(zone()) LMathFloorOfDiv(dividend, divisor, remainder)));
1323 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1324 HValue* left = instr->left();
1325 HValue* right = instr->right();
1326 if (instr->representation().IsSmiOrInteger32()) {
1327 ASSERT(instr->left()->representation().Equals(instr->representation()));
1328 ASSERT(instr->right()->representation().Equals(instr->representation()));
1329 if (instr->RightIsPowerOf2()) {
1330 ASSERT(!right->CanBeZero());
1331 LModI* mod = new(zone()) LModI(UseRegisterAtStart(left),
1332 UseConstant(right));
1333 LInstruction* result = DefineAsRegister(mod);
1334 return (left->CanBeNegative() &&
1335 instr->CheckFlag(HValue::kBailoutOnMinusZero))
1336 ? AssignEnvironment(result)
1338 } else if (CpuFeatures::IsSupported(SUDIV)) {
1339 LModI* mod = new(zone()) LModI(UseRegister(left),
1340 UseRegister(right));
1341 LInstruction* result = DefineAsRegister(mod);
1342 return (right->CanBeZero() ||
1343 (left->RangeCanInclude(kMinInt) &&
1344 right->RangeCanInclude(-1) &&
1345 instr->CheckFlag(HValue::kBailoutOnMinusZero)) ||
1346 (left->CanBeNegative() &&
1347 instr->CanBeZero() &&
1348 instr->CheckFlag(HValue::kBailoutOnMinusZero)))
1349 ? AssignEnvironment(result)
1352 LModI* mod = new(zone()) LModI(UseRegister(left),
1356 LInstruction* result = DefineAsRegister(mod);
1357 return (right->CanBeZero() ||
1358 (left->CanBeNegative() &&
1359 instr->CanBeZero() &&
1360 instr->CheckFlag(HValue::kBailoutOnMinusZero)))
1361 ? AssignEnvironment(result)
1364 } else if (instr->representation().IsDouble()) {
1365 return DoArithmeticD(Token::MOD, instr);
1367 return DoArithmeticT(Token::MOD, instr);
1372 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1373 if (instr->representation().IsSmiOrInteger32()) {
1374 ASSERT(instr->left()->representation().Equals(instr->representation()));
1375 ASSERT(instr->right()->representation().Equals(instr->representation()));
1376 HValue* left = instr->BetterLeftOperand();
1377 HValue* right = instr->BetterRightOperand();
1380 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1381 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1383 if (right->IsConstant()) {
1384 HConstant* constant = HConstant::cast(right);
1385 int32_t constant_value = constant->Integer32Value();
1386 // Constants -1, 0 and 1 can be optimized if the result can overflow.
1387 // For other constants, it can be optimized only without overflow.
1388 if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1389 left_op = UseRegisterAtStart(left);
1390 right_op = UseConstant(right);
1392 if (bailout_on_minus_zero) {
1393 left_op = UseRegister(left);
1395 left_op = UseRegisterAtStart(left);
1397 right_op = UseRegister(right);
1400 if (bailout_on_minus_zero) {
1401 left_op = UseRegister(left);
1403 left_op = UseRegisterAtStart(left);
1405 right_op = UseRegister(right);
1407 LMulI* mul = new(zone()) LMulI(left_op, right_op);
1408 if (can_overflow || bailout_on_minus_zero) {
1409 AssignEnvironment(mul);
1411 return DefineAsRegister(mul);
1413 } else if (instr->representation().IsDouble()) {
1414 if (instr->UseCount() == 1 && (instr->uses().value()->IsAdd() ||
1415 instr->uses().value()->IsSub())) {
1416 HBinaryOperation* use = HBinaryOperation::cast(instr->uses().value());
1418 if (use->IsAdd() && instr == use->left()) {
1419 // This mul is the lhs of an add. The add and mul will be folded into a
1420 // multiply-add in DoAdd.
1423 if (instr == use->right() && use->IsAdd() && !use->left()->IsMul()) {
1424 // This mul is the rhs of an add, where the lhs is not another mul.
1425 // The add and mul will be folded into a multiply-add in DoAdd.
1428 if (instr == use->right() && use->IsSub()) {
1429 // This mul is the rhs of a sub. The sub and mul will be folded into a
1430 // multiply-sub in DoSub.
1435 return DoArithmeticD(Token::MUL, instr);
1437 return DoArithmeticT(Token::MUL, instr);
1442 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1443 if (instr->representation().IsSmiOrInteger32()) {
1444 ASSERT(instr->left()->representation().Equals(instr->representation()));
1445 ASSERT(instr->right()->representation().Equals(instr->representation()));
1447 if (instr->left()->IsConstant()) {
1448 // If lhs is constant, do reverse subtraction instead.
1449 return DoRSub(instr);
1452 LOperand* left = UseRegisterAtStart(instr->left());
1453 LOperand* right = UseOrConstantAtStart(instr->right());
1454 LSubI* sub = new(zone()) LSubI(left, right);
1455 LInstruction* result = DefineAsRegister(sub);
1456 if (instr->CheckFlag(HValue::kCanOverflow)) {
1457 result = AssignEnvironment(result);
1460 } else if (instr->representation().IsDouble()) {
1461 if (instr->right()->IsMul()) {
1462 return DoMultiplySub(instr->left(), HMul::cast(instr->right()));
1465 return DoArithmeticD(Token::SUB, instr);
1467 return DoArithmeticT(Token::SUB, instr);
1472 LInstruction* LChunkBuilder::DoRSub(HSub* instr) {
1473 ASSERT(instr->representation().IsSmiOrInteger32());
1474 ASSERT(instr->left()->representation().Equals(instr->representation()));
1475 ASSERT(instr->right()->representation().Equals(instr->representation()));
1477 // Note: The lhs of the subtraction becomes the rhs of the
1478 // reverse-subtraction.
1479 LOperand* left = UseRegisterAtStart(instr->right());
1480 LOperand* right = UseOrConstantAtStart(instr->left());
1481 LRSubI* rsb = new(zone()) LRSubI(left, right);
1482 LInstruction* result = DefineAsRegister(rsb);
1483 if (instr->CheckFlag(HValue::kCanOverflow)) {
1484 result = AssignEnvironment(result);
1490 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1491 LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1492 LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1493 LOperand* addend_op = UseRegisterAtStart(addend);
1494 return DefineSameAsFirst(new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1499 LInstruction* LChunkBuilder::DoMultiplySub(HValue* minuend, HMul* mul) {
1500 LOperand* minuend_op = UseRegisterAtStart(minuend);
1501 LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1502 LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1504 return DefineSameAsFirst(new(zone()) LMultiplySubD(minuend_op,
1510 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1511 if (instr->representation().IsSmiOrInteger32()) {
1512 ASSERT(instr->left()->representation().Equals(instr->representation()));
1513 ASSERT(instr->right()->representation().Equals(instr->representation()));
1514 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1515 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1516 LAddI* add = new(zone()) LAddI(left, right);
1517 LInstruction* result = DefineAsRegister(add);
1518 if (instr->CheckFlag(HValue::kCanOverflow)) {
1519 result = AssignEnvironment(result);
1522 } else if (instr->representation().IsExternal()) {
1523 ASSERT(instr->left()->representation().IsExternal());
1524 ASSERT(instr->right()->representation().IsInteger32());
1525 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1526 LOperand* left = UseRegisterAtStart(instr->left());
1527 LOperand* right = UseOrConstantAtStart(instr->right());
1528 LAddI* add = new(zone()) LAddI(left, right);
1529 LInstruction* result = DefineAsRegister(add);
1531 } else if (instr->representation().IsDouble()) {
1532 if (instr->left()->IsMul()) {
1533 return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1536 if (instr->right()->IsMul()) {
1537 ASSERT(!instr->left()->IsMul());
1538 return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1541 return DoArithmeticD(Token::ADD, instr);
1543 return DoArithmeticT(Token::ADD, instr);
1548 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1549 LOperand* left = NULL;
1550 LOperand* right = NULL;
1551 if (instr->representation().IsSmiOrInteger32()) {
1552 ASSERT(instr->left()->representation().Equals(instr->representation()));
1553 ASSERT(instr->right()->representation().Equals(instr->representation()));
1554 left = UseRegisterAtStart(instr->BetterLeftOperand());
1555 right = UseOrConstantAtStart(instr->BetterRightOperand());
1557 ASSERT(instr->representation().IsDouble());
1558 ASSERT(instr->left()->representation().IsDouble());
1559 ASSERT(instr->right()->representation().IsDouble());
1560 left = UseRegisterAtStart(instr->left());
1561 right = UseRegisterAtStart(instr->right());
1563 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1567 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1568 ASSERT(instr->representation().IsDouble());
1569 // We call a C function for double power. It can't trigger a GC.
1570 // We need to use fixed result register for the call.
1571 Representation exponent_type = instr->right()->representation();
1572 ASSERT(instr->left()->representation().IsDouble());
1573 LOperand* left = UseFixedDouble(instr->left(), d0);
1574 LOperand* right = exponent_type.IsDouble() ?
1575 UseFixedDouble(instr->right(), d1) :
1576 UseFixed(instr->right(), r2);
1577 LPower* result = new(zone()) LPower(left, right);
1578 return MarkAsCall(DefineFixedDouble(result, d2),
1580 CAN_DEOPTIMIZE_EAGERLY);
1584 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1585 ASSERT(instr->left()->representation().IsTagged());
1586 ASSERT(instr->right()->representation().IsTagged());
1587 LOperand* context = UseFixed(instr->context(), cp);
1588 LOperand* left = UseFixed(instr->left(), r1);
1589 LOperand* right = UseFixed(instr->right(), r0);
1590 LCmpT* result = new(zone()) LCmpT(context, left, right);
1591 return MarkAsCall(DefineFixed(result, r0), instr);
1595 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1596 HCompareNumericAndBranch* instr) {
1597 Representation r = instr->representation();
1598 if (r.IsSmiOrInteger32()) {
1599 ASSERT(instr->left()->representation().Equals(r));
1600 ASSERT(instr->right()->representation().Equals(r));
1601 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1602 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1603 return new(zone()) LCompareNumericAndBranch(left, right);
1605 ASSERT(r.IsDouble());
1606 ASSERT(instr->left()->representation().IsDouble());
1607 ASSERT(instr->right()->representation().IsDouble());
1608 LOperand* left = UseRegisterAtStart(instr->left());
1609 LOperand* right = UseRegisterAtStart(instr->right());
1610 return new(zone()) LCompareNumericAndBranch(left, right);
1615 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1616 HCompareObjectEqAndBranch* instr) {
1617 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1618 if (goto_instr != NULL) return goto_instr;
1619 LOperand* left = UseRegisterAtStart(instr->left());
1620 LOperand* right = UseRegisterAtStart(instr->right());
1621 return new(zone()) LCmpObjectEqAndBranch(left, right);
1625 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1626 HCompareHoleAndBranch* instr) {
1627 LOperand* value = UseRegisterAtStart(instr->value());
1628 return new(zone()) LCmpHoleAndBranch(value);
1632 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1633 HCompareMinusZeroAndBranch* instr) {
1634 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1635 if (goto_instr != NULL) return goto_instr;
1636 LOperand* value = UseRegister(instr->value());
1637 LOperand* scratch = TempRegister();
1638 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1642 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1643 ASSERT(instr->value()->representation().IsTagged());
1644 LOperand* value = UseRegisterAtStart(instr->value());
1645 LOperand* temp = TempRegister();
1646 return new(zone()) LIsObjectAndBranch(value, temp);
1650 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1651 ASSERT(instr->value()->representation().IsTagged());
1652 LOperand* value = UseRegisterAtStart(instr->value());
1653 LOperand* temp = TempRegister();
1654 return new(zone()) LIsStringAndBranch(value, temp);
1658 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1659 ASSERT(instr->value()->representation().IsTagged());
1660 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1664 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1665 HIsUndetectableAndBranch* instr) {
1666 ASSERT(instr->value()->representation().IsTagged());
1667 LOperand* value = UseRegisterAtStart(instr->value());
1668 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1672 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1673 HStringCompareAndBranch* instr) {
1674 ASSERT(instr->left()->representation().IsTagged());
1675 ASSERT(instr->right()->representation().IsTagged());
1676 LOperand* context = UseFixed(instr->context(), cp);
1677 LOperand* left = UseFixed(instr->left(), r1);
1678 LOperand* right = UseFixed(instr->right(), r0);
1679 LStringCompareAndBranch* result =
1680 new(zone()) LStringCompareAndBranch(context, left, right);
1681 return MarkAsCall(result, instr);
1685 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1686 HHasInstanceTypeAndBranch* instr) {
1687 ASSERT(instr->value()->representation().IsTagged());
1688 LOperand* value = UseRegisterAtStart(instr->value());
1689 return new(zone()) LHasInstanceTypeAndBranch(value);
1693 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1694 HGetCachedArrayIndex* instr) {
1695 ASSERT(instr->value()->representation().IsTagged());
1696 LOperand* value = UseRegisterAtStart(instr->value());
1698 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1702 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1703 HHasCachedArrayIndexAndBranch* instr) {
1704 ASSERT(instr->value()->representation().IsTagged());
1705 return new(zone()) LHasCachedArrayIndexAndBranch(
1706 UseRegisterAtStart(instr->value()));
1710 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1711 HClassOfTestAndBranch* instr) {
1712 ASSERT(instr->value()->representation().IsTagged());
1713 LOperand* value = UseRegister(instr->value());
1714 return new(zone()) LClassOfTestAndBranch(value, TempRegister());
1718 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1719 LOperand* map = UseRegisterAtStart(instr->value());
1720 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1724 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1725 LOperand* object = UseRegisterAtStart(instr->value());
1726 return DefineAsRegister(new(zone()) LElementsKind(object));
1730 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1731 LOperand* object = UseRegister(instr->value());
1732 LValueOf* result = new(zone()) LValueOf(object, TempRegister());
1733 return DefineAsRegister(result);
1737 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1738 LOperand* object = UseFixed(instr->value(), r0);
1739 LDateField* result =
1740 new(zone()) LDateField(object, FixedTemp(r1), instr->index());
1741 return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1745 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1746 LOperand* string = UseRegisterAtStart(instr->string());
1747 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1748 return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1752 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1753 LOperand* string = UseRegisterAtStart(instr->string());
1754 LOperand* index = FLAG_debug_code
1755 ? UseRegisterAtStart(instr->index())
1756 : UseRegisterOrConstantAtStart(instr->index());
1757 LOperand* value = UseRegisterAtStart(instr->value());
1758 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1759 return new(zone()) LSeqStringSetChar(context, string, index, value);
1763 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1764 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1765 LOperand* length = UseRegister(instr->length());
1766 return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1770 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1771 HBoundsCheckBaseIndexInformation* instr) {
1777 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1778 // The control instruction marking the end of a block that completed
1779 // abruptly (e.g., threw an exception). There is nothing specific to do.
1784 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1785 LOperand* context = UseFixed(instr->context(), cp);
1786 LOperand* value = UseFixed(instr->value(), r0);
1787 return MarkAsCall(new(zone()) LThrow(context, value), instr);
1791 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1796 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1797 // All HForceRepresentation instructions should be eliminated in the
1798 // representation change phase of Hydrogen.
1804 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1805 Representation from = instr->from();
1806 Representation to = instr->to();
1808 if (to.IsTagged()) {
1809 LOperand* value = UseRegister(instr->value());
1810 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1812 from = Representation::Tagged();
1814 if (from.IsTagged()) {
1815 if (to.IsDouble()) {
1816 LOperand* value = UseRegister(instr->value());
1817 LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1818 return AssignEnvironment(DefineAsRegister(res));
1819 } else if (to.IsSmi()) {
1820 HValue* val = instr->value();
1821 LOperand* value = UseRegister(val);
1822 if (val->type().IsSmi()) {
1823 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1825 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1827 ASSERT(to.IsInteger32());
1828 LOperand* value = NULL;
1829 LInstruction* res = NULL;
1830 HValue* val = instr->value();
1831 if (val->type().IsSmi() || val->representation().IsSmi()) {
1832 value = UseRegisterAtStart(val);
1833 res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1835 value = UseRegister(val);
1836 LOperand* temp1 = TempRegister();
1837 LOperand* temp2 = FixedTemp(d11);
1838 res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
1841 res = AssignEnvironment(res);
1845 } else if (from.IsDouble()) {
1846 if (to.IsTagged()) {
1847 info()->MarkAsDeferredCalling();
1848 LOperand* value = UseRegister(instr->value());
1849 LOperand* temp1 = TempRegister();
1850 LOperand* temp2 = TempRegister();
1852 // Make sure that the temp and result_temp registers are
1854 LUnallocated* result_temp = TempRegister();
1855 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1856 Define(result, result_temp);
1857 return AssignPointerMap(result);
1858 } else if (to.IsSmi()) {
1859 LOperand* value = UseRegister(instr->value());
1860 return AssignEnvironment(
1861 DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1863 ASSERT(to.IsInteger32());
1864 LOperand* value = UseRegister(instr->value());
1865 LDoubleToI* res = new(zone()) LDoubleToI(value);
1866 return AssignEnvironment(DefineAsRegister(res));
1868 } else if (from.IsInteger32()) {
1869 info()->MarkAsDeferredCalling();
1870 if (to.IsTagged()) {
1871 HValue* val = instr->value();
1872 LOperand* value = UseRegisterAtStart(val);
1873 if (val->CheckFlag(HInstruction::kUint32)) {
1874 LNumberTagU* result = new(zone()) LNumberTagU(value);
1875 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1876 } else if (val->HasRange() && val->range()->IsInSmiRange()) {
1877 return DefineAsRegister(new(zone()) LSmiTag(value));
1879 LNumberTagI* result = new(zone()) LNumberTagI(value);
1880 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1882 } else if (to.IsSmi()) {
1883 HValue* val = instr->value();
1884 LOperand* value = UseRegister(val);
1885 LInstruction* result = val->CheckFlag(HInstruction::kUint32)
1886 ? DefineAsRegister(new(zone()) LUint32ToSmi(value))
1887 : DefineAsRegister(new(zone()) LInteger32ToSmi(value));
1888 if (val->HasRange() && val->range()->IsInSmiRange()) {
1891 return AssignEnvironment(result);
1893 ASSERT(to.IsDouble());
1894 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1895 return DefineAsRegister(
1896 new(zone()) LUint32ToDouble(UseRegister(instr->value())));
1898 return DefineAsRegister(
1899 new(zone()) LInteger32ToDouble(Use(instr->value())));
1908 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1909 LOperand* value = UseRegisterAtStart(instr->value());
1910 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1914 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1915 LOperand* value = UseRegisterAtStart(instr->value());
1916 return AssignEnvironment(new(zone()) LCheckSmi(value));
1920 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1921 LOperand* value = UseRegisterAtStart(instr->value());
1922 LInstruction* result = new(zone()) LCheckInstanceType(value);
1923 return AssignEnvironment(result);
1927 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1928 LOperand* value = UseRegisterAtStart(instr->value());
1929 return AssignEnvironment(new(zone()) LCheckValue(value));
1933 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1934 LOperand* value = NULL;
1935 if (!instr->CanOmitMapChecks()) {
1936 value = UseRegisterAtStart(instr->value());
1937 if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1939 LCheckMaps* result = new(zone()) LCheckMaps(value);
1940 if (!instr->CanOmitMapChecks()) {
1941 AssignEnvironment(result);
1942 if (instr->has_migration_target()) return AssignPointerMap(result);
1948 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1949 HValue* value = instr->value();
1950 Representation input_rep = value->representation();
1951 LOperand* reg = UseRegister(value);
1952 if (input_rep.IsDouble()) {
1953 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1954 } else if (input_rep.IsInteger32()) {
1955 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1957 ASSERT(input_rep.IsSmiOrTagged());
1958 // Register allocator doesn't (yet) support allocation of double
1959 // temps. Reserve d1 explicitly.
1960 LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(d11));
1961 return AssignEnvironment(DefineAsRegister(result));
1966 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1967 LOperand* context = info()->IsStub()
1968 ? UseFixed(instr->context(), cp)
1970 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1971 return new(zone()) LReturn(UseFixed(instr->value(), r0), context,
1976 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1977 Representation r = instr->representation();
1979 return DefineAsRegister(new(zone()) LConstantS);
1980 } else if (r.IsInteger32()) {
1981 return DefineAsRegister(new(zone()) LConstantI);
1982 } else if (r.IsDouble()) {
1983 return DefineAsRegister(new(zone()) LConstantD);
1984 } else if (r.IsExternal()) {
1985 return DefineAsRegister(new(zone()) LConstantE);
1986 } else if (r.IsTagged()) {
1987 return DefineAsRegister(new(zone()) LConstantT);
1995 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1996 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1997 return instr->RequiresHoleCheck()
1998 ? AssignEnvironment(DefineAsRegister(result))
1999 : DefineAsRegister(result);
2003 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2004 LOperand* context = UseFixed(instr->context(), cp);
2005 LOperand* global_object = UseFixed(instr->global_object(), r0);
2006 LLoadGlobalGeneric* result =
2007 new(zone()) LLoadGlobalGeneric(context, global_object);
2008 return MarkAsCall(DefineFixed(result, r0), instr);
2012 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2013 LOperand* value = UseRegister(instr->value());
2014 // Use a temp to check the value in the cell in the case where we perform
2016 return instr->RequiresHoleCheck()
2017 ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2018 : new(zone()) LStoreGlobalCell(value, NULL);
2022 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2023 LOperand* context = UseRegisterAtStart(instr->value());
2024 LInstruction* result =
2025 DefineAsRegister(new(zone()) LLoadContextSlot(context));
2026 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2030 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2033 if (instr->NeedsWriteBarrier()) {
2034 context = UseTempRegister(instr->context());
2035 value = UseTempRegister(instr->value());
2037 context = UseRegister(instr->context());
2038 value = UseRegister(instr->value());
2040 LInstruction* result = new(zone()) LStoreContextSlot(context, value);
2041 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2045 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2046 LOperand* obj = UseRegisterAtStart(instr->object());
2047 return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2051 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2052 LOperand* context = UseFixed(instr->context(), cp);
2053 LOperand* object = UseFixed(instr->object(), r0);
2054 LInstruction* result =
2055 DefineFixed(new(zone()) LLoadNamedGeneric(context, object), r0);
2056 return MarkAsCall(result, instr);
2060 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2061 HLoadFunctionPrototype* instr) {
2062 return AssignEnvironment(DefineAsRegister(
2063 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2067 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2068 return DefineAsRegister(new(zone()) LLoadRoot);
2072 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
2073 HLoadExternalArrayPointer* instr) {
2074 LOperand* input = UseRegisterAtStart(instr->value());
2075 return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
2079 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2080 ASSERT(instr->key()->representation().IsSmiOrInteger32());
2081 ElementsKind elements_kind = instr->elements_kind();
2082 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2083 LLoadKeyed* result = NULL;
2085 if (!instr->is_typed_elements()) {
2086 LOperand* obj = NULL;
2087 if (instr->representation().IsDouble()) {
2088 obj = UseRegister(instr->elements());
2090 ASSERT(instr->representation().IsSmiOrTagged());
2091 obj = UseRegisterAtStart(instr->elements());
2093 result = new(zone()) LLoadKeyed(obj, key);
2096 (instr->representation().IsInteger32() &&
2097 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2098 (instr->representation().IsDouble() &&
2099 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2100 LOperand* backing_store = UseRegister(instr->elements());
2101 result = new(zone()) LLoadKeyed(backing_store, key);
2104 DefineAsRegister(result);
2105 // An unsigned int array load might overflow and cause a deopt, make sure it
2106 // has an environment.
2107 bool can_deoptimize = instr->RequiresHoleCheck() ||
2108 elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2109 elements_kind == UINT32_ELEMENTS;
2110 return can_deoptimize ? AssignEnvironment(result) : result;
2114 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2115 LOperand* context = UseFixed(instr->context(), cp);
2116 LOperand* object = UseFixed(instr->object(), r1);
2117 LOperand* key = UseFixed(instr->key(), r0);
2119 LInstruction* result =
2120 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), r0);
2121 return MarkAsCall(result, instr);
2125 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2126 if (!instr->is_typed_elements()) {
2127 ASSERT(instr->elements()->representation().IsTagged());
2128 bool needs_write_barrier = instr->NeedsWriteBarrier();
2129 LOperand* object = NULL;
2130 LOperand* key = NULL;
2131 LOperand* val = NULL;
2133 if (instr->value()->representation().IsDouble()) {
2134 object = UseRegisterAtStart(instr->elements());
2135 val = UseRegister(instr->value());
2136 key = UseRegisterOrConstantAtStart(instr->key());
2138 ASSERT(instr->value()->representation().IsSmiOrTagged());
2139 if (needs_write_barrier) {
2140 object = UseTempRegister(instr->elements());
2141 val = UseTempRegister(instr->value());
2142 key = UseTempRegister(instr->key());
2144 object = UseRegisterAtStart(instr->elements());
2145 val = UseRegisterAtStart(instr->value());
2146 key = UseRegisterOrConstantAtStart(instr->key());
2150 return new(zone()) LStoreKeyed(object, key, val);
2154 (instr->value()->representation().IsInteger32() &&
2155 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2156 (instr->value()->representation().IsDouble() &&
2157 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2158 ASSERT((instr->is_fixed_typed_array() &&
2159 instr->elements()->representation().IsTagged()) ||
2160 (instr->is_external() &&
2161 instr->elements()->representation().IsExternal()));
2162 LOperand* val = UseRegister(instr->value());
2163 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2164 LOperand* backing_store = UseRegister(instr->elements());
2165 return new(zone()) LStoreKeyed(backing_store, key, val);
2169 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2170 LOperand* context = UseFixed(instr->context(), cp);
2171 LOperand* obj = UseFixed(instr->object(), r2);
2172 LOperand* key = UseFixed(instr->key(), r1);
2173 LOperand* val = UseFixed(instr->value(), r0);
2175 ASSERT(instr->object()->representation().IsTagged());
2176 ASSERT(instr->key()->representation().IsTagged());
2177 ASSERT(instr->value()->representation().IsTagged());
2180 new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2184 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2185 HTransitionElementsKind* instr) {
2186 LOperand* object = UseRegister(instr->object());
2187 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2188 LOperand* new_map_reg = TempRegister();
2189 LTransitionElementsKind* result =
2190 new(zone()) LTransitionElementsKind(object, NULL, new_map_reg);
2193 LOperand* context = UseFixed(instr->context(), cp);
2194 LTransitionElementsKind* result =
2195 new(zone()) LTransitionElementsKind(object, context, NULL);
2196 return AssignPointerMap(result);
2201 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2202 HTrapAllocationMemento* instr) {
2203 LOperand* object = UseRegister(instr->object());
2204 LOperand* temp = TempRegister();
2205 LTrapAllocationMemento* result =
2206 new(zone()) LTrapAllocationMemento(object, temp);
2207 return AssignEnvironment(result);
2211 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2212 bool is_in_object = instr->access().IsInobject();
2213 bool needs_write_barrier = instr->NeedsWriteBarrier();
2214 bool needs_write_barrier_for_map = instr->has_transition() &&
2215 instr->NeedsWriteBarrierForMap();
2218 if (needs_write_barrier) {
2220 ? UseRegister(instr->object())
2221 : UseTempRegister(instr->object());
2223 obj = needs_write_barrier_for_map
2224 ? UseRegister(instr->object())
2225 : UseRegisterAtStart(instr->object());
2229 if (needs_write_barrier ||
2230 (FLAG_track_fields && instr->field_representation().IsSmi())) {
2231 val = UseTempRegister(instr->value());
2232 } else if (FLAG_track_double_fields &&
2233 instr->field_representation().IsDouble()) {
2234 val = UseRegisterAtStart(instr->value());
2236 val = UseRegister(instr->value());
2239 // We need a temporary register for write barrier of the map field.
2240 LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2242 LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
2243 if (FLAG_track_heap_object_fields &&
2244 instr->field_representation().IsHeapObject()) {
2245 if (!instr->value()->type().IsHeapObject()) {
2246 return AssignEnvironment(result);
2253 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2254 LOperand* context = UseFixed(instr->context(), cp);
2255 LOperand* obj = UseFixed(instr->object(), r1);
2256 LOperand* val = UseFixed(instr->value(), r0);
2258 LInstruction* result = new(zone()) LStoreNamedGeneric(context, obj, val);
2259 return MarkAsCall(result, instr);
2263 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2264 LOperand* context = UseFixed(instr->context(), cp);
2265 LOperand* left = UseFixed(instr->left(), r1);
2266 LOperand* right = UseFixed(instr->right(), r0);
2268 DefineFixed(new(zone()) LStringAdd(context, left, right), r0),
2273 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2274 LOperand* string = UseTempRegister(instr->string());
2275 LOperand* index = UseTempRegister(instr->index());
2276 LOperand* context = UseAny(instr->context());
2277 LStringCharCodeAt* result =
2278 new(zone()) LStringCharCodeAt(context, string, index);
2279 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2283 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2284 LOperand* char_code = UseRegister(instr->value());
2285 LOperand* context = UseAny(instr->context());
2286 LStringCharFromCode* result =
2287 new(zone()) LStringCharFromCode(context, char_code);
2288 return AssignPointerMap(DefineAsRegister(result));
2292 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2293 info()->MarkAsDeferredCalling();
2294 LOperand* context = UseAny(instr->context());
2295 LOperand* size = instr->size()->IsConstant()
2296 ? UseConstant(instr->size())
2297 : UseTempRegister(instr->size());
2298 LOperand* temp1 = TempRegister();
2299 LOperand* temp2 = TempRegister();
2300 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
2301 return AssignPointerMap(DefineAsRegister(result));
2305 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2306 LOperand* context = UseFixed(instr->context(), cp);
2308 DefineFixed(new(zone()) LRegExpLiteral(context), r0), instr);
2312 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2313 LOperand* context = UseFixed(instr->context(), cp);
2315 DefineFixed(new(zone()) LFunctionLiteral(context), r0), instr);
2319 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2320 ASSERT(argument_count_ == 0);
2321 allocator_->MarkAsOsrEntry();
2322 current_block_->last_environment()->set_ast_id(instr->ast_id());
2323 return AssignEnvironment(new(zone()) LOsrEntry);
2327 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2328 LParameter* result = new(zone()) LParameter;
2329 if (instr->kind() == HParameter::STACK_PARAMETER) {
2330 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2331 return DefineAsSpilled(result, spill_index);
2333 ASSERT(info()->IsStub());
2334 CodeStubInterfaceDescriptor* descriptor =
2335 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2336 int index = static_cast<int>(instr->index());
2337 Register reg = descriptor->GetParameterRegister(index);
2338 return DefineFixed(result, reg);
2343 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2344 // Use an index that corresponds to the location in the unoptimized frame,
2345 // which the optimized frame will subsume.
2346 int env_index = instr->index();
2347 int spill_index = 0;
2348 if (instr->environment()->is_parameter_index(env_index)) {
2349 spill_index = chunk()->GetParameterStackSlot(env_index);
2351 spill_index = env_index - instr->environment()->first_local_index();
2352 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2353 Abort(kTooManySpillSlotsNeededForOSR);
2357 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2361 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2362 LOperand* context = UseFixed(instr->context(), cp);
2363 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), r0), instr);
2367 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2368 // There are no real uses of the arguments object.
2369 // arguments.length and element access are supported directly on
2370 // stack arguments, and any real arguments object use causes a bailout.
2371 // So this value is never used.
2376 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2377 instr->ReplayEnvironment(current_block_->last_environment());
2379 // There are no real uses of a captured object.
2384 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2385 info()->MarkAsRequiresFrame();
2386 LOperand* args = UseRegister(instr->arguments());
2387 LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2388 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2389 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2393 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2394 LOperand* object = UseFixed(instr->value(), r0);
2395 LToFastProperties* result = new(zone()) LToFastProperties(object);
2396 return MarkAsCall(DefineFixed(result, r0), instr);
2400 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2401 LOperand* context = UseFixed(instr->context(), cp);
2402 LTypeof* result = new(zone()) LTypeof(context, UseFixed(instr->value(), r0));
2403 return MarkAsCall(DefineFixed(result, r0), instr);
2407 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2408 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2409 if (goto_instr != NULL) return goto_instr;
2411 return new(zone()) LTypeofIsAndBranch(UseRegister(instr->value()));
2415 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2416 HIsConstructCallAndBranch* instr) {
2417 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2421 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2422 instr->ReplayEnvironment(current_block_->last_environment());
2424 // If there is an instruction pending deoptimization environment create a
2425 // lazy bailout instruction to capture the environment.
2426 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2427 LInstruction* result = new(zone()) LLazyBailout;
2428 result = AssignEnvironment(result);
2429 // Store the lazy deopt environment with the instruction if needed. Right
2430 // now it is only used for LInstanceOfKnownGlobal.
2431 instruction_pending_deoptimization_environment_->
2432 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2433 instruction_pending_deoptimization_environment_ = NULL;
2434 pending_deoptimization_ast_id_ = BailoutId::None();
2442 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2443 if (instr->is_function_entry()) {
2444 LOperand* context = UseFixed(instr->context(), cp);
2445 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2447 ASSERT(instr->is_backwards_branch());
2448 LOperand* context = UseAny(instr->context());
2449 return AssignEnvironment(
2450 AssignPointerMap(new(zone()) LStackCheck(context)));
2455 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2456 HEnvironment* outer = current_block_->last_environment();
2457 HConstant* undefined = graph()->GetConstantUndefined();
2458 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2459 instr->arguments_count(),
2462 instr->inlining_kind());
2463 // Only replay binding of arguments object if it wasn't removed from graph.
2464 if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2465 inner->Bind(instr->arguments_var(), instr->arguments_object());
2467 inner->set_entry(instr);
2468 current_block_->UpdateEnvironment(inner);
2469 chunk_->AddInlinedClosure(instr->closure());
2474 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2475 LInstruction* pop = NULL;
2477 HEnvironment* env = current_block_->last_environment();
2479 if (env->entry()->arguments_pushed()) {
2480 int argument_count = env->arguments_environment()->parameter_count();
2481 pop = new(zone()) LDrop(argument_count);
2482 ASSERT(instr->argument_delta() == -argument_count);
2485 HEnvironment* outer = current_block_->last_environment()->
2486 DiscardInlined(false);
2487 current_block_->UpdateEnvironment(outer);
2493 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2494 LOperand* context = UseFixed(instr->context(), cp);
2495 LOperand* object = UseFixed(instr->enumerable(), r0);
2496 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2497 return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
2501 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2502 LOperand* map = UseRegister(instr->map());
2503 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2507 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2508 LOperand* value = UseRegisterAtStart(instr->value());
2509 LOperand* map = UseRegisterAtStart(instr->map());
2510 return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2514 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2515 LOperand* object = UseRegister(instr->object());
2516 LOperand* index = UseRegister(instr->index());
2517 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2520 } } // namespace v8::internal