1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "full-codegen.h"
34 #include "lithium-allocator.h"
36 #include "scopeinfo.h"
38 #include "stub-cache.h"
40 #if V8_TARGET_ARCH_IA32
41 #include "ia32/lithium-codegen-ia32.h"
42 #elif V8_TARGET_ARCH_X64
43 #include "x64/lithium-codegen-x64.h"
44 #elif V8_TARGET_ARCH_ARM
45 #include "arm/lithium-codegen-arm.h"
46 #elif V8_TARGET_ARCH_MIPS
47 #include "mips/lithium-codegen-mips.h"
49 #error Unsupported target architecture.
55 HBasicBlock::HBasicBlock(HGraph* graph)
56 : block_id_(graph->GetNextBlockID()),
62 loop_information_(NULL),
66 last_environment_(NULL),
68 first_instruction_index_(-1),
69 last_instruction_index_(-1),
71 parent_loop_header_(NULL),
72 is_inline_return_target_(false),
73 is_deoptimizing_(false),
74 dominates_loop_successors_(false) { }
77 void HBasicBlock::AttachLoopInformation() {
78 ASSERT(!IsLoopHeader());
79 loop_information_ = new(zone()) HLoopInformation(this);
83 void HBasicBlock::DetachLoopInformation() {
84 ASSERT(IsLoopHeader());
85 loop_information_ = NULL;
89 void HBasicBlock::AddPhi(HPhi* phi) {
90 ASSERT(!IsStartBlock());
96 void HBasicBlock::RemovePhi(HPhi* phi) {
97 ASSERT(phi->block() == this);
98 ASSERT(phis_.Contains(phi));
99 ASSERT(phi->HasNoUses() || !phi->is_live());
101 phis_.RemoveElement(phi);
106 void HBasicBlock::AddInstruction(HInstruction* instr) {
107 ASSERT(!IsStartBlock() || !IsFinished());
108 ASSERT(!instr->IsLinked());
109 ASSERT(!IsFinished());
110 if (first_ == NULL) {
111 HBlockEntry* entry = new(zone()) HBlockEntry();
112 entry->InitializeAsFirst(this);
113 first_ = last_ = entry;
115 instr->InsertAfter(last_);
119 HDeoptimize* HBasicBlock::CreateDeoptimize(
120 HDeoptimize::UseEnvironment has_uses) {
121 ASSERT(HasEnvironment());
122 if (has_uses == HDeoptimize::kNoUses) return new(zone()) HDeoptimize(0);
124 HEnvironment* environment = last_environment();
125 HDeoptimize* instr = new(zone()) HDeoptimize(environment->length());
126 for (int i = 0; i < environment->length(); i++) {
127 HValue* val = environment->values()->at(i);
128 instr->AddEnvironmentValue(val);
135 HSimulate* HBasicBlock::CreateSimulate(int ast_id) {
136 ASSERT(HasEnvironment());
137 HEnvironment* environment = last_environment();
138 ASSERT(ast_id == AstNode::kNoNumber ||
139 environment->closure()->shared()->VerifyBailoutId(ast_id));
141 int push_count = environment->push_count();
142 int pop_count = environment->pop_count();
144 HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count);
145 for (int i = push_count - 1; i >= 0; --i) {
146 instr->AddPushedValue(environment->ExpressionStackAt(i));
148 for (int i = 0; i < environment->assigned_variables()->length(); ++i) {
149 int index = environment->assigned_variables()->at(i);
150 instr->AddAssignedValue(index, environment->Lookup(index));
152 environment->ClearHistory();
157 void HBasicBlock::Finish(HControlInstruction* end) {
158 ASSERT(!IsFinished());
161 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
162 it.Current()->RegisterPredecessor(this);
167 void HBasicBlock::Goto(HBasicBlock* block, FunctionState* state) {
168 bool drop_extra = state != NULL && state->drop_extra();
169 bool arguments_pushed = state != NULL && state->arguments_pushed();
171 if (block->IsInlineReturnTarget()) {
172 AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
173 last_environment_ = last_environment()->DiscardInlined(drop_extra);
176 AddSimulate(AstNode::kNoNumber);
177 HGoto* instr = new(zone()) HGoto(block);
182 void HBasicBlock::AddLeaveInlined(HValue* return_value,
184 FunctionState* state) {
185 bool drop_extra = state != NULL && state->drop_extra();
186 bool arguments_pushed = state != NULL && state->arguments_pushed();
188 ASSERT(target->IsInlineReturnTarget());
189 ASSERT(return_value != NULL);
190 AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
191 last_environment_ = last_environment()->DiscardInlined(drop_extra);
192 last_environment()->Push(return_value);
193 AddSimulate(AstNode::kNoNumber);
194 HGoto* instr = new(zone()) HGoto(target);
199 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
200 ASSERT(!HasEnvironment());
201 ASSERT(first() == NULL);
202 UpdateEnvironment(env);
206 void HBasicBlock::SetJoinId(int ast_id) {
207 int length = predecessors_.length();
209 for (int i = 0; i < length; i++) {
210 HBasicBlock* predecessor = predecessors_[i];
211 ASSERT(predecessor->end()->IsGoto());
212 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
213 // We only need to verify the ID once.
215 predecessor->last_environment()->closure()->shared()
216 ->VerifyBailoutId(ast_id));
217 simulate->set_ast_id(ast_id);
222 bool HBasicBlock::Dominates(HBasicBlock* other) const {
223 HBasicBlock* current = other->dominator();
224 while (current != NULL) {
225 if (current == this) return true;
226 current = current->dominator();
232 int HBasicBlock::LoopNestingDepth() const {
233 const HBasicBlock* current = this;
234 int result = (current->IsLoopHeader()) ? 1 : 0;
235 while (current->parent_loop_header() != NULL) {
236 current = current->parent_loop_header();
243 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
244 ASSERT(IsLoopHeader());
246 SetJoinId(stmt->EntryId());
247 if (predecessors()->length() == 1) {
248 // This is a degenerated loop.
249 DetachLoopInformation();
253 // Only the first entry into the loop is from outside the loop. All other
254 // entries must be back edges.
255 for (int i = 1; i < predecessors()->length(); ++i) {
256 loop_information()->RegisterBackEdge(predecessors()->at(i));
261 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
262 if (HasPredecessor()) {
263 // Only loop header blocks can have a predecessor added after
264 // instructions have been added to the block (they have phis for all
265 // values in the environment, these phis may be eliminated later).
266 ASSERT(IsLoopHeader() || first_ == NULL);
267 HEnvironment* incoming_env = pred->last_environment();
268 if (IsLoopHeader()) {
269 ASSERT(phis()->length() == incoming_env->length());
270 for (int i = 0; i < phis_.length(); ++i) {
271 phis_[i]->AddInput(incoming_env->values()->at(i));
274 last_environment()->AddIncomingEdge(this, pred->last_environment());
276 } else if (!HasEnvironment() && !IsFinished()) {
277 ASSERT(!IsLoopHeader());
278 SetInitialEnvironment(pred->last_environment()->Copy());
281 predecessors_.Add(pred);
285 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
286 ASSERT(!dominated_blocks_.Contains(block));
287 // Keep the list of dominated blocks sorted such that if there is two
288 // succeeding block in this list, the predecessor is before the successor.
290 while (index < dominated_blocks_.length() &&
291 dominated_blocks_[index]->block_id() < block->block_id()) {
294 dominated_blocks_.InsertAt(index, block);
298 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
299 if (dominator_ == NULL) {
301 other->AddDominatedBlock(this);
302 } else if (other->dominator() != NULL) {
303 HBasicBlock* first = dominator_;
304 HBasicBlock* second = other;
306 while (first != second) {
307 if (first->block_id() > second->block_id()) {
308 first = first->dominator();
310 second = second->dominator();
312 ASSERT(first != NULL && second != NULL);
315 if (dominator_ != first) {
316 ASSERT(dominator_->dominated_blocks_.Contains(this));
317 dominator_->dominated_blocks_.RemoveElement(this);
319 first->AddDominatedBlock(this);
325 void HBasicBlock::AssignLoopSuccessorDominators() {
326 // Mark blocks that dominate all subsequent reachable blocks inside their
327 // loop. Exploit the fact that blocks are sorted in reverse post order. When
328 // the loop is visited in increasing block id order, if the number of
329 // non-loop-exiting successor edges at the dominator_candidate block doesn't
330 // exceed the number of previously encountered predecessor edges, there is no
331 // path from the loop header to any block with higher id that doesn't go
332 // through the dominator_candidate block. In this case, the
333 // dominator_candidate block is guaranteed to dominate all blocks reachable
334 // from it with higher ids.
335 HBasicBlock* last = loop_information()->GetLastBackEdge();
336 int outstanding_successors = 1; // one edge from the pre-header
337 // Header always dominates everything.
338 MarkAsLoopSuccessorDominator();
339 for (int j = block_id(); j <= last->block_id(); ++j) {
340 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
341 for (HPredecessorIterator it(dominator_candidate); !it.Done();
343 HBasicBlock* predecessor = it.Current();
344 // Don't count back edges.
345 if (predecessor->block_id() < dominator_candidate->block_id()) {
346 outstanding_successors--;
350 // If more successors than predecessors have been seen in the loop up to
351 // now, it's not possible to guarantee that the current block dominates
352 // all of the blocks with higher IDs. In this case, assume conservatively
353 // that those paths through loop that don't go through the current block
354 // contain all of the loop's dependencies. Also be careful to record
355 // dominator information about the current loop that's being processed,
356 // and not nested loops, which will be processed when
357 // AssignLoopSuccessorDominators gets called on their header.
358 ASSERT(outstanding_successors >= 0);
359 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
360 if (outstanding_successors == 0 &&
361 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
362 dominator_candidate->MarkAsLoopSuccessorDominator();
364 HControlInstruction* end = dominator_candidate->end();
365 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
366 HBasicBlock* successor = it.Current();
367 // Only count successors that remain inside the loop and don't loop back
369 if (successor->block_id() > dominator_candidate->block_id() &&
370 successor->block_id() <= last->block_id()) {
371 // Backwards edges must land on loop headers.
372 ASSERT(successor->block_id() > dominator_candidate->block_id() ||
373 successor->IsLoopHeader());
374 outstanding_successors++;
381 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
382 for (int i = 0; i < predecessors_.length(); ++i) {
383 if (predecessors_[i] == predecessor) return i;
391 void HBasicBlock::Verify() {
392 // Check that every block is finished.
393 ASSERT(IsFinished());
394 ASSERT(block_id() >= 0);
396 // Check that the incoming edges are in edge split form.
397 if (predecessors_.length() > 1) {
398 for (int i = 0; i < predecessors_.length(); ++i) {
399 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
406 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
407 this->back_edges_.Add(block);
412 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
414 HBasicBlock* result = NULL;
415 for (int i = 0; i < back_edges_.length(); ++i) {
416 HBasicBlock* cur = back_edges_[i];
417 if (cur->block_id() > max_id) {
418 max_id = cur->block_id();
426 void HLoopInformation::AddBlock(HBasicBlock* block) {
427 if (block == loop_header()) return;
428 if (block->parent_loop_header() == loop_header()) return;
429 if (block->parent_loop_header() != NULL) {
430 AddBlock(block->parent_loop_header());
432 block->set_parent_loop_header(loop_header());
434 for (int i = 0; i < block->predecessors()->length(); ++i) {
435 AddBlock(block->predecessors()->at(i));
443 // Checks reachability of the blocks in this graph and stores a bit in
444 // the BitVector "reachable()" for every block that can be reached
445 // from the start block of the graph. If "dont_visit" is non-null, the given
446 // block is treated as if it would not be part of the graph. "visited_count()"
447 // returns the number of reachable blocks.
448 class ReachabilityAnalyzer BASE_EMBEDDED {
450 ReachabilityAnalyzer(HBasicBlock* entry_block,
452 HBasicBlock* dont_visit)
455 reachable_(block_count, ZONE),
456 dont_visit_(dont_visit) {
457 PushBlock(entry_block);
461 int visited_count() const { return visited_count_; }
462 const BitVector* reachable() const { return &reachable_; }
465 void PushBlock(HBasicBlock* block) {
466 if (block != NULL && block != dont_visit_ &&
467 !reachable_.Contains(block->block_id())) {
468 reachable_.Add(block->block_id());
475 while (!stack_.is_empty()) {
476 HControlInstruction* end = stack_.RemoveLast()->end();
477 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
478 PushBlock(it.Current());
484 ZoneList<HBasicBlock*> stack_;
485 BitVector reachable_;
486 HBasicBlock* dont_visit_;
490 void HGraph::Verify(bool do_full_verify) const {
491 for (int i = 0; i < blocks_.length(); i++) {
492 HBasicBlock* block = blocks_.at(i);
496 // Check that every block contains at least one node and that only the last
497 // node is a control instruction.
498 HInstruction* current = block->first();
499 ASSERT(current != NULL && current->IsBlockEntry());
500 while (current != NULL) {
501 ASSERT((current->next() == NULL) == current->IsControlInstruction());
502 ASSERT(current->block() == block);
504 current = current->next();
507 // Check that successors are correctly set.
508 HBasicBlock* first = block->end()->FirstSuccessor();
509 HBasicBlock* second = block->end()->SecondSuccessor();
510 ASSERT(second == NULL || first != NULL);
512 // Check that the predecessor array is correct.
514 ASSERT(first->predecessors()->Contains(block));
515 if (second != NULL) {
516 ASSERT(second->predecessors()->Contains(block));
520 // Check that phis have correct arguments.
521 for (int j = 0; j < block->phis()->length(); j++) {
522 HPhi* phi = block->phis()->at(j);
526 // Check that all join blocks have predecessors that end with an
527 // unconditional goto and agree on their environment node id.
528 if (block->predecessors()->length() >= 2) {
529 int id = block->predecessors()->first()->last_environment()->ast_id();
530 for (int k = 0; k < block->predecessors()->length(); k++) {
531 HBasicBlock* predecessor = block->predecessors()->at(k);
532 ASSERT(predecessor->end()->IsGoto());
533 ASSERT(predecessor->last_environment()->ast_id() == id);
538 // Check special property of first block to have no predecessors.
539 ASSERT(blocks_.at(0)->predecessors()->is_empty());
541 if (do_full_verify) {
542 // Check that the graph is fully connected.
543 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
544 ASSERT(analyzer.visited_count() == blocks_.length());
546 // Check that entry block dominator is NULL.
547 ASSERT(entry_block_->dominator() == NULL);
550 for (int i = 0; i < blocks_.length(); ++i) {
551 HBasicBlock* block = blocks_.at(i);
552 if (block->dominator() == NULL) {
553 // Only start block may have no dominator assigned to.
556 // Assert that block is unreachable if dominator must not be visited.
557 ReachabilityAnalyzer dominator_analyzer(entry_block_,
560 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
569 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
571 if (!pointer->is_set()) {
572 HConstant* constant = new(zone()) HConstant(Handle<Object>(value),
573 Representation::Tagged());
574 constant->InsertAfter(GetConstantUndefined());
575 pointer->set(constant);
577 return pointer->get();
581 HConstant* HGraph::GetConstant1() {
582 return GetConstant(&constant_1_, Smi::FromInt(1));
586 HConstant* HGraph::GetConstantMinus1() {
587 return GetConstant(&constant_minus1_, Smi::FromInt(-1));
591 HConstant* HGraph::GetConstantTrue() {
592 return GetConstant(&constant_true_, isolate()->heap()->true_value());
596 HConstant* HGraph::GetConstantFalse() {
597 return GetConstant(&constant_false_, isolate()->heap()->false_value());
601 HConstant* HGraph::GetConstantHole() {
602 return GetConstant(&constant_hole_, isolate()->heap()->the_hole_value());
606 HGraphBuilder::HGraphBuilder(CompilationInfo* info,
607 TypeFeedbackOracle* oracle)
608 : function_state_(NULL),
609 initial_function_state_(this, info, oracle, NORMAL_RETURN),
613 current_block_(NULL),
616 zone_(info->isolate()->zone()),
617 inline_bailout_(false) {
618 // This is not initialized in the initializer list because the
619 // constructor for the initial state relies on function_state_ == NULL
620 // to know it's the initial state.
621 function_state_= &initial_function_state_;
624 HBasicBlock* HGraphBuilder::CreateJoin(HBasicBlock* first,
629 } else if (second == NULL) {
632 HBasicBlock* join_block = graph_->CreateBasicBlock();
633 first->Goto(join_block);
634 second->Goto(join_block);
635 join_block->SetJoinId(join_id);
641 HBasicBlock* HGraphBuilder::JoinContinue(IterationStatement* statement,
642 HBasicBlock* exit_block,
643 HBasicBlock* continue_block) {
644 if (continue_block != NULL) {
645 if (exit_block != NULL) exit_block->Goto(continue_block);
646 continue_block->SetJoinId(statement->ContinueId());
647 return continue_block;
653 HBasicBlock* HGraphBuilder::CreateLoop(IterationStatement* statement,
654 HBasicBlock* loop_entry,
655 HBasicBlock* body_exit,
656 HBasicBlock* loop_successor,
657 HBasicBlock* break_block) {
658 if (body_exit != NULL) body_exit->Goto(loop_entry);
659 loop_entry->PostProcessLoopHeader(statement);
660 if (break_block != NULL) {
661 if (loop_successor != NULL) loop_successor->Goto(break_block);
662 break_block->SetJoinId(statement->ExitId());
665 return loop_successor;
669 void HBasicBlock::FinishExit(HControlInstruction* instruction) {
675 HGraph::HGraph(CompilationInfo* info)
676 : isolate_(info->isolate()),
683 new(zone()) HEnvironment(NULL, info->scope(), info->closure());
684 start_environment_->set_ast_id(AstNode::kFunctionEntryId);
685 entry_block_ = CreateBasicBlock();
686 entry_block_->SetInitialEnvironment(start_environment_);
690 Handle<Code> HGraph::Compile(CompilationInfo* info) {
691 int values = GetMaximumValueID();
692 if (values > LUnallocated::kMaxVirtualRegisters) {
693 if (FLAG_trace_bailout) {
694 PrintF("Not enough virtual registers for (values).\n");
696 return Handle<Code>::null();
698 LAllocator allocator(values, this);
699 LChunkBuilder builder(info, this, &allocator);
700 LChunk* chunk = builder.Build();
701 if (chunk == NULL) return Handle<Code>::null();
703 if (!allocator.Allocate(chunk)) {
704 if (FLAG_trace_bailout) {
705 PrintF("Not enough virtual registers (regalloc).\n");
707 return Handle<Code>::null();
710 MacroAssembler assembler(info->isolate(), NULL, 0);
711 LCodeGen generator(chunk, &assembler, info);
713 chunk->MarkEmptyBlocks();
715 if (generator.GenerateCode()) {
716 if (FLAG_trace_codegen) {
717 PrintF("Crankshaft Compiler - ");
719 CodeGenerator::MakeCodePrologue(info);
720 Code::Flags flags = Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
722 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info);
723 generator.FinishCode(code);
724 CodeGenerator::PrintCode(code, info);
727 return Handle<Code>::null();
731 HBasicBlock* HGraph::CreateBasicBlock() {
732 HBasicBlock* result = new(zone()) HBasicBlock(this);
738 void HGraph::Canonicalize() {
739 if (!FLAG_use_canonicalizing) return;
740 HPhase phase("H_Canonicalize", this);
741 for (int i = 0; i < blocks()->length(); ++i) {
742 HInstruction* instr = blocks()->at(i)->first();
743 while (instr != NULL) {
744 HValue* value = instr->Canonicalize();
745 if (value != instr) instr->DeleteAndReplaceWith(value);
746 instr = instr->next();
752 void HGraph::OrderBlocks() {
753 HPhase phase("H_Block ordering");
754 BitVector visited(blocks_.length(), zone());
756 ZoneList<HBasicBlock*> reverse_result(8);
757 HBasicBlock* start = blocks_[0];
758 Postorder(start, &visited, &reverse_result, NULL);
762 for (int i = reverse_result.length() - 1; i >= 0; --i) {
763 HBasicBlock* b = reverse_result[i];
765 b->set_block_id(index++);
770 void HGraph::PostorderLoopBlocks(HLoopInformation* loop,
772 ZoneList<HBasicBlock*>* order,
773 HBasicBlock* loop_header) {
774 for (int i = 0; i < loop->blocks()->length(); ++i) {
775 HBasicBlock* b = loop->blocks()->at(i);
776 for (HSuccessorIterator it(b->end()); !it.Done(); it.Advance()) {
777 Postorder(it.Current(), visited, order, loop_header);
779 if (b->IsLoopHeader() && b != loop->loop_header()) {
780 PostorderLoopBlocks(b->loop_information(), visited, order, loop_header);
786 void HGraph::Postorder(HBasicBlock* block,
788 ZoneList<HBasicBlock*>* order,
789 HBasicBlock* loop_header) {
790 if (block == NULL || visited->Contains(block->block_id())) return;
791 if (block->parent_loop_header() != loop_header) return;
792 visited->Add(block->block_id());
793 if (block->IsLoopHeader()) {
794 PostorderLoopBlocks(block->loop_information(), visited, order, loop_header);
795 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
796 Postorder(it.Current(), visited, order, block);
799 ASSERT(block->IsFinished());
800 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
801 Postorder(it.Current(), visited, order, loop_header);
804 ASSERT(block->end()->FirstSuccessor() == NULL ||
805 order->Contains(block->end()->FirstSuccessor()) ||
806 block->end()->FirstSuccessor()->IsLoopHeader());
807 ASSERT(block->end()->SecondSuccessor() == NULL ||
808 order->Contains(block->end()->SecondSuccessor()) ||
809 block->end()->SecondSuccessor()->IsLoopHeader());
814 void HGraph::AssignDominators() {
815 HPhase phase("H_Assign dominators", this);
816 for (int i = 0; i < blocks_.length(); ++i) {
817 HBasicBlock* block = blocks_[i];
818 if (block->IsLoopHeader()) {
819 // Only the first predecessor of a loop header is from outside the loop.
820 // All others are back edges, and thus cannot dominate the loop header.
821 block->AssignCommonDominator(block->predecessors()->first());
822 block->AssignLoopSuccessorDominators();
824 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
825 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
831 // Mark all blocks that are dominated by an unconditional soft deoptimize to
832 // prevent code motion across those blocks.
833 void HGraph::PropagateDeoptimizingMark() {
834 HPhase phase("H_Propagate deoptimizing mark", this);
835 MarkAsDeoptimizingRecursively(entry_block());
838 void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
839 for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
840 HBasicBlock* dominated = block->dominated_blocks()->at(i);
841 if (block->IsDeoptimizing()) dominated->MarkAsDeoptimizing();
842 MarkAsDeoptimizingRecursively(dominated);
846 void HGraph::EliminateRedundantPhis() {
847 HPhase phase("H_Redundant phi elimination", this);
849 // Worklist of phis that can potentially be eliminated. Initialized with
850 // all phi nodes. When elimination of a phi node modifies another phi node
851 // the modified phi node is added to the worklist.
852 ZoneList<HPhi*> worklist(blocks_.length());
853 for (int i = 0; i < blocks_.length(); ++i) {
854 worklist.AddAll(*blocks_[i]->phis());
857 while (!worklist.is_empty()) {
858 HPhi* phi = worklist.RemoveLast();
859 HBasicBlock* block = phi->block();
861 // Skip phi node if it was already replaced.
862 if (block == NULL) continue;
864 // Get replacement value if phi is redundant.
865 HValue* replacement = phi->GetRedundantReplacement();
867 if (replacement != NULL) {
868 // Iterate through the uses and replace them all.
869 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
870 HValue* value = it.value();
871 value->SetOperandAt(it.index(), replacement);
872 if (value->IsPhi()) worklist.Add(HPhi::cast(value));
874 block->RemovePhi(phi);
880 void HGraph::EliminateUnreachablePhis() {
881 HPhase phase("H_Unreachable phi elimination", this);
883 // Initialize worklist.
884 ZoneList<HPhi*> phi_list(blocks_.length());
885 ZoneList<HPhi*> worklist(blocks_.length());
886 for (int i = 0; i < blocks_.length(); ++i) {
887 for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
888 HPhi* phi = blocks_[i]->phis()->at(j);
890 // We can't eliminate phis in the receiver position in the environment
891 // because in case of throwing an error we need this value to
892 // construct a stack trace.
893 if (phi->HasRealUses() || phi->IsReceiver()) {
894 phi->set_is_live(true);
900 // Iteratively mark live phis.
901 while (!worklist.is_empty()) {
902 HPhi* phi = worklist.RemoveLast();
903 for (int i = 0; i < phi->OperandCount(); i++) {
904 HValue* operand = phi->OperandAt(i);
905 if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) {
906 HPhi::cast(operand)->set_is_live(true);
907 worklist.Add(HPhi::cast(operand));
912 // Remove unreachable phis.
913 for (int i = 0; i < phi_list.length(); i++) {
914 HPhi* phi = phi_list[i];
915 if (!phi->is_live()) {
916 HBasicBlock* block = phi->block();
917 block->RemovePhi(phi);
918 block->RecordDeletedPhi(phi->merged_index());
924 bool HGraph::CheckArgumentsPhiUses() {
925 int block_count = blocks_.length();
926 for (int i = 0; i < block_count; ++i) {
927 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
928 HPhi* phi = blocks_[i]->phis()->at(j);
929 // We don't support phi uses of arguments for now.
930 if (phi->CheckFlag(HValue::kIsArguments)) return false;
937 bool HGraph::CheckConstPhiUses() {
938 int block_count = blocks_.length();
939 for (int i = 0; i < block_count; ++i) {
940 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
941 HPhi* phi = blocks_[i]->phis()->at(j);
942 // Check for the hole value (from an uninitialized const).
943 for (int k = 0; k < phi->OperandCount(); k++) {
944 if (phi->OperandAt(k) == GetConstantHole()) return false;
952 void HGraph::CollectPhis() {
953 int block_count = blocks_.length();
954 phi_list_ = new ZoneList<HPhi*>(block_count);
955 for (int i = 0; i < block_count; ++i) {
956 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
957 HPhi* phi = blocks_[i]->phis()->at(j);
964 void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
965 BitVector in_worklist(GetMaximumValueID(), zone());
966 for (int i = 0; i < worklist->length(); ++i) {
967 ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
968 in_worklist.Add(worklist->at(i)->id());
971 while (!worklist->is_empty()) {
972 HValue* current = worklist->RemoveLast();
973 in_worklist.Remove(current->id());
974 if (current->UpdateInferredType()) {
975 for (HUseIterator it(current->uses()); !it.Done(); it.Advance()) {
976 HValue* use = it.value();
977 if (!in_worklist.Contains(use->id())) {
978 in_worklist.Add(use->id());
987 class HRangeAnalysis BASE_EMBEDDED {
989 explicit HRangeAnalysis(HGraph* graph) :
990 graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { }
995 void TraceRange(const char* msg, ...);
996 void Analyze(HBasicBlock* block);
997 void InferControlFlowRange(HCompareIDAndBranch* test, HBasicBlock* dest);
998 void UpdateControlFlowRange(Token::Value op, HValue* value, HValue* other);
999 void InferRange(HValue* value);
1000 void RollBackTo(int index);
1001 void AddRange(HValue* value, Range* range);
1005 ZoneList<HValue*> changed_ranges_;
1009 void HRangeAnalysis::TraceRange(const char* msg, ...) {
1010 if (FLAG_trace_range) {
1012 va_start(arguments, msg);
1013 OS::VPrint(msg, arguments);
1019 void HRangeAnalysis::Analyze() {
1020 HPhase phase("H_Range analysis", graph_);
1021 Analyze(graph_->entry_block());
1025 void HRangeAnalysis::Analyze(HBasicBlock* block) {
1026 TraceRange("Analyzing block B%d\n", block->block_id());
1028 int last_changed_range = changed_ranges_.length() - 1;
1030 // Infer range based on control flow.
1031 if (block->predecessors()->length() == 1) {
1032 HBasicBlock* pred = block->predecessors()->first();
1033 if (pred->end()->IsCompareIDAndBranch()) {
1034 InferControlFlowRange(HCompareIDAndBranch::cast(pred->end()), block);
1038 // Process phi instructions.
1039 for (int i = 0; i < block->phis()->length(); ++i) {
1040 HPhi* phi = block->phis()->at(i);
1044 // Go through all instructions of the current block.
1045 HInstruction* instr = block->first();
1046 while (instr != block->end()) {
1048 instr = instr->next();
1051 // Continue analysis in all dominated blocks.
1052 for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
1053 Analyze(block->dominated_blocks()->at(i));
1056 RollBackTo(last_changed_range);
1060 void HRangeAnalysis::InferControlFlowRange(HCompareIDAndBranch* test,
1061 HBasicBlock* dest) {
1062 ASSERT((test->FirstSuccessor() == dest) == (test->SecondSuccessor() != dest));
1063 if (test->GetInputRepresentation().IsInteger32()) {
1064 Token::Value op = test->token();
1065 if (test->SecondSuccessor() == dest) {
1066 op = Token::NegateCompareOp(op);
1068 Token::Value inverted_op = Token::InvertCompareOp(op);
1069 UpdateControlFlowRange(op, test->left(), test->right());
1070 UpdateControlFlowRange(inverted_op, test->right(), test->left());
1075 // We know that value [op] other. Use this information to update the range on
1077 void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
1081 Range* range = other->range() != NULL ? other->range() : &temp_range;
1082 Range* new_range = NULL;
1084 TraceRange("Control flow range infer %d %s %d\n",
1089 if (op == Token::EQ || op == Token::EQ_STRICT) {
1090 // The same range has to apply for value.
1091 new_range = range->Copy(zone_);
1092 } else if (op == Token::LT || op == Token::LTE) {
1093 new_range = range->CopyClearLower(zone_);
1094 if (op == Token::LT) {
1095 new_range->AddConstant(-1);
1097 } else if (op == Token::GT || op == Token::GTE) {
1098 new_range = range->CopyClearUpper(zone_);
1099 if (op == Token::GT) {
1100 new_range->AddConstant(1);
1104 if (new_range != NULL && !new_range->IsMostGeneric()) {
1105 AddRange(value, new_range);
1110 void HRangeAnalysis::InferRange(HValue* value) {
1111 ASSERT(!value->HasRange());
1112 if (!value->representation().IsNone()) {
1113 value->ComputeInitialRange(zone_);
1114 Range* range = value->range();
1115 TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
1124 void HRangeAnalysis::RollBackTo(int index) {
1125 for (int i = index + 1; i < changed_ranges_.length(); ++i) {
1126 changed_ranges_[i]->RemoveLastAddedRange();
1128 changed_ranges_.Rewind(index + 1);
1132 void HRangeAnalysis::AddRange(HValue* value, Range* range) {
1133 Range* original_range = value->range();
1134 value->AddNewRange(range, zone_);
1135 changed_ranges_.Add(value);
1136 Range* new_range = value->range();
1137 TraceRange("Updated range of %d set to [%d,%d]\n",
1140 new_range->upper());
1141 if (original_range != NULL) {
1142 TraceRange("Original range was [%d,%d]\n",
1143 original_range->lower(),
1144 original_range->upper());
1146 TraceRange("New information was [%d,%d]\n",
1152 void TraceGVN(const char* msg, ...) {
1154 va_start(arguments, msg);
1155 OS::VPrint(msg, arguments);
1159 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
1160 // --trace-gvn is off.
1161 #define TRACE_GVN_1(msg, a1) \
1162 if (FLAG_trace_gvn) { \
1163 TraceGVN(msg, a1); \
1166 #define TRACE_GVN_2(msg, a1, a2) \
1167 if (FLAG_trace_gvn) { \
1168 TraceGVN(msg, a1, a2); \
1171 #define TRACE_GVN_3(msg, a1, a2, a3) \
1172 if (FLAG_trace_gvn) { \
1173 TraceGVN(msg, a1, a2, a3); \
1176 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
1177 if (FLAG_trace_gvn) { \
1178 TraceGVN(msg, a1, a2, a3, a4); \
1181 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
1182 if (FLAG_trace_gvn) { \
1183 TraceGVN(msg, a1, a2, a3, a4, a5); \
1187 HValueMap::HValueMap(Zone* zone, const HValueMap* other)
1188 : array_size_(other->array_size_),
1189 lists_size_(other->lists_size_),
1190 count_(other->count_),
1191 present_flags_(other->present_flags_),
1192 array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
1193 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
1194 free_list_head_(other->free_list_head_) {
1195 memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement));
1196 memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
1200 void HValueMap::Kill(GVNFlagSet flags) {
1201 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
1202 if (!present_flags_.ContainsAnyOf(depends_flags)) return;
1203 present_flags_.RemoveAll();
1204 for (int i = 0; i < array_size_; ++i) {
1205 HValue* value = array_[i].value;
1206 if (value != NULL) {
1207 // Clear list of collisions first, so we know if it becomes empty.
1208 int kept = kNil; // List of kept elements.
1210 for (int current = array_[i].next; current != kNil; current = next) {
1211 next = lists_[current].next;
1212 HValue* value = lists_[current].value;
1213 if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
1216 lists_[current].next = free_list_head_;
1217 free_list_head_ = current;
1220 lists_[current].next = kept;
1222 present_flags_.Add(value->gvn_flags());
1225 array_[i].next = kept;
1227 // Now possibly drop directly indexed element.
1228 value = array_[i].value;
1229 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
1231 int head = array_[i].next;
1233 array_[i].value = NULL;
1235 array_[i].value = lists_[head].value;
1236 array_[i].next = lists_[head].next;
1237 lists_[head].next = free_list_head_;
1238 free_list_head_ = head;
1241 present_flags_.Add(value->gvn_flags()); // Keep it.
1248 HValue* HValueMap::Lookup(HValue* value) const {
1249 uint32_t hash = static_cast<uint32_t>(value->Hashcode());
1250 uint32_t pos = Bound(hash);
1251 if (array_[pos].value != NULL) {
1252 if (array_[pos].value->Equals(value)) return array_[pos].value;
1253 int next = array_[pos].next;
1254 while (next != kNil) {
1255 if (lists_[next].value->Equals(value)) return lists_[next].value;
1256 next = lists_[next].next;
1263 void HValueMap::Resize(int new_size) {
1264 ASSERT(new_size > count_);
1265 // Hashing the values into the new array has no more collisions than in the
1266 // old hash map, so we can use the existing lists_ array, if we are careful.
1268 // Make sure we have at least one free element.
1269 if (free_list_head_ == kNil) {
1270 ResizeLists(lists_size_ << 1);
1273 HValueMapListElement* new_array =
1274 ZONE->NewArray<HValueMapListElement>(new_size);
1275 memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
1277 HValueMapListElement* old_array = array_;
1278 int old_size = array_size_;
1280 int old_count = count_;
1282 // Do not modify present_flags_. It is currently correct.
1283 array_size_ = new_size;
1286 if (old_array != NULL) {
1287 // Iterate over all the elements in lists, rehashing them.
1288 for (int i = 0; i < old_size; ++i) {
1289 if (old_array[i].value != NULL) {
1290 int current = old_array[i].next;
1291 while (current != kNil) {
1292 Insert(lists_[current].value);
1293 int next = lists_[current].next;
1294 lists_[current].next = free_list_head_;
1295 free_list_head_ = current;
1298 // Rehash the directly stored value.
1299 Insert(old_array[i].value);
1304 ASSERT(count_ == old_count);
1308 void HValueMap::ResizeLists(int new_size) {
1309 ASSERT(new_size > lists_size_);
1311 HValueMapListElement* new_lists =
1312 ZONE->NewArray<HValueMapListElement>(new_size);
1313 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
1315 HValueMapListElement* old_lists = lists_;
1316 int old_size = lists_size_;
1318 lists_size_ = new_size;
1321 if (old_lists != NULL) {
1322 memcpy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
1324 for (int i = old_size; i < lists_size_; ++i) {
1325 lists_[i].next = free_list_head_;
1326 free_list_head_ = i;
1331 void HValueMap::Insert(HValue* value) {
1332 ASSERT(value != NULL);
1333 // Resizing when half of the hashtable is filled up.
1334 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1);
1335 ASSERT(count_ < array_size_);
1337 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
1338 if (array_[pos].value == NULL) {
1339 array_[pos].value = value;
1340 array_[pos].next = kNil;
1342 if (free_list_head_ == kNil) {
1343 ResizeLists(lists_size_ << 1);
1345 int new_element_pos = free_list_head_;
1346 ASSERT(new_element_pos != kNil);
1347 free_list_head_ = lists_[free_list_head_].next;
1348 lists_[new_element_pos].value = value;
1349 lists_[new_element_pos].next = array_[pos].next;
1350 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
1351 array_[pos].next = new_element_pos;
1356 HSideEffectMap::HSideEffectMap() : count_(0) {
1357 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
1361 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
1362 memcpy(data_, other->data_, kNumberOfTrackedSideEffects * kPointerSize);
1366 void HSideEffectMap::Kill(GVNFlagSet flags) {
1367 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1368 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1369 if (flags.Contains(changes_flag)) {
1370 if (data_[i] != NULL) count_--;
1377 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
1378 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1379 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1380 if (flags.Contains(changes_flag)) {
1381 if (data_[i] == NULL) count_++;
1388 class HStackCheckEliminator BASE_EMBEDDED {
1390 explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
1399 void HStackCheckEliminator::Process() {
1400 // For each loop block walk the dominator tree from the backwards branch to
1401 // the loop header. If a call instruction is encountered the backwards branch
1402 // is dominated by a call and the stack check in the backwards branch can be
1404 for (int i = 0; i < graph_->blocks()->length(); i++) {
1405 HBasicBlock* block = graph_->blocks()->at(i);
1406 if (block->IsLoopHeader()) {
1407 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1408 HBasicBlock* dominator = back_edge;
1410 HInstruction* instr = dominator->first();
1411 while (instr != NULL) {
1412 if (instr->IsCall()) {
1413 block->loop_information()->stack_check()->Eliminate();
1416 instr = instr->next();
1419 // Done when the loop header is processed.
1420 if (dominator == block) break;
1422 // Move up the dominator tree.
1423 dominator = dominator->dominator();
1430 // Simple sparse set with O(1) add, contains, and clear.
1433 SparseSet(Zone* zone, int capacity)
1434 : capacity_(capacity),
1436 dense_(zone->NewArray<int>(capacity)),
1437 sparse_(zone->NewArray<int>(capacity)) {
1439 // Initialize the sparse array to make valgrind happy.
1440 memset(sparse_, 0, sizeof(sparse_[0]) * capacity);
1444 bool Contains(int n) const {
1445 ASSERT(0 <= n && n < capacity_);
1447 return 0 <= d && d < length_ && dense_[d] == n;
1451 if (Contains(n)) return false;
1452 dense_[length_] = n;
1453 sparse_[n] = length_;
1458 void Clear() { length_ = 0; }
1466 DISALLOW_COPY_AND_ASSIGN(SparseSet);
1470 class HGlobalValueNumberer BASE_EMBEDDED {
1472 explicit HGlobalValueNumberer(HGraph* graph, CompilationInfo* info)
1475 removed_side_effects_(false),
1476 block_side_effects_(graph->blocks()->length()),
1477 loop_side_effects_(graph->blocks()->length()),
1478 visited_on_paths_(graph->zone(), graph->blocks()->length()) {
1479 ASSERT(info->isolate()->heap()->allow_allocation(false));
1480 block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
1481 loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
1483 ~HGlobalValueNumberer() {
1484 ASSERT(!info_->isolate()->heap()->allow_allocation(true));
1487 // Returns true if values with side effects are removed.
1491 GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
1492 HBasicBlock* dominator,
1493 HBasicBlock* dominated);
1494 void AnalyzeBlock(HBasicBlock* block,
1496 HSideEffectMap* dominators);
1497 void ComputeBlockSideEffects();
1498 void LoopInvariantCodeMotion();
1499 void ProcessLoopBlock(HBasicBlock* block,
1500 HBasicBlock* before_loop,
1501 GVNFlagSet loop_kills,
1502 GVNFlagSet* accumulated_first_time_depends,
1503 GVNFlagSet* accumulated_first_time_changes);
1504 bool AllowCodeMotion();
1505 bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
1507 HGraph* graph() { return graph_; }
1508 CompilationInfo* info() { return info_; }
1509 Zone* zone() { return graph_->zone(); }
1512 CompilationInfo* info_;
1513 bool removed_side_effects_;
1515 // A map of block IDs to their side effects.
1516 ZoneList<GVNFlagSet> block_side_effects_;
1518 // A map of loop header block IDs to their loop's side effects.
1519 ZoneList<GVNFlagSet> loop_side_effects_;
1521 // Used when collecting side effects on paths from dominator to
1523 SparseSet visited_on_paths_;
1527 bool HGlobalValueNumberer::Analyze() {
1528 removed_side_effects_ = false;
1529 ComputeBlockSideEffects();
1530 if (FLAG_loop_invariant_code_motion) {
1531 LoopInvariantCodeMotion();
1533 HValueMap* map = new(zone()) HValueMap();
1534 HSideEffectMap side_effect_dominators;
1535 AnalyzeBlock(graph_->entry_block(), map, &side_effect_dominators);
1536 return removed_side_effects_;
1540 void HGlobalValueNumberer::ComputeBlockSideEffects() {
1541 // The Analyze phase of GVN can be called multiple times. Clear loop side
1542 // effects before computing them to erase the contents from previous Analyze
1544 for (int i = 0; i < loop_side_effects_.length(); ++i) {
1545 loop_side_effects_[i].RemoveAll();
1547 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1548 // Compute side effects for the block.
1549 HBasicBlock* block = graph_->blocks()->at(i);
1550 HInstruction* instr = block->first();
1551 int id = block->block_id();
1552 GVNFlagSet side_effects;
1553 while (instr != NULL) {
1554 side_effects.Add(instr->ChangesFlags());
1555 if (instr->IsSoftDeoptimize()) {
1556 block_side_effects_[id].RemoveAll();
1557 side_effects.RemoveAll();
1560 instr = instr->next();
1562 block_side_effects_[id].Add(side_effects);
1564 // Loop headers are part of their loop.
1565 if (block->IsLoopHeader()) {
1566 loop_side_effects_[id].Add(side_effects);
1569 // Propagate loop side effects upwards.
1570 if (block->HasParentLoopHeader()) {
1571 int header_id = block->parent_loop_header()->block_id();
1572 loop_side_effects_[header_id].Add(block->IsLoopHeader()
1573 ? loop_side_effects_[id]
1580 SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
1581 char underlying_buffer[kLastFlag * 128];
1582 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
1585 const char* separator = "";
1586 const char* comma = ", ";
1588 uint32_t set_depends_on = 0;
1589 uint32_t set_changes = 0;
1590 for (int bit = 0; bit < kLastFlag; ++bit) {
1591 if ((flags.ToIntegral() & (1 << bit)) != 0) {
1599 bool positive_changes = set_changes < (kLastFlag / 2);
1600 bool positive_depends_on = set_depends_on < (kLastFlag / 2);
1601 if (set_changes > 0) {
1602 if (positive_changes) {
1603 offset += OS::SNPrintF(buffer + offset, "changes [");
1605 offset += OS::SNPrintF(buffer + offset, "changes all except [");
1607 for (int bit = 0; bit < kLastFlag; ++bit) {
1608 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
1609 switch (static_cast<GVNFlag>(bit)) {
1610 #define DECLARE_FLAG(type) \
1611 case kChanges##type: \
1612 offset += OS::SNPrintF(buffer + offset, separator); \
1613 offset += OS::SNPrintF(buffer + offset, #type); \
1614 separator = comma; \
1616 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
1617 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
1624 offset += OS::SNPrintF(buffer + offset, "]");
1626 if (set_depends_on > 0) {
1628 if (set_changes > 0) {
1629 offset += OS::SNPrintF(buffer + offset, ", ");
1631 if (positive_depends_on) {
1632 offset += OS::SNPrintF(buffer + offset, "depends on [");
1634 offset += OS::SNPrintF(buffer + offset, "depends on all except [");
1636 for (int bit = 0; bit < kLastFlag; ++bit) {
1637 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
1638 switch (static_cast<GVNFlag>(bit)) {
1639 #define DECLARE_FLAG(type) \
1640 case kDependsOn##type: \
1641 offset += OS::SNPrintF(buffer + offset, separator); \
1642 offset += OS::SNPrintF(buffer + offset, #type); \
1643 separator = comma; \
1645 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
1646 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
1653 offset += OS::SNPrintF(buffer + offset, "]");
1656 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
1658 size_t string_len = strlen(underlying_buffer) + 1;
1659 ASSERT(string_len <= sizeof(underlying_buffer));
1660 char* result = new char[strlen(underlying_buffer) + 1];
1661 memcpy(result, underlying_buffer, string_len);
1662 return SmartArrayPointer<char>(result);
1666 void HGlobalValueNumberer::LoopInvariantCodeMotion() {
1667 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1668 HBasicBlock* block = graph_->blocks()->at(i);
1669 if (block->IsLoopHeader()) {
1670 GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
1671 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
1673 *GetGVNFlagsString(side_effects));
1675 GVNFlagSet accumulated_first_time_depends;
1676 GVNFlagSet accumulated_first_time_changes;
1677 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
1678 for (int j = block->block_id(); j <= last->block_id(); ++j) {
1679 ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
1680 &accumulated_first_time_depends,
1681 &accumulated_first_time_changes);
1688 void HGlobalValueNumberer::ProcessLoopBlock(
1690 HBasicBlock* loop_header,
1691 GVNFlagSet loop_kills,
1692 GVNFlagSet* first_time_depends,
1693 GVNFlagSet* first_time_changes) {
1694 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
1695 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
1696 TRACE_GVN_2("Loop invariant motion for B%d %s\n",
1698 *GetGVNFlagsString(depends_flags));
1699 HInstruction* instr = block->first();
1700 while (instr != NULL) {
1701 HInstruction* next = instr->next();
1702 bool hoisted = false;
1703 if (instr->CheckFlag(HValue::kUseGVN)) {
1704 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
1707 *GetGVNFlagsString(instr->gvn_flags()),
1708 *GetGVNFlagsString(loop_kills));
1709 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
1710 if (instr->IsTransitionElementsKind()) {
1711 // It's possible to hoist transitions out of a loop as long as the
1712 // hoisting wouldn't move the transition past a DependsOn of one of it's
1713 // changes or any instructions that might change an objects map or
1714 // elements contents.
1715 GVNFlagSet changes = instr->ChangesFlags();
1716 GVNFlagSet hoist_depends_blockers =
1717 HValue::ConvertChangesToDependsFlags(changes);
1718 // In addition to not hoisting transitions above other instructions that
1719 // change dependencies that the transition changes, it must not be
1720 // hoisted above map changes and stores to an elements backing store
1721 // that the transition might change.
1722 GVNFlagSet hoist_change_blockers = changes;
1723 hoist_change_blockers.Add(kChangesMaps);
1724 HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr);
1725 if (trans->original_map()->has_fast_double_elements()) {
1726 hoist_change_blockers.Add(kChangesDoubleArrayElements);
1728 if (trans->transitioned_map()->has_fast_double_elements()) {
1729 hoist_change_blockers.Add(kChangesArrayElements);
1731 if (FLAG_trace_gvn) {
1732 GVNFlagSet hoist_blockers = hoist_depends_blockers;
1733 hoist_blockers.Add(hoist_change_blockers);
1734 GVNFlagSet first_time = *first_time_changes;
1735 first_time.Add(*first_time_depends);
1736 TRACE_GVN_4("Checking dependencies on HTransitionElementsKind "
1737 "%d (%s) hoist blockers: %s; "
1738 "first-time accumulated: %s\n",
1741 *GetGVNFlagsString(hoist_blockers),
1742 *GetGVNFlagsString(first_time));
1744 // It's possible to hoist transition from the current loop loop only if
1745 // they dominate all of the successor blocks in the same loop and there
1746 // are not any instructions that have Changes/DependsOn that intervene
1747 // between it and the beginning of the loop header.
1748 bool in_nested_loop = block != loop_header &&
1749 ((block->parent_loop_header() != loop_header) ||
1750 block->IsLoopHeader());
1751 can_hoist = !in_nested_loop &&
1752 block->IsLoopSuccessorDominator() &&
1753 !first_time_depends->ContainsAnyOf(hoist_depends_blockers) &&
1754 !first_time_changes->ContainsAnyOf(hoist_change_blockers);
1758 bool inputs_loop_invariant = true;
1759 for (int i = 0; i < instr->OperandCount(); ++i) {
1760 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
1761 inputs_loop_invariant = false;
1765 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
1766 TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
1767 // Move the instruction out of the loop.
1769 instr->InsertBefore(pre_header->end());
1770 if (instr->HasSideEffects()) removed_side_effects_ = true;
1776 // If an instruction is not hoisted, we have to account for its side
1777 // effects when hoisting later HTransitionElementsKind instructions.
1778 GVNFlagSet previous_depends = *first_time_depends;
1779 GVNFlagSet previous_changes = *first_time_changes;
1780 first_time_depends->Add(instr->DependsOnFlags());
1781 first_time_changes->Add(instr->ChangesFlags());
1782 if (!(previous_depends == *first_time_depends)) {
1783 TRACE_GVN_1("Updated first-time accumulated %s\n",
1784 *GetGVNFlagsString(*first_time_depends));
1786 if (!(previous_changes == *first_time_changes)) {
1787 TRACE_GVN_1("Updated first-time accumulated %s\n",
1788 *GetGVNFlagsString(*first_time_changes));
1796 bool HGlobalValueNumberer::AllowCodeMotion() {
1797 return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount;
1801 bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
1802 HBasicBlock* loop_header) {
1803 // If we've disabled code motion or we're in a block that unconditionally
1804 // deoptimizes, don't move any instructions.
1805 return AllowCodeMotion() && !instr->block()->IsDeoptimizing();
1809 GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
1810 HBasicBlock* dominator, HBasicBlock* dominated) {
1811 GVNFlagSet side_effects;
1812 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
1813 HBasicBlock* block = dominated->predecessors()->at(i);
1814 if (dominator->block_id() < block->block_id() &&
1815 block->block_id() < dominated->block_id() &&
1816 visited_on_paths_.Add(block->block_id())) {
1817 side_effects.Add(block_side_effects_[block->block_id()]);
1818 if (block->IsLoopHeader()) {
1819 side_effects.Add(loop_side_effects_[block->block_id()]);
1821 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
1825 return side_effects;
1829 void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block,
1831 HSideEffectMap* dominators) {
1832 TRACE_GVN_2("Analyzing block B%d%s\n",
1834 block->IsLoopHeader() ? " (loop header)" : "");
1836 // If this is a loop header kill everything killed by the loop.
1837 if (block->IsLoopHeader()) {
1838 map->Kill(loop_side_effects_[block->block_id()]);
1841 // Go through all instructions of the current block.
1842 HInstruction* instr = block->first();
1843 while (instr != NULL) {
1844 HInstruction* next = instr->next();
1845 GVNFlagSet flags = instr->ChangesFlags();
1846 if (!flags.IsEmpty()) {
1847 // Clear all instructions in the map that are affected by side effects.
1848 // Store instruction as the dominating one for tracked side effects.
1850 dominators->Store(flags, instr);
1851 TRACE_GVN_2("Instruction %d %s\n", instr->id(),
1852 *GetGVNFlagsString(flags));
1854 if (instr->CheckFlag(HValue::kUseGVN)) {
1855 ASSERT(!instr->HasObservableSideEffects());
1856 HValue* other = map->Lookup(instr);
1857 if (other != NULL) {
1858 ASSERT(instr->Equals(other) && other->Equals(instr));
1859 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
1864 if (instr->HasSideEffects()) removed_side_effects_ = true;
1865 instr->DeleteAndReplaceWith(other);
1870 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
1871 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1872 HValue* other = dominators->at(i);
1873 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1874 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
1875 if (instr->DependsOnFlags().Contains(depends_on_flag) &&
1877 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
1883 instr->SetSideEffectDominator(changes_flag, other);
1890 // Recursively continue analysis for all immediately dominated blocks.
1891 int length = block->dominated_blocks()->length();
1892 for (int i = 0; i < length; ++i) {
1893 HBasicBlock* dominated = block->dominated_blocks()->at(i);
1894 // No need to copy the map for the last child in the dominator tree.
1895 HValueMap* successor_map = (i == length - 1) ? map : map->Copy(zone());
1896 HSideEffectMap successor_dominators(dominators);
1898 // Kill everything killed on any path between this block and the
1899 // dominated block. We don't have to traverse these paths if the
1900 // value map and the dominators list is already empty. If the range
1901 // of block ids (block_id, dominated_id) is empty there are no such
1903 if ((!successor_map->IsEmpty() || !successor_dominators.IsEmpty()) &&
1904 block->block_id() + 1 < dominated->block_id()) {
1905 visited_on_paths_.Clear();
1906 GVNFlagSet side_effects_on_all_paths =
1907 CollectSideEffectsOnPathsToDominatedBlock(block, dominated);
1908 successor_map->Kill(side_effects_on_all_paths);
1909 successor_dominators.Kill(side_effects_on_all_paths);
1911 AnalyzeBlock(dominated, successor_map, &successor_dominators);
1916 class HInferRepresentation BASE_EMBEDDED {
1918 explicit HInferRepresentation(HGraph* graph)
1921 in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
1926 Representation TryChange(HValue* current);
1927 void AddToWorklist(HValue* current);
1928 void InferBasedOnInputs(HValue* current);
1929 void AddDependantsToWorklist(HValue* current);
1930 void InferBasedOnUses(HValue* current);
1932 Zone* zone() { return graph_->zone(); }
1935 ZoneList<HValue*> worklist_;
1936 BitVector in_worklist_;
1940 void HInferRepresentation::AddToWorklist(HValue* current) {
1941 if (current->representation().IsSpecialization()) return;
1942 if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
1943 if (in_worklist_.Contains(current->id())) return;
1944 worklist_.Add(current);
1945 in_worklist_.Add(current->id());
1949 // This method tries to specialize the representation type of the value
1950 // given as a parameter. The value is asked to infer its representation type
1951 // based on its inputs. If the inferred type is more specialized, then this
1952 // becomes the new representation type of the node.
1953 void HInferRepresentation::InferBasedOnInputs(HValue* current) {
1954 Representation r = current->representation();
1955 if (r.IsSpecialization()) return;
1956 ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
1957 Representation inferred = current->InferredRepresentation();
1958 if (inferred.IsSpecialization()) {
1959 if (FLAG_trace_representation) {
1960 PrintF("Changing #%d representation %s -> %s based on inputs\n",
1963 inferred.Mnemonic());
1965 current->ChangeRepresentation(inferred);
1966 AddDependantsToWorklist(current);
1971 void HInferRepresentation::AddDependantsToWorklist(HValue* value) {
1972 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
1973 AddToWorklist(it.value());
1975 for (int i = 0; i < value->OperandCount(); ++i) {
1976 AddToWorklist(value->OperandAt(i));
1981 // This method calculates whether specializing the representation of the value
1982 // given as the parameter has a benefit in terms of less necessary type
1983 // conversions. If there is a benefit, then the representation of the value is
1985 void HInferRepresentation::InferBasedOnUses(HValue* value) {
1986 Representation r = value->representation();
1987 if (r.IsSpecialization() || value->HasNoUses()) return;
1988 ASSERT(value->CheckFlag(HValue::kFlexibleRepresentation));
1989 Representation new_rep = TryChange(value);
1990 if (!new_rep.IsNone()) {
1991 if (!value->representation().Equals(new_rep)) {
1992 if (FLAG_trace_representation) {
1993 PrintF("Changing #%d representation %s -> %s based on uses\n",
1996 new_rep.Mnemonic());
1998 value->ChangeRepresentation(new_rep);
1999 AddDependantsToWorklist(value);
2005 Representation HInferRepresentation::TryChange(HValue* value) {
2006 // Array of use counts for each representation.
2007 int use_count[Representation::kNumRepresentations] = { 0 };
2009 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2010 HValue* use = it.value();
2011 Representation rep = use->RequiredInputRepresentation(it.index());
2012 if (rep.IsNone()) continue;
2013 if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
2014 use_count[rep.kind()] += use->LoopWeight();
2016 int tagged_count = use_count[Representation::kTagged];
2017 int double_count = use_count[Representation::kDouble];
2018 int int32_count = use_count[Representation::kInteger32];
2019 int non_tagged_count = double_count + int32_count;
2021 // If a non-loop phi has tagged uses, don't convert it to untagged.
2022 if (value->IsPhi() && !value->block()->IsLoopHeader() && tagged_count > 0) {
2023 return Representation::None();
2026 // Prefer unboxing over boxing, the latter is more expensive.
2027 if (tagged_count > non_tagged_count) return Representation::None();
2029 // Prefer Integer32 over Double, if possible.
2030 if (int32_count > 0 && value->IsConvertibleToInteger()) {
2031 return Representation::Integer32();
2034 if (double_count > 0) return Representation::Double();
2036 return Representation::None();
2040 void HInferRepresentation::Analyze() {
2041 HPhase phase("H_Infer representations", graph_);
2043 // (1) Initialize bit vectors and count real uses. Each phi gets a
2044 // bit-vector of length <number of phis>.
2045 const ZoneList<HPhi*>* phi_list = graph_->phi_list();
2046 int phi_count = phi_list->length();
2047 ZoneList<BitVector*> connected_phis(phi_count);
2048 for (int i = 0; i < phi_count; ++i) {
2049 phi_list->at(i)->InitRealUses(i);
2050 BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
2051 connected_set->Add(i);
2052 connected_phis.Add(connected_set);
2055 // (2) Do a fixed point iteration to find the set of connected phis. A
2056 // phi is connected to another phi if its value is used either directly or
2057 // indirectly through a transitive closure of the def-use relation.
2061 // We normally have far more "forward edges" than "backward edges",
2062 // so we terminate faster when we walk backwards.
2063 for (int i = phi_count - 1; i >= 0; --i) {
2064 HPhi* phi = phi_list->at(i);
2065 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2066 HValue* use = it.value();
2068 int id = HPhi::cast(use)->phi_id();
2069 if (connected_phis[i]->UnionIsChanged(*connected_phis[id]))
2076 // (3) Use the phi reachability information from step 2 to
2077 // (a) sum up the non-phi use counts of all connected phis.
2078 // (b) push information about values which can't be converted to integer
2079 // without deoptimization through the phi use-def chains, avoiding
2080 // unnecessary deoptimizations later.
2081 for (int i = 0; i < phi_count; ++i) {
2082 HPhi* phi = phi_list->at(i);
2083 bool cti = phi->AllOperandsConvertibleToInteger();
2084 for (BitVector::Iterator it(connected_phis.at(i));
2087 int index = it.Current();
2088 HPhi* it_use = phi_list->at(it.Current());
2089 if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice!
2090 if (!cti) it_use->set_is_convertible_to_integer(false);
2094 // Initialize work list
2095 for (int i = 0; i < graph_->blocks()->length(); ++i) {
2096 HBasicBlock* block = graph_->blocks()->at(i);
2097 const ZoneList<HPhi*>* phis = block->phis();
2098 for (int j = 0; j < phis->length(); ++j) {
2099 AddToWorklist(phis->at(j));
2102 HInstruction* current = block->first();
2103 while (current != NULL) {
2104 AddToWorklist(current);
2105 current = current->next();
2109 // Do a fixed point iteration, trying to improve representations
2110 while (!worklist_.is_empty()) {
2111 HValue* current = worklist_.RemoveLast();
2112 in_worklist_.Remove(current->id());
2113 InferBasedOnInputs(current);
2114 InferBasedOnUses(current);
2119 void HGraph::InitializeInferredTypes() {
2120 HPhase phase("H_Inferring types", this);
2121 InitializeInferredTypes(0, this->blocks_.length() - 1);
2125 void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
2126 for (int i = from_inclusive; i <= to_inclusive; ++i) {
2127 HBasicBlock* block = blocks_[i];
2129 const ZoneList<HPhi*>* phis = block->phis();
2130 for (int j = 0; j < phis->length(); j++) {
2131 phis->at(j)->UpdateInferredType();
2134 HInstruction* current = block->first();
2135 while (current != NULL) {
2136 current->UpdateInferredType();
2137 current = current->next();
2140 if (block->IsLoopHeader()) {
2141 HBasicBlock* last_back_edge =
2142 block->loop_information()->GetLastBackEdge();
2143 InitializeInferredTypes(i + 1, last_back_edge->block_id());
2144 // Skip all blocks already processed by the recursive call.
2145 i = last_back_edge->block_id();
2146 // Update phis of the loop header now after the whole loop body is
2147 // guaranteed to be processed.
2148 ZoneList<HValue*> worklist(block->phis()->length());
2149 for (int j = 0; j < block->phis()->length(); ++j) {
2150 worklist.Add(block->phis()->at(j));
2152 InferTypes(&worklist);
2158 void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
2159 HValue* current = value;
2160 while (current != NULL) {
2161 if (visited->Contains(current->id())) return;
2163 // For phis, we must propagate the check to all of its inputs.
2164 if (current->IsPhi()) {
2165 visited->Add(current->id());
2166 HPhi* phi = HPhi::cast(current);
2167 for (int i = 0; i < phi->OperandCount(); ++i) {
2168 PropagateMinusZeroChecks(phi->OperandAt(i), visited);
2173 // For multiplication and division, we must propagate to the left and
2175 if (current->IsMul()) {
2176 HMul* mul = HMul::cast(current);
2177 mul->EnsureAndPropagateNotMinusZero(visited);
2178 PropagateMinusZeroChecks(mul->left(), visited);
2179 PropagateMinusZeroChecks(mul->right(), visited);
2180 } else if (current->IsDiv()) {
2181 HDiv* div = HDiv::cast(current);
2182 div->EnsureAndPropagateNotMinusZero(visited);
2183 PropagateMinusZeroChecks(div->left(), visited);
2184 PropagateMinusZeroChecks(div->right(), visited);
2187 current = current->EnsureAndPropagateNotMinusZero(visited);
2192 void HGraph::InsertRepresentationChangeForUse(HValue* value,
2195 Representation to) {
2196 // Insert the representation change right before its use. For phi-uses we
2197 // insert at the end of the corresponding predecessor.
2198 HInstruction* next = NULL;
2199 if (use_value->IsPhi()) {
2200 next = use_value->block()->predecessors()->at(use_index)->end();
2202 next = HInstruction::cast(use_value);
2205 // For constants we try to make the representation change at compile
2206 // time. When a representation change is not possible without loss of
2207 // information we treat constants like normal instructions and insert the
2208 // change instructions for them.
2209 HInstruction* new_value = NULL;
2210 bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
2211 bool deoptimize_on_undefined =
2212 use_value->CheckFlag(HValue::kDeoptimizeOnUndefined);
2213 if (value->IsConstant()) {
2214 HConstant* constant = HConstant::cast(value);
2215 // Try to create a new copy of the constant with the new representation.
2216 new_value = is_truncating
2217 ? constant->CopyToTruncatedInt32()
2218 : constant->CopyToRepresentation(to);
2221 if (new_value == NULL) {
2222 new_value = new(zone()) HChange(value, to,
2223 is_truncating, deoptimize_on_undefined);
2226 new_value->InsertBefore(next);
2227 use_value->SetOperandAt(use_index, new_value);
2231 void HGraph::InsertRepresentationChangesForValue(HValue* value) {
2232 Representation r = value->representation();
2233 if (r.IsNone()) return;
2234 if (value->HasNoUses()) return;
2236 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2237 HValue* use_value = it.value();
2238 int use_index = it.index();
2239 Representation req = use_value->RequiredInputRepresentation(use_index);
2240 if (req.IsNone() || req.Equals(r)) continue;
2241 InsertRepresentationChangeForUse(value, use_value, use_index, req);
2243 if (value->HasNoUses()) {
2244 ASSERT(value->IsConstant());
2245 value->DeleteAndReplaceWith(NULL);
2248 // The only purpose of a HForceRepresentation is to represent the value
2249 // after the (possible) HChange instruction. We make it disappear.
2250 if (value->IsForceRepresentation()) {
2251 value->DeleteAndReplaceWith(HForceRepresentation::cast(value)->value());
2256 void HGraph::InsertRepresentationChanges() {
2257 HPhase phase("H_Representation changes", this);
2259 // Compute truncation flag for phis: Initially assume that all
2260 // int32-phis allow truncation and iteratively remove the ones that
2261 // are used in an operation that does not allow a truncating
2263 // TODO(fschneider): Replace this with a worklist-based iteration.
2264 for (int i = 0; i < phi_list()->length(); i++) {
2265 HPhi* phi = phi_list()->at(i);
2266 if (phi->representation().IsInteger32()) {
2267 phi->SetFlag(HValue::kTruncatingToInt32);
2273 for (int i = 0; i < phi_list()->length(); i++) {
2274 HPhi* phi = phi_list()->at(i);
2275 if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
2276 if (!phi->CheckUsesForFlag(HValue::kTruncatingToInt32)) {
2277 phi->ClearFlag(HValue::kTruncatingToInt32);
2283 for (int i = 0; i < blocks_.length(); ++i) {
2284 // Process phi instructions first.
2285 const ZoneList<HPhi*>* phis = blocks_[i]->phis();
2286 for (int j = 0; j < phis->length(); j++) {
2287 InsertRepresentationChangesForValue(phis->at(j));
2290 // Process normal instructions.
2291 HInstruction* current = blocks_[i]->first();
2292 while (current != NULL) {
2293 InsertRepresentationChangesForValue(current);
2294 current = current->next();
2300 void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
2301 if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return;
2302 phi->SetFlag(HValue::kDeoptimizeOnUndefined);
2303 for (int i = 0; i < phi->OperandCount(); ++i) {
2304 HValue* input = phi->OperandAt(i);
2305 if (input->IsPhi()) {
2306 RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
2312 void HGraph::MarkDeoptimizeOnUndefined() {
2313 HPhase phase("H_MarkDeoptimizeOnUndefined", this);
2314 // Compute DeoptimizeOnUndefined flag for phis.
2315 // Any phi that can reach a use with DeoptimizeOnUndefined set must
2316 // have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
2317 // double input representation, has this flag set.
2318 // The flag is used by HChange tagged->double, which must deoptimize
2319 // if one of its uses has this flag set.
2320 for (int i = 0; i < phi_list()->length(); i++) {
2321 HPhi* phi = phi_list()->at(i);
2322 if (phi->representation().IsDouble()) {
2323 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2324 if (it.value()->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
2325 RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
2334 void HGraph::ComputeMinusZeroChecks() {
2335 BitVector visited(GetMaximumValueID(), zone());
2336 for (int i = 0; i < blocks_.length(); ++i) {
2337 for (HInstruction* current = blocks_[i]->first();
2339 current = current->next()) {
2340 if (current->IsChange()) {
2341 HChange* change = HChange::cast(current);
2342 // Propagate flags for negative zero checks upwards from conversions
2343 // int32-to-tagged and int32-to-double.
2344 Representation from = change->value()->representation();
2345 ASSERT(from.Equals(change->from()));
2346 if (from.IsInteger32()) {
2347 ASSERT(change->to().IsTagged() || change->to().IsDouble());
2348 ASSERT(visited.IsEmpty());
2349 PropagateMinusZeroChecks(change->value(), &visited);
2358 // Implementation of utility class to encapsulate the translation state for
2359 // a (possibly inlined) function.
2360 FunctionState::FunctionState(HGraphBuilder* owner,
2361 CompilationInfo* info,
2362 TypeFeedbackOracle* oracle,
2363 ReturnHandlingFlag return_handling)
2365 compilation_info_(info),
2367 call_context_(NULL),
2368 return_handling_(return_handling),
2369 function_return_(NULL),
2370 test_context_(NULL),
2372 arguments_elements_(NULL),
2373 outer_(owner->function_state()) {
2374 if (outer_ != NULL) {
2375 // State for an inline function.
2376 if (owner->ast_context()->IsTest()) {
2377 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
2378 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
2379 if_true->MarkAsInlineReturnTarget();
2380 if_false->MarkAsInlineReturnTarget();
2381 Expression* cond = TestContext::cast(owner->ast_context())->condition();
2382 // The AstContext constructor pushed on the context stack. This newed
2383 // instance is the reason that AstContext can't be BASE_EMBEDDED.
2384 test_context_ = new TestContext(owner, cond, if_true, if_false);
2386 function_return_ = owner->graph()->CreateBasicBlock();
2387 function_return()->MarkAsInlineReturnTarget();
2389 // Set this after possibly allocating a new TestContext above.
2390 call_context_ = owner->ast_context();
2393 // Push on the state stack.
2394 owner->set_function_state(this);
2398 FunctionState::~FunctionState() {
2399 delete test_context_;
2400 owner_->set_function_state(outer_);
2404 // Implementation of utility classes to represent an expression's context in
2406 AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind)
2409 outer_(owner->ast_context()),
2410 for_typeof_(false) {
2411 owner->set_ast_context(this); // Push.
2413 ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
2414 original_length_ = owner->environment()->length();
2419 AstContext::~AstContext() {
2420 owner_->set_ast_context(outer_); // Pop.
2424 EffectContext::~EffectContext() {
2425 ASSERT(owner()->HasStackOverflow() ||
2426 owner()->current_block() == NULL ||
2427 (owner()->environment()->length() == original_length_ &&
2428 owner()->environment()->frame_type() == JS_FUNCTION));
2432 ValueContext::~ValueContext() {
2433 ASSERT(owner()->HasStackOverflow() ||
2434 owner()->current_block() == NULL ||
2435 (owner()->environment()->length() == original_length_ + 1 &&
2436 owner()->environment()->frame_type() == JS_FUNCTION));
2440 void EffectContext::ReturnValue(HValue* value) {
2441 // The value is simply ignored.
2445 void ValueContext::ReturnValue(HValue* value) {
2446 // The value is tracked in the bailout environment, and communicated
2447 // through the environment as the result of the expression.
2448 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
2449 owner()->Bailout("bad value context for arguments value");
2451 owner()->Push(value);
2455 void TestContext::ReturnValue(HValue* value) {
2460 void EffectContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2461 ASSERT(!instr->IsControlInstruction());
2462 owner()->AddInstruction(instr);
2463 if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
2467 void EffectContext::ReturnControl(HControlInstruction* instr, int ast_id) {
2468 ASSERT(!instr->HasObservableSideEffects());
2469 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
2470 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
2471 instr->SetSuccessorAt(0, empty_true);
2472 instr->SetSuccessorAt(1, empty_false);
2473 owner()->current_block()->Finish(instr);
2474 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
2475 owner()->set_current_block(join);
2479 void ValueContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2480 ASSERT(!instr->IsControlInstruction());
2481 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
2482 return owner()->Bailout("bad value context for arguments object value");
2484 owner()->AddInstruction(instr);
2485 owner()->Push(instr);
2486 if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
2490 void ValueContext::ReturnControl(HControlInstruction* instr, int ast_id) {
2491 ASSERT(!instr->HasObservableSideEffects());
2492 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
2493 return owner()->Bailout("bad value context for arguments object value");
2495 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
2496 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
2497 instr->SetSuccessorAt(0, materialize_true);
2498 instr->SetSuccessorAt(1, materialize_false);
2499 owner()->current_block()->Finish(instr);
2500 owner()->set_current_block(materialize_true);
2501 owner()->Push(owner()->graph()->GetConstantTrue());
2502 owner()->set_current_block(materialize_false);
2503 owner()->Push(owner()->graph()->GetConstantFalse());
2505 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
2506 owner()->set_current_block(join);
2510 void TestContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2511 ASSERT(!instr->IsControlInstruction());
2512 HGraphBuilder* builder = owner();
2513 builder->AddInstruction(instr);
2514 // We expect a simulate after every expression with side effects, though
2515 // this one isn't actually needed (and wouldn't work if it were targeted).
2516 if (instr->HasObservableSideEffects()) {
2517 builder->Push(instr);
2518 builder->AddSimulate(ast_id);
2525 void TestContext::ReturnControl(HControlInstruction* instr, int ast_id) {
2526 ASSERT(!instr->HasObservableSideEffects());
2527 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
2528 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
2529 instr->SetSuccessorAt(0, empty_true);
2530 instr->SetSuccessorAt(1, empty_false);
2531 owner()->current_block()->Finish(instr);
2532 empty_true->Goto(if_true(), owner()->function_state());
2533 empty_false->Goto(if_false(), owner()->function_state());
2534 owner()->set_current_block(NULL);
2538 void TestContext::BuildBranch(HValue* value) {
2539 // We expect the graph to be in edge-split form: there is no edge that
2540 // connects a branch node to a join node. We conservatively ensure that
2541 // property by always adding an empty block on the outgoing edges of this
2543 HGraphBuilder* builder = owner();
2544 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
2545 builder->Bailout("arguments object value in a test context");
2547 HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
2548 HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
2549 unsigned test_id = condition()->test_id();
2550 ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id));
2551 HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
2552 builder->current_block()->Finish(test);
2554 empty_true->Goto(if_true(), owner()->function_state());
2555 empty_false->Goto(if_false(), owner()->function_state());
2556 builder->set_current_block(NULL);
2560 // HGraphBuilder infrastructure for bailing out and checking bailouts.
2561 #define CHECK_BAILOUT(call) \
2564 if (HasStackOverflow()) return; \
2568 #define CHECK_ALIVE(call) \
2571 if (HasStackOverflow() || current_block() == NULL) return; \
2575 void HGraphBuilder::Bailout(const char* reason) {
2576 if (FLAG_trace_bailout) {
2577 SmartArrayPointer<char> name(
2578 info()->shared_info()->DebugName()->ToCString());
2579 PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *name, reason);
2585 void HGraphBuilder::VisitForEffect(Expression* expr) {
2586 EffectContext for_effect(this);
2591 void HGraphBuilder::VisitForValue(Expression* expr, ArgumentsAllowedFlag flag) {
2592 ValueContext for_value(this, flag);
2597 void HGraphBuilder::VisitForTypeOf(Expression* expr) {
2598 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
2599 for_value.set_for_typeof(true);
2605 void HGraphBuilder::VisitForControl(Expression* expr,
2606 HBasicBlock* true_block,
2607 HBasicBlock* false_block) {
2608 TestContext for_test(this, expr, true_block, false_block);
2613 HValue* HGraphBuilder::VisitArgument(Expression* expr) {
2614 VisitForValue(expr);
2615 if (HasStackOverflow() || current_block() == NULL) return NULL;
2616 HValue* value = Pop();
2617 Push(AddInstruction(new(zone()) HPushArgument(value)));
2622 void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
2623 for (int i = 0; i < arguments->length(); i++) {
2624 CHECK_ALIVE(VisitArgument(arguments->at(i)));
2629 void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
2630 for (int i = 0; i < exprs->length(); ++i) {
2631 CHECK_ALIVE(VisitForValue(exprs->at(i)));
2636 HGraph* HGraphBuilder::CreateGraph() {
2637 graph_ = new(zone()) HGraph(info());
2638 if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
2641 HPhase phase("H_Block building");
2642 current_block_ = graph()->entry_block();
2644 Scope* scope = info()->scope();
2645 if (scope->HasIllegalRedeclaration()) {
2646 Bailout("function with illegal redeclaration");
2649 if (scope->calls_eval()) {
2650 Bailout("function calls eval");
2655 // Add an edge to the body entry. This is warty: the graph's start
2656 // environment will be used by the Lithium translation as the initial
2657 // environment on graph entry, but it has now been mutated by the
2658 // Hydrogen translation of the instructions in the start block. This
2659 // environment uses values which have not been defined yet. These
2660 // Hydrogen instructions will then be replayed by the Lithium
2661 // translation, so they cannot have an environment effect. The edge to
2662 // the body's entry block (along with some special logic for the start
2663 // block in HInstruction::InsertAfter) seals the start block from
2664 // getting unwanted instructions inserted.
2666 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
2667 // Make the Hydrogen instructions in the initial block into Hydrogen
2668 // values (but not instructions), present in the initial environment and
2669 // not replayed by the Lithium translation.
2670 HEnvironment* initial_env = environment()->CopyWithoutHistory();
2671 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
2672 current_block()->Goto(body_entry);
2673 body_entry->SetJoinId(AstNode::kFunctionEntryId);
2674 set_current_block(body_entry);
2676 // Handle implicit declaration of the function name in named function
2677 // expressions before other declarations.
2678 if (scope->is_function_scope() && scope->function() != NULL) {
2679 VisitVariableDeclaration(scope->function());
2681 VisitDeclarations(scope->declarations());
2682 AddSimulate(AstNode::kDeclarationsId);
2684 HValue* context = environment()->LookupContext();
2686 new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry));
2688 VisitStatements(info()->function()->body());
2689 if (HasStackOverflow()) return NULL;
2691 if (current_block() != NULL) {
2692 HReturn* instr = new(zone()) HReturn(graph()->GetConstantUndefined());
2693 current_block()->FinishExit(instr);
2694 set_current_block(NULL);
2698 graph()->OrderBlocks();
2699 graph()->AssignDominators();
2702 // Do a full verify after building the graph and computing dominators.
2703 graph()->Verify(true);
2706 graph()->PropagateDeoptimizingMark();
2707 if (!graph()->CheckConstPhiUses()) {
2708 Bailout("Unsupported phi use of const variable");
2711 graph()->EliminateRedundantPhis();
2712 if (!graph()->CheckArgumentsPhiUses()) {
2713 Bailout("Unsupported phi use of arguments");
2716 if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis();
2717 graph()->CollectPhis();
2719 if (graph()->has_osr_loop_entry()) {
2720 const ZoneList<HPhi*>* phis = graph()->osr_loop_entry()->phis();
2721 for (int j = 0; j < phis->length(); j++) {
2722 HPhi* phi = phis->at(j);
2723 graph()->osr_values()->at(phi->merged_index())->set_incoming_value(phi);
2727 HInferRepresentation rep(graph());
2730 graph()->MarkDeoptimizeOnUndefined();
2731 graph()->InsertRepresentationChanges();
2733 graph()->InitializeInferredTypes();
2734 graph()->Canonicalize();
2736 // Perform common subexpression elimination and loop-invariant code motion.
2738 HPhase phase("H_Global value numbering", graph());
2739 HGlobalValueNumberer gvn(graph(), info());
2740 bool removed_side_effects = gvn.Analyze();
2741 // Trigger a second analysis pass to further eliminate duplicate values that
2742 // could only be discovered by removing side-effect-generating instructions
2743 // during the first pass.
2744 if (FLAG_smi_only_arrays && removed_side_effects) {
2745 removed_side_effects = gvn.Analyze();
2746 ASSERT(!removed_side_effects);
2750 if (FLAG_use_range) {
2751 HRangeAnalysis rangeAnalysis(graph());
2752 rangeAnalysis.Analyze();
2754 graph()->ComputeMinusZeroChecks();
2756 // Eliminate redundant stack checks on backwards branches.
2757 HStackCheckEliminator sce(graph());
2760 graph()->EliminateRedundantBoundsChecks();
2761 graph()->DehoistSimpleArrayIndexComputations();
2767 // We try to "factor up" HBoundsCheck instructions towards the root of the
2769 // For now we handle checks where the index is like "exp + int32value".
2770 // If in the dominator tree we check "exp + v1" and later (dominated)
2771 // "exp + v2", if v2 <= v1 we can safely remove the second check, and if
2772 // v2 > v1 we can use v2 in the 1st check and again remove the second.
2773 // To do so we keep a dictionary of all checks where the key if the pair
2775 // The class BoundsCheckKey represents this key.
2776 class BoundsCheckKey : public ZoneObject {
2778 HValue* IndexBase() const { return index_base_; }
2779 HValue* Length() const { return length_; }
2782 return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
2785 static BoundsCheckKey* Create(Zone* zone,
2786 HBoundsCheck* check,
2788 HValue* index_base = NULL;
2789 HConstant* constant = NULL;
2790 bool is_sub = false;
2792 if (check->index()->IsAdd()) {
2793 HAdd* index = HAdd::cast(check->index());
2794 if (index->left()->IsConstant()) {
2795 constant = HConstant::cast(index->left());
2796 index_base = index->right();
2797 } else if (index->right()->IsConstant()) {
2798 constant = HConstant::cast(index->right());
2799 index_base = index->left();
2801 } else if (check->index()->IsSub()) {
2802 HSub* index = HSub::cast(check->index());
2804 if (index->left()->IsConstant()) {
2805 constant = HConstant::cast(index->left());
2806 index_base = index->right();
2807 } else if (index->right()->IsConstant()) {
2808 constant = HConstant::cast(index->right());
2809 index_base = index->left();
2813 if (constant != NULL && constant->HasInteger32Value()) {
2814 *offset = is_sub ? - constant->Integer32Value()
2815 : constant->Integer32Value();
2818 index_base = check->index();
2821 return new(zone) BoundsCheckKey(index_base, check->length());
2825 BoundsCheckKey(HValue* index_base, HValue* length)
2826 : index_base_(index_base),
2829 HValue* index_base_;
2834 // Data about each HBoundsCheck that can be eliminated or moved.
2835 // It is the "value" in the dictionary indexed by "base-index, length"
2836 // (the key is BoundsCheckKey).
2837 // We scan the code with a dominator tree traversal.
2838 // Traversing the dominator tree we keep a stack (implemented as a singly
2839 // linked list) of "data" for each basic block that contains a relevant check
2840 // with the same key (the dictionary holds the head of the list).
2841 // We also keep all the "data" created for a given basic block in a list, and
2842 // use it to "clean up" the dictionary when backtracking in the dominator tree
2844 // Doing this each dictionary entry always directly points to the check that
2845 // is dominating the code being examined now.
2846 // We also track the current "offset" of the index expression and use it to
2847 // decide if any check is already "covered" (so it can be removed) or not.
2848 class BoundsCheckBbData: public ZoneObject {
2850 BoundsCheckKey* Key() const { return key_; }
2851 int32_t LowerOffset() const { return lower_offset_; }
2852 int32_t UpperOffset() const { return upper_offset_; }
2853 HBasicBlock* BasicBlock() const { return basic_block_; }
2854 HBoundsCheck* Check() const { return check_; }
2855 BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
2856 BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
2858 bool OffsetIsCovered(int32_t offset) const {
2859 return offset >= LowerOffset() && offset <= UpperOffset();
2862 // This method removes new_check and modifies the current check so that it
2863 // also "covers" what new_check covered.
2864 // The obvious precondition is that new_check follows Check() in the
2865 // same basic block, and that new_offset is not covered (otherwise we
2866 // could simply remove new_check).
2867 // As a consequence LowerOffset() or UpperOffset() change (the covered
2870 // In the general case the check covering the current range should be like
2871 // these two checks:
2872 // 0 <= Key()->IndexBase() + LowerOffset()
2873 // Key()->IndexBase() + UpperOffset() < Key()->Length()
2875 // We can transform the second check like this:
2876 // Key()->IndexBase() + LowerOffset() <
2877 // Key()->Length() + (LowerOffset() - UpperOffset())
2878 // so we can handle both checks with a single unsigned comparison.
2880 // The bulk of this method changes Check()->index() and Check()->length()
2881 // replacing them with new HAdd instructions to perform the transformation
2883 void CoverCheck(HBoundsCheck* new_check,
2884 int32_t new_offset) {
2885 ASSERT(new_check->index()->representation().IsInteger32());
2887 if (new_offset > upper_offset_) {
2888 upper_offset_ = new_offset;
2889 } else if (new_offset < lower_offset_) {
2890 lower_offset_ = new_offset;
2895 BuildOffsetAdd(&added_index_,
2896 &added_index_offset_,
2898 new_check->index()->representation(),
2900 Check()->SetOperandAt(0, added_index_);
2901 BuildOffsetAdd(&added_length_,
2902 &added_length_offset_,
2904 new_check->length()->representation(),
2905 lower_offset_ - upper_offset_);
2906 Check()->SetOperandAt(1, added_length_);
2908 new_check->DeleteAndReplaceWith(NULL);
2911 void RemoveZeroOperations() {
2912 RemoveZeroAdd(&added_index_, &added_index_offset_);
2913 RemoveZeroAdd(&added_length_, &added_length_offset_);
2916 BoundsCheckBbData(BoundsCheckKey* key,
2917 int32_t lower_offset,
2918 int32_t upper_offset,
2920 HBoundsCheck* check,
2921 BoundsCheckBbData* next_in_bb,
2922 BoundsCheckBbData* father_in_dt)
2924 lower_offset_(lower_offset),
2925 upper_offset_(upper_offset),
2928 added_index_offset_(NULL),
2930 added_length_offset_(NULL),
2931 added_length_(NULL),
2932 next_in_bb_(next_in_bb),
2933 father_in_dt_(father_in_dt) { }
2936 BoundsCheckKey* key_;
2937 int32_t lower_offset_;
2938 int32_t upper_offset_;
2939 HBasicBlock* basic_block_;
2940 HBoundsCheck* check_;
2941 HConstant* added_index_offset_;
2943 HConstant* added_length_offset_;
2944 HAdd* added_length_;
2945 BoundsCheckBbData* next_in_bb_;
2946 BoundsCheckBbData* father_in_dt_;
2948 void BuildOffsetAdd(HAdd** add,
2949 HConstant** constant,
2950 HValue* original_value,
2951 Representation representation,
2952 int32_t new_offset) {
2953 HConstant* new_constant = new(BasicBlock()->zone())
2954 HConstant(Handle<Object>(Smi::FromInt(new_offset)),
2955 Representation::Integer32());
2957 new_constant->InsertBefore(Check());
2958 *add = new(BasicBlock()->zone()) HAdd(NULL,
2961 (*add)->AssumeRepresentation(representation);
2962 (*add)->InsertBefore(Check());
2964 new_constant->InsertBefore(*add);
2965 (*constant)->DeleteAndReplaceWith(new_constant);
2967 *constant = new_constant;
2970 void RemoveZeroAdd(HAdd** add, HConstant** constant) {
2971 if (*add != NULL && (*constant)->Integer32Value() == 0) {
2972 (*add)->DeleteAndReplaceWith((*add)->left());
2973 (*constant)->DeleteAndReplaceWith(NULL);
2979 static bool BoundsCheckKeyMatch(void* key1, void* key2) {
2980 BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
2981 BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
2982 return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
2986 class BoundsCheckTable : private ZoneHashMap {
2988 BoundsCheckBbData** LookupOrInsert(BoundsCheckKey* key) {
2989 return reinterpret_cast<BoundsCheckBbData**>(
2990 &(Lookup(key, key->Hash(), true)->value));
2993 void Insert(BoundsCheckKey* key, BoundsCheckBbData* data) {
2994 Lookup(key, key->Hash(), true)->value = data;
2997 void Delete(BoundsCheckKey* key) {
2998 Remove(key, key->Hash());
3001 BoundsCheckTable() : ZoneHashMap(BoundsCheckKeyMatch) { }
3005 // Eliminates checks in bb and recursively in the dominated blocks.
3006 // Also replace the results of check instructions with the original value, if
3007 // the result is used. This is safe now, since we don't do code motion after
3008 // this point. It enables better register allocation since the value produced
3009 // by check instructions is really a copy of the original value.
3010 void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
3011 BoundsCheckTable* table) {
3012 BoundsCheckBbData* bb_data_list = NULL;
3014 for (HInstruction* i = bb->first(); i != NULL; i = i->next()) {
3015 if (!i->IsBoundsCheck()) continue;
3017 HBoundsCheck* check = HBoundsCheck::cast(i);
3018 check->ReplaceAllUsesWith(check->index());
3020 if (!FLAG_array_bounds_checks_elimination) continue;
3023 BoundsCheckKey* key =
3024 BoundsCheckKey::Create(bb->zone(), check, &offset);
3025 BoundsCheckBbData** data_p = table->LookupOrInsert(key);
3026 BoundsCheckBbData* data = *data_p;
3028 bb_data_list = new(zone()) BoundsCheckBbData(key,
3035 *data_p = bb_data_list;
3036 } else if (data->OffsetIsCovered(offset)) {
3037 check->DeleteAndReplaceWith(NULL);
3038 } else if (data->BasicBlock() == bb) {
3039 data->CoverCheck(check, offset);
3041 int32_t new_lower_offset = offset < data->LowerOffset()
3043 : data->LowerOffset();
3044 int32_t new_upper_offset = offset > data->UpperOffset()
3046 : data->UpperOffset();
3047 bb_data_list = new(bb->zone()) BoundsCheckBbData(key,
3054 table->Insert(key, bb_data_list);
3058 for (int i = 0; i < bb->dominated_blocks()->length(); ++i) {
3059 EliminateRedundantBoundsChecks(bb->dominated_blocks()->at(i), table);
3062 for (BoundsCheckBbData* data = bb_data_list;
3064 data = data->NextInBasicBlock()) {
3065 data->RemoveZeroOperations();
3066 if (data->FatherInDominatorTree()) {
3067 table->Insert(data->Key(), data->FatherInDominatorTree());
3069 table->Delete(data->Key());
3075 void HGraph::EliminateRedundantBoundsChecks() {
3076 HPhase phase("H_Eliminate bounds checks", this);
3077 AssertNoAllocation no_gc;
3078 BoundsCheckTable checks_table;
3079 EliminateRedundantBoundsChecks(entry_block(), &checks_table);
3083 static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
3084 HValue* index = array_operation->GetKey();
3086 HConstant* constant;
3087 HValue* subexpression;
3089 if (index->IsAdd()) {
3091 HAdd* add = HAdd::cast(index);
3092 if (add->left()->IsConstant()) {
3093 subexpression = add->right();
3094 constant = HConstant::cast(add->left());
3095 } else if (add->right()->IsConstant()) {
3096 subexpression = add->left();
3097 constant = HConstant::cast(add->right());
3101 } else if (index->IsSub()) {
3103 HSub* sub = HSub::cast(index);
3104 if (sub->left()->IsConstant()) {
3105 subexpression = sub->right();
3106 constant = HConstant::cast(sub->left());
3107 } else if (sub->right()->IsConstant()) {
3108 subexpression = sub->left();
3109 constant = HConstant::cast(sub->right());
3115 if (!constant->HasInteger32Value()) return;
3116 int32_t value = constant->Integer32Value() * sign;
3117 // We limit offset values to 30 bits because we want to avoid the risk of
3118 // overflows when the offset is added to the object header size.
3119 if (value >= 1 << 30 || value < 0) return;
3120 array_operation->SetKey(subexpression);
3121 if (index->HasNoUses()) {
3122 index->DeleteAndReplaceWith(NULL);
3125 array_operation->SetIndexOffset(static_cast<uint32_t>(value));
3126 array_operation->SetDehoisted(true);
3130 void HGraph::DehoistSimpleArrayIndexComputations() {
3131 if (!FLAG_array_index_dehoisting) return;
3133 HPhase phase("H_Dehoist index computations", this);
3134 for (int i = 0; i < blocks()->length(); ++i) {
3135 for (HInstruction* instr = blocks()->at(i)->first();
3137 instr = instr->next()) {
3138 ArrayInstructionInterface* array_instruction = NULL;
3139 if (instr->IsLoadKeyedFastElement()) {
3140 HLoadKeyedFastElement* op = HLoadKeyedFastElement::cast(instr);
3141 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3142 } else if (instr->IsLoadKeyedFastDoubleElement()) {
3143 HLoadKeyedFastDoubleElement* op =
3144 HLoadKeyedFastDoubleElement::cast(instr);
3145 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3146 } else if (instr->IsLoadKeyedSpecializedArrayElement()) {
3147 HLoadKeyedSpecializedArrayElement* op =
3148 HLoadKeyedSpecializedArrayElement::cast(instr);
3149 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3150 } else if (instr->IsStoreKeyedFastElement()) {
3151 HStoreKeyedFastElement* op = HStoreKeyedFastElement::cast(instr);
3152 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3153 } else if (instr->IsStoreKeyedFastDoubleElement()) {
3154 HStoreKeyedFastDoubleElement* op =
3155 HStoreKeyedFastDoubleElement::cast(instr);
3156 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3157 } else if (instr->IsStoreKeyedSpecializedArrayElement()) {
3158 HStoreKeyedSpecializedArrayElement* op =
3159 HStoreKeyedSpecializedArrayElement::cast(instr);
3160 array_instruction = static_cast<ArrayInstructionInterface*>(op);
3164 DehoistArrayIndex(array_instruction);
3170 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
3171 ASSERT(current_block() != NULL);
3172 current_block()->AddInstruction(instr);
3177 void HGraphBuilder::AddSimulate(int ast_id) {
3178 ASSERT(current_block() != NULL);
3179 current_block()->AddSimulate(ast_id);
3183 void HGraphBuilder::AddPhi(HPhi* instr) {
3184 ASSERT(current_block() != NULL);
3185 current_block()->AddPhi(instr);
3189 void HGraphBuilder::PushAndAdd(HInstruction* instr) {
3191 AddInstruction(instr);
3195 template <class Instruction>
3196 HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
3197 int count = call->argument_count();
3198 ZoneList<HValue*> arguments(count);
3199 for (int i = 0; i < count; ++i) {
3200 arguments.Add(Pop());
3203 while (!arguments.is_empty()) {
3204 AddInstruction(new(zone()) HPushArgument(arguments.RemoveLast()));
3210 void HGraphBuilder::SetUpScope(Scope* scope) {
3211 HConstant* undefined_constant = new(zone()) HConstant(
3212 isolate()->factory()->undefined_value(), Representation::Tagged());
3213 AddInstruction(undefined_constant);
3214 graph_->set_undefined_constant(undefined_constant);
3216 HArgumentsObject* object = new(zone()) HArgumentsObject;
3217 AddInstruction(object);
3218 graph()->SetArgumentsObject(object);
3220 // Set the initial values of parameters including "this". "This" has
3221 // parameter index 0.
3222 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
3224 for (int i = 0; i < environment()->parameter_count(); ++i) {
3225 HInstruction* parameter = AddInstruction(new(zone()) HParameter(i));
3226 environment()->Bind(i, parameter);
3229 // First special is HContext.
3230 HInstruction* context = AddInstruction(new(zone()) HContext);
3231 environment()->BindContext(context);
3233 // Initialize specials and locals to undefined.
3234 for (int i = environment()->parameter_count() + 1;
3235 i < environment()->length();
3237 environment()->Bind(i, undefined_constant);
3240 // Handle the arguments and arguments shadow variables specially (they do
3241 // not have declarations).
3242 if (scope->arguments() != NULL) {
3243 if (!scope->arguments()->IsStackAllocated()) {
3244 return Bailout("context-allocated arguments");
3247 environment()->Bind(scope->arguments(),
3248 graph()->GetArgumentsObject());
3253 void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
3254 for (int i = 0; i < statements->length(); i++) {
3255 CHECK_ALIVE(Visit(statements->at(i)));
3260 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
3261 HBasicBlock* b = graph()->CreateBasicBlock();
3262 b->SetInitialEnvironment(env);
3267 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
3268 HBasicBlock* header = graph()->CreateBasicBlock();
3269 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
3270 header->SetInitialEnvironment(entry_env);
3271 header->AttachLoopInformation();
3276 void HGraphBuilder::VisitBlock(Block* stmt) {
3277 ASSERT(!HasStackOverflow());
3278 ASSERT(current_block() != NULL);
3279 ASSERT(current_block()->HasPredecessor());
3280 if (stmt->scope() != NULL) {
3281 return Bailout("ScopedBlock");
3283 BreakAndContinueInfo break_info(stmt);
3284 { BreakAndContinueScope push(&break_info, this);
3285 CHECK_BAILOUT(VisitStatements(stmt->statements()));
3287 HBasicBlock* break_block = break_info.break_block();
3288 if (break_block != NULL) {
3289 if (current_block() != NULL) current_block()->Goto(break_block);
3290 break_block->SetJoinId(stmt->ExitId());
3291 set_current_block(break_block);
3296 void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
3297 ASSERT(!HasStackOverflow());
3298 ASSERT(current_block() != NULL);
3299 ASSERT(current_block()->HasPredecessor());
3300 VisitForEffect(stmt->expression());
3304 void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
3305 ASSERT(!HasStackOverflow());
3306 ASSERT(current_block() != NULL);
3307 ASSERT(current_block()->HasPredecessor());
3311 void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
3312 ASSERT(!HasStackOverflow());
3313 ASSERT(current_block() != NULL);
3314 ASSERT(current_block()->HasPredecessor());
3315 if (stmt->condition()->ToBooleanIsTrue()) {
3316 AddSimulate(stmt->ThenId());
3317 Visit(stmt->then_statement());
3318 } else if (stmt->condition()->ToBooleanIsFalse()) {
3319 AddSimulate(stmt->ElseId());
3320 Visit(stmt->else_statement());
3322 HBasicBlock* cond_true = graph()->CreateBasicBlock();
3323 HBasicBlock* cond_false = graph()->CreateBasicBlock();
3324 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
3326 if (cond_true->HasPredecessor()) {
3327 cond_true->SetJoinId(stmt->ThenId());
3328 set_current_block(cond_true);
3329 CHECK_BAILOUT(Visit(stmt->then_statement()));
3330 cond_true = current_block();
3335 if (cond_false->HasPredecessor()) {
3336 cond_false->SetJoinId(stmt->ElseId());
3337 set_current_block(cond_false);
3338 CHECK_BAILOUT(Visit(stmt->else_statement()));
3339 cond_false = current_block();
3344 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
3345 set_current_block(join);
3350 HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get(
3351 BreakableStatement* stmt,
3355 BreakAndContinueScope* current = this;
3356 while (current != NULL && current->info()->target() != stmt) {
3357 *drop_extra += current->info()->drop_extra();
3358 current = current->next();
3360 ASSERT(current != NULL); // Always found (unless stack is malformed).
3362 if (type == BREAK) {
3363 *drop_extra += current->info()->drop_extra();
3366 HBasicBlock* block = NULL;
3369 block = current->info()->break_block();
3370 if (block == NULL) {
3371 block = current->owner()->graph()->CreateBasicBlock();
3372 current->info()->set_break_block(block);
3377 block = current->info()->continue_block();
3378 if (block == NULL) {
3379 block = current->owner()->graph()->CreateBasicBlock();
3380 current->info()->set_continue_block(block);
3389 void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
3390 ASSERT(!HasStackOverflow());
3391 ASSERT(current_block() != NULL);
3392 ASSERT(current_block()->HasPredecessor());
3394 HBasicBlock* continue_block = break_scope()->Get(stmt->target(),
3398 current_block()->Goto(continue_block);
3399 set_current_block(NULL);
3403 void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
3404 ASSERT(!HasStackOverflow());
3405 ASSERT(current_block() != NULL);
3406 ASSERT(current_block()->HasPredecessor());
3408 HBasicBlock* break_block = break_scope()->Get(stmt->target(),
3412 current_block()->Goto(break_block);
3413 set_current_block(NULL);
3417 void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
3418 ASSERT(!HasStackOverflow());
3419 ASSERT(current_block() != NULL);
3420 ASSERT(current_block()->HasPredecessor());
3421 AstContext* context = call_context();
3422 if (context == NULL) {
3423 // Not an inlined return, so an actual one.
3424 CHECK_ALIVE(VisitForValue(stmt->expression()));
3425 HValue* result = environment()->Pop();
3426 current_block()->FinishExit(new(zone()) HReturn(result));
3427 } else if (function_state()->is_construct()) {
3428 // Return from an inlined construct call. In a test context the return
3429 // value will always evaluate to true, in a value context the return value
3430 // needs to be a JSObject.
3431 if (context->IsTest()) {
3432 TestContext* test = TestContext::cast(context);
3433 CHECK_ALIVE(VisitForEffect(stmt->expression()));
3434 current_block()->Goto(test->if_true(), function_state());
3435 } else if (context->IsEffect()) {
3436 CHECK_ALIVE(VisitForEffect(stmt->expression()));
3437 current_block()->Goto(function_return(), function_state());
3439 ASSERT(context->IsValue());
3440 CHECK_ALIVE(VisitForValue(stmt->expression()));
3441 HValue* return_value = Pop();
3442 HValue* receiver = environment()->Lookup(0);
3443 HHasInstanceTypeAndBranch* typecheck =
3444 new(zone()) HHasInstanceTypeAndBranch(return_value,
3445 FIRST_SPEC_OBJECT_TYPE,
3446 LAST_SPEC_OBJECT_TYPE);
3447 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
3448 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
3449 typecheck->SetSuccessorAt(0, if_spec_object);
3450 typecheck->SetSuccessorAt(1, not_spec_object);
3451 current_block()->Finish(typecheck);
3452 if_spec_object->AddLeaveInlined(return_value,
3455 not_spec_object->AddLeaveInlined(receiver,
3460 // Return from an inlined function, visit the subexpression in the
3461 // expression context of the call.
3462 if (context->IsTest()) {
3463 TestContext* test = TestContext::cast(context);
3464 VisitForControl(stmt->expression(),
3467 } else if (context->IsEffect()) {
3468 CHECK_ALIVE(VisitForEffect(stmt->expression()));
3469 current_block()->Goto(function_return(), function_state());
3471 ASSERT(context->IsValue());
3472 CHECK_ALIVE(VisitForValue(stmt->expression()));
3473 HValue* return_value = Pop();
3474 current_block()->AddLeaveInlined(return_value,
3479 set_current_block(NULL);
3483 void HGraphBuilder::VisitWithStatement(WithStatement* stmt) {
3484 ASSERT(!HasStackOverflow());
3485 ASSERT(current_block() != NULL);
3486 ASSERT(current_block()->HasPredecessor());
3487 return Bailout("WithStatement");
3491 void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
3492 ASSERT(!HasStackOverflow());
3493 ASSERT(current_block() != NULL);
3494 ASSERT(current_block()->HasPredecessor());
3495 // We only optimize switch statements with smi-literal smi comparisons,
3496 // with a bounded number of clauses.
3497 const int kCaseClauseLimit = 128;
3498 ZoneList<CaseClause*>* clauses = stmt->cases();
3499 int clause_count = clauses->length();
3500 if (clause_count > kCaseClauseLimit) {
3501 return Bailout("SwitchStatement: too many clauses");
3504 HValue* context = environment()->LookupContext();
3506 CHECK_ALIVE(VisitForValue(stmt->tag()));
3507 AddSimulate(stmt->EntryId());
3508 HValue* tag_value = Pop();
3509 HBasicBlock* first_test_block = current_block();
3511 SwitchType switch_type = UNKNOWN_SWITCH;
3513 // 1. Extract clause type
3514 for (int i = 0; i < clause_count; ++i) {
3515 CaseClause* clause = clauses->at(i);
3516 if (clause->is_default()) continue;
3518 if (switch_type == UNKNOWN_SWITCH) {
3519 if (clause->label()->IsSmiLiteral()) {
3520 switch_type = SMI_SWITCH;
3521 } else if (clause->label()->IsStringLiteral()) {
3522 switch_type = STRING_SWITCH;
3524 return Bailout("SwitchStatement: non-literal switch label");
3526 } else if ((switch_type == STRING_SWITCH &&
3527 !clause->label()->IsStringLiteral()) ||
3528 (switch_type == SMI_SWITCH &&
3529 !clause->label()->IsSmiLiteral())) {
3530 return Bailout("SwitchStatemnt: mixed label types are not supported");
3534 HUnaryControlInstruction* string_check = NULL;
3535 HBasicBlock* not_string_block = NULL;
3537 // Test switch's tag value if all clauses are string literals
3538 if (switch_type == STRING_SWITCH) {
3539 string_check = new(zone()) HIsStringAndBranch(tag_value);
3540 first_test_block = graph()->CreateBasicBlock();
3541 not_string_block = graph()->CreateBasicBlock();
3543 string_check->SetSuccessorAt(0, first_test_block);
3544 string_check->SetSuccessorAt(1, not_string_block);
3545 current_block()->Finish(string_check);
3547 set_current_block(first_test_block);
3550 // 2. Build all the tests, with dangling true branches
3551 int default_id = AstNode::kNoNumber;
3552 for (int i = 0; i < clause_count; ++i) {
3553 CaseClause* clause = clauses->at(i);
3554 if (clause->is_default()) {
3555 default_id = clause->EntryId();
3558 if (switch_type == SMI_SWITCH) {
3559 clause->RecordTypeFeedback(oracle());
3562 // Generate a compare and branch.
3563 CHECK_ALIVE(VisitForValue(clause->label()));
3564 HValue* label_value = Pop();
3566 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
3567 HBasicBlock* body_block = graph()->CreateBasicBlock();
3569 HControlInstruction* compare;
3571 if (switch_type == SMI_SWITCH) {
3572 if (!clause->IsSmiCompare()) {
3573 // Finish with deoptimize and add uses of enviroment values to
3574 // account for invisible uses.
3575 current_block()->FinishExitWithDeoptimization(HDeoptimize::kUseAll);
3576 set_current_block(NULL);
3580 HCompareIDAndBranch* compare_ =
3581 new(zone()) HCompareIDAndBranch(tag_value,
3584 compare_->SetInputRepresentation(Representation::Integer32());
3587 compare = new(zone()) HStringCompareAndBranch(context, tag_value,
3592 compare->SetSuccessorAt(0, body_block);
3593 compare->SetSuccessorAt(1, next_test_block);
3594 current_block()->Finish(compare);
3596 set_current_block(next_test_block);
3599 // Save the current block to use for the default or to join with the
3600 // exit. This block is NULL if we deoptimized.
3601 HBasicBlock* last_block = current_block();
3603 if (not_string_block != NULL) {
3604 int join_id = (default_id != AstNode::kNoNumber)
3607 last_block = CreateJoin(last_block, not_string_block, join_id);
3610 // 3. Loop over the clauses and the linked list of tests in lockstep,
3611 // translating the clause bodies.
3612 HBasicBlock* curr_test_block = first_test_block;
3613 HBasicBlock* fall_through_block = NULL;
3615 BreakAndContinueInfo break_info(stmt);
3616 { BreakAndContinueScope push(&break_info, this);
3617 for (int i = 0; i < clause_count; ++i) {
3618 CaseClause* clause = clauses->at(i);
3620 // Identify the block where normal (non-fall-through) control flow
3622 HBasicBlock* normal_block = NULL;
3623 if (clause->is_default()) {
3624 if (last_block != NULL) {
3625 normal_block = last_block;
3626 last_block = NULL; // Cleared to indicate we've handled it.
3628 } else if (!curr_test_block->end()->IsDeoptimize()) {
3629 normal_block = curr_test_block->end()->FirstSuccessor();
3630 curr_test_block = curr_test_block->end()->SecondSuccessor();
3633 // Identify a block to emit the body into.
3634 if (normal_block == NULL) {
3635 if (fall_through_block == NULL) {
3637 if (clause->is_default()) {
3638 continue; // Might still be reachable clause bodies.
3643 // (b) Reachable only as fall through.
3644 set_current_block(fall_through_block);
3646 } else if (fall_through_block == NULL) {
3647 // (c) Reachable only normally.
3648 set_current_block(normal_block);
3650 // (d) Reachable both ways.
3651 HBasicBlock* join = CreateJoin(fall_through_block,
3654 set_current_block(join);
3657 CHECK_BAILOUT(VisitStatements(clause->statements()));
3658 fall_through_block = current_block();
3662 // Create an up-to-3-way join. Use the break block if it exists since
3663 // it's already a join block.
3664 HBasicBlock* break_block = break_info.break_block();
3665 if (break_block == NULL) {
3666 set_current_block(CreateJoin(fall_through_block,
3670 if (fall_through_block != NULL) fall_through_block->Goto(break_block);
3671 if (last_block != NULL) last_block->Goto(break_block);
3672 break_block->SetJoinId(stmt->ExitId());
3673 set_current_block(break_block);
3678 bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
3679 return statement->OsrEntryId() == info()->osr_ast_id();
3683 bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
3684 if (!HasOsrEntryAt(statement)) return false;
3686 HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
3687 HBasicBlock* osr_entry = graph()->CreateBasicBlock();
3688 HValue* true_value = graph()->GetConstantTrue();
3689 HBranch* test = new(zone()) HBranch(true_value, non_osr_entry, osr_entry);
3690 current_block()->Finish(test);
3692 HBasicBlock* loop_predecessor = graph()->CreateBasicBlock();
3693 non_osr_entry->Goto(loop_predecessor);
3695 set_current_block(osr_entry);
3696 int osr_entry_id = statement->OsrEntryId();
3697 int first_expression_index = environment()->first_expression_index();
3698 int length = environment()->length();
3699 ZoneList<HUnknownOSRValue*>* osr_values =
3700 new(zone()) ZoneList<HUnknownOSRValue*>(length);
3702 for (int i = 0; i < first_expression_index; ++i) {
3703 HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
3704 AddInstruction(osr_value);
3705 environment()->Bind(i, osr_value);
3706 osr_values->Add(osr_value);
3709 if (first_expression_index != length) {
3710 environment()->Drop(length - first_expression_index);
3711 for (int i = first_expression_index; i < length; ++i) {
3712 HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
3713 AddInstruction(osr_value);
3714 environment()->Push(osr_value);
3715 osr_values->Add(osr_value);
3719 graph()->set_osr_values(osr_values);
3721 AddSimulate(osr_entry_id);
3722 AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
3723 HContext* context = new(zone()) HContext;
3724 AddInstruction(context);
3725 environment()->BindContext(context);
3726 current_block()->Goto(loop_predecessor);
3727 loop_predecessor->SetJoinId(statement->EntryId());
3728 set_current_block(loop_predecessor);
3733 void HGraphBuilder::VisitLoopBody(IterationStatement* stmt,
3734 HBasicBlock* loop_entry,
3735 BreakAndContinueInfo* break_info) {
3736 BreakAndContinueScope push(break_info, this);
3737 AddSimulate(stmt->StackCheckId());
3738 HValue* context = environment()->LookupContext();
3739 HStackCheck* stack_check =
3740 new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch);
3741 AddInstruction(stack_check);
3742 ASSERT(loop_entry->IsLoopHeader());
3743 loop_entry->loop_information()->set_stack_check(stack_check);
3744 CHECK_BAILOUT(Visit(stmt->body()));
3748 void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
3749 ASSERT(!HasStackOverflow());
3750 ASSERT(current_block() != NULL);
3751 ASSERT(current_block()->HasPredecessor());
3752 ASSERT(current_block() != NULL);
3753 bool osr_entry = PreProcessOsrEntry(stmt);
3754 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3755 current_block()->Goto(loop_entry);
3756 set_current_block(loop_entry);
3757 if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3759 BreakAndContinueInfo break_info(stmt);
3760 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3761 HBasicBlock* body_exit =
3762 JoinContinue(stmt, current_block(), break_info.continue_block());
3763 HBasicBlock* loop_successor = NULL;
3764 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
3765 set_current_block(body_exit);
3766 // The block for a true condition, the actual predecessor block of the
3768 body_exit = graph()->CreateBasicBlock();
3769 loop_successor = graph()->CreateBasicBlock();
3770 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
3771 if (body_exit->HasPredecessor()) {
3772 body_exit->SetJoinId(stmt->BackEdgeId());
3776 if (loop_successor->HasPredecessor()) {
3777 loop_successor->SetJoinId(stmt->ExitId());
3779 loop_successor = NULL;
3782 HBasicBlock* loop_exit = CreateLoop(stmt,
3786 break_info.break_block());
3787 set_current_block(loop_exit);
3791 void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
3792 ASSERT(!HasStackOverflow());
3793 ASSERT(current_block() != NULL);
3794 ASSERT(current_block()->HasPredecessor());
3795 ASSERT(current_block() != NULL);
3796 bool osr_entry = PreProcessOsrEntry(stmt);
3797 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3798 current_block()->Goto(loop_entry);
3799 set_current_block(loop_entry);
3800 if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3803 // If the condition is constant true, do not generate a branch.
3804 HBasicBlock* loop_successor = NULL;
3805 if (!stmt->cond()->ToBooleanIsTrue()) {
3806 HBasicBlock* body_entry = graph()->CreateBasicBlock();
3807 loop_successor = graph()->CreateBasicBlock();
3808 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
3809 if (body_entry->HasPredecessor()) {
3810 body_entry->SetJoinId(stmt->BodyId());
3811 set_current_block(body_entry);
3813 if (loop_successor->HasPredecessor()) {
3814 loop_successor->SetJoinId(stmt->ExitId());
3816 loop_successor = NULL;
3820 BreakAndContinueInfo break_info(stmt);
3821 if (current_block() != NULL) {
3822 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3824 HBasicBlock* body_exit =
3825 JoinContinue(stmt, current_block(), break_info.continue_block());
3826 HBasicBlock* loop_exit = CreateLoop(stmt,
3830 break_info.break_block());
3831 set_current_block(loop_exit);
3835 void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
3836 ASSERT(!HasStackOverflow());
3837 ASSERT(current_block() != NULL);
3838 ASSERT(current_block()->HasPredecessor());
3839 if (stmt->init() != NULL) {
3840 CHECK_ALIVE(Visit(stmt->init()));
3842 ASSERT(current_block() != NULL);
3843 bool osr_entry = PreProcessOsrEntry(stmt);
3844 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3845 current_block()->Goto(loop_entry);
3846 set_current_block(loop_entry);
3847 if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3849 HBasicBlock* loop_successor = NULL;
3850 if (stmt->cond() != NULL) {
3851 HBasicBlock* body_entry = graph()->CreateBasicBlock();
3852 loop_successor = graph()->CreateBasicBlock();
3853 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
3854 if (body_entry->HasPredecessor()) {
3855 body_entry->SetJoinId(stmt->BodyId());
3856 set_current_block(body_entry);
3858 if (loop_successor->HasPredecessor()) {
3859 loop_successor->SetJoinId(stmt->ExitId());
3861 loop_successor = NULL;
3865 BreakAndContinueInfo break_info(stmt);
3866 if (current_block() != NULL) {
3867 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3869 HBasicBlock* body_exit =
3870 JoinContinue(stmt, current_block(), break_info.continue_block());
3872 if (stmt->next() != NULL && body_exit != NULL) {
3873 set_current_block(body_exit);
3874 CHECK_BAILOUT(Visit(stmt->next()));
3875 body_exit = current_block();
3878 HBasicBlock* loop_exit = CreateLoop(stmt,
3882 break_info.break_block());
3883 set_current_block(loop_exit);
3887 void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
3888 ASSERT(!HasStackOverflow());
3889 ASSERT(current_block() != NULL);
3890 ASSERT(current_block()->HasPredecessor());
3892 if (!FLAG_optimize_for_in) {
3893 return Bailout("ForInStatement optimization is disabled");
3896 if (!oracle()->IsForInFastCase(stmt)) {
3897 return Bailout("ForInStatement is not fast case");
3900 if (!stmt->each()->IsVariableProxy() ||
3901 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
3902 return Bailout("ForInStatement with non-local each variable");
3905 Variable* each_var = stmt->each()->AsVariableProxy()->var();
3907 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
3908 HValue* enumerable = Top(); // Leave enumerable at the top.
3910 HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
3911 environment()->LookupContext(), enumerable));
3912 AddSimulate(stmt->PrepareId());
3914 HInstruction* array = AddInstruction(
3915 new(zone()) HForInCacheArray(
3918 DescriptorArray::kEnumCacheBridgeCacheIndex));
3920 HInstruction* array_length = AddInstruction(
3921 new(zone()) HFixedArrayBaseLength(array));
3923 HInstruction* start_index = AddInstruction(new(zone()) HConstant(
3924 Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
3931 HInstruction* index_cache = AddInstruction(
3932 new(zone()) HForInCacheArray(
3935 DescriptorArray::kEnumCacheBridgeIndicesCacheIndex));
3936 HForInCacheArray::cast(array)->set_index_cache(
3937 HForInCacheArray::cast(index_cache));
3939 bool osr_entry = PreProcessOsrEntry(stmt);
3940 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3941 current_block()->Goto(loop_entry);
3942 set_current_block(loop_entry);
3943 if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3945 HValue* index = environment()->ExpressionStackAt(0);
3946 HValue* limit = environment()->ExpressionStackAt(1);
3948 // Check that we still have more keys.
3949 HCompareIDAndBranch* compare_index =
3950 new(zone()) HCompareIDAndBranch(index, limit, Token::LT);
3951 compare_index->SetInputRepresentation(Representation::Integer32());
3953 HBasicBlock* loop_body = graph()->CreateBasicBlock();
3954 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
3956 compare_index->SetSuccessorAt(0, loop_body);
3957 compare_index->SetSuccessorAt(1, loop_successor);
3958 current_block()->Finish(compare_index);
3960 set_current_block(loop_successor);
3963 set_current_block(loop_body);
3965 HValue* key = AddInstruction(
3966 new(zone()) HLoadKeyedFastElement(
3967 environment()->ExpressionStackAt(2), // Enum cache.
3968 environment()->ExpressionStackAt(0), // Iteration index.
3969 HLoadKeyedFastElement::OMIT_HOLE_CHECK));
3971 // Check if the expected map still matches that of the enumerable.
3972 // If not just deoptimize.
3973 AddInstruction(new(zone()) HCheckMapValue(
3974 environment()->ExpressionStackAt(4),
3975 environment()->ExpressionStackAt(3)));
3977 Bind(each_var, key);
3979 BreakAndContinueInfo break_info(stmt, 5);
3980 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3982 HBasicBlock* body_exit =
3983 JoinContinue(stmt, current_block(), break_info.continue_block());
3985 if (body_exit != NULL) {
3986 set_current_block(body_exit);
3988 HValue* current_index = Pop();
3989 HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
3991 graph()->GetConstant1());
3992 new_index->AssumeRepresentation(Representation::Integer32());
3993 PushAndAdd(new_index);
3994 body_exit = current_block();
3997 HBasicBlock* loop_exit = CreateLoop(stmt,
4001 break_info.break_block());
4003 set_current_block(loop_exit);
4007 void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4008 ASSERT(!HasStackOverflow());
4009 ASSERT(current_block() != NULL);
4010 ASSERT(current_block()->HasPredecessor());
4011 return Bailout("TryCatchStatement");
4015 void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
4016 ASSERT(!HasStackOverflow());
4017 ASSERT(current_block() != NULL);
4018 ASSERT(current_block()->HasPredecessor());
4019 return Bailout("TryFinallyStatement");
4023 void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4024 ASSERT(!HasStackOverflow());
4025 ASSERT(current_block() != NULL);
4026 ASSERT(current_block()->HasPredecessor());
4027 return Bailout("DebuggerStatement");
4031 static Handle<SharedFunctionInfo> SearchSharedFunctionInfo(
4032 Code* unoptimized_code, FunctionLiteral* expr) {
4033 int start_position = expr->start_position();
4034 RelocIterator it(unoptimized_code);
4035 for (;!it.done(); it.next()) {
4036 RelocInfo* rinfo = it.rinfo();
4037 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue;
4038 Object* obj = rinfo->target_object();
4039 if (obj->IsSharedFunctionInfo()) {
4040 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
4041 if (shared->start_position() == start_position) {
4042 return Handle<SharedFunctionInfo>(shared);
4047 return Handle<SharedFunctionInfo>();
4051 void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4052 ASSERT(!HasStackOverflow());
4053 ASSERT(current_block() != NULL);
4054 ASSERT(current_block()->HasPredecessor());
4055 Handle<SharedFunctionInfo> shared_info =
4056 SearchSharedFunctionInfo(info()->shared_info()->code(),
4058 if (shared_info.is_null()) {
4059 shared_info = Compiler::BuildFunctionInfo(expr, info()->script());
4061 // We also have a stack overflow if the recursive compilation did.
4062 if (HasStackOverflow()) return;
4063 HValue* context = environment()->LookupContext();
4064 HFunctionLiteral* instr =
4065 new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure());
4066 return ast_context()->ReturnInstruction(instr, expr->id());
4070 void HGraphBuilder::VisitSharedFunctionInfoLiteral(
4071 SharedFunctionInfoLiteral* expr) {
4072 ASSERT(!HasStackOverflow());
4073 ASSERT(current_block() != NULL);
4074 ASSERT(current_block()->HasPredecessor());
4075 return Bailout("SharedFunctionInfoLiteral");
4079 void HGraphBuilder::VisitConditional(Conditional* expr) {
4080 ASSERT(!HasStackOverflow());
4081 ASSERT(current_block() != NULL);
4082 ASSERT(current_block()->HasPredecessor());
4083 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4084 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4085 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4087 // Visit the true and false subexpressions in the same AST context as the
4088 // whole expression.
4089 if (cond_true->HasPredecessor()) {
4090 cond_true->SetJoinId(expr->ThenId());
4091 set_current_block(cond_true);
4092 CHECK_BAILOUT(Visit(expr->then_expression()));
4093 cond_true = current_block();
4098 if (cond_false->HasPredecessor()) {
4099 cond_false->SetJoinId(expr->ElseId());
4100 set_current_block(cond_false);
4101 CHECK_BAILOUT(Visit(expr->else_expression()));
4102 cond_false = current_block();
4107 if (!ast_context()->IsTest()) {
4108 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4109 set_current_block(join);
4110 if (join != NULL && !ast_context()->IsEffect()) {
4111 return ast_context()->ReturnValue(Pop());
4117 HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty(
4118 Variable* var, LookupResult* lookup, bool is_store) {
4119 if (var->is_this() || !info()->has_global_object()) {
4122 Handle<GlobalObject> global(info()->global_object());
4123 global->Lookup(*var->name(), lookup);
4124 if (!lookup->IsFound() ||
4125 lookup->type() != NORMAL ||
4126 (is_store && lookup->IsReadOnly()) ||
4127 lookup->holder() != *global) {
4135 HValue* HGraphBuilder::BuildContextChainWalk(Variable* var) {
4136 ASSERT(var->IsContextSlot());
4137 HValue* context = environment()->LookupContext();
4138 int length = info()->scope()->ContextChainLength(var->scope());
4139 while (length-- > 0) {
4140 HInstruction* context_instruction = new(zone()) HOuterContext(context);
4141 AddInstruction(context_instruction);
4142 context = context_instruction;
4148 void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4149 ASSERT(!HasStackOverflow());
4150 ASSERT(current_block() != NULL);
4151 ASSERT(current_block()->HasPredecessor());
4152 Variable* variable = expr->var();
4153 switch (variable->location()) {
4154 case Variable::UNALLOCATED: {
4155 if (variable->mode() == LET || variable->mode() == CONST_HARMONY) {
4156 return Bailout("reference to global harmony declared variable");
4158 // Handle known global constants like 'undefined' specially to avoid a
4159 // load from a global cell for them.
4160 Handle<Object> constant_value =
4161 isolate()->factory()->GlobalConstantFor(variable->name());
4162 if (!constant_value.is_null()) {
4164 new(zone()) HConstant(constant_value, Representation::Tagged());
4165 return ast_context()->ReturnInstruction(instr, expr->id());
4168 LookupResult lookup(isolate());
4169 GlobalPropertyAccess type =
4170 LookupGlobalProperty(variable, &lookup, false);
4172 if (type == kUseCell &&
4173 info()->global_object()->IsAccessCheckNeeded()) {
4177 if (type == kUseCell) {
4178 Handle<GlobalObject> global(info()->global_object());
4179 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
4180 HLoadGlobalCell* instr =
4181 new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails());
4182 return ast_context()->ReturnInstruction(instr, expr->id());
4184 HValue* context = environment()->LookupContext();
4185 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
4186 if (variable->is_qml_global()) global_object->set_qml_global(true);
4187 AddInstruction(global_object);
4188 HLoadGlobalGeneric* instr =
4189 new(zone()) HLoadGlobalGeneric(context,
4192 ast_context()->is_for_typeof());
4193 instr->set_position(expr->position());
4194 return ast_context()->ReturnInstruction(instr, expr->id());
4198 case Variable::PARAMETER:
4199 case Variable::LOCAL: {
4200 HValue* value = environment()->Lookup(variable);
4201 if (value == graph()->GetConstantHole()) {
4202 ASSERT(variable->mode() == CONST ||
4203 variable->mode() == CONST_HARMONY ||
4204 variable->mode() == LET);
4205 return Bailout("reference to uninitialized variable");
4207 return ast_context()->ReturnValue(value);
4210 case Variable::CONTEXT: {
4211 HValue* context = BuildContextChainWalk(variable);
4212 HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4213 return ast_context()->ReturnInstruction(instr, expr->id());
4216 case Variable::LOOKUP:
4217 return Bailout("reference to a variable which requires dynamic lookup");
4222 void HGraphBuilder::VisitLiteral(Literal* expr) {
4223 ASSERT(!HasStackOverflow());
4224 ASSERT(current_block() != NULL);
4225 ASSERT(current_block()->HasPredecessor());
4227 new(zone()) HConstant(expr->handle(), Representation::Tagged());
4228 return ast_context()->ReturnInstruction(instr, expr->id());
4232 void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4233 ASSERT(!HasStackOverflow());
4234 ASSERT(current_block() != NULL);
4235 ASSERT(current_block()->HasPredecessor());
4236 HValue* context = environment()->LookupContext();
4238 HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context,
4241 expr->literal_index());
4242 return ast_context()->ReturnInstruction(instr, expr->id());
4246 // Determines whether the given array or object literal boilerplate satisfies
4247 // all limits to be considered for fast deep-copying and computes the total
4248 // size of all objects that are part of the graph.
4249 static bool IsFastLiteral(Handle<JSObject> boilerplate,
4251 int* max_properties,
4253 ASSERT(max_depth >= 0 && *max_properties >= 0);
4254 if (max_depth == 0) return false;
4256 Handle<FixedArrayBase> elements(boilerplate->elements());
4257 if (elements->length() > 0 &&
4258 elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
4259 if (boilerplate->HasFastDoubleElements()) {
4260 *total_size += FixedDoubleArray::SizeFor(elements->length());
4261 } else if (boilerplate->HasFastElements()) {
4262 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
4263 int length = elements->length();
4264 for (int i = 0; i < length; i++) {
4265 if ((*max_properties)-- == 0) return false;
4266 Handle<Object> value(fast_elements->get(i));
4267 if (value->IsJSObject()) {
4268 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4269 if (!IsFastLiteral(value_object,
4277 *total_size += FixedArray::SizeFor(length);
4283 Handle<FixedArray> properties(boilerplate->properties());
4284 if (properties->length() > 0) {
4287 int nof = boilerplate->map()->inobject_properties();
4288 for (int i = 0; i < nof; i++) {
4289 if ((*max_properties)-- == 0) return false;
4290 Handle<Object> value(boilerplate->InObjectPropertyAt(i));
4291 if (value->IsJSObject()) {
4292 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4293 if (!IsFastLiteral(value_object,
4303 *total_size += boilerplate->map()->instance_size();
4308 void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
4309 ASSERT(!HasStackOverflow());
4310 ASSERT(current_block() != NULL);
4311 ASSERT(current_block()->HasPredecessor());
4312 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4313 HValue* context = environment()->LookupContext();
4314 HInstruction* literal;
4316 // Check whether to use fast or slow deep-copying for boilerplate.
4318 int max_properties = HFastLiteral::kMaxLiteralProperties;
4319 Handle<Object> boilerplate(closure->literals()->get(expr->literal_index()));
4320 if (boilerplate->IsJSObject() &&
4321 IsFastLiteral(Handle<JSObject>::cast(boilerplate),
4322 HFastLiteral::kMaxLiteralDepth,
4325 Handle<JSObject> boilerplate_object = Handle<JSObject>::cast(boilerplate);
4326 literal = new(zone()) HFastLiteral(context,
4329 expr->literal_index(),
4332 literal = new(zone()) HObjectLiteral(context,
4333 expr->constant_properties(),
4334 expr->fast_elements(),
4335 expr->literal_index(),
4337 expr->has_function());
4340 // The object is expected in the bailout environment during computation
4341 // of the property values and is the value of the entire expression.
4342 PushAndAdd(literal);
4344 expr->CalculateEmitStore();
4346 for (int i = 0; i < expr->properties()->length(); i++) {
4347 ObjectLiteral::Property* property = expr->properties()->at(i);
4348 if (property->IsCompileTimeValue()) continue;
4350 Literal* key = property->key();
4351 Expression* value = property->value();
4353 switch (property->kind()) {
4354 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
4355 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
4357 case ObjectLiteral::Property::COMPUTED:
4358 if (key->handle()->IsSymbol()) {
4359 if (property->emit_store()) {
4360 property->RecordTypeFeedback(oracle());
4361 CHECK_ALIVE(VisitForValue(value));
4362 HValue* value = Pop();
4363 HInstruction* store = BuildStoreNamed(literal, value, property);
4364 AddInstruction(store);
4365 if (store->HasObservableSideEffects()) AddSimulate(key->id());
4367 CHECK_ALIVE(VisitForEffect(value));
4372 case ObjectLiteral::Property::PROTOTYPE:
4373 case ObjectLiteral::Property::SETTER:
4374 case ObjectLiteral::Property::GETTER:
4375 return Bailout("Object literal with complex property");
4376 default: UNREACHABLE();
4380 if (expr->has_function()) {
4381 // Return the result of the transformation to fast properties
4382 // instead of the original since this operation changes the map
4383 // of the object. This makes sure that the original object won't
4384 // be used by other optimized code before it is transformed
4385 // (e.g. because of code motion).
4386 HToFastProperties* result = new(zone()) HToFastProperties(Pop());
4387 AddInstruction(result);
4388 return ast_context()->ReturnValue(result);
4390 return ast_context()->ReturnValue(Pop());
4395 void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
4396 ASSERT(!HasStackOverflow());
4397 ASSERT(current_block() != NULL);
4398 ASSERT(current_block()->HasPredecessor());
4399 ZoneList<Expression*>* subexprs = expr->values();
4400 int length = subexprs->length();
4401 HValue* context = environment()->LookupContext();
4402 HInstruction* literal;
4404 Handle<FixedArray> literals(environment()->closure()->literals());
4405 Handle<Object> raw_boilerplate(literals->get(expr->literal_index()));
4407 if (raw_boilerplate->IsUndefined()) {
4408 raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
4409 isolate(), literals, expr->constant_elements());
4410 if (raw_boilerplate.is_null()) {
4411 return Bailout("array boilerplate creation failed");
4413 literals->set(expr->literal_index(), *raw_boilerplate);
4414 if (JSObject::cast(*raw_boilerplate)->elements()->map() ==
4415 isolate()->heap()->fixed_cow_array_map()) {
4416 isolate()->counters()->cow_arrays_created_runtime()->Increment();
4420 Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate);
4421 ElementsKind boilerplate_elements_kind =
4422 Handle<JSObject>::cast(boilerplate)->GetElementsKind();
4424 // Check whether to use fast or slow deep-copying for boilerplate.
4426 int max_properties = HFastLiteral::kMaxLiteralProperties;
4427 if (IsFastLiteral(boilerplate,
4428 HFastLiteral::kMaxLiteralDepth,
4431 literal = new(zone()) HFastLiteral(context,
4434 expr->literal_index(),
4437 literal = new(zone()) HArrayLiteral(context,
4440 expr->literal_index(),
4444 // The array is expected in the bailout environment during computation
4445 // of the property values and is the value of the entire expression.
4446 PushAndAdd(literal);
4448 HLoadElements* elements = NULL;
4450 for (int i = 0; i < length; i++) {
4451 Expression* subexpr = subexprs->at(i);
4452 // If the subexpression is a literal or a simple materialized literal it
4453 // is already set in the cloned array.
4454 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
4456 CHECK_ALIVE(VisitForValue(subexpr));
4457 HValue* value = Pop();
4458 if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
4460 elements = new(zone()) HLoadElements(literal);
4461 AddInstruction(elements);
4463 HValue* key = AddInstruction(
4464 new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)),
4465 Representation::Integer32()));
4467 switch (boilerplate_elements_kind) {
4468 case FAST_SMI_ONLY_ELEMENTS:
4469 // Smi-only arrays need a smi check.
4470 AddInstruction(new(zone()) HCheckSmi(value));
4473 AddInstruction(new(zone()) HStoreKeyedFastElement(
4477 boilerplate_elements_kind));
4479 case FAST_DOUBLE_ELEMENTS:
4480 AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
4489 AddSimulate(expr->GetIdForElement(i));
4491 return ast_context()->ReturnValue(Pop());
4495 // Sets the lookup result and returns true if the load/store can be inlined.
4496 static bool ComputeLoadStoreField(Handle<Map> type,
4497 Handle<String> name,
4498 LookupResult* lookup,
4500 type->LookupInDescriptors(NULL, *name, lookup);
4501 if (!lookup->IsFound()) return false;
4502 if (lookup->type() == FIELD) return true;
4503 return is_store && (lookup->type() == MAP_TRANSITION) &&
4504 (type->unused_property_fields() > 0);
4508 static int ComputeLoadStoreFieldIndex(Handle<Map> type,
4509 Handle<String> name,
4510 LookupResult* lookup) {
4511 ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
4512 if (lookup->type() == FIELD) {
4513 return lookup->GetLocalFieldIndexFromMap(*type);
4515 Map* transition = lookup->GetTransitionMapFromMap(*type);
4516 return transition->PropertyIndexFor(*name) - type->inobject_properties();
4521 HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
4522 Handle<String> name,
4525 LookupResult* lookup,
4526 bool smi_and_map_check) {
4527 if (smi_and_map_check) {
4528 AddInstruction(new(zone()) HCheckNonSmi(object));
4529 AddInstruction(HCheckMaps::NewWithTransitions(object, type));
4532 int index = ComputeLoadStoreFieldIndex(type, name, lookup);
4533 bool is_in_object = index < 0;
4534 int offset = index * kPointerSize;
4536 // Negative property indices are in-object properties, indexed
4537 // from the end of the fixed part of the object.
4538 offset += type->instance_size();
4540 offset += FixedArray::kHeaderSize;
4542 HStoreNamedField* instr =
4543 new(zone()) HStoreNamedField(object, name, value, is_in_object, offset);
4544 if (lookup->type() == MAP_TRANSITION) {
4545 Handle<Map> transition(lookup->GetTransitionMapFromMap(*type));
4546 instr->set_transition(transition);
4547 // TODO(fschneider): Record the new map type of the object in the IR to
4548 // enable elimination of redundant checks after the transition store.
4549 instr->SetGVNFlag(kChangesMaps);
4555 HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
4556 Handle<String> name,
4558 HValue* context = environment()->LookupContext();
4559 return new(zone()) HStoreNamedGeneric(
4564 function_strict_mode_flag());
4568 HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
4570 ObjectLiteral::Property* prop) {
4571 Literal* key = prop->key()->AsLiteral();
4572 Handle<String> name = Handle<String>::cast(key->handle());
4573 ASSERT(!name.is_null());
4575 LookupResult lookup(isolate());
4576 Handle<Map> type = prop->GetReceiverType();
4577 bool is_monomorphic = prop->IsMonomorphic() &&
4578 ComputeLoadStoreField(type, name, &lookup, true);
4580 return is_monomorphic
4581 ? BuildStoreNamedField(object, name, value, type, &lookup,
4582 true) // Needs smi and map check.
4583 : BuildStoreNamedGeneric(object, name, value);
4587 HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
4590 Property* prop = (expr->AsProperty() != NULL)
4591 ? expr->AsProperty()
4592 : expr->AsAssignment()->target()->AsProperty();
4593 Literal* key = prop->key()->AsLiteral();
4594 Handle<String> name = Handle<String>::cast(key->handle());
4595 ASSERT(!name.is_null());
4597 LookupResult lookup(isolate());
4598 SmallMapList* types = expr->GetReceiverTypes();
4599 bool is_monomorphic = expr->IsMonomorphic() &&
4600 ComputeLoadStoreField(types->first(), name, &lookup, true);
4602 return is_monomorphic
4603 ? BuildStoreNamedField(object, name, value, types->first(), &lookup,
4604 true) // Needs smi and map check.
4605 : BuildStoreNamedGeneric(object, name, value);
4609 void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
4611 SmallMapList* types,
4612 Handle<String> name) {
4614 int previous_field_offset = 0;
4615 bool previous_field_is_in_object = false;
4616 bool is_monomorphic_field = true;
4618 LookupResult lookup(isolate());
4619 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
4621 if (ComputeLoadStoreField(map, name, &lookup, false)) {
4622 int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
4623 bool is_in_object = index < 0;
4624 int offset = index * kPointerSize;
4626 // Negative property indices are in-object properties, indexed
4627 // from the end of the fixed part of the object.
4628 offset += map->instance_size();
4630 offset += FixedArray::kHeaderSize;
4633 previous_field_offset = offset;
4634 previous_field_is_in_object = is_in_object;
4635 } else if (is_monomorphic_field) {
4636 is_monomorphic_field = (offset == previous_field_offset) &&
4637 (is_in_object == previous_field_is_in_object);
4643 // Use monomorphic load if property lookup results in the same field index
4644 // for all maps. Requires special map check on the set of all handled maps.
4645 HInstruction* instr;
4646 if (count == types->length() && is_monomorphic_field) {
4647 AddInstruction(new(zone()) HCheckMaps(object, types));
4648 instr = BuildLoadNamedField(object, expr, map, &lookup, false);
4650 HValue* context = environment()->LookupContext();
4651 instr = new(zone()) HLoadNamedFieldPolymorphic(context,
4657 instr->set_position(expr->position());
4658 return ast_context()->ReturnInstruction(instr, expr->id());
4662 void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
4665 SmallMapList* types,
4666 Handle<String> name) {
4667 // TODO(ager): We should recognize when the prototype chains for different
4668 // maps are identical. In that case we can avoid repeatedly generating the
4669 // same prototype map checks.
4671 HBasicBlock* join = NULL;
4672 for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
4673 Handle<Map> map = types->at(i);
4674 LookupResult lookup(isolate());
4675 if (ComputeLoadStoreField(map, name, &lookup, true)) {
4677 AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once.
4678 join = graph()->CreateBasicBlock();
4681 HBasicBlock* if_true = graph()->CreateBasicBlock();
4682 HBasicBlock* if_false = graph()->CreateBasicBlock();
4683 HCompareMap* compare =
4684 new(zone()) HCompareMap(object, map, if_true, if_false);
4685 current_block()->Finish(compare);
4687 set_current_block(if_true);
4688 HInstruction* instr =
4689 BuildStoreNamedField(object, name, value, map, &lookup, false);
4690 instr->set_position(expr->position());
4691 // Goto will add the HSimulate for the store.
4692 AddInstruction(instr);
4693 if (!ast_context()->IsEffect()) Push(value);
4694 current_block()->Goto(join);
4696 set_current_block(if_false);
4700 // Finish up. Unconditionally deoptimize if we've handled all the maps we
4701 // know about and do not want to handle ones we've never seen. Otherwise
4702 // use a generic IC.
4703 if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
4704 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses);
4706 HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
4707 instr->set_position(expr->position());
4708 AddInstruction(instr);
4711 if (!ast_context()->IsEffect()) Push(value);
4712 current_block()->Goto(join);
4714 // The HSimulate for the store should not see the stored value in
4715 // effect contexts (it is not materialized at expr->id() in the
4716 // unoptimized code).
4717 if (instr->HasObservableSideEffects()) {
4718 if (ast_context()->IsEffect()) {
4719 AddSimulate(expr->id());
4722 AddSimulate(expr->id());
4726 return ast_context()->ReturnValue(value);
4730 ASSERT(join != NULL);
4731 join->SetJoinId(expr->id());
4732 set_current_block(join);
4733 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
4737 void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
4738 Property* prop = expr->target()->AsProperty();
4739 ASSERT(prop != NULL);
4740 expr->RecordTypeFeedback(oracle());
4741 CHECK_ALIVE(VisitForValue(prop->obj()));
4743 HValue* value = NULL;
4744 HInstruction* instr = NULL;
4746 if (prop->key()->IsPropertyName()) {
4748 CHECK_ALIVE(VisitForValue(expr->value()));
4750 HValue* object = Pop();
4752 Literal* key = prop->key()->AsLiteral();
4753 Handle<String> name = Handle<String>::cast(key->handle());
4754 ASSERT(!name.is_null());
4756 SmallMapList* types = expr->GetReceiverTypes();
4757 LookupResult lookup(isolate());
4759 if (expr->IsMonomorphic()) {
4760 instr = BuildStoreNamed(object, value, expr);
4762 } else if (types != NULL && types->length() > 1) {
4763 HandlePolymorphicStoreNamedField(expr, object, value, types, name);
4767 instr = BuildStoreNamedGeneric(object, name, value);
4772 CHECK_ALIVE(VisitForValue(prop->key()));
4773 CHECK_ALIVE(VisitForValue(expr->value()));
4775 HValue* key = Pop();
4776 HValue* object = Pop();
4777 bool has_side_effects = false;
4778 HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(),
4783 ASSERT(has_side_effects); // Stores always have side effects.
4784 AddSimulate(expr->AssignmentId());
4785 return ast_context()->ReturnValue(Pop());
4788 instr->set_position(expr->position());
4789 AddInstruction(instr);
4790 if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
4791 return ast_context()->ReturnValue(Pop());
4795 // Because not every expression has a position and there is not common
4796 // superclass of Assignment and CountOperation, we cannot just pass the
4797 // owning expression instead of position and ast_id separately.
4798 void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
4802 LookupResult lookup(isolate());
4803 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
4804 if (type == kUseCell) {
4805 Handle<GlobalObject> global(info()->global_object());
4806 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
4807 HInstruction* instr =
4808 new(zone()) HStoreGlobalCell(value, cell, lookup.GetPropertyDetails());
4809 instr->set_position(position);
4810 AddInstruction(instr);
4811 if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
4813 HValue* context = environment()->LookupContext();
4814 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
4815 if (var->is_qml_global()) global_object->set_qml_global(true);
4816 AddInstruction(global_object);
4817 HStoreGlobalGeneric* instr =
4818 new(zone()) HStoreGlobalGeneric(context,
4822 function_strict_mode_flag());
4823 instr->set_position(position);
4824 AddInstruction(instr);
4825 ASSERT(instr->HasObservableSideEffects());
4826 if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
4831 void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
4832 Expression* target = expr->target();
4833 VariableProxy* proxy = target->AsVariableProxy();
4834 Property* prop = target->AsProperty();
4835 ASSERT(proxy == NULL || prop == NULL);
4837 // We have a second position recorded in the FullCodeGenerator to have
4838 // type feedback for the binary operation.
4839 BinaryOperation* operation = expr->binary_operation();
4841 if (proxy != NULL) {
4842 Variable* var = proxy->var();
4843 if (var->mode() == LET) {
4844 return Bailout("unsupported let compound assignment");
4847 CHECK_ALIVE(VisitForValue(operation));
4849 switch (var->location()) {
4850 case Variable::UNALLOCATED:
4851 HandleGlobalVariableAssignment(var,
4854 expr->AssignmentId());
4857 case Variable::PARAMETER:
4858 case Variable::LOCAL:
4859 if (var->mode() == CONST) {
4860 return Bailout("unsupported const compound assignment");
4865 case Variable::CONTEXT: {
4866 // Bail out if we try to mutate a parameter value in a function
4867 // using the arguments object. We do not (yet) correctly handle the
4868 // arguments property of the function.
4869 if (info()->scope()->arguments() != NULL) {
4870 // Parameters will be allocated to context slots. We have no
4871 // direct way to detect that the variable is a parameter so we do
4872 // a linear search of the parameter variables.
4873 int count = info()->scope()->num_parameters();
4874 for (int i = 0; i < count; ++i) {
4875 if (var == info()->scope()->parameter(i)) {
4877 "assignment to parameter, function uses arguments object");
4882 HStoreContextSlot::Mode mode;
4884 switch (var->mode()) {
4886 mode = HStoreContextSlot::kCheckDeoptimize;
4889 return ast_context()->ReturnValue(Pop());
4891 // This case is checked statically so no need to
4892 // perform checks here
4895 mode = HStoreContextSlot::kNoCheck;
4898 HValue* context = BuildContextChainWalk(var);
4899 HStoreContextSlot* instr =
4900 new(zone()) HStoreContextSlot(context, var->index(), mode, Top());
4901 AddInstruction(instr);
4902 if (instr->HasObservableSideEffects()) {
4903 AddSimulate(expr->AssignmentId());
4908 case Variable::LOOKUP:
4909 return Bailout("compound assignment to lookup slot");
4911 return ast_context()->ReturnValue(Pop());
4913 } else if (prop != NULL) {
4914 prop->RecordTypeFeedback(oracle());
4916 if (prop->key()->IsPropertyName()) {
4918 CHECK_ALIVE(VisitForValue(prop->obj()));
4919 HValue* obj = Top();
4921 HInstruction* load = NULL;
4922 if (prop->IsMonomorphic()) {
4923 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
4924 Handle<Map> map = prop->GetReceiverTypes()->first();
4925 load = BuildLoadNamed(obj, prop, map, name);
4927 load = BuildLoadNamedGeneric(obj, prop);
4930 if (load->HasObservableSideEffects()) AddSimulate(expr->CompoundLoadId());
4932 CHECK_ALIVE(VisitForValue(expr->value()));
4933 HValue* right = Pop();
4934 HValue* left = Pop();
4936 HInstruction* instr = BuildBinaryOperation(operation, left, right);
4938 if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
4940 HInstruction* store = BuildStoreNamed(obj, instr, prop);
4941 AddInstruction(store);
4942 // Drop the simulated receiver and value. Return the value.
4945 if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
4946 return ast_context()->ReturnValue(Pop());
4950 CHECK_ALIVE(VisitForValue(prop->obj()));
4951 CHECK_ALIVE(VisitForValue(prop->key()));
4952 HValue* obj = environment()->ExpressionStackAt(1);
4953 HValue* key = environment()->ExpressionStackAt(0);
4955 bool has_side_effects = false;
4956 HValue* load = HandleKeyedElementAccess(
4957 obj, key, NULL, prop, expr->CompoundLoadId(), RelocInfo::kNoPosition,
4961 if (has_side_effects) AddSimulate(expr->CompoundLoadId());
4964 CHECK_ALIVE(VisitForValue(expr->value()));
4965 HValue* right = Pop();
4966 HValue* left = Pop();
4968 HInstruction* instr = BuildBinaryOperation(operation, left, right);
4970 if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
4972 expr->RecordTypeFeedback(oracle());
4973 HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(),
4974 RelocInfo::kNoPosition,
4978 // Drop the simulated receiver, key, and value. Return the value.
4981 ASSERT(has_side_effects); // Stores always have side effects.
4982 AddSimulate(expr->AssignmentId());
4983 return ast_context()->ReturnValue(Pop());
4987 return Bailout("invalid lhs in compound assignment");
4992 void HGraphBuilder::VisitAssignment(Assignment* expr) {
4993 ASSERT(!HasStackOverflow());
4994 ASSERT(current_block() != NULL);
4995 ASSERT(current_block()->HasPredecessor());
4996 VariableProxy* proxy = expr->target()->AsVariableProxy();
4997 Property* prop = expr->target()->AsProperty();
4998 ASSERT(proxy == NULL || prop == NULL);
5000 if (expr->is_compound()) {
5001 HandleCompoundAssignment(expr);
5006 HandlePropertyAssignment(expr);
5007 } else if (proxy != NULL) {
5008 Variable* var = proxy->var();
5010 if (var->mode() == CONST) {
5011 if (expr->op() != Token::INIT_CONST) {
5012 CHECK_ALIVE(VisitForValue(expr->value()));
5013 return ast_context()->ReturnValue(Pop());
5016 if (var->IsStackAllocated()) {
5017 // We insert a use of the old value to detect unsupported uses of const
5018 // variables (e.g. initialization inside a loop).
5019 HValue* old_value = environment()->Lookup(var);
5020 AddInstruction(new HUseConst(old_value));
5022 } else if (var->mode() == CONST_HARMONY) {
5023 if (expr->op() != Token::INIT_CONST_HARMONY) {
5024 return Bailout("non-initializer assignment to const");
5028 if (proxy->IsArguments()) return Bailout("assignment to arguments");
5030 // Handle the assignment.
5031 switch (var->location()) {
5032 case Variable::UNALLOCATED:
5033 CHECK_ALIVE(VisitForValue(expr->value()));
5034 HandleGlobalVariableAssignment(var,
5037 expr->AssignmentId());
5038 return ast_context()->ReturnValue(Pop());
5040 case Variable::PARAMETER:
5041 case Variable::LOCAL: {
5042 // Perform an initialization check for let declared variables
5044 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
5045 HValue* env_value = environment()->Lookup(var);
5046 if (env_value == graph()->GetConstantHole()) {
5047 return Bailout("assignment to let variable before initialization");
5050 // We do not allow the arguments object to occur in a context where it
5051 // may escape, but assignments to stack-allocated locals are
5053 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
5054 HValue* value = Pop();
5056 return ast_context()->ReturnValue(value);
5059 case Variable::CONTEXT: {
5060 // Bail out if we try to mutate a parameter value in a function using
5061 // the arguments object. We do not (yet) correctly handle the
5062 // arguments property of the function.
5063 if (info()->scope()->arguments() != NULL) {
5064 // Parameters will rewrite to context slots. We have no direct way
5065 // to detect that the variable is a parameter.
5066 int count = info()->scope()->num_parameters();
5067 for (int i = 0; i < count; ++i) {
5068 if (var == info()->scope()->parameter(i)) {
5069 return Bailout("assignment to parameter in arguments object");
5074 CHECK_ALIVE(VisitForValue(expr->value()));
5075 HStoreContextSlot::Mode mode;
5076 if (expr->op() == Token::ASSIGN) {
5077 switch (var->mode()) {
5079 mode = HStoreContextSlot::kCheckDeoptimize;
5082 return ast_context()->ReturnValue(Pop());
5084 // This case is checked statically so no need to
5085 // perform checks here
5088 mode = HStoreContextSlot::kNoCheck;
5090 } else if (expr->op() == Token::INIT_VAR ||
5091 expr->op() == Token::INIT_LET ||
5092 expr->op() == Token::INIT_CONST_HARMONY) {
5093 mode = HStoreContextSlot::kNoCheck;
5095 ASSERT(expr->op() == Token::INIT_CONST);
5097 mode = HStoreContextSlot::kCheckIgnoreAssignment;
5100 HValue* context = BuildContextChainWalk(var);
5101 HStoreContextSlot* instr = new(zone()) HStoreContextSlot(
5102 context, var->index(), mode, Top());
5103 AddInstruction(instr);
5104 if (instr->HasObservableSideEffects()) {
5105 AddSimulate(expr->AssignmentId());
5107 return ast_context()->ReturnValue(Pop());
5110 case Variable::LOOKUP:
5111 return Bailout("assignment to LOOKUP variable");
5114 return Bailout("invalid left-hand side in assignment");
5119 void HGraphBuilder::VisitThrow(Throw* expr) {
5120 ASSERT(!HasStackOverflow());
5121 ASSERT(current_block() != NULL);
5122 ASSERT(current_block()->HasPredecessor());
5123 // We don't optimize functions with invalid left-hand sides in
5124 // assignments, count operations, or for-in. Consequently throw can
5125 // currently only occur in an effect context.
5126 ASSERT(ast_context()->IsEffect());
5127 CHECK_ALIVE(VisitForValue(expr->exception()));
5129 HValue* context = environment()->LookupContext();
5130 HValue* value = environment()->Pop();
5131 HThrow* instr = new(zone()) HThrow(context, value);
5132 instr->set_position(expr->position());
5133 AddInstruction(instr);
5134 AddSimulate(expr->id());
5135 current_block()->FinishExit(new(zone()) HAbnormalExit);
5136 set_current_block(NULL);
5140 HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
5143 LookupResult* lookup,
5144 bool smi_and_map_check) {
5145 if (smi_and_map_check) {
5146 AddInstruction(new(zone()) HCheckNonSmi(object));
5147 AddInstruction(HCheckMaps::NewWithTransitions(object, type));
5150 int index = lookup->GetLocalFieldIndexFromMap(*type);
5152 // Negative property indices are in-object properties, indexed
5153 // from the end of the fixed part of the object.
5154 int offset = (index * kPointerSize) + type->instance_size();
5155 return new(zone()) HLoadNamedField(object, true, offset);
5157 // Non-negative property indices are in the properties array.
5158 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
5159 return new(zone()) HLoadNamedField(object, false, offset);
5164 HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
5166 if (expr->IsUninitialized() && !FLAG_always_opt) {
5167 AddInstruction(new(zone()) HSoftDeoptimize);
5168 current_block()->MarkAsDeoptimizing();
5170 ASSERT(expr->key()->IsPropertyName());
5171 Handle<Object> name = expr->key()->AsLiteral()->handle();
5172 HValue* context = environment()->LookupContext();
5173 return new(zone()) HLoadNamedGeneric(context, obj, name);
5177 HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
5180 Handle<String> name) {
5181 LookupResult lookup(isolate());
5182 map->LookupInDescriptors(NULL, *name, &lookup);
5183 if (lookup.IsFound() && lookup.type() == FIELD) {
5184 return BuildLoadNamedField(obj,
5189 } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
5190 AddInstruction(new(zone()) HCheckNonSmi(obj));
5191 AddInstruction(HCheckMaps::NewWithTransitions(obj, map));
5192 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
5193 return new(zone()) HConstant(function, Representation::Tagged());
5195 return BuildLoadNamedGeneric(obj, expr);
5200 HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
5202 HValue* context = environment()->LookupContext();
5203 return new(zone()) HLoadKeyedGeneric(context, object, key);
5207 HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
5208 HValue* external_elements,
5209 HValue* checked_key,
5211 ElementsKind elements_kind,
5214 ASSERT(val != NULL);
5215 switch (elements_kind) {
5216 case EXTERNAL_PIXEL_ELEMENTS: {
5217 val = AddInstruction(new(zone()) HClampToUint8(val));
5220 case EXTERNAL_BYTE_ELEMENTS:
5221 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
5222 case EXTERNAL_SHORT_ELEMENTS:
5223 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
5224 case EXTERNAL_INT_ELEMENTS:
5225 case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
5226 if (!val->representation().IsInteger32()) {
5227 val = AddInstruction(new(zone()) HChange(
5229 Representation::Integer32(),
5230 true, // Truncate to int32.
5231 false)); // Don't deoptimize undefined (irrelevant here).
5235 case EXTERNAL_FLOAT_ELEMENTS:
5236 case EXTERNAL_DOUBLE_ELEMENTS:
5238 case FAST_SMI_ONLY_ELEMENTS:
5240 case FAST_DOUBLE_ELEMENTS:
5241 case DICTIONARY_ELEMENTS:
5242 case NON_STRICT_ARGUMENTS_ELEMENTS:
5246 return new(zone()) HStoreKeyedSpecializedArrayElement(
5247 external_elements, checked_key, val, elements_kind);
5249 ASSERT(val == NULL);
5250 return new(zone()) HLoadKeyedSpecializedArrayElement(
5251 external_elements, checked_key, elements_kind);
5256 HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
5257 HValue* checked_key,
5259 ElementsKind elements_kind,
5262 ASSERT(val != NULL);
5263 switch (elements_kind) {
5264 case FAST_DOUBLE_ELEMENTS:
5265 return new(zone()) HStoreKeyedFastDoubleElement(
5266 elements, checked_key, val);
5267 case FAST_SMI_ONLY_ELEMENTS:
5268 // Smi-only arrays need a smi check.
5269 AddInstruction(new(zone()) HCheckSmi(val));
5272 return new(zone()) HStoreKeyedFastElement(
5273 elements, checked_key, val, elements_kind);
5279 // It's an element load (!is_store).
5280 if (elements_kind == FAST_DOUBLE_ELEMENTS) {
5281 return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key);
5282 } else { // FAST_ELEMENTS or FAST_SMI_ONLY_ELEMENTS.
5283 return new(zone()) HLoadKeyedFastElement(elements, checked_key);
5288 HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
5293 HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMaps(object, map));
5294 bool fast_smi_only_elements = map->has_fast_smi_only_elements();
5295 bool fast_elements = map->has_fast_elements();
5296 HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
5297 if (is_store && (fast_elements || fast_smi_only_elements)) {
5298 AddInstruction(new(zone()) HCheckMaps(
5299 elements, isolate()->factory()->fixed_array_map()));
5301 HInstruction* length = NULL;
5302 HInstruction* checked_key = NULL;
5303 if (map->has_external_array_elements()) {
5304 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5305 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5306 HLoadExternalArrayPointer* external_elements =
5307 new(zone()) HLoadExternalArrayPointer(elements);
5308 AddInstruction(external_elements);
5309 return BuildExternalArrayElementAccess(external_elements, checked_key,
5310 val, map->elements_kind(), is_store);
5312 ASSERT(fast_smi_only_elements ||
5314 map->has_fast_double_elements());
5315 if (map->instance_type() == JS_ARRAY_TYPE) {
5316 length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
5318 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5320 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5321 return BuildFastElementAccess(elements, checked_key, val,
5322 map->elements_kind(), is_store);
5326 HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
5333 bool* has_side_effects) {
5334 *has_side_effects = false;
5335 AddInstruction(new(zone()) HCheckNonSmi(object));
5336 SmallMapList* maps = prop->GetReceiverTypes();
5337 bool todo_external_array = false;
5339 static const int kNumElementTypes = kElementsKindCount;
5340 bool type_todo[kNumElementTypes];
5341 for (int i = 0; i < kNumElementTypes; ++i) {
5342 type_todo[i] = false;
5345 // Elements_kind transition support.
5346 MapHandleList transition_target(maps->length());
5347 // Collect possible transition targets.
5348 MapHandleList possible_transitioned_maps(maps->length());
5349 for (int i = 0; i < maps->length(); ++i) {
5350 Handle<Map> map = maps->at(i);
5351 ElementsKind elements_kind = map->elements_kind();
5352 if (elements_kind == FAST_DOUBLE_ELEMENTS ||
5353 elements_kind == FAST_ELEMENTS) {
5354 possible_transitioned_maps.Add(map);
5357 // Get transition target for each map (NULL == no transition).
5358 for (int i = 0; i < maps->length(); ++i) {
5359 Handle<Map> map = maps->at(i);
5360 Handle<Map> transitioned_map =
5361 map->FindTransitionedMap(&possible_transitioned_maps);
5362 transition_target.Add(transitioned_map);
5365 int num_untransitionable_maps = 0;
5366 Handle<Map> untransitionable_map;
5367 for (int i = 0; i < maps->length(); ++i) {
5368 Handle<Map> map = maps->at(i);
5369 ASSERT(map->IsMap());
5370 if (!transition_target.at(i).is_null()) {
5371 AddInstruction(new(zone()) HTransitionElementsKind(
5372 object, map, transition_target.at(i)));
5374 type_todo[map->elements_kind()] = true;
5375 if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) {
5376 todo_external_array = true;
5378 num_untransitionable_maps++;
5379 untransitionable_map = map;
5383 // If only one map is left after transitioning, handle this case
5385 if (num_untransitionable_maps == 1) {
5386 HInstruction* instr = NULL;
5387 if (untransitionable_map->has_slow_elements_kind()) {
5388 instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val)
5389 : BuildLoadKeyedGeneric(object, key));
5391 instr = AddInstruction(BuildMonomorphicElementAccess(
5392 object, key, val, untransitionable_map, is_store));
5394 *has_side_effects |= instr->HasObservableSideEffects();
5395 instr->set_position(position);
5396 return is_store ? NULL : instr;
5399 AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
5400 HBasicBlock* join = graph()->CreateBasicBlock();
5402 HInstruction* elements_kind_instr =
5403 AddInstruction(new(zone()) HElementsKind(object));
5404 HCompareConstantEqAndBranch* elements_kind_branch = NULL;
5405 HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
5406 HLoadExternalArrayPointer* external_elements = NULL;
5407 HInstruction* checked_key = NULL;
5409 // Generated code assumes that FAST_SMI_ONLY_ELEMENTS, FAST_ELEMENTS,
5410 // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS are handled before external
5412 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
5413 STATIC_ASSERT(FAST_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
5414 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
5415 STATIC_ASSERT(DICTIONARY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
5417 for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND;
5418 elements_kind <= LAST_ELEMENTS_KIND;
5419 elements_kind = ElementsKind(elements_kind + 1)) {
5420 // After having handled FAST_ELEMENTS, FAST_SMI_ONLY_ELEMENTS,
5421 // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS, we need to add some code
5422 // that's executed for all external array cases.
5423 STATIC_ASSERT(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND ==
5424 LAST_ELEMENTS_KIND);
5425 if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
5426 && todo_external_array) {
5427 HInstruction* length =
5428 AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5429 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5430 external_elements = new(zone()) HLoadExternalArrayPointer(elements);
5431 AddInstruction(external_elements);
5433 if (type_todo[elements_kind]) {
5434 HBasicBlock* if_true = graph()->CreateBasicBlock();
5435 HBasicBlock* if_false = graph()->CreateBasicBlock();
5436 elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
5437 elements_kind_instr, elements_kind, Token::EQ_STRICT);
5438 elements_kind_branch->SetSuccessorAt(0, if_true);
5439 elements_kind_branch->SetSuccessorAt(1, if_false);
5440 current_block()->Finish(elements_kind_branch);
5442 set_current_block(if_true);
5443 HInstruction* access;
5444 if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
5445 elements_kind == FAST_ELEMENTS ||
5446 elements_kind == FAST_DOUBLE_ELEMENTS) {
5447 if (is_store && elements_kind != FAST_DOUBLE_ELEMENTS) {
5448 AddInstruction(new(zone()) HCheckMaps(
5449 elements, isolate()->factory()->fixed_array_map(),
5450 elements_kind_branch));
5452 // TODO(jkummerow): The need for these two blocks could be avoided
5453 // in one of two ways:
5454 // (1) Introduce ElementsKinds for JSArrays that are distinct from
5455 // those for fast objects.
5456 // (2) Put the common instructions into a third "join" block. This
5457 // requires additional AST IDs that we can deopt to from inside
5458 // that join block. They must be added to the Property class (when
5459 // it's a keyed property) and registered in the full codegen.
5460 HBasicBlock* if_jsarray = graph()->CreateBasicBlock();
5461 HBasicBlock* if_fastobject = graph()->CreateBasicBlock();
5462 HHasInstanceTypeAndBranch* typecheck =
5463 new(zone()) HHasInstanceTypeAndBranch(object, JS_ARRAY_TYPE);
5464 typecheck->SetSuccessorAt(0, if_jsarray);
5465 typecheck->SetSuccessorAt(1, if_fastobject);
5466 current_block()->Finish(typecheck);
5468 set_current_block(if_jsarray);
5469 HInstruction* length;
5470 length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck));
5471 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5472 access = AddInstruction(BuildFastElementAccess(
5473 elements, checked_key, val, elements_kind, is_store));
5478 *has_side_effects |= access->HasObservableSideEffects();
5479 if (position != -1) {
5480 access->set_position(position);
5482 if_jsarray->Goto(join);
5484 set_current_block(if_fastobject);
5485 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5486 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5487 access = AddInstruction(BuildFastElementAccess(
5488 elements, checked_key, val, elements_kind, is_store));
5489 } else if (elements_kind == DICTIONARY_ELEMENTS) {
5491 access = AddInstruction(BuildStoreKeyedGeneric(object, key, val));
5493 access = AddInstruction(BuildLoadKeyedGeneric(object, key));
5495 } else { // External array elements.
5496 access = AddInstruction(BuildExternalArrayElementAccess(
5497 external_elements, checked_key, val, elements_kind, is_store));
5499 *has_side_effects |= access->HasObservableSideEffects();
5500 access->set_position(position);
5504 current_block()->Goto(join);
5505 set_current_block(if_false);
5509 // Deopt if none of the cases matched.
5510 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses);
5511 join->SetJoinId(ast_id);
5512 set_current_block(join);
5513 return is_store ? NULL : Pop();
5517 HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
5524 bool* has_side_effects) {
5525 ASSERT(!expr->IsPropertyName());
5526 HInstruction* instr = NULL;
5527 if (expr->IsMonomorphic()) {
5528 Handle<Map> map = expr->GetMonomorphicReceiverType();
5529 if (map->has_slow_elements_kind()) {
5530 instr = is_store ? BuildStoreKeyedGeneric(obj, key, val)
5531 : BuildLoadKeyedGeneric(obj, key);
5533 AddInstruction(new(zone()) HCheckNonSmi(obj));
5534 instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
5536 } else if (expr->GetReceiverTypes() != NULL &&
5537 !expr->GetReceiverTypes()->is_empty()) {
5538 return HandlePolymorphicElementAccess(
5539 obj, key, val, expr, ast_id, position, is_store, has_side_effects);
5542 instr = BuildStoreKeyedGeneric(obj, key, val);
5544 instr = BuildLoadKeyedGeneric(obj, key);
5547 instr->set_position(position);
5548 AddInstruction(instr);
5549 *has_side_effects = instr->HasObservableSideEffects();
5554 HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
5557 HValue* context = environment()->LookupContext();
5558 return new(zone()) HStoreKeyedGeneric(
5563 function_strict_mode_flag());
5567 void HGraphBuilder::EnsureArgumentsArePushedForAccess() {
5568 // Outermost function already has arguments on the stack.
5569 if (function_state()->outer() == NULL) return;
5571 if (function_state()->arguments_pushed()) return;
5573 // Push arguments when entering inlined function.
5574 HEnterInlined* entry = function_state()->entry();
5576 ZoneList<HValue*>* arguments_values = entry->arguments_values();
5578 HInstruction* insert_after = entry;
5579 for (int i = 0; i < arguments_values->length(); i++) {
5580 HValue* argument = arguments_values->at(i);
5581 HInstruction* push_argument = new(zone()) HPushArgument(argument);
5582 push_argument->InsertAfter(insert_after);
5583 insert_after = push_argument;
5586 HArgumentsElements* arguments_elements =
5587 new(zone()) HArgumentsElements(true);
5588 arguments_elements->ClearFlag(HValue::kUseGVN);
5589 arguments_elements->InsertAfter(insert_after);
5590 function_state()->set_arguments_elements(arguments_elements);
5594 bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
5595 VariableProxy* proxy = expr->obj()->AsVariableProxy();
5596 if (proxy == NULL) return false;
5597 if (!proxy->var()->IsStackAllocated()) return false;
5598 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
5602 HInstruction* result = NULL;
5603 if (expr->key()->IsPropertyName()) {
5604 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
5605 if (!name->IsEqualTo(CStrVector("length"))) return false;
5607 if (function_state()->outer() == NULL) {
5608 HInstruction* elements = AddInstruction(
5609 new(zone()) HArgumentsElements(false));
5610 result = new(zone()) HArgumentsLength(elements);
5612 // Number of arguments without receiver.
5613 int argument_count = environment()->
5614 arguments_environment()->parameter_count() - 1;
5615 result = new(zone()) HConstant(
5616 Handle<Object>(Smi::FromInt(argument_count)),
5617 Representation::Integer32());
5620 Push(graph()->GetArgumentsObject());
5621 VisitForValue(expr->key());
5622 if (HasStackOverflow() || current_block() == NULL) return true;
5623 HValue* key = Pop();
5624 Drop(1); // Arguments object.
5625 if (function_state()->outer() == NULL) {
5626 HInstruction* elements = AddInstruction(
5627 new(zone()) HArgumentsElements(false));
5628 HInstruction* length = AddInstruction(
5629 new(zone()) HArgumentsLength(elements));
5630 HInstruction* checked_key =
5631 AddInstruction(new(zone()) HBoundsCheck(key, length));
5632 result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
5634 EnsureArgumentsArePushedForAccess();
5636 // Number of arguments without receiver.
5637 HInstruction* elements = function_state()->arguments_elements();
5638 int argument_count = environment()->
5639 arguments_environment()->parameter_count() - 1;
5640 HInstruction* length = AddInstruction(new(zone()) HConstant(
5641 Handle<Object>(Smi::FromInt(argument_count)),
5642 Representation::Integer32()));
5643 HInstruction* checked_key =
5644 AddInstruction(new(zone()) HBoundsCheck(key, length));
5645 result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
5648 ast_context()->ReturnInstruction(result, expr->id());
5653 void HGraphBuilder::VisitProperty(Property* expr) {
5654 ASSERT(!HasStackOverflow());
5655 ASSERT(current_block() != NULL);
5656 ASSERT(current_block()->HasPredecessor());
5657 expr->RecordTypeFeedback(oracle());
5659 if (TryArgumentsAccess(expr)) return;
5661 CHECK_ALIVE(VisitForValue(expr->obj()));
5663 HInstruction* instr = NULL;
5664 if (expr->AsProperty()->IsArrayLength()) {
5665 HValue* array = Pop();
5666 AddInstruction(new(zone()) HCheckNonSmi(array));
5667 HInstruction* mapcheck =
5668 AddInstruction(HCheckInstanceType::NewIsJSArray(array));
5669 instr = new(zone()) HJSArrayLength(array, mapcheck);
5671 } else if (expr->IsStringLength()) {
5672 HValue* string = Pop();
5673 AddInstruction(new(zone()) HCheckNonSmi(string));
5674 AddInstruction(HCheckInstanceType::NewIsString(string));
5675 instr = new(zone()) HStringLength(string);
5676 } else if (expr->IsStringAccess()) {
5677 CHECK_ALIVE(VisitForValue(expr->key()));
5678 HValue* index = Pop();
5679 HValue* string = Pop();
5680 HValue* context = environment()->LookupContext();
5681 HStringCharCodeAt* char_code =
5682 BuildStringCharCodeAt(context, string, index);
5683 AddInstruction(char_code);
5684 instr = new(zone()) HStringCharFromCode(context, char_code);
5686 } else if (expr->IsFunctionPrototype()) {
5687 HValue* function = Pop();
5688 AddInstruction(new(zone()) HCheckNonSmi(function));
5689 instr = new(zone()) HLoadFunctionPrototype(function);
5691 } else if (expr->key()->IsPropertyName()) {
5692 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
5693 SmallMapList* types = expr->GetReceiverTypes();
5695 HValue* obj = Pop();
5696 if (expr->IsMonomorphic()) {
5697 instr = BuildLoadNamed(obj, expr, types->first(), name);
5698 } else if (types != NULL && types->length() > 1) {
5699 AddInstruction(new(zone()) HCheckNonSmi(obj));
5700 HandlePolymorphicLoadNamedField(expr, obj, types, name);
5703 instr = BuildLoadNamedGeneric(obj, expr);
5707 CHECK_ALIVE(VisitForValue(expr->key()));
5709 HValue* key = Pop();
5710 HValue* obj = Pop();
5712 bool has_side_effects = false;
5713 HValue* load = HandleKeyedElementAccess(
5714 obj, key, NULL, expr, expr->id(), expr->position(),
5717 if (has_side_effects) {
5718 if (ast_context()->IsEffect()) {
5719 AddSimulate(expr->id());
5722 AddSimulate(expr->id());
5726 return ast_context()->ReturnValue(load);
5728 instr->set_position(expr->position());
5729 return ast_context()->ReturnInstruction(instr, expr->id());
5733 void HGraphBuilder::AddCheckConstantFunction(Call* expr,
5735 Handle<Map> receiver_map,
5736 bool smi_and_map_check) {
5737 // Constant functions have the nice property that the map will change if they
5738 // are overwritten. Therefore it is enough to check the map of the holder and
5740 if (smi_and_map_check) {
5741 AddInstruction(new(zone()) HCheckNonSmi(receiver));
5742 AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map));
5744 if (!expr->holder().is_null()) {
5745 AddInstruction(new(zone()) HCheckPrototypeMaps(
5746 Handle<JSObject>(JSObject::cast(receiver_map->prototype())),
5752 class FunctionSorter {
5754 FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { }
5755 FunctionSorter(int index, int ticks, int ast_length, int src_length)
5758 ast_length_(ast_length),
5759 src_length_(src_length) { }
5761 int index() const { return index_; }
5762 int ticks() const { return ticks_; }
5763 int ast_length() const { return ast_length_; }
5764 int src_length() const { return src_length_; }
5774 static int CompareHotness(void const* a, void const* b) {
5775 FunctionSorter const* function1 = reinterpret_cast<FunctionSorter const*>(a);
5776 FunctionSorter const* function2 = reinterpret_cast<FunctionSorter const*>(b);
5777 int diff = function1->ticks() - function2->ticks();
5778 if (diff != 0) return -diff;
5779 diff = function1->ast_length() - function2->ast_length();
5780 if (diff != 0) return diff;
5781 return function1->src_length() - function2->src_length();
5785 void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
5787 SmallMapList* types,
5788 Handle<String> name) {
5789 // TODO(ager): We should recognize when the prototype chains for different
5790 // maps are identical. In that case we can avoid repeatedly generating the
5791 // same prototype map checks.
5792 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
5793 HBasicBlock* join = NULL;
5794 FunctionSorter order[kMaxCallPolymorphism];
5795 int ordered_functions = 0;
5797 i < types->length() && ordered_functions < kMaxCallPolymorphism;
5799 Handle<Map> map = types->at(i);
5800 if (expr->ComputeTarget(map, name)) {
5801 order[ordered_functions++] =
5803 expr->target()->shared()->profiler_ticks(),
5804 InliningAstSize(expr->target()),
5805 expr->target()->shared()->SourceSize());
5809 qsort(reinterpret_cast<void*>(&order[0]),
5814 for (int fn = 0; fn < ordered_functions; ++fn) {
5815 int i = order[fn].index();
5816 Handle<Map> map = types->at(i);
5818 // Only needed once.
5819 AddInstruction(new(zone()) HCheckNonSmi(receiver));
5820 join = graph()->CreateBasicBlock();
5822 HBasicBlock* if_true = graph()->CreateBasicBlock();
5823 HBasicBlock* if_false = graph()->CreateBasicBlock();
5824 HCompareMap* compare =
5825 new(zone()) HCompareMap(receiver, map, if_true, if_false);
5826 current_block()->Finish(compare);
5828 set_current_block(if_true);
5829 expr->ComputeTarget(map, name);
5830 AddCheckConstantFunction(expr, receiver, map, false);
5831 if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
5832 Handle<JSFunction> caller = info()->closure();
5833 SmartArrayPointer<char> caller_name =
5834 caller->shared()->DebugName()->ToCString();
5835 PrintF("Trying to inline the polymorphic call to %s from %s\n",
5839 if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
5840 // Trying to inline will signal that we should bailout from the
5841 // entire compilation by setting stack overflow on the visitor.
5842 if (HasStackOverflow()) return;
5844 HCallConstantFunction* call =
5845 new(zone()) HCallConstantFunction(expr->target(), argument_count);
5846 call->set_position(expr->position());
5847 PreProcessCall(call);
5848 AddInstruction(call);
5849 if (!ast_context()->IsEffect()) Push(call);
5852 if (current_block() != NULL) current_block()->Goto(join);
5853 set_current_block(if_false);
5856 // Finish up. Unconditionally deoptimize if we've handled all the maps we
5857 // know about and do not want to handle ones we've never seen. Otherwise
5858 // use a generic IC.
5859 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
5860 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses);
5862 HValue* context = environment()->LookupContext();
5863 HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count);
5864 call->set_position(expr->position());
5865 PreProcessCall(call);
5868 AddInstruction(call);
5869 if (!ast_context()->IsEffect()) Push(call);
5870 current_block()->Goto(join);
5872 return ast_context()->ReturnInstruction(call, expr->id());
5876 // We assume that control flow is always live after an expression. So
5877 // even without predecessors to the join block, we set it as the exit
5878 // block and continue by adding instructions there.
5879 ASSERT(join != NULL);
5880 if (join->HasPredecessor()) {
5881 set_current_block(join);
5882 join->SetJoinId(expr->id());
5883 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
5885 set_current_block(NULL);
5890 void HGraphBuilder::TraceInline(Handle<JSFunction> target,
5891 Handle<JSFunction> caller,
5892 const char* reason) {
5893 if (FLAG_trace_inlining) {
5894 SmartArrayPointer<char> target_name =
5895 target->shared()->DebugName()->ToCString();
5896 SmartArrayPointer<char> caller_name =
5897 caller->shared()->DebugName()->ToCString();
5898 if (reason == NULL) {
5899 PrintF("Inlined %s called from %s.\n", *target_name, *caller_name);
5901 PrintF("Did not inline %s called from %s (%s).\n",
5902 *target_name, *caller_name, reason);
5908 static const int kNotInlinable = 1000000000;
5911 int HGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
5912 if (!FLAG_use_inlining) return kNotInlinable;
5914 // Precondition: call is monomorphic and we have found a target with the
5915 // appropriate arity.
5916 Handle<JSFunction> caller = info()->closure();
5917 Handle<SharedFunctionInfo> target_shared(target->shared());
5919 // Do a quick check on source code length to avoid parsing large
5920 // inlining candidates.
5921 if (target_shared->SourceSize() >
5922 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
5923 TraceInline(target, caller, "target text too big");
5924 return kNotInlinable;
5927 // Target must be inlineable.
5928 if (!target->IsInlineable()) {
5929 TraceInline(target, caller, "target not inlineable");
5930 return kNotInlinable;
5932 if (target_shared->dont_inline() || target_shared->dont_optimize()) {
5933 TraceInline(target, caller, "target contains unsupported syntax [early]");
5934 return kNotInlinable;
5937 int nodes_added = target_shared->ast_node_count();
5942 bool HGraphBuilder::TryInline(CallKind call_kind,
5943 Handle<JSFunction> target,
5944 ZoneList<Expression*>* arguments,
5948 ReturnHandlingFlag return_handling) {
5949 int nodes_added = InliningAstSize(target);
5950 if (nodes_added == kNotInlinable) return false;
5952 Handle<JSFunction> caller = info()->closure();
5954 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
5955 TraceInline(target, caller, "target AST is too large [early]");
5959 Handle<SharedFunctionInfo> target_shared(target->shared());
5961 #if !defined(V8_TARGET_ARCH_IA32)
5962 // Target must be able to use caller's context.
5963 CompilationInfo* outer_info = info();
5964 if (target->context() != outer_info->closure()->context() ||
5965 outer_info->scope()->contains_with() ||
5966 outer_info->scope()->num_heap_slots() > 0) {
5967 TraceInline(target, caller, "target requires context change");
5973 // Don't inline deeper than kMaxInliningLevels calls.
5974 HEnvironment* env = environment();
5975 int current_level = 1;
5976 while (env->outer() != NULL) {
5977 if (current_level == Compiler::kMaxInliningLevels) {
5978 TraceInline(target, caller, "inline depth limit reached");
5981 if (env->outer()->frame_type() == JS_FUNCTION) {
5987 // Don't inline recursive functions.
5988 for (FunctionState* state = function_state();
5990 state = state->outer()) {
5991 if (state->compilation_info()->closure()->shared() == *target_shared) {
5992 TraceInline(target, caller, "target is recursive");
5997 // We don't want to add more than a certain number of nodes from inlining.
5998 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
5999 kUnlimitedMaxInlinedNodesCumulative)) {
6000 TraceInline(target, caller, "cumulative AST node limit reached");
6004 // Parse and allocate variables.
6005 CompilationInfo target_info(target);
6006 if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
6007 !Scope::Analyze(&target_info)) {
6008 if (target_info.isolate()->has_pending_exception()) {
6009 // Parse or scope error, never optimize this function.
6011 target_shared->DisableOptimization();
6013 TraceInline(target, caller, "parse failure");
6017 if (target_info.scope()->num_heap_slots() > 0) {
6018 TraceInline(target, caller, "target has context-allocated variables");
6021 FunctionLiteral* function = target_info.function();
6023 // The following conditions must be checked again after re-parsing, because
6024 // earlier the information might not have been complete due to lazy parsing.
6025 nodes_added = function->ast_node_count();
6026 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
6027 TraceInline(target, caller, "target AST is too large [late]");
6030 AstProperties::Flags* flags(function->flags());
6031 if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
6032 TraceInline(target, caller, "target contains unsupported syntax [late]");
6036 // If the function uses the arguments object check that inlining of functions
6037 // with arguments object is enabled and the arguments-variable is
6039 if (function->scope()->arguments() != NULL) {
6040 if (!FLAG_inline_arguments) {
6041 TraceInline(target, caller, "target uses arguments object");
6045 if (!function->scope()->arguments()->IsStackAllocated()) {
6048 "target uses non-stackallocated arguments object");
6053 // All declarations must be inlineable.
6054 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
6055 int decl_count = decls->length();
6056 for (int i = 0; i < decl_count; ++i) {
6057 if (!decls->at(i)->IsInlineable()) {
6058 TraceInline(target, caller, "target has non-trivial declaration");
6063 // Generate the deoptimization data for the unoptimized version of
6064 // the target function if we don't already have it.
6065 if (!target_shared->has_deoptimization_support()) {
6066 // Note that we compile here using the same AST that we will use for
6067 // generating the optimized inline code.
6068 target_info.EnableDeoptimizationSupport();
6069 if (!FullCodeGenerator::MakeCode(&target_info)) {
6070 TraceInline(target, caller, "could not generate deoptimization info");
6073 if (target_shared->scope_info() == ScopeInfo::Empty()) {
6074 // The scope info might not have been set if a lazily compiled
6075 // function is inlined before being called for the first time.
6076 Handle<ScopeInfo> target_scope_info =
6077 ScopeInfo::Create(target_info.scope());
6078 target_shared->set_scope_info(*target_scope_info);
6080 target_shared->EnableDeoptimizationSupport(*target_info.code());
6081 Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
6086 // ----------------------------------------------------------------
6087 // After this point, we've made a decision to inline this function (so
6088 // TryInline should always return true).
6090 // Save the pending call context and type feedback oracle. Set up new ones
6091 // for the inlined function.
6092 ASSERT(target_shared->has_deoptimization_support());
6093 TypeFeedbackOracle target_oracle(
6094 Handle<Code>(target_shared->code()),
6095 Handle<Context>(target->context()->global_context()),
6097 // The function state is new-allocated because we need to delete it
6098 // in two different places.
6099 FunctionState* target_state = new FunctionState(
6100 this, &target_info, &target_oracle, return_handling);
6102 HConstant* undefined = graph()->GetConstantUndefined();
6103 HEnvironment* inner_env =
6104 environment()->CopyForInlining(target,
6105 arguments->length(),
6109 function_state()->is_construct());
6110 #ifdef V8_TARGET_ARCH_IA32
6111 // IA32 only, overwrite the caller's context in the deoptimization
6112 // environment with the correct one.
6114 // TODO(kmillikin): implement the same inlining on other platforms so we
6115 // can remove the unsightly ifdefs in this function.
6116 HConstant* context = new HConstant(Handle<Context>(target->context()),
6117 Representation::Tagged());
6118 AddInstruction(context);
6119 inner_env->BindContext(context);
6122 AddSimulate(return_id);
6123 current_block()->UpdateEnvironment(inner_env);
6125 ZoneList<HValue*>* arguments_values = NULL;
6127 // If the function uses arguments copy current arguments values
6128 // to use them for materialization.
6129 if (function->scope()->arguments() != NULL) {
6130 HEnvironment* arguments_env = inner_env->arguments_environment();
6131 int arguments_count = arguments_env->parameter_count();
6132 arguments_values = new(zone()) ZoneList<HValue*>(arguments_count);
6133 for (int i = 0; i < arguments_count; i++) {
6134 arguments_values->Add(arguments_env->Lookup(i));
6138 HEnterInlined* enter_inlined =
6139 new(zone()) HEnterInlined(target,
6140 arguments->length(),
6143 function_state()->is_construct(),
6144 function->scope()->arguments(),
6146 function_state()->set_entry(enter_inlined);
6147 AddInstruction(enter_inlined);
6149 // If the function uses arguments object create and bind one.
6150 if (function->scope()->arguments() != NULL) {
6151 ASSERT(function->scope()->arguments()->IsStackAllocated());
6152 inner_env->Bind(function->scope()->arguments(),
6153 graph()->GetArgumentsObject());
6157 VisitDeclarations(target_info.scope()->declarations());
6158 VisitStatements(function->body());
6159 if (HasStackOverflow()) {
6160 // Bail out if the inline function did, as we cannot residualize a call
6162 TraceInline(target, caller, "inline graph construction failed");
6163 target_shared->DisableOptimization();
6164 inline_bailout_ = true;
6165 delete target_state;
6169 // Update inlined nodes count.
6170 inlined_count_ += nodes_added;
6172 TraceInline(target, caller, NULL);
6174 if (current_block() != NULL) {
6175 // Add default return value (i.e. undefined for normals calls or the newly
6176 // allocated receiver for construct calls) if control can fall off the
6177 // body. In a test context, undefined is false and any JSObject is true.
6178 if (call_context()->IsValue()) {
6179 ASSERT(function_return() != NULL);
6180 HValue* return_value = function_state()->is_construct()
6183 current_block()->AddLeaveInlined(return_value,
6186 } else if (call_context()->IsEffect()) {
6187 ASSERT(function_return() != NULL);
6188 current_block()->Goto(function_return(), function_state());
6190 ASSERT(call_context()->IsTest());
6191 ASSERT(inlined_test_context() != NULL);
6192 HBasicBlock* target = function_state()->is_construct()
6193 ? inlined_test_context()->if_true()
6194 : inlined_test_context()->if_false();
6195 current_block()->Goto(target, function_state());
6199 // Fix up the function exits.
6200 if (inlined_test_context() != NULL) {
6201 HBasicBlock* if_true = inlined_test_context()->if_true();
6202 HBasicBlock* if_false = inlined_test_context()->if_false();
6204 // Pop the return test context from the expression context stack.
6205 ASSERT(ast_context() == inlined_test_context());
6206 ClearInlinedTestContext();
6207 delete target_state;
6209 // Forward to the real test context.
6210 if (if_true->HasPredecessor()) {
6211 if_true->SetJoinId(ast_id);
6212 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
6213 if_true->Goto(true_target, function_state());
6215 if (if_false->HasPredecessor()) {
6216 if_false->SetJoinId(ast_id);
6217 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
6218 if_false->Goto(false_target, function_state());
6220 set_current_block(NULL);
6223 } else if (function_return()->HasPredecessor()) {
6224 function_return()->SetJoinId(ast_id);
6225 set_current_block(function_return());
6227 set_current_block(NULL);
6229 delete target_state;
6234 bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
6235 // The function call we are inlining is a method call if the call
6236 // is a property call.
6237 CallKind call_kind = (expr->expression()->AsProperty() == NULL)
6241 return TryInline(call_kind,
6247 drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
6251 bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
6252 return TryInline(CALL_AS_FUNCTION,
6258 CONSTRUCT_CALL_RETURN);
6262 bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
6263 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
6264 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
6273 if (expr->arguments()->length() == 1) {
6274 HValue* argument = Pop();
6275 HValue* context = environment()->LookupContext();
6276 Drop(1); // Receiver.
6277 HUnaryMathOperation* op =
6278 new(zone()) HUnaryMathOperation(context, argument, id);
6279 op->set_position(expr->position());
6280 if (drop_extra) Drop(1); // Optionally drop the function.
6281 ast_context()->ReturnInstruction(op, expr->id());
6286 // Not supported for inlining yet.
6293 bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
6295 Handle<Map> receiver_map,
6296 CheckType check_type) {
6297 ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
6298 // Try to inline calls like Math.* as operations in the calling function.
6299 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
6300 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
6301 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
6303 case kStringCharCodeAt:
6305 if (argument_count == 2 && check_type == STRING_CHECK) {
6306 HValue* index = Pop();
6307 HValue* string = Pop();
6308 HValue* context = environment()->LookupContext();
6309 ASSERT(!expr->holder().is_null());
6310 AddInstruction(new(zone()) HCheckPrototypeMaps(
6311 oracle()->GetPrototypeForPrimitiveCheck(STRING_CHECK),
6313 HStringCharCodeAt* char_code =
6314 BuildStringCharCodeAt(context, string, index);
6315 if (id == kStringCharCodeAt) {
6316 ast_context()->ReturnInstruction(char_code, expr->id());
6319 AddInstruction(char_code);
6320 HStringCharFromCode* result =
6321 new(zone()) HStringCharFromCode(context, char_code);
6322 ast_context()->ReturnInstruction(result, expr->id());
6334 if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
6335 AddCheckConstantFunction(expr, receiver, receiver_map, true);
6336 HValue* argument = Pop();
6337 HValue* context = environment()->LookupContext();
6338 Drop(1); // Receiver.
6339 HUnaryMathOperation* op =
6340 new(zone()) HUnaryMathOperation(context, argument, id);
6341 op->set_position(expr->position());
6342 ast_context()->ReturnInstruction(op, expr->id());
6347 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
6348 AddCheckConstantFunction(expr, receiver, receiver_map, true);
6349 HValue* right = Pop();
6350 HValue* left = Pop();
6351 Pop(); // Pop receiver.
6352 HValue* context = environment()->LookupContext();
6353 HInstruction* result = NULL;
6354 // Use sqrt() if exponent is 0.5 or -0.5.
6355 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
6356 double exponent = HConstant::cast(right)->DoubleValue();
6357 if (exponent == 0.5) {
6359 new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
6360 } else if (exponent == -0.5) {
6361 HConstant* double_one =
6362 new(zone()) HConstant(Handle<Object>(Smi::FromInt(1)),
6363 Representation::Double());
6364 AddInstruction(double_one);
6365 HUnaryMathOperation* square_root =
6366 new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
6367 AddInstruction(square_root);
6368 // MathPowHalf doesn't have side effects so there's no need for
6369 // an environment simulation here.
6370 ASSERT(!square_root->HasObservableSideEffects());
6371 result = new(zone()) HDiv(context, double_one, square_root);
6372 } else if (exponent == 2.0) {
6373 result = new(zone()) HMul(context, left, left);
6375 } else if (right->IsConstant() &&
6376 HConstant::cast(right)->HasInteger32Value() &&
6377 HConstant::cast(right)->Integer32Value() == 2) {
6378 result = new(zone()) HMul(context, left, left);
6381 if (result == NULL) {
6382 result = new(zone()) HPower(left, right);
6384 ast_context()->ReturnInstruction(result, expr->id());
6389 if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
6390 AddCheckConstantFunction(expr, receiver, receiver_map, true);
6391 Drop(1); // Receiver.
6392 HValue* context = environment()->LookupContext();
6393 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6394 AddInstruction(global_object);
6395 HRandom* result = new(zone()) HRandom(global_object);
6396 ast_context()->ReturnInstruction(result, expr->id());
6402 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
6403 AddCheckConstantFunction(expr, receiver, receiver_map, true);
6404 HValue* right = Pop();
6405 HValue* left = Pop();
6406 Pop(); // Pop receiver.
6408 HValue* left_operand = left;
6409 HValue* right_operand = right;
6411 // If we do not have two integers, we convert to double for comparison.
6412 if (!left->representation().IsInteger32() ||
6413 !right->representation().IsInteger32()) {
6414 if (!left->representation().IsDouble()) {
6415 HChange* left_convert = new(zone()) HChange(
6417 Representation::Double(),
6418 false, // Do not truncate when converting to double.
6419 true); // Deoptimize for undefined.
6420 left_convert->SetFlag(HValue::kBailoutOnMinusZero);
6421 left_operand = AddInstruction(left_convert);
6423 if (!right->representation().IsDouble()) {
6424 HChange* right_convert = new(zone()) HChange(
6426 Representation::Double(),
6427 false, // Do not truncate when converting to double.
6428 true); // Deoptimize for undefined.
6429 right_convert->SetFlag(HValue::kBailoutOnMinusZero);
6430 right_operand = AddInstruction(right_convert);
6434 ASSERT(left_operand->representation().Equals(
6435 right_operand->representation()));
6436 ASSERT(!left_operand->representation().IsTagged());
6438 Token::Value op = (id == kMathMin) ? Token::LT : Token::GT;
6440 HCompareIDAndBranch* compare =
6441 new(zone()) HCompareIDAndBranch(left_operand, right_operand, op);
6442 compare->SetInputRepresentation(left_operand->representation());
6444 HBasicBlock* return_left = graph()->CreateBasicBlock();
6445 HBasicBlock* return_right = graph()->CreateBasicBlock();
6447 compare->SetSuccessorAt(0, return_left);
6448 compare->SetSuccessorAt(1, return_right);
6449 current_block()->Finish(compare);
6451 set_current_block(return_left);
6453 set_current_block(return_right);
6454 // The branch above always returns the right operand if either of
6455 // them is NaN, but the spec requires that max/min(NaN, X) = NaN.
6456 // We add another branch that checks if the left operand is NaN or not.
6457 if (left_operand->representation().IsDouble()) {
6458 // If left_operand != left_operand then it is NaN.
6459 HCompareIDAndBranch* compare_nan = new(zone()) HCompareIDAndBranch(
6460 left_operand, left_operand, Token::EQ);
6461 compare_nan->SetInputRepresentation(left_operand->representation());
6462 HBasicBlock* left_is_number = graph()->CreateBasicBlock();
6463 HBasicBlock* left_is_nan = graph()->CreateBasicBlock();
6464 compare_nan->SetSuccessorAt(0, left_is_number);
6465 compare_nan->SetSuccessorAt(1, left_is_nan);
6466 current_block()->Finish(compare_nan);
6467 set_current_block(left_is_nan);
6469 set_current_block(left_is_number);
6471 return_right = CreateJoin(left_is_number, left_is_nan, expr->id());
6476 HBasicBlock* join = CreateJoin(return_left, return_right, expr->id());
6477 set_current_block(join);
6478 ast_context()->ReturnValue(Pop());
6483 // Not yet supported for inlining.
6490 bool HGraphBuilder::TryCallApply(Call* expr) {
6491 Expression* callee = expr->expression();
6492 Property* prop = callee->AsProperty();
6493 ASSERT(prop != NULL);
6495 if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
6498 Handle<Map> function_map = expr->GetReceiverTypes()->first();
6499 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
6500 !expr->target()->shared()->HasBuiltinFunctionId() ||
6501 expr->target()->shared()->builtin_function_id() != kFunctionApply) {
6505 if (info()->scope()->arguments() == NULL) return false;
6507 ZoneList<Expression*>* args = expr->arguments();
6508 if (args->length() != 2) return false;
6510 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
6511 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
6512 HValue* arg_two_value = environment()->Lookup(arg_two->var());
6513 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
6515 // Found pattern f.apply(receiver, arguments).
6516 VisitForValue(prop->obj());
6517 if (HasStackOverflow() || current_block() == NULL) return true;
6518 HValue* function = Top();
6519 AddCheckConstantFunction(expr, function, function_map, true);
6522 VisitForValue(args->at(0));
6523 if (HasStackOverflow() || current_block() == NULL) return true;
6524 HValue* receiver = Pop();
6526 if (function_state()->outer() == NULL) {
6527 HInstruction* elements = AddInstruction(
6528 new(zone()) HArgumentsElements(false));
6529 HInstruction* length =
6530 AddInstruction(new(zone()) HArgumentsLength(elements));
6531 HValue* wrapped_receiver =
6532 AddInstruction(new(zone()) HWrapReceiver(receiver, function));
6533 HInstruction* result =
6534 new(zone()) HApplyArguments(function,
6538 result->set_position(expr->position());
6539 ast_context()->ReturnInstruction(result, expr->id());
6542 // We are inside inlined function and we know exactly what is inside
6543 // arguments object.
6544 HValue* context = environment()->LookupContext();
6546 HValue* wrapped_receiver =
6547 AddInstruction(new(zone()) HWrapReceiver(receiver, function));
6548 PushAndAdd(new(zone()) HPushArgument(wrapped_receiver));
6550 HEnvironment* arguments_env = environment()->arguments_environment();
6552 int parameter_count = arguments_env->parameter_count();
6553 for (int i = 1; i < arguments_env->parameter_count(); i++) {
6554 PushAndAdd(new(zone()) HPushArgument(arguments_env->Lookup(i)));
6557 HInvokeFunction* call = new(zone()) HInvokeFunction(
6561 Drop(parameter_count);
6562 call->set_position(expr->position());
6563 ast_context()->ReturnInstruction(call, expr->id());
6569 void HGraphBuilder::VisitCall(Call* expr) {
6570 ASSERT(!HasStackOverflow());
6571 ASSERT(current_block() != NULL);
6572 ASSERT(current_block()->HasPredecessor());
6573 Expression* callee = expr->expression();
6574 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
6575 HInstruction* call = NULL;
6577 Property* prop = callee->AsProperty();
6579 if (!prop->key()->IsPropertyName()) {
6580 // Keyed function call.
6581 CHECK_ALIVE(VisitArgument(prop->obj()));
6583 CHECK_ALIVE(VisitForValue(prop->key()));
6584 // Push receiver and key like the non-optimized code generator expects it.
6585 HValue* key = Pop();
6586 HValue* receiver = Pop();
6590 CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6592 HValue* context = environment()->LookupContext();
6593 call = new(zone()) HCallKeyed(context, key, argument_count);
6594 call->set_position(expr->position());
6595 Drop(argument_count + 1); // 1 is the key.
6596 return ast_context()->ReturnInstruction(call, expr->id());
6599 // Named function call.
6600 expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
6602 if (TryCallApply(expr)) return;
6604 CHECK_ALIVE(VisitForValue(prop->obj()));
6605 CHECK_ALIVE(VisitExpressions(expr->arguments()));
6607 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
6609 SmallMapList* types = expr->GetReceiverTypes();
6612 environment()->ExpressionStackAt(expr->arguments()->length());
6613 if (expr->IsMonomorphic()) {
6614 Handle<Map> receiver_map = (types == NULL || types->is_empty())
6615 ? Handle<Map>::null()
6617 if (TryInlineBuiltinMethodCall(expr,
6620 expr->check_type())) {
6621 if (FLAG_trace_inlining) {
6622 PrintF("Inlining builtin ");
6623 expr->target()->ShortPrint();
6629 if (CallStubCompiler::HasCustomCallGenerator(expr->target()) ||
6630 expr->check_type() != RECEIVER_MAP_CHECK) {
6631 // When the target has a custom call IC generator, use the IC,
6632 // because it is likely to generate better code. Also use the IC
6633 // when a primitive receiver check is required.
6634 HValue* context = environment()->LookupContext();
6635 call = PreProcessCall(
6636 new(zone()) HCallNamed(context, name, argument_count));
6638 AddCheckConstantFunction(expr, receiver, receiver_map, true);
6640 if (TryInlineCall(expr)) return;
6641 call = PreProcessCall(
6642 new(zone()) HCallConstantFunction(expr->target(),
6645 } else if (types != NULL && types->length() > 1) {
6646 ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
6647 HandlePolymorphicCallNamed(expr, receiver, types, name);
6651 HValue* context = environment()->LookupContext();
6652 call = PreProcessCall(
6653 new(zone()) HCallNamed(context, name, argument_count));
6657 expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION);
6658 VariableProxy* proxy = expr->expression()->AsVariableProxy();
6659 bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
6661 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
6662 return Bailout("possible direct call to eval");
6666 Variable* var = proxy->var();
6667 bool known_global_function = false;
6668 // If there is a global property cell for the name at compile time and
6669 // access check is not enabled we assume that the function will not change
6670 // and generate optimized code for calling the function.
6671 LookupResult lookup(isolate());
6672 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false);
6673 if (type == kUseCell &&
6674 !info()->global_object()->IsAccessCheckNeeded()) {
6675 Handle<GlobalObject> global(info()->global_object());
6676 known_global_function = expr->ComputeGlobalTarget(global, &lookup);
6678 if (known_global_function) {
6679 // Push the global object instead of the global receiver because
6680 // code generated by the full code generator expects it.
6681 HValue* context = environment()->LookupContext();
6682 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6683 PushAndAdd(global_object);
6684 CHECK_ALIVE(VisitExpressions(expr->arguments()));
6686 CHECK_ALIVE(VisitForValue(expr->expression()));
6687 HValue* function = Pop();
6688 AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
6690 // Replace the global object with the global receiver.
6691 HGlobalReceiver* global_receiver =
6692 new(zone()) HGlobalReceiver(global_object);
6693 // Index of the receiver from the top of the expression stack.
6694 const int receiver_index = argument_count - 1;
6695 AddInstruction(global_receiver);
6696 ASSERT(environment()->ExpressionStackAt(receiver_index)->
6698 environment()->SetExpressionStackAt(receiver_index, global_receiver);
6700 if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
6701 if (FLAG_trace_inlining) {
6702 PrintF("Inlining builtin ");
6703 expr->target()->ShortPrint();
6708 if (TryInlineCall(expr)) return;
6709 call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
6712 HValue* context = environment()->LookupContext();
6713 HGlobalObject* receiver = new(zone()) HGlobalObject(context);
6714 if (var->is_qml_global()) receiver->set_qml_global(true);
6715 AddInstruction(receiver);
6716 PushAndAdd(new(zone()) HPushArgument(receiver));
6717 CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6719 call = new(zone()) HCallGlobal(context, var->name(), argument_count);
6720 if (var->is_qml_global()) static_cast<HCallGlobal*>(call)->set_qml_global(true);
6721 Drop(argument_count);
6724 } else if (expr->IsMonomorphic()) {
6725 // The function is on the stack in the unoptimized code during
6726 // evaluation of the arguments.
6727 CHECK_ALIVE(VisitForValue(expr->expression()));
6728 HValue* function = Top();
6729 HValue* context = environment()->LookupContext();
6730 HGlobalObject* global = new(zone()) HGlobalObject(context);
6731 HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
6732 AddInstruction(global);
6733 PushAndAdd(receiver);
6734 CHECK_ALIVE(VisitExpressions(expr->arguments()));
6735 AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
6737 if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
6738 if (FLAG_trace_inlining) {
6739 PrintF("Inlining builtin ");
6740 expr->target()->ShortPrint();
6746 if (TryInlineCall(expr, true)) { // Drop function from environment.
6749 call = PreProcessCall(
6750 new(zone()) HInvokeFunction(context,
6754 Drop(1); // The function.
6758 CHECK_ALIVE(VisitForValue(expr->expression()));
6759 HValue* function = Top();
6760 HValue* context = environment()->LookupContext();
6761 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6762 HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
6763 AddInstruction(global_object);
6764 AddInstruction(receiver);
6765 PushAndAdd(new(zone()) HPushArgument(receiver));
6766 CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6768 call = new(zone()) HCallFunction(context, function, argument_count);
6769 Drop(argument_count + 1);
6773 call->set_position(expr->position());
6774 return ast_context()->ReturnInstruction(call, expr->id());
6778 // Checks whether allocation using the given constructor can be inlined.
6779 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
6780 return constructor->has_initial_map() &&
6781 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
6782 constructor->initial_map()->instance_size() < HAllocateObject::kMaxSize;
6786 void HGraphBuilder::VisitCallNew(CallNew* expr) {
6787 ASSERT(!HasStackOverflow());
6788 ASSERT(current_block() != NULL);
6789 ASSERT(current_block()->HasPredecessor());
6790 expr->RecordTypeFeedback(oracle());
6791 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
6792 HValue* context = environment()->LookupContext();
6794 if (FLAG_inline_construct &&
6795 expr->IsMonomorphic() &&
6796 IsAllocationInlineable(expr->target())) {
6797 // The constructor function is on the stack in the unoptimized code
6798 // during evaluation of the arguments.
6799 CHECK_ALIVE(VisitForValue(expr->expression()));
6800 HValue* function = Top();
6801 CHECK_ALIVE(VisitExpressions(expr->arguments()));
6802 Handle<JSFunction> constructor = expr->target();
6803 HValue* check = AddInstruction(
6804 new(zone()) HCheckFunction(function, constructor));
6806 // Force completion of inobject slack tracking before generating
6807 // allocation code to finalize instance size.
6808 if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
6809 constructor->shared()->CompleteInobjectSlackTracking();
6812 // Replace the constructor function with a newly allocated receiver.
6813 HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
6814 // Index of the receiver from the top of the expression stack.
6815 const int receiver_index = argument_count - 1;
6816 AddInstruction(receiver);
6817 ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
6818 environment()->SetExpressionStackAt(receiver_index, receiver);
6820 if (TryInlineConstruct(expr, receiver)) return;
6822 // TODO(mstarzinger): For now we remove the previous HAllocateObject and
6823 // add HPushArgument for the arguments in case inlining failed. What we
6824 // actually should do is emit HInvokeFunction on the constructor instead
6825 // of using HCallNew as a fallback.
6826 receiver->DeleteAndReplaceWith(NULL);
6827 check->DeleteAndReplaceWith(NULL);
6828 environment()->SetExpressionStackAt(receiver_index, function);
6829 HInstruction* call = PreProcessCall(
6830 new(zone()) HCallNew(context, function, argument_count));
6831 call->set_position(expr->position());
6832 return ast_context()->ReturnInstruction(call, expr->id());
6834 // The constructor function is both an operand to the instruction and an
6835 // argument to the construct call.
6836 HValue* constructor = NULL;
6837 CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
6838 CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6839 HInstruction* call =
6840 new(zone()) HCallNew(context, constructor, argument_count);
6841 Drop(argument_count);
6842 call->set_position(expr->position());
6843 return ast_context()->ReturnInstruction(call, expr->id());
6848 // Support for generating inlined runtime functions.
6850 // Lookup table for generators for runtime calls that are generated inline.
6851 // Elements of the table are member pointers to functions of HGraphBuilder.
6852 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
6853 &HGraphBuilder::Generate##Name,
6855 const HGraphBuilder::InlineFunctionGenerator
6856 HGraphBuilder::kInlineFunctionGenerators[] = {
6857 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
6858 INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
6860 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
6863 void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
6864 ASSERT(!HasStackOverflow());
6865 ASSERT(current_block() != NULL);
6866 ASSERT(current_block()->HasPredecessor());
6867 if (expr->is_jsruntime()) {
6868 return Bailout("call to a JavaScript runtime function");
6871 const Runtime::Function* function = expr->function();
6872 ASSERT(function != NULL);
6873 if (function->intrinsic_type == Runtime::INLINE) {
6874 ASSERT(expr->name()->length() > 0);
6875 ASSERT(expr->name()->Get(0) == '_');
6876 // Call to an inline function.
6877 int lookup_index = static_cast<int>(function->function_id) -
6878 static_cast<int>(Runtime::kFirstInlineFunction);
6879 ASSERT(lookup_index >= 0);
6880 ASSERT(static_cast<size_t>(lookup_index) <
6881 ARRAY_SIZE(kInlineFunctionGenerators));
6882 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
6884 // Call the inline code generator using the pointer-to-member.
6885 (this->*generator)(expr);
6887 ASSERT(function->intrinsic_type == Runtime::RUNTIME);
6888 CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6890 HValue* context = environment()->LookupContext();
6891 Handle<String> name = expr->name();
6892 int argument_count = expr->arguments()->length();
6893 HCallRuntime* call =
6894 new(zone()) HCallRuntime(context, name, function, argument_count);
6895 call->set_position(RelocInfo::kNoPosition);
6896 Drop(argument_count);
6897 return ast_context()->ReturnInstruction(call, expr->id());
6902 void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
6903 ASSERT(!HasStackOverflow());
6904 ASSERT(current_block() != NULL);
6905 ASSERT(current_block()->HasPredecessor());
6906 switch (expr->op()) {
6907 case Token::DELETE: return VisitDelete(expr);
6908 case Token::VOID: return VisitVoid(expr);
6909 case Token::TYPEOF: return VisitTypeof(expr);
6910 case Token::ADD: return VisitAdd(expr);
6911 case Token::SUB: return VisitSub(expr);
6912 case Token::BIT_NOT: return VisitBitNot(expr);
6913 case Token::NOT: return VisitNot(expr);
6914 default: UNREACHABLE();
6918 void HGraphBuilder::VisitDelete(UnaryOperation* expr) {
6919 Property* prop = expr->expression()->AsProperty();
6920 VariableProxy* proxy = expr->expression()->AsVariableProxy();
6922 CHECK_ALIVE(VisitForValue(prop->obj()));
6923 CHECK_ALIVE(VisitForValue(prop->key()));
6924 HValue* key = Pop();
6925 HValue* obj = Pop();
6926 HValue* context = environment()->LookupContext();
6927 HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
6928 return ast_context()->ReturnInstruction(instr, expr->id());
6929 } else if (proxy != NULL) {
6930 Variable* var = proxy->var();
6931 if (var->IsUnallocated()) {
6932 Bailout("delete with global variable");
6933 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
6934 // Result of deleting non-global variables is false. 'this' is not
6935 // really a variable, though we implement it as one. The
6936 // subexpression does not have side effects.
6937 HValue* value = var->is_this()
6938 ? graph()->GetConstantTrue()
6939 : graph()->GetConstantFalse();
6940 return ast_context()->ReturnValue(value);
6942 Bailout("delete with non-global variable");
6945 // Result of deleting non-property, non-variable reference is true.
6946 // Evaluate the subexpression for side effects.
6947 CHECK_ALIVE(VisitForEffect(expr->expression()));
6948 return ast_context()->ReturnValue(graph()->GetConstantTrue());
6953 void HGraphBuilder::VisitVoid(UnaryOperation* expr) {
6954 CHECK_ALIVE(VisitForEffect(expr->expression()));
6955 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
6959 void HGraphBuilder::VisitTypeof(UnaryOperation* expr) {
6960 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
6961 HValue* value = Pop();
6962 HValue* context = environment()->LookupContext();
6963 HInstruction* instr = new(zone()) HTypeof(context, value);
6964 return ast_context()->ReturnInstruction(instr, expr->id());
6968 void HGraphBuilder::VisitAdd(UnaryOperation* expr) {
6969 CHECK_ALIVE(VisitForValue(expr->expression()));
6970 HValue* value = Pop();
6971 HValue* context = environment()->LookupContext();
6972 HInstruction* instr =
6973 new(zone()) HMul(context, value, graph_->GetConstant1());
6974 return ast_context()->ReturnInstruction(instr, expr->id());
6978 void HGraphBuilder::VisitSub(UnaryOperation* expr) {
6979 CHECK_ALIVE(VisitForValue(expr->expression()));
6980 HValue* value = Pop();
6981 HValue* context = environment()->LookupContext();
6982 HInstruction* instr =
6983 new(zone()) HMul(context, value, graph_->GetConstantMinus1());
6984 TypeInfo info = oracle()->UnaryType(expr);
6985 if (info.IsUninitialized()) {
6986 AddInstruction(new(zone()) HSoftDeoptimize);
6987 current_block()->MarkAsDeoptimizing();
6988 info = TypeInfo::Unknown();
6990 Representation rep = ToRepresentation(info);
6991 TraceRepresentation(expr->op(), info, instr, rep);
6992 instr->AssumeRepresentation(rep);
6993 return ast_context()->ReturnInstruction(instr, expr->id());
6997 void HGraphBuilder::VisitBitNot(UnaryOperation* expr) {
6998 CHECK_ALIVE(VisitForValue(expr->expression()));
6999 HValue* value = Pop();
7000 TypeInfo info = oracle()->UnaryType(expr);
7001 if (info.IsUninitialized()) {
7002 AddInstruction(new(zone()) HSoftDeoptimize);
7003 current_block()->MarkAsDeoptimizing();
7005 HInstruction* instr = new(zone()) HBitNot(value);
7006 return ast_context()->ReturnInstruction(instr, expr->id());
7010 void HGraphBuilder::VisitNot(UnaryOperation* expr) {
7011 if (ast_context()->IsTest()) {
7012 TestContext* context = TestContext::cast(ast_context());
7013 VisitForControl(expr->expression(),
7014 context->if_false(),
7015 context->if_true());
7019 if (ast_context()->IsEffect()) {
7020 VisitForEffect(expr->expression());
7024 ASSERT(ast_context()->IsValue());
7025 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
7026 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
7027 CHECK_BAILOUT(VisitForControl(expr->expression(),
7031 if (materialize_false->HasPredecessor()) {
7032 materialize_false->SetJoinId(expr->MaterializeFalseId());
7033 set_current_block(materialize_false);
7034 Push(graph()->GetConstantFalse());
7036 materialize_false = NULL;
7039 if (materialize_true->HasPredecessor()) {
7040 materialize_true->SetJoinId(expr->MaterializeTrueId());
7041 set_current_block(materialize_true);
7042 Push(graph()->GetConstantTrue());
7044 materialize_true = NULL;
7048 CreateJoin(materialize_false, materialize_true, expr->id());
7049 set_current_block(join);
7050 if (join != NULL) return ast_context()->ReturnValue(Pop());
7054 HInstruction* HGraphBuilder::BuildIncrement(bool returns_original_input,
7055 CountOperation* expr) {
7056 // The input to the count operation is on top of the expression stack.
7057 TypeInfo info = oracle()->IncrementType(expr);
7058 Representation rep = ToRepresentation(info);
7059 if (rep.IsTagged()) {
7060 rep = Representation::Integer32();
7063 if (returns_original_input) {
7064 // We need an explicit HValue representing ToNumber(input). The
7065 // actual HChange instruction we need is (sometimes) added in a later
7066 // phase, so it is not available now to be used as an input to HAdd and
7067 // as the return value.
7068 HInstruction* number_input = new(zone()) HForceRepresentation(Pop(), rep);
7069 AddInstruction(number_input);
7073 // The addition has no side effects, so we do not need
7074 // to simulate the expression stack after this instruction.
7075 // Any later failures deopt to the load of the input or earlier.
7076 HConstant* delta = (expr->op() == Token::INC)
7077 ? graph_->GetConstant1()
7078 : graph_->GetConstantMinus1();
7079 HValue* context = environment()->LookupContext();
7080 HInstruction* instr = new(zone()) HAdd(context, Top(), delta);
7081 TraceRepresentation(expr->op(), info, instr, rep);
7082 instr->AssumeRepresentation(rep);
7083 AddInstruction(instr);
7088 void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
7089 ASSERT(!HasStackOverflow());
7090 ASSERT(current_block() != NULL);
7091 ASSERT(current_block()->HasPredecessor());
7092 Expression* target = expr->expression();
7093 VariableProxy* proxy = target->AsVariableProxy();
7094 Property* prop = target->AsProperty();
7095 if (proxy == NULL && prop == NULL) {
7096 return Bailout("invalid lhs in count operation");
7099 // Match the full code generator stack by simulating an extra stack
7100 // element for postfix operations in a non-effect context. The return
7101 // value is ToNumber(input).
7102 bool returns_original_input =
7103 expr->is_postfix() && !ast_context()->IsEffect();
7104 HValue* input = NULL; // ToNumber(original_input).
7105 HValue* after = NULL; // The result after incrementing or decrementing.
7107 if (proxy != NULL) {
7108 Variable* var = proxy->var();
7109 if (var->mode() == CONST) {
7110 return Bailout("unsupported count operation with const");
7112 // Argument of the count operation is a variable, not a property.
7113 ASSERT(prop == NULL);
7114 CHECK_ALIVE(VisitForValue(target));
7116 after = BuildIncrement(returns_original_input, expr);
7117 input = returns_original_input ? Top() : Pop();
7120 switch (var->location()) {
7121 case Variable::UNALLOCATED:
7122 HandleGlobalVariableAssignment(var,
7125 expr->AssignmentId());
7128 case Variable::PARAMETER:
7129 case Variable::LOCAL:
7133 case Variable::CONTEXT: {
7134 // Bail out if we try to mutate a parameter value in a function
7135 // using the arguments object. We do not (yet) correctly handle the
7136 // arguments property of the function.
7137 if (info()->scope()->arguments() != NULL) {
7138 // Parameters will rewrite to context slots. We have no direct
7139 // way to detect that the variable is a parameter so we use a
7140 // linear search of the parameter list.
7141 int count = info()->scope()->num_parameters();
7142 for (int i = 0; i < count; ++i) {
7143 if (var == info()->scope()->parameter(i)) {
7144 return Bailout("assignment to parameter in arguments object");
7149 HValue* context = BuildContextChainWalk(var);
7150 HStoreContextSlot::Mode mode =
7151 (var->mode() == LET || var->mode() == CONST_HARMONY)
7152 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
7153 HStoreContextSlot* instr =
7154 new(zone()) HStoreContextSlot(context, var->index(), mode, after);
7155 AddInstruction(instr);
7156 if (instr->HasObservableSideEffects()) {
7157 AddSimulate(expr->AssignmentId());
7162 case Variable::LOOKUP:
7163 return Bailout("lookup variable in count operation");
7167 // Argument of the count operation is a property.
7168 ASSERT(prop != NULL);
7169 prop->RecordTypeFeedback(oracle());
7171 if (prop->key()->IsPropertyName()) {
7173 if (returns_original_input) Push(graph_->GetConstantUndefined());
7175 CHECK_ALIVE(VisitForValue(prop->obj()));
7176 HValue* obj = Top();
7178 HInstruction* load = NULL;
7179 if (prop->IsMonomorphic()) {
7180 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7181 Handle<Map> map = prop->GetReceiverTypes()->first();
7182 load = BuildLoadNamed(obj, prop, map, name);
7184 load = BuildLoadNamedGeneric(obj, prop);
7187 if (load->HasObservableSideEffects()) AddSimulate(expr->CountId());
7189 after = BuildIncrement(returns_original_input, expr);
7192 HInstruction* store = BuildStoreNamed(obj, after, prop);
7193 AddInstruction(store);
7195 // Overwrite the receiver in the bailout environment with the result
7196 // of the operation, and the placeholder with the original value if
7198 environment()->SetExpressionStackAt(0, after);
7199 if (returns_original_input) environment()->SetExpressionStackAt(1, input);
7200 if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
7204 if (returns_original_input) Push(graph_->GetConstantUndefined());
7206 CHECK_ALIVE(VisitForValue(prop->obj()));
7207 CHECK_ALIVE(VisitForValue(prop->key()));
7208 HValue* obj = environment()->ExpressionStackAt(1);
7209 HValue* key = environment()->ExpressionStackAt(0);
7211 bool has_side_effects = false;
7212 HValue* load = HandleKeyedElementAccess(
7213 obj, key, NULL, prop, expr->CountId(), RelocInfo::kNoPosition,
7217 if (has_side_effects) AddSimulate(expr->CountId());
7219 after = BuildIncrement(returns_original_input, expr);
7222 expr->RecordTypeFeedback(oracle());
7223 HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
7224 RelocInfo::kNoPosition,
7228 // Drop the key from the bailout environment. Overwrite the receiver
7229 // with the result of the operation, and the placeholder with the
7230 // original value if necessary.
7232 environment()->SetExpressionStackAt(0, after);
7233 if (returns_original_input) environment()->SetExpressionStackAt(1, input);
7234 ASSERT(has_side_effects); // Stores always have side effects.
7235 AddSimulate(expr->AssignmentId());
7239 Drop(returns_original_input ? 2 : 1);
7240 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
7244 HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
7247 AddInstruction(new(zone()) HCheckNonSmi(string));
7248 AddInstruction(HCheckInstanceType::NewIsString(string));
7249 HStringLength* length = new(zone()) HStringLength(string);
7250 AddInstruction(length);
7251 HInstruction* checked_index =
7252 AddInstruction(new(zone()) HBoundsCheck(index, length));
7253 return new(zone()) HStringCharCodeAt(context, string, checked_index);
7257 HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
7260 HValue* context = environment()->LookupContext();
7261 TypeInfo info = oracle()->BinaryType(expr);
7262 if (info.IsUninitialized()) {
7263 AddInstruction(new(zone()) HSoftDeoptimize);
7264 current_block()->MarkAsDeoptimizing();
7265 info = TypeInfo::Unknown();
7267 HInstruction* instr = NULL;
7268 switch (expr->op()) {
7270 if (info.IsString()) {
7271 AddInstruction(new(zone()) HCheckNonSmi(left));
7272 AddInstruction(HCheckInstanceType::NewIsString(left));
7273 AddInstruction(new(zone()) HCheckNonSmi(right));
7274 AddInstruction(HCheckInstanceType::NewIsString(right));
7275 instr = new(zone()) HStringAdd(context, left, right);
7277 instr = HAdd::NewHAdd(zone(), context, left, right);
7281 instr = HSub::NewHSub(zone(), context, left, right);
7284 instr = HMul::NewHMul(zone(), context, left, right);
7287 instr = HMod::NewHMod(zone(), context, left, right);
7290 instr = HDiv::NewHDiv(zone(), context, left, right);
7292 case Token::BIT_XOR:
7293 case Token::BIT_AND:
7295 instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
7298 instr = HSar::NewHSar(zone(), context, left, right);
7301 instr = HShr::NewHShr(zone(), context, left, right);
7304 instr = HShl::NewHShl(zone(), context, left, right);
7310 // If we hit an uninitialized binary op stub we will get type info
7311 // for a smi operation. If one of the operands is a constant string
7312 // do not generate code assuming it is a smi operation.
7314 ((left->IsConstant() && HConstant::cast(left)->HasStringValue()) ||
7315 (right->IsConstant() && HConstant::cast(right)->HasStringValue()))) {
7318 Representation rep = ToRepresentation(info);
7319 // We only generate either int32 or generic tagged bitwise operations.
7320 if (instr->IsBitwiseBinaryOperation() && rep.IsDouble()) {
7321 rep = Representation::Integer32();
7323 TraceRepresentation(expr->op(), info, instr, rep);
7324 instr->AssumeRepresentation(rep);
7329 // Check for the form (%_ClassOf(foo) === 'BarClass').
7330 static bool IsClassOfTest(CompareOperation* expr) {
7331 if (expr->op() != Token::EQ_STRICT) return false;
7332 CallRuntime* call = expr->left()->AsCallRuntime();
7333 if (call == NULL) return false;
7334 Literal* literal = expr->right()->AsLiteral();
7335 if (literal == NULL) return false;
7336 if (!literal->handle()->IsString()) return false;
7337 if (!call->name()->IsEqualTo(CStrVector("_ClassOf"))) return false;
7338 ASSERT(call->arguments()->length() == 1);
7343 void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
7344 ASSERT(!HasStackOverflow());
7345 ASSERT(current_block() != NULL);
7346 ASSERT(current_block()->HasPredecessor());
7347 switch (expr->op()) {
7349 return VisitComma(expr);
7352 return VisitLogicalExpression(expr);
7354 return VisitArithmeticExpression(expr);
7359 void HGraphBuilder::VisitComma(BinaryOperation* expr) {
7360 CHECK_ALIVE(VisitForEffect(expr->left()));
7361 // Visit the right subexpression in the same AST context as the entire
7363 Visit(expr->right());
7367 void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
7368 bool is_logical_and = expr->op() == Token::AND;
7369 if (ast_context()->IsTest()) {
7370 TestContext* context = TestContext::cast(ast_context());
7371 // Translate left subexpression.
7372 HBasicBlock* eval_right = graph()->CreateBasicBlock();
7373 if (is_logical_and) {
7374 CHECK_BAILOUT(VisitForControl(expr->left(),
7376 context->if_false()));
7378 CHECK_BAILOUT(VisitForControl(expr->left(),
7383 // Translate right subexpression by visiting it in the same AST
7384 // context as the entire expression.
7385 if (eval_right->HasPredecessor()) {
7386 eval_right->SetJoinId(expr->RightId());
7387 set_current_block(eval_right);
7388 Visit(expr->right());
7391 } else if (ast_context()->IsValue()) {
7392 CHECK_ALIVE(VisitForValue(expr->left()));
7393 ASSERT(current_block() != NULL);
7395 // We need an extra block to maintain edge-split form.
7396 HBasicBlock* empty_block = graph()->CreateBasicBlock();
7397 HBasicBlock* eval_right = graph()->CreateBasicBlock();
7398 unsigned test_id = expr->left()->test_id();
7399 ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
7400 HBranch* test = is_logical_and
7401 ? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
7402 : new(zone()) HBranch(Top(), empty_block, eval_right, expected);
7403 current_block()->Finish(test);
7405 set_current_block(eval_right);
7406 Drop(1); // Value of the left subexpression.
7407 CHECK_BAILOUT(VisitForValue(expr->right()));
7409 HBasicBlock* join_block =
7410 CreateJoin(empty_block, current_block(), expr->id());
7411 set_current_block(join_block);
7412 return ast_context()->ReturnValue(Pop());
7415 ASSERT(ast_context()->IsEffect());
7416 // In an effect context, we don't need the value of the left subexpression,
7417 // only its control flow and side effects. We need an extra block to
7418 // maintain edge-split form.
7419 HBasicBlock* empty_block = graph()->CreateBasicBlock();
7420 HBasicBlock* right_block = graph()->CreateBasicBlock();
7421 if (is_logical_and) {
7422 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
7424 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
7427 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
7428 // actually two empty blocks (one here and one inserted by
7429 // TestContext::BuildBranch, and that they both have an HSimulate though the
7430 // second one is not a merge node, and that we really have no good AST ID to
7431 // put on that first HSimulate.
7433 if (empty_block->HasPredecessor()) {
7434 empty_block->SetJoinId(expr->id());
7439 if (right_block->HasPredecessor()) {
7440 right_block->SetJoinId(expr->RightId());
7441 set_current_block(right_block);
7442 CHECK_BAILOUT(VisitForEffect(expr->right()));
7443 right_block = current_block();
7448 HBasicBlock* join_block =
7449 CreateJoin(empty_block, right_block, expr->id());
7450 set_current_block(join_block);
7451 // We did not materialize any value in the predecessor environments,
7452 // so there is no need to handle it here.
7457 void HGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
7458 CHECK_ALIVE(VisitForValue(expr->left()));
7459 CHECK_ALIVE(VisitForValue(expr->right()));
7460 HValue* right = Pop();
7461 HValue* left = Pop();
7462 HInstruction* instr = BuildBinaryOperation(expr, left, right);
7463 instr->set_position(expr->position());
7464 return ast_context()->ReturnInstruction(instr, expr->id());
7468 void HGraphBuilder::TraceRepresentation(Token::Value op,
7471 Representation rep) {
7472 if (!FLAG_trace_representation) return;
7473 // TODO(svenpanne) Under which circumstances are we actually not flexible?
7474 // At first glance, this looks a bit weird...
7475 bool flexible = value->CheckFlag(HValue::kFlexibleRepresentation);
7476 PrintF("Operation %s has type info %s, %schange representation assumption "
7477 "for %s (ID %d) from %s to %s\n",
7480 flexible ? "" : " DO NOT ",
7482 graph_->GetMaximumValueID(),
7483 value->representation().Mnemonic(),
7488 Representation HGraphBuilder::ToRepresentation(TypeInfo info) {
7489 if (info.IsSmi()) return Representation::Integer32();
7490 if (info.IsInteger32()) return Representation::Integer32();
7491 if (info.IsDouble()) return Representation::Double();
7492 if (info.IsNumber()) return Representation::Double();
7493 return Representation::Tagged();
7497 void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
7498 HTypeof* typeof_expr,
7499 Handle<String> check) {
7500 // Note: The HTypeof itself is removed during canonicalization, if possible.
7501 HValue* value = typeof_expr->value();
7502 HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check);
7503 instr->set_position(expr->position());
7504 return ast_context()->ReturnControl(instr, expr->id());
7508 static bool MatchLiteralCompareNil(HValue* left,
7513 if (left->IsConstant() &&
7514 HConstant::cast(left)->handle().is_identical_to(nil) &&
7515 Token::IsEqualityOp(op)) {
7523 static bool MatchLiteralCompareTypeof(HValue* left,
7526 HTypeof** typeof_expr,
7527 Handle<String>* check) {
7528 if (left->IsTypeof() &&
7529 Token::IsEqualityOp(op) &&
7530 right->IsConstant() &&
7531 HConstant::cast(right)->HasStringValue()) {
7532 *typeof_expr = HTypeof::cast(left);
7533 *check = Handle<String>::cast(HConstant::cast(right)->handle());
7540 static bool IsLiteralCompareTypeof(HValue* left,
7543 HTypeof** typeof_expr,
7544 Handle<String>* check) {
7545 return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) ||
7546 MatchLiteralCompareTypeof(right, op, left, typeof_expr, check);
7550 static bool IsLiteralCompareNil(HValue* left,
7555 return MatchLiteralCompareNil(left, op, right, nil, expr) ||
7556 MatchLiteralCompareNil(right, op, left, nil, expr);
7560 static bool IsLiteralCompareBool(HValue* left,
7563 return op == Token::EQ_STRICT &&
7564 ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) ||
7565 (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean()));
7569 void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
7570 ASSERT(!HasStackOverflow());
7571 ASSERT(current_block() != NULL);
7572 ASSERT(current_block()->HasPredecessor());
7573 if (IsClassOfTest(expr)) {
7574 CallRuntime* call = expr->left()->AsCallRuntime();
7575 ASSERT(call->arguments()->length() == 1);
7576 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7577 HValue* value = Pop();
7578 Literal* literal = expr->right()->AsLiteral();
7579 Handle<String> rhs = Handle<String>::cast(literal->handle());
7580 HClassOfTestAndBranch* instr =
7581 new(zone()) HClassOfTestAndBranch(value, rhs);
7582 instr->set_position(expr->position());
7583 return ast_context()->ReturnControl(instr, expr->id());
7586 TypeInfo type_info = oracle()->CompareType(expr);
7587 // Check if this expression was ever executed according to type feedback.
7588 // Note that for the special typeof/null/undefined cases we get unknown here.
7589 if (type_info.IsUninitialized()) {
7590 AddInstruction(new(zone()) HSoftDeoptimize);
7591 current_block()->MarkAsDeoptimizing();
7592 type_info = TypeInfo::Unknown();
7595 CHECK_ALIVE(VisitForValue(expr->left()));
7596 CHECK_ALIVE(VisitForValue(expr->right()));
7598 HValue* context = environment()->LookupContext();
7599 HValue* right = Pop();
7600 HValue* left = Pop();
7601 Token::Value op = expr->op();
7603 HTypeof* typeof_expr = NULL;
7604 Handle<String> check;
7605 if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) {
7606 return HandleLiteralCompareTypeof(expr, typeof_expr, check);
7608 HValue* sub_expr = NULL;
7609 Factory* f = graph()->isolate()->factory();
7610 if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) {
7611 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
7613 if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
7614 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
7616 if (IsLiteralCompareBool(left, op, right)) {
7617 HCompareObjectEqAndBranch* result =
7618 new(zone()) HCompareObjectEqAndBranch(left, right);
7619 result->set_position(expr->position());
7620 return ast_context()->ReturnControl(result, expr->id());
7623 if (op == Token::INSTANCEOF) {
7624 // Check to see if the rhs of the instanceof is a global function not
7625 // residing in new space. If it is we assume that the function will stay the
7627 Handle<JSFunction> target = Handle<JSFunction>::null();
7628 VariableProxy* proxy = expr->right()->AsVariableProxy();
7629 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
7630 if (global_function &&
7631 info()->has_global_object() &&
7632 !info()->global_object()->IsAccessCheckNeeded()) {
7633 Handle<String> name = proxy->name();
7634 Handle<GlobalObject> global(info()->global_object());
7635 LookupResult lookup(isolate());
7636 global->Lookup(*name, &lookup);
7637 if (lookup.IsFound() &&
7638 lookup.type() == NORMAL &&
7639 lookup.GetValue()->IsJSFunction()) {
7640 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
7641 // If the function is in new space we assume it's more likely to
7642 // change and thus prefer the general IC code.
7643 if (!isolate()->heap()->InNewSpace(*candidate)) {
7649 // If the target is not null we have found a known global function that is
7650 // assumed to stay the same for this instanceof.
7651 if (target.is_null()) {
7652 HInstanceOf* result = new(zone()) HInstanceOf(context, left, right);
7653 result->set_position(expr->position());
7654 return ast_context()->ReturnInstruction(result, expr->id());
7656 AddInstruction(new(zone()) HCheckFunction(right, target));
7657 HInstanceOfKnownGlobal* result =
7658 new(zone()) HInstanceOfKnownGlobal(context, left, target);
7659 result->set_position(expr->position());
7660 return ast_context()->ReturnInstruction(result, expr->id());
7662 } else if (op == Token::IN) {
7663 HIn* result = new(zone()) HIn(context, left, right);
7664 result->set_position(expr->position());
7665 return ast_context()->ReturnInstruction(result, expr->id());
7666 } else if (type_info.IsNonPrimitive()) {
7669 case Token::EQ_STRICT: {
7670 // Can we get away with map check and not instance type check?
7671 Handle<Map> map = oracle()->GetCompareMap(expr);
7672 if (!map.is_null()) {
7673 AddInstruction(new(zone()) HCheckNonSmi(left));
7674 AddInstruction(HCheckMaps::NewWithTransitions(left, map));
7675 AddInstruction(new(zone()) HCheckNonSmi(right));
7676 AddInstruction(HCheckMaps::NewWithTransitions(right, map));
7677 HCompareObjectEqAndBranch* result =
7678 new(zone()) HCompareObjectEqAndBranch(left, right);
7679 result->set_position(expr->position());
7680 return ast_context()->ReturnControl(result, expr->id());
7682 AddInstruction(new(zone()) HCheckNonSmi(left));
7683 AddInstruction(HCheckInstanceType::NewIsSpecObject(left));
7684 AddInstruction(new(zone()) HCheckNonSmi(right));
7685 AddInstruction(HCheckInstanceType::NewIsSpecObject(right));
7686 HCompareObjectEqAndBranch* result =
7687 new(zone()) HCompareObjectEqAndBranch(left, right);
7688 result->set_position(expr->position());
7689 return ast_context()->ReturnControl(result, expr->id());
7693 return Bailout("Unsupported non-primitive compare");
7695 } else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) &&
7696 (op == Token::EQ || op == Token::EQ_STRICT)) {
7697 AddInstruction(new(zone()) HCheckNonSmi(left));
7698 AddInstruction(HCheckInstanceType::NewIsSymbol(left));
7699 AddInstruction(new(zone()) HCheckNonSmi(right));
7700 AddInstruction(HCheckInstanceType::NewIsSymbol(right));
7701 HCompareObjectEqAndBranch* result =
7702 new(zone()) HCompareObjectEqAndBranch(left, right);
7703 result->set_position(expr->position());
7704 return ast_context()->ReturnControl(result, expr->id());
7706 Representation r = ToRepresentation(type_info);
7708 HCompareGeneric* result =
7709 new(zone()) HCompareGeneric(context, left, right, op);
7710 result->set_position(expr->position());
7711 return ast_context()->ReturnInstruction(result, expr->id());
7713 HCompareIDAndBranch* result =
7714 new(zone()) HCompareIDAndBranch(left, right, op);
7715 result->set_position(expr->position());
7716 result->SetInputRepresentation(r);
7717 return ast_context()->ReturnControl(result, expr->id());
7723 void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
7726 ASSERT(!HasStackOverflow());
7727 ASSERT(current_block() != NULL);
7728 ASSERT(current_block()->HasPredecessor());
7730 expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality;
7731 HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil);
7732 instr->set_position(expr->position());
7733 return ast_context()->ReturnControl(instr, expr->id());
7737 void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
7738 ASSERT(!HasStackOverflow());
7739 ASSERT(current_block() != NULL);
7740 ASSERT(current_block()->HasPredecessor());
7741 HThisFunction* self = new(zone()) HThisFunction(
7742 function_state()->compilation_info()->closure());
7743 return ast_context()->ReturnInstruction(self, expr->id());
7747 void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
7748 ASSERT(globals_.is_empty());
7749 AstVisitor::VisitDeclarations(declarations);
7750 if (!globals_.is_empty()) {
7751 Handle<FixedArray> array =
7752 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
7753 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
7754 int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
7755 DeclareGlobalsNativeFlag::encode(info()->is_native()) |
7756 DeclareGlobalsLanguageMode::encode(info()->language_mode());
7757 HInstruction* result = new(zone()) HDeclareGlobals(
7758 environment()->LookupContext(), array, flags);
7759 AddInstruction(result);
7765 void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
7766 VariableProxy* proxy = declaration->proxy();
7767 VariableMode mode = declaration->mode();
7768 Variable* variable = proxy->var();
7769 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
7770 switch (variable->location()) {
7771 case Variable::UNALLOCATED:
7772 globals_.Add(variable->name());
7773 globals_.Add(variable->binding_needs_init()
7774 ? isolate()->factory()->the_hole_value()
7775 : isolate()->factory()->undefined_value());
7776 globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
7778 case Variable::PARAMETER:
7779 case Variable::LOCAL:
7781 HValue* value = graph()->GetConstantHole();
7782 environment()->Bind(variable, value);
7785 case Variable::CONTEXT:
7787 HValue* value = graph()->GetConstantHole();
7788 HValue* context = environment()->LookupContext();
7789 HStoreContextSlot* store = new HStoreContextSlot(
7790 context, variable->index(), HStoreContextSlot::kNoCheck, value);
7791 AddInstruction(store);
7792 if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
7795 case Variable::LOOKUP:
7796 return Bailout("unsupported lookup slot in declaration");
7801 void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
7802 VariableProxy* proxy = declaration->proxy();
7803 Variable* variable = proxy->var();
7804 switch (variable->location()) {
7805 case Variable::UNALLOCATED: {
7806 globals_.Add(variable->name());
7807 Handle<SharedFunctionInfo> function =
7808 Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
7809 // Check for stack-overflow exception.
7810 if (function.is_null()) return SetStackOverflow();
7811 globals_.Add(function);
7812 globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
7815 case Variable::PARAMETER:
7816 case Variable::LOCAL: {
7817 CHECK_ALIVE(VisitForValue(declaration->fun()));
7818 HValue* value = Pop();
7819 environment()->Bind(variable, value);
7822 case Variable::CONTEXT: {
7823 CHECK_ALIVE(VisitForValue(declaration->fun()));
7824 HValue* value = Pop();
7825 HValue* context = environment()->LookupContext();
7826 HStoreContextSlot* store = new HStoreContextSlot(
7827 context, variable->index(), HStoreContextSlot::kNoCheck, value);
7828 AddInstruction(store);
7829 if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
7832 case Variable::LOOKUP:
7833 return Bailout("unsupported lookup slot in declaration");
7838 void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* declaration) {
7843 void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* declaration) {
7848 void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* declaration) {
7853 void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
7858 void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
7863 void HGraphBuilder::VisitModulePath(ModulePath* module) {
7868 void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
7873 // Generators for inline runtime functions.
7874 // Support for types.
7875 void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
7876 ASSERT(call->arguments()->length() == 1);
7877 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7878 HValue* value = Pop();
7879 HIsSmiAndBranch* result = new(zone()) HIsSmiAndBranch(value);
7880 return ast_context()->ReturnControl(result, call->id());
7884 void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
7885 ASSERT(call->arguments()->length() == 1);
7886 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7887 HValue* value = Pop();
7888 HHasInstanceTypeAndBranch* result =
7889 new(zone()) HHasInstanceTypeAndBranch(value,
7890 FIRST_SPEC_OBJECT_TYPE,
7891 LAST_SPEC_OBJECT_TYPE);
7892 return ast_context()->ReturnControl(result, call->id());
7896 void HGraphBuilder::GenerateIsFunction(CallRuntime* call) {
7897 ASSERT(call->arguments()->length() == 1);
7898 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7899 HValue* value = Pop();
7900 HHasInstanceTypeAndBranch* result =
7901 new(zone()) HHasInstanceTypeAndBranch(value, JS_FUNCTION_TYPE);
7902 return ast_context()->ReturnControl(result, call->id());
7906 void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
7907 ASSERT(call->arguments()->length() == 1);
7908 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7909 HValue* value = Pop();
7910 HHasCachedArrayIndexAndBranch* result =
7911 new(zone()) HHasCachedArrayIndexAndBranch(value);
7912 return ast_context()->ReturnControl(result, call->id());
7916 void HGraphBuilder::GenerateIsArray(CallRuntime* call) {
7917 ASSERT(call->arguments()->length() == 1);
7918 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7919 HValue* value = Pop();
7920 HHasInstanceTypeAndBranch* result =
7921 new(zone()) HHasInstanceTypeAndBranch(value, JS_ARRAY_TYPE);
7922 return ast_context()->ReturnControl(result, call->id());
7926 void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
7927 ASSERT(call->arguments()->length() == 1);
7928 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7929 HValue* value = Pop();
7930 HHasInstanceTypeAndBranch* result =
7931 new(zone()) HHasInstanceTypeAndBranch(value, JS_REGEXP_TYPE);
7932 return ast_context()->ReturnControl(result, call->id());
7936 void HGraphBuilder::GenerateIsObject(CallRuntime* call) {
7937 ASSERT(call->arguments()->length() == 1);
7938 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7939 HValue* value = Pop();
7940 HIsObjectAndBranch* result = new(zone()) HIsObjectAndBranch(value);
7941 return ast_context()->ReturnControl(result, call->id());
7945 void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
7946 return Bailout("inlined runtime function: IsNonNegativeSmi");
7950 void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
7951 ASSERT(call->arguments()->length() == 1);
7952 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7953 HValue* value = Pop();
7954 HIsUndetectableAndBranch* result =
7955 new(zone()) HIsUndetectableAndBranch(value);
7956 return ast_context()->ReturnControl(result, call->id());
7960 void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
7961 CallRuntime* call) {
7963 "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
7967 // Support for construct call checks.
7968 void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
7969 ASSERT(call->arguments()->length() == 0);
7970 if (function_state()->outer() != NULL) {
7971 // We are generating graph for inlined function.
7972 HValue* value = function_state()->is_construct()
7973 ? graph()->GetConstantTrue()
7974 : graph()->GetConstantFalse();
7975 return ast_context()->ReturnValue(value);
7977 return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
7983 // Support for arguments.length and arguments[?].
7984 void HGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
7985 // Our implementation of arguments (based on this stack frame or an
7986 // adapter below it) does not work for inlined functions. This runtime
7987 // function is blacklisted by AstNode::IsInlineable.
7988 ASSERT(function_state()->outer() == NULL);
7989 ASSERT(call->arguments()->length() == 0);
7990 HInstruction* elements = AddInstruction(
7991 new(zone()) HArgumentsElements(false));
7992 HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
7993 return ast_context()->ReturnInstruction(result, call->id());
7997 void HGraphBuilder::GenerateArguments(CallRuntime* call) {
7998 // Our implementation of arguments (based on this stack frame or an
7999 // adapter below it) does not work for inlined functions. This runtime
8000 // function is blacklisted by AstNode::IsInlineable.
8001 ASSERT(function_state()->outer() == NULL);
8002 ASSERT(call->arguments()->length() == 1);
8003 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8004 HValue* index = Pop();
8005 HInstruction* elements = AddInstruction(
8006 new(zone()) HArgumentsElements(false));
8007 HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
8008 HAccessArgumentsAt* result =
8009 new(zone()) HAccessArgumentsAt(elements, length, index);
8010 return ast_context()->ReturnInstruction(result, call->id());
8014 // Support for accessing the class and value fields of an object.
8015 void HGraphBuilder::GenerateClassOf(CallRuntime* call) {
8016 // The special form detected by IsClassOfTest is detected before we get here
8017 // and does not cause a bailout.
8018 return Bailout("inlined runtime function: ClassOf");
8022 void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
8023 ASSERT(call->arguments()->length() == 1);
8024 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8025 HValue* value = Pop();
8026 HValueOf* result = new(zone()) HValueOf(value);
8027 return ast_context()->ReturnInstruction(result, call->id());
8031 void HGraphBuilder::GenerateDateField(CallRuntime* call) {
8032 ASSERT(call->arguments()->length() == 2);
8033 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
8034 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle()));
8035 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8036 HValue* date = Pop();
8037 HDateField* result = new(zone()) HDateField(date, index);
8038 return ast_context()->ReturnInstruction(result, call->id());
8042 void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
8043 ASSERT(call->arguments()->length() == 2);
8044 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8045 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8046 HValue* value = Pop();
8047 HValue* object = Pop();
8048 // Check if object is a not a smi.
8049 HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object);
8050 HBasicBlock* if_smi = graph()->CreateBasicBlock();
8051 HBasicBlock* if_heap_object = graph()->CreateBasicBlock();
8052 HBasicBlock* join = graph()->CreateBasicBlock();
8053 smicheck->SetSuccessorAt(0, if_smi);
8054 smicheck->SetSuccessorAt(1, if_heap_object);
8055 current_block()->Finish(smicheck);
8058 // Check if object is a JSValue.
8059 set_current_block(if_heap_object);
8060 HHasInstanceTypeAndBranch* typecheck =
8061 new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE);
8062 HBasicBlock* if_js_value = graph()->CreateBasicBlock();
8063 HBasicBlock* not_js_value = graph()->CreateBasicBlock();
8064 typecheck->SetSuccessorAt(0, if_js_value);
8065 typecheck->SetSuccessorAt(1, not_js_value);
8066 current_block()->Finish(typecheck);
8067 not_js_value->Goto(join);
8069 // Create in-object property store to kValueOffset.
8070 set_current_block(if_js_value);
8071 Handle<String> name = isolate()->factory()->undefined_symbol();
8072 AddInstruction(new HStoreNamedField(object,
8075 true, // in-object store.
8076 JSValue::kValueOffset));
8077 if_js_value->Goto(join);
8078 join->SetJoinId(call->id());
8079 set_current_block(join);
8080 return ast_context()->ReturnValue(value);
8084 // Fast support for charCodeAt(n).
8085 void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
8086 ASSERT(call->arguments()->length() == 2);
8087 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8088 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8089 HValue* index = Pop();
8090 HValue* string = Pop();
8091 HValue* context = environment()->LookupContext();
8092 HStringCharCodeAt* result = BuildStringCharCodeAt(context, string, index);
8093 return ast_context()->ReturnInstruction(result, call->id());
8097 // Fast support for string.charAt(n) and string[n].
8098 void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
8099 ASSERT(call->arguments()->length() == 1);
8100 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8101 HValue* char_code = Pop();
8102 HValue* context = environment()->LookupContext();
8103 HStringCharFromCode* result =
8104 new(zone()) HStringCharFromCode(context, char_code);
8105 return ast_context()->ReturnInstruction(result, call->id());
8109 // Fast support for string.charAt(n) and string[n].
8110 void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
8111 ASSERT(call->arguments()->length() == 2);
8112 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8113 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8114 HValue* index = Pop();
8115 HValue* string = Pop();
8116 HValue* context = environment()->LookupContext();
8117 HStringCharCodeAt* char_code = BuildStringCharCodeAt(context, string, index);
8118 AddInstruction(char_code);
8119 HStringCharFromCode* result =
8120 new(zone()) HStringCharFromCode(context, char_code);
8121 return ast_context()->ReturnInstruction(result, call->id());
8125 // Fast support for object equality testing.
8126 void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
8127 ASSERT(call->arguments()->length() == 2);
8128 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8129 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8130 HValue* right = Pop();
8131 HValue* left = Pop();
8132 HCompareObjectEqAndBranch* result =
8133 new(zone()) HCompareObjectEqAndBranch(left, right);
8134 return ast_context()->ReturnControl(result, call->id());
8138 void HGraphBuilder::GenerateLog(CallRuntime* call) {
8139 // %_Log is ignored in optimized code.
8140 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
8144 // Fast support for Math.random().
8145 void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
8146 HValue* context = environment()->LookupContext();
8147 HGlobalObject* global_object = new(zone()) HGlobalObject(context);
8148 AddInstruction(global_object);
8149 HRandom* result = new(zone()) HRandom(global_object);
8150 return ast_context()->ReturnInstruction(result, call->id());
8154 // Fast support for StringAdd.
8155 void HGraphBuilder::GenerateStringAdd(CallRuntime* call) {
8156 ASSERT_EQ(2, call->arguments()->length());
8157 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8158 HValue* context = environment()->LookupContext();
8159 HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2);
8161 return ast_context()->ReturnInstruction(result, call->id());
8165 // Fast support for SubString.
8166 void HGraphBuilder::GenerateSubString(CallRuntime* call) {
8167 ASSERT_EQ(3, call->arguments()->length());
8168 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8169 HValue* context = environment()->LookupContext();
8170 HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
8172 return ast_context()->ReturnInstruction(result, call->id());
8176 // Fast support for StringCompare.
8177 void HGraphBuilder::GenerateStringCompare(CallRuntime* call) {
8178 ASSERT_EQ(2, call->arguments()->length());
8179 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8180 HValue* context = environment()->LookupContext();
8182 new(zone()) HCallStub(context, CodeStub::StringCompare, 2);
8184 return ast_context()->ReturnInstruction(result, call->id());
8188 // Support for direct calls from JavaScript to native RegExp code.
8189 void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
8190 ASSERT_EQ(4, call->arguments()->length());
8191 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8192 HValue* context = environment()->LookupContext();
8193 HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
8195 return ast_context()->ReturnInstruction(result, call->id());
8199 // Construct a RegExp exec result with two in-object properties.
8200 void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
8201 ASSERT_EQ(3, call->arguments()->length());
8202 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8203 HValue* context = environment()->LookupContext();
8205 new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3);
8207 return ast_context()->ReturnInstruction(result, call->id());
8211 // Support for fast native caches.
8212 void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
8213 return Bailout("inlined runtime function: GetFromCache");
8217 // Fast support for number to string.
8218 void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
8219 ASSERT_EQ(1, call->arguments()->length());
8220 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8221 HValue* context = environment()->LookupContext();
8223 new(zone()) HCallStub(context, CodeStub::NumberToString, 1);
8225 return ast_context()->ReturnInstruction(result, call->id());
8229 // Fast call for custom callbacks.
8230 void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
8231 // 1 ~ The function to call is not itself an argument to the call.
8232 int arg_count = call->arguments()->length() - 1;
8233 ASSERT(arg_count >= 1); // There's always at least a receiver.
8235 for (int i = 0; i < arg_count; ++i) {
8236 CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
8238 CHECK_ALIVE(VisitForValue(call->arguments()->last()));
8240 HValue* function = Pop();
8241 HValue* context = environment()->LookupContext();
8243 // Branch for function proxies, or other non-functions.
8244 HHasInstanceTypeAndBranch* typecheck =
8245 new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE);
8246 HBasicBlock* if_jsfunction = graph()->CreateBasicBlock();
8247 HBasicBlock* if_nonfunction = graph()->CreateBasicBlock();
8248 HBasicBlock* join = graph()->CreateBasicBlock();
8249 typecheck->SetSuccessorAt(0, if_jsfunction);
8250 typecheck->SetSuccessorAt(1, if_nonfunction);
8251 current_block()->Finish(typecheck);
8253 set_current_block(if_jsfunction);
8254 HInstruction* invoke_result = AddInstruction(
8255 new(zone()) HInvokeFunction(context, function, arg_count));
8257 Push(invoke_result);
8258 if_jsfunction->Goto(join);
8260 set_current_block(if_nonfunction);
8261 HInstruction* call_result = AddInstruction(
8262 new(zone()) HCallFunction(context, function, arg_count));
8265 if_nonfunction->Goto(join);
8267 set_current_block(join);
8268 join->SetJoinId(call->id());
8269 return ast_context()->ReturnValue(Pop());
8273 // Fast call to math functions.
8274 void HGraphBuilder::GenerateMathPow(CallRuntime* call) {
8275 ASSERT_EQ(2, call->arguments()->length());
8276 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8277 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8278 HValue* right = Pop();
8279 HValue* left = Pop();
8280 HPower* result = new(zone()) HPower(left, right);
8281 return ast_context()->ReturnInstruction(result, call->id());
8285 void HGraphBuilder::GenerateMathSin(CallRuntime* call) {
8286 ASSERT_EQ(1, call->arguments()->length());
8287 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8288 HValue* context = environment()->LookupContext();
8290 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8291 result->set_transcendental_type(TranscendentalCache::SIN);
8293 return ast_context()->ReturnInstruction(result, call->id());
8297 void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
8298 ASSERT_EQ(1, call->arguments()->length());
8299 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8300 HValue* context = environment()->LookupContext();
8302 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8303 result->set_transcendental_type(TranscendentalCache::COS);
8305 return ast_context()->ReturnInstruction(result, call->id());
8309 void HGraphBuilder::GenerateMathTan(CallRuntime* call) {
8310 ASSERT_EQ(1, call->arguments()->length());
8311 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8312 HValue* context = environment()->LookupContext();
8314 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8315 result->set_transcendental_type(TranscendentalCache::TAN);
8317 return ast_context()->ReturnInstruction(result, call->id());
8321 void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
8322 ASSERT_EQ(1, call->arguments()->length());
8323 CHECK_ALIVE(VisitArgumentList(call->arguments()));
8324 HValue* context = environment()->LookupContext();
8326 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8327 result->set_transcendental_type(TranscendentalCache::LOG);
8329 return ast_context()->ReturnInstruction(result, call->id());
8333 void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
8334 return Bailout("inlined runtime function: MathSqrt");
8338 // Check whether two RegExps are equivalent
8339 void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
8340 return Bailout("inlined runtime function: IsRegExpEquivalent");
8344 void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
8345 ASSERT(call->arguments()->length() == 1);
8346 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8347 HValue* value = Pop();
8348 HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value);
8349 return ast_context()->ReturnInstruction(result, call->id());
8353 void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
8354 return Bailout("inlined runtime function: FastAsciiArrayJoin");
8358 #undef CHECK_BAILOUT
8362 HEnvironment::HEnvironment(HEnvironment* outer,
8364 Handle<JSFunction> closure)
8365 : closure_(closure),
8367 assigned_variables_(4),
8368 frame_type_(JS_FUNCTION),
8369 parameter_count_(0),
8375 ast_id_(AstNode::kNoNumber) {
8376 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
8380 HEnvironment::HEnvironment(const HEnvironment* other)
8382 assigned_variables_(0),
8383 frame_type_(JS_FUNCTION),
8384 parameter_count_(0),
8390 ast_id_(other->ast_id()) {
8395 HEnvironment::HEnvironment(HEnvironment* outer,
8396 Handle<JSFunction> closure,
8397 FrameType frame_type,
8399 : closure_(closure),
8401 assigned_variables_(0),
8402 frame_type_(frame_type),
8403 parameter_count_(arguments),
8408 ast_id_(AstNode::kNoNumber) {
8412 void HEnvironment::Initialize(int parameter_count,
8415 parameter_count_ = parameter_count;
8416 local_count_ = local_count;
8418 // Avoid reallocating the temporaries' backing store on the first Push.
8419 int total = parameter_count + specials_count_ + local_count + stack_height;
8420 values_.Initialize(total + 4);
8421 for (int i = 0; i < total; ++i) values_.Add(NULL);
8425 void HEnvironment::Initialize(const HEnvironment* other) {
8426 closure_ = other->closure();
8427 values_.AddAll(other->values_);
8428 assigned_variables_.AddAll(other->assigned_variables_);
8429 frame_type_ = other->frame_type_;
8430 parameter_count_ = other->parameter_count_;
8431 local_count_ = other->local_count_;
8432 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
8433 pop_count_ = other->pop_count_;
8434 push_count_ = other->push_count_;
8435 ast_id_ = other->ast_id_;
8439 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
8440 ASSERT(!block->IsLoopHeader());
8441 ASSERT(values_.length() == other->values_.length());
8443 int length = values_.length();
8444 for (int i = 0; i < length; ++i) {
8445 HValue* value = values_[i];
8446 if (value != NULL && value->IsPhi() && value->block() == block) {
8447 // There is already a phi for the i'th value.
8448 HPhi* phi = HPhi::cast(value);
8449 // Assert index is correct and that we haven't missed an incoming edge.
8450 ASSERT(phi->merged_index() == i);
8451 ASSERT(phi->OperandCount() == block->predecessors()->length());
8452 phi->AddInput(other->values_[i]);
8453 } else if (values_[i] != other->values_[i]) {
8454 // There is a fresh value on the incoming edge, a phi is needed.
8455 ASSERT(values_[i] != NULL && other->values_[i] != NULL);
8456 HPhi* phi = new(block->zone()) HPhi(i);
8457 HValue* old_value = values_[i];
8458 for (int j = 0; j < block->predecessors()->length(); j++) {
8459 phi->AddInput(old_value);
8461 phi->AddInput(other->values_[i]);
8462 this->values_[i] = phi;
8469 void HEnvironment::Bind(int index, HValue* value) {
8470 ASSERT(value != NULL);
8471 if (!assigned_variables_.Contains(index)) {
8472 assigned_variables_.Add(index);
8474 values_[index] = value;
8478 bool HEnvironment::HasExpressionAt(int index) const {
8479 return index >= parameter_count_ + specials_count_ + local_count_;
8483 bool HEnvironment::ExpressionStackIsEmpty() const {
8484 ASSERT(length() >= first_expression_index());
8485 return length() == first_expression_index();
8489 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
8490 int count = index_from_top + 1;
8491 int index = values_.length() - count;
8492 ASSERT(HasExpressionAt(index));
8493 // The push count must include at least the element in question or else
8494 // the new value will not be included in this environment's history.
8495 if (push_count_ < count) {
8496 // This is the same effect as popping then re-pushing 'count' elements.
8497 pop_count_ += (count - push_count_);
8498 push_count_ = count;
8500 values_[index] = value;
8504 void HEnvironment::Drop(int count) {
8505 for (int i = 0; i < count; ++i) {
8511 HEnvironment* HEnvironment::Copy() const {
8512 return new(closure()->GetIsolate()->zone()) HEnvironment(this);
8516 HEnvironment* HEnvironment::CopyWithoutHistory() const {
8517 HEnvironment* result = Copy();
8518 result->ClearHistory();
8523 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
8524 HEnvironment* new_env = Copy();
8525 for (int i = 0; i < values_.length(); ++i) {
8526 HPhi* phi = new(loop_header->zone()) HPhi(i);
8527 phi->AddInput(values_[i]);
8528 new_env->values_[i] = phi;
8529 loop_header->AddPhi(phi);
8531 new_env->ClearHistory();
8536 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
8537 Handle<JSFunction> target,
8538 FrameType frame_type,
8539 int arguments) const {
8540 HEnvironment* new_env = new(closure()->GetIsolate()->zone())
8541 HEnvironment(outer, target, frame_type, arguments + 1);
8542 for (int i = 0; i <= arguments; ++i) { // Include receiver.
8543 new_env->Push(ExpressionStackAt(arguments - i));
8545 new_env->ClearHistory();
8550 HEnvironment* HEnvironment::CopyForInlining(
8551 Handle<JSFunction> target,
8553 FunctionLiteral* function,
8554 HConstant* undefined,
8556 bool is_construct) const {
8557 ASSERT(frame_type() == JS_FUNCTION);
8559 Zone* zone = closure()->GetIsolate()->zone();
8561 // Outer environment is a copy of this one without the arguments.
8562 int arity = function->scope()->num_parameters();
8564 HEnvironment* outer = Copy();
8565 outer->Drop(arguments + 1); // Including receiver.
8566 outer->ClearHistory();
8569 // Create artificial constructor stub environment. The receiver should
8570 // actually be the constructor function, but we pass the newly allocated
8571 // object instead, DoComputeConstructStubFrame() relies on that.
8572 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
8575 if (arity != arguments) {
8576 // Create artificial arguments adaptation environment.
8577 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
8580 HEnvironment* inner =
8581 new(zone) HEnvironment(outer, function->scope(), target);
8582 // Get the argument values from the original environment.
8583 for (int i = 0; i <= arity; ++i) { // Include receiver.
8584 HValue* push = (i <= arguments) ?
8585 ExpressionStackAt(arguments - i) : undefined;
8586 inner->SetValueAt(i, push);
8588 // If the function we are inlining is a strict mode function or a
8589 // builtin function, pass undefined as the receiver for function
8590 // calls (instead of the global receiver).
8591 if ((target->shared()->native() || !function->is_classic_mode()) &&
8592 call_kind == CALL_AS_FUNCTION && !is_construct) {
8593 inner->SetValueAt(0, undefined);
8595 inner->SetValueAt(arity + 1, LookupContext());
8596 for (int i = arity + 2; i < inner->length(); ++i) {
8597 inner->SetValueAt(i, undefined);
8600 inner->set_ast_id(AstNode::kFunctionEntryId);
8605 void HEnvironment::PrintTo(StringStream* stream) {
8606 for (int i = 0; i < length(); i++) {
8607 if (i == 0) stream->Add("parameters\n");
8608 if (i == parameter_count()) stream->Add("specials\n");
8609 if (i == parameter_count() + specials_count()) stream->Add("locals\n");
8610 if (i == parameter_count() + specials_count() + local_count()) {
8611 stream->Add("expressions\n");
8613 HValue* val = values_.at(i);
8614 stream->Add("%d: ", i);
8616 val->PrintNameTo(stream);
8618 stream->Add("NULL");
8626 void HEnvironment::PrintToStd() {
8627 HeapStringAllocator string_allocator;
8628 StringStream trace(&string_allocator);
8630 PrintF("%s", *trace.ToCString());
8634 void HTracer::TraceCompilation(FunctionLiteral* function) {
8635 Tag tag(this, "compilation");
8636 Handle<String> name = function->debug_name();
8637 PrintStringProperty("name", *name->ToCString());
8638 PrintStringProperty("method", *name->ToCString());
8639 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
8643 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
8644 Trace(name, chunk->graph(), chunk);
8648 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
8649 Trace(name, graph, NULL);
8653 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
8654 Tag tag(this, "cfg");
8655 PrintStringProperty("name", name);
8656 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
8657 for (int i = 0; i < blocks->length(); i++) {
8658 HBasicBlock* current = blocks->at(i);
8659 Tag block_tag(this, "block");
8660 PrintBlockProperty("name", current->block_id());
8661 PrintIntProperty("from_bci", -1);
8662 PrintIntProperty("to_bci", -1);
8664 if (!current->predecessors()->is_empty()) {
8666 trace_.Add("predecessors");
8667 for (int j = 0; j < current->predecessors()->length(); ++j) {
8668 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
8672 PrintEmptyProperty("predecessors");
8675 if (current->end()->SuccessorCount() == 0) {
8676 PrintEmptyProperty("successors");
8679 trace_.Add("successors");
8680 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
8681 trace_.Add(" \"B%d\"", it.Current()->block_id());
8686 PrintEmptyProperty("xhandlers");
8687 const char* flags = current->IsLoopSuccessorDominator()
8690 PrintStringProperty("flags", flags);
8692 if (current->dominator() != NULL) {
8693 PrintBlockProperty("dominator", current->dominator()->block_id());
8696 PrintIntProperty("loop_depth", current->LoopNestingDepth());
8698 if (chunk != NULL) {
8699 int first_index = current->first_instruction_index();
8700 int last_index = current->last_instruction_index();
8703 LifetimePosition::FromInstructionIndex(first_index).Value());
8706 LifetimePosition::FromInstructionIndex(last_index).Value());
8710 Tag states_tag(this, "states");
8711 Tag locals_tag(this, "locals");
8712 int total = current->phis()->length();
8713 PrintIntProperty("size", current->phis()->length());
8714 PrintStringProperty("method", "None");
8715 for (int j = 0; j < total; ++j) {
8716 HPhi* phi = current->phis()->at(j);
8718 trace_.Add("%d ", phi->merged_index());
8719 phi->PrintNameTo(&trace_);
8721 phi->PrintTo(&trace_);
8727 Tag HIR_tag(this, "HIR");
8728 HInstruction* instruction = current->first();
8729 while (instruction != NULL) {
8731 int uses = instruction->UseCount();
8733 trace_.Add("%d %d ", bci, uses);
8734 instruction->PrintNameTo(&trace_);
8736 instruction->PrintTo(&trace_);
8737 trace_.Add(" <|@\n");
8738 instruction = instruction->next();
8743 if (chunk != NULL) {
8744 Tag LIR_tag(this, "LIR");
8745 int first_index = current->first_instruction_index();
8746 int last_index = current->last_instruction_index();
8747 if (first_index != -1 && last_index != -1) {
8748 const ZoneList<LInstruction*>* instructions = chunk->instructions();
8749 for (int i = first_index; i <= last_index; ++i) {
8750 LInstruction* linstr = instructions->at(i);
8751 if (linstr != NULL) {
8754 LifetimePosition::FromInstructionIndex(i).Value());
8755 linstr->PrintTo(&trace_);
8756 trace_.Add(" <|@\n");
8765 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
8766 Tag tag(this, "intervals");
8767 PrintStringProperty("name", name);
8769 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
8770 for (int i = 0; i < fixed_d->length(); ++i) {
8771 TraceLiveRange(fixed_d->at(i), "fixed");
8774 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
8775 for (int i = 0; i < fixed->length(); ++i) {
8776 TraceLiveRange(fixed->at(i), "fixed");
8779 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
8780 for (int i = 0; i < live_ranges->length(); ++i) {
8781 TraceLiveRange(live_ranges->at(i), "object");
8786 void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
8787 if (range != NULL && !range->IsEmpty()) {
8789 trace_.Add("%d %s", range->id(), type);
8790 if (range->HasRegisterAssigned()) {
8791 LOperand* op = range->CreateAssignedOperand(ZONE);
8792 int assigned_reg = op->index();
8793 if (op->IsDoubleRegister()) {
8794 trace_.Add(" \"%s\"",
8795 DoubleRegister::AllocationIndexToString(assigned_reg));
8797 ASSERT(op->IsRegister());
8798 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
8800 } else if (range->IsSpilled()) {
8801 LOperand* op = range->TopLevel()->GetSpillOperand();
8802 if (op->IsDoubleStackSlot()) {
8803 trace_.Add(" \"double_stack:%d\"", op->index());
8805 ASSERT(op->IsStackSlot());
8806 trace_.Add(" \"stack:%d\"", op->index());
8809 int parent_index = -1;
8810 if (range->IsChild()) {
8811 parent_index = range->parent()->id();
8813 parent_index = range->id();
8815 LOperand* op = range->FirstHint();
8816 int hint_index = -1;
8817 if (op != NULL && op->IsUnallocated()) {
8818 hint_index = LUnallocated::cast(op)->virtual_register();
8820 trace_.Add(" %d %d", parent_index, hint_index);
8821 UseInterval* cur_interval = range->first_interval();
8822 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
8823 trace_.Add(" [%d, %d[",
8824 cur_interval->start().Value(),
8825 cur_interval->end().Value());
8826 cur_interval = cur_interval->next();
8829 UsePosition* current_pos = range->first_pos();
8830 while (current_pos != NULL) {
8831 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
8832 trace_.Add(" %d M", current_pos->pos().Value());
8834 current_pos = current_pos->next();
8837 trace_.Add(" \"\"\n");
8842 void HTracer::FlushToFile() {
8843 AppendChars(filename_, *trace_.ToCString(), trace_.length(), false);
8848 void HStatistics::Initialize(CompilationInfo* info) {
8849 source_size_ += info->shared_info()->SourceSize();
8853 void HStatistics::Print() {
8854 PrintF("Timing results:\n");
8856 for (int i = 0; i < timing_.length(); ++i) {
8860 for (int i = 0; i < names_.length(); ++i) {
8861 PrintF("%30s", names_[i]);
8862 double ms = static_cast<double>(timing_[i]) / 1000;
8863 double percent = static_cast<double>(timing_[i]) * 100 / sum;
8864 PrintF(" - %7.3f ms / %4.1f %% ", ms, percent);
8866 unsigned size = sizes_[i];
8867 double size_percent = static_cast<double>(size) * 100 / total_size_;
8868 PrintF(" %8u bytes / %4.1f %%\n", size, size_percent);
8870 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
8871 double normalized_time = source_size_in_kb > 0
8872 ? (static_cast<double>(sum) / 1000) / source_size_in_kb
8874 double normalized_bytes = source_size_in_kb > 0
8875 ? total_size_ / source_size_in_kb
8877 PrintF("%30s - %7.3f ms %7.3f bytes\n", "Sum",
8878 normalized_time, normalized_bytes);
8879 PrintF("---------------------------------------------------------------\n");
8880 PrintF("%30s - %7.3f ms (%.1f times slower than full code gen)\n",
8882 static_cast<double>(total_) / 1000,
8883 static_cast<double>(total_) / full_code_gen_);
8887 void HStatistics::SaveTiming(const char* name, int64_t ticks, unsigned size) {
8888 if (name == HPhase::kFullCodeGen) {
8889 full_code_gen_ += ticks;
8890 } else if (name == HPhase::kTotal) {
8893 total_size_ += size;
8894 for (int i = 0; i < names_.length(); ++i) {
8895 if (names_[i] == name) {
8896 timing_[i] += ticks;
8908 const char* const HPhase::kFullCodeGen = "Full code generator";
8909 const char* const HPhase::kTotal = "Total";
8912 void HPhase::Begin(const char* name,
8915 LAllocator* allocator) {
8919 allocator_ = allocator;
8920 if (allocator != NULL && chunk_ == NULL) {
8921 chunk_ = allocator->chunk();
8923 if (FLAG_hydrogen_stats) start_ = OS::Ticks();
8924 start_allocation_size_ = Zone::allocation_size_;
8928 void HPhase::End() const {
8929 if (FLAG_hydrogen_stats) {
8930 int64_t end = OS::Ticks();
8931 unsigned size = Zone::allocation_size_ - start_allocation_size_;
8932 HStatistics::Instance()->SaveTiming(name_, end - start_, size);
8935 // Produce trace output if flag is set so that the first letter of the
8936 // phase name matches the command line parameter FLAG_trace_phase.
8937 if (FLAG_trace_hydrogen &&
8938 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) {
8939 if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
8940 if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
8941 if (allocator_ != NULL) {
8942 HTracer::Instance()->TraceLiveRanges(name_, allocator_);
8947 if (graph_ != NULL) graph_->Verify(false); // No full verify.
8948 if (allocator_ != NULL) allocator_->Verify();
8952 } } // namespace v8::internal