Upstream version 8.37.186.0
[platform/framework/web/crosswalk.git] / src / v8 / src / hydrogen.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/hydrogen.h"
6
7 #include <algorithm>
8
9 #include "src/v8.h"
10 #include "src/allocation-site-scopes.h"
11 #include "src/codegen.h"
12 #include "src/full-codegen.h"
13 #include "src/hashmap.h"
14 #include "src/hydrogen-bce.h"
15 #include "src/hydrogen-bch.h"
16 #include "src/hydrogen-canonicalize.h"
17 #include "src/hydrogen-check-elimination.h"
18 #include "src/hydrogen-dce.h"
19 #include "src/hydrogen-dehoist.h"
20 #include "src/hydrogen-environment-liveness.h"
21 #include "src/hydrogen-escape-analysis.h"
22 #include "src/hydrogen-infer-representation.h"
23 #include "src/hydrogen-infer-types.h"
24 #include "src/hydrogen-load-elimination.h"
25 #include "src/hydrogen-gvn.h"
26 #include "src/hydrogen-mark-deoptimize.h"
27 #include "src/hydrogen-mark-unreachable.h"
28 #include "src/hydrogen-osr.h"
29 #include "src/hydrogen-range-analysis.h"
30 #include "src/hydrogen-redundant-phi.h"
31 #include "src/hydrogen-removable-simulates.h"
32 #include "src/hydrogen-representation-changes.h"
33 #include "src/hydrogen-sce.h"
34 #include "src/hydrogen-store-elimination.h"
35 #include "src/hydrogen-uint32-analysis.h"
36 #include "src/lithium-allocator.h"
37 #include "src/parser.h"
38 #include "src/runtime.h"
39 #include "src/scopeinfo.h"
40 #include "src/scopes.h"
41 #include "src/stub-cache.h"
42 #include "src/typing.h"
43
44 #if V8_TARGET_ARCH_IA32
45 #include "src/ia32/lithium-codegen-ia32.h"
46 #elif V8_TARGET_ARCH_X64
47 #include "src/x64/lithium-codegen-x64.h"
48 #elif V8_TARGET_ARCH_ARM64
49 #include "src/arm64/lithium-codegen-arm64.h"
50 #elif V8_TARGET_ARCH_ARM
51 #include "src/arm/lithium-codegen-arm.h"
52 #elif V8_TARGET_ARCH_MIPS
53 #include "src/mips/lithium-codegen-mips.h"
54 #elif V8_TARGET_ARCH_X87
55 #include "src/x87/lithium-codegen-x87.h"
56 #else
57 #error Unsupported target architecture.
58 #endif
59
60 namespace v8 {
61 namespace internal {
62
63 HBasicBlock::HBasicBlock(HGraph* graph)
64     : block_id_(graph->GetNextBlockID()),
65       graph_(graph),
66       phis_(4, graph->zone()),
67       first_(NULL),
68       last_(NULL),
69       end_(NULL),
70       loop_information_(NULL),
71       predecessors_(2, graph->zone()),
72       dominator_(NULL),
73       dominated_blocks_(4, graph->zone()),
74       last_environment_(NULL),
75       argument_count_(-1),
76       first_instruction_index_(-1),
77       last_instruction_index_(-1),
78       deleted_phis_(4, graph->zone()),
79       parent_loop_header_(NULL),
80       inlined_entry_block_(NULL),
81       is_inline_return_target_(false),
82       is_reachable_(true),
83       dominates_loop_successors_(false),
84       is_osr_entry_(false),
85       is_ordered_(false) { }
86
87
88 Isolate* HBasicBlock::isolate() const {
89   return graph_->isolate();
90 }
91
92
93 void HBasicBlock::MarkUnreachable() {
94   is_reachable_ = false;
95 }
96
97
98 void HBasicBlock::AttachLoopInformation() {
99   ASSERT(!IsLoopHeader());
100   loop_information_ = new(zone()) HLoopInformation(this, zone());
101 }
102
103
104 void HBasicBlock::DetachLoopInformation() {
105   ASSERT(IsLoopHeader());
106   loop_information_ = NULL;
107 }
108
109
110 void HBasicBlock::AddPhi(HPhi* phi) {
111   ASSERT(!IsStartBlock());
112   phis_.Add(phi, zone());
113   phi->SetBlock(this);
114 }
115
116
117 void HBasicBlock::RemovePhi(HPhi* phi) {
118   ASSERT(phi->block() == this);
119   ASSERT(phis_.Contains(phi));
120   phi->Kill();
121   phis_.RemoveElement(phi);
122   phi->SetBlock(NULL);
123 }
124
125
126 void HBasicBlock::AddInstruction(HInstruction* instr,
127                                  HSourcePosition position) {
128   ASSERT(!IsStartBlock() || !IsFinished());
129   ASSERT(!instr->IsLinked());
130   ASSERT(!IsFinished());
131
132   if (!position.IsUnknown()) {
133     instr->set_position(position);
134   }
135   if (first_ == NULL) {
136     ASSERT(last_environment() != NULL);
137     ASSERT(!last_environment()->ast_id().IsNone());
138     HBlockEntry* entry = new(zone()) HBlockEntry();
139     entry->InitializeAsFirst(this);
140     if (!position.IsUnknown()) {
141       entry->set_position(position);
142     } else {
143       ASSERT(!FLAG_hydrogen_track_positions ||
144              !graph()->info()->IsOptimizing());
145     }
146     first_ = last_ = entry;
147   }
148   instr->InsertAfter(last_);
149 }
150
151
152 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
153   if (graph()->IsInsideNoSideEffectsScope()) {
154     merged_index = HPhi::kInvalidMergedIndex;
155   }
156   HPhi* phi = new(zone()) HPhi(merged_index, zone());
157   AddPhi(phi);
158   return phi;
159 }
160
161
162 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
163                                        RemovableSimulate removable) {
164   ASSERT(HasEnvironment());
165   HEnvironment* environment = last_environment();
166   ASSERT(ast_id.IsNone() ||
167          ast_id == BailoutId::StubEntry() ||
168          environment->closure()->shared()->VerifyBailoutId(ast_id));
169
170   int push_count = environment->push_count();
171   int pop_count = environment->pop_count();
172
173   HSimulate* instr =
174       new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
175 #ifdef DEBUG
176   instr->set_closure(environment->closure());
177 #endif
178   // Order of pushed values: newest (top of stack) first. This allows
179   // HSimulate::MergeWith() to easily append additional pushed values
180   // that are older (from further down the stack).
181   for (int i = 0; i < push_count; ++i) {
182     instr->AddPushedValue(environment->ExpressionStackAt(i));
183   }
184   for (GrowableBitVector::Iterator it(environment->assigned_variables(),
185                                       zone());
186        !it.Done();
187        it.Advance()) {
188     int index = it.Current();
189     instr->AddAssignedValue(index, environment->Lookup(index));
190   }
191   environment->ClearHistory();
192   return instr;
193 }
194
195
196 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
197   ASSERT(!IsFinished());
198   AddInstruction(end, position);
199   end_ = end;
200   for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
201     it.Current()->RegisterPredecessor(this);
202   }
203 }
204
205
206 void HBasicBlock::Goto(HBasicBlock* block,
207                        HSourcePosition position,
208                        FunctionState* state,
209                        bool add_simulate) {
210   bool drop_extra = state != NULL &&
211       state->inlining_kind() == NORMAL_RETURN;
212
213   if (block->IsInlineReturnTarget()) {
214     HEnvironment* env = last_environment();
215     int argument_count = env->arguments_environment()->parameter_count();
216     AddInstruction(new(zone())
217                    HLeaveInlined(state->entry(), argument_count),
218                    position);
219     UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
220   }
221
222   if (add_simulate) AddNewSimulate(BailoutId::None(), position);
223   HGoto* instr = new(zone()) HGoto(block);
224   Finish(instr, position);
225 }
226
227
228 void HBasicBlock::AddLeaveInlined(HValue* return_value,
229                                   FunctionState* state,
230                                   HSourcePosition position) {
231   HBasicBlock* target = state->function_return();
232   bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
233
234   ASSERT(target->IsInlineReturnTarget());
235   ASSERT(return_value != NULL);
236   HEnvironment* env = last_environment();
237   int argument_count = env->arguments_environment()->parameter_count();
238   AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
239                  position);
240   UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
241   last_environment()->Push(return_value);
242   AddNewSimulate(BailoutId::None(), position);
243   HGoto* instr = new(zone()) HGoto(target);
244   Finish(instr, position);
245 }
246
247
248 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
249   ASSERT(!HasEnvironment());
250   ASSERT(first() == NULL);
251   UpdateEnvironment(env);
252 }
253
254
255 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
256   last_environment_ = env;
257   graph()->update_maximum_environment_size(env->first_expression_index());
258 }
259
260
261 void HBasicBlock::SetJoinId(BailoutId ast_id) {
262   int length = predecessors_.length();
263   ASSERT(length > 0);
264   for (int i = 0; i < length; i++) {
265     HBasicBlock* predecessor = predecessors_[i];
266     ASSERT(predecessor->end()->IsGoto());
267     HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
268     ASSERT(i != 0 ||
269            (predecessor->last_environment()->closure().is_null() ||
270             predecessor->last_environment()->closure()->shared()
271               ->VerifyBailoutId(ast_id)));
272     simulate->set_ast_id(ast_id);
273     predecessor->last_environment()->set_ast_id(ast_id);
274   }
275 }
276
277
278 bool HBasicBlock::Dominates(HBasicBlock* other) const {
279   HBasicBlock* current = other->dominator();
280   while (current != NULL) {
281     if (current == this) return true;
282     current = current->dominator();
283   }
284   return false;
285 }
286
287
288 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
289   if (this == other) return true;
290   return Dominates(other);
291 }
292
293
294 int HBasicBlock::LoopNestingDepth() const {
295   const HBasicBlock* current = this;
296   int result  = (current->IsLoopHeader()) ? 1 : 0;
297   while (current->parent_loop_header() != NULL) {
298     current = current->parent_loop_header();
299     result++;
300   }
301   return result;
302 }
303
304
305 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
306   ASSERT(IsLoopHeader());
307
308   SetJoinId(stmt->EntryId());
309   if (predecessors()->length() == 1) {
310     // This is a degenerated loop.
311     DetachLoopInformation();
312     return;
313   }
314
315   // Only the first entry into the loop is from outside the loop. All other
316   // entries must be back edges.
317   for (int i = 1; i < predecessors()->length(); ++i) {
318     loop_information()->RegisterBackEdge(predecessors()->at(i));
319   }
320 }
321
322
323 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
324   ASSERT(IsFinished());
325   HBasicBlock* succ_block = end()->SuccessorAt(succ);
326
327   ASSERT(succ_block->predecessors()->length() == 1);
328   succ_block->MarkUnreachable();
329 }
330
331
332 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
333   if (HasPredecessor()) {
334     // Only loop header blocks can have a predecessor added after
335     // instructions have been added to the block (they have phis for all
336     // values in the environment, these phis may be eliminated later).
337     ASSERT(IsLoopHeader() || first_ == NULL);
338     HEnvironment* incoming_env = pred->last_environment();
339     if (IsLoopHeader()) {
340       ASSERT(phis()->length() == incoming_env->length());
341       for (int i = 0; i < phis_.length(); ++i) {
342         phis_[i]->AddInput(incoming_env->values()->at(i));
343       }
344     } else {
345       last_environment()->AddIncomingEdge(this, pred->last_environment());
346     }
347   } else if (!HasEnvironment() && !IsFinished()) {
348     ASSERT(!IsLoopHeader());
349     SetInitialEnvironment(pred->last_environment()->Copy());
350   }
351
352   predecessors_.Add(pred, zone());
353 }
354
355
356 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
357   ASSERT(!dominated_blocks_.Contains(block));
358   // Keep the list of dominated blocks sorted such that if there is two
359   // succeeding block in this list, the predecessor is before the successor.
360   int index = 0;
361   while (index < dominated_blocks_.length() &&
362          dominated_blocks_[index]->block_id() < block->block_id()) {
363     ++index;
364   }
365   dominated_blocks_.InsertAt(index, block, zone());
366 }
367
368
369 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
370   if (dominator_ == NULL) {
371     dominator_ = other;
372     other->AddDominatedBlock(this);
373   } else if (other->dominator() != NULL) {
374     HBasicBlock* first = dominator_;
375     HBasicBlock* second = other;
376
377     while (first != second) {
378       if (first->block_id() > second->block_id()) {
379         first = first->dominator();
380       } else {
381         second = second->dominator();
382       }
383       ASSERT(first != NULL && second != NULL);
384     }
385
386     if (dominator_ != first) {
387       ASSERT(dominator_->dominated_blocks_.Contains(this));
388       dominator_->dominated_blocks_.RemoveElement(this);
389       dominator_ = first;
390       first->AddDominatedBlock(this);
391     }
392   }
393 }
394
395
396 void HBasicBlock::AssignLoopSuccessorDominators() {
397   // Mark blocks that dominate all subsequent reachable blocks inside their
398   // loop. Exploit the fact that blocks are sorted in reverse post order. When
399   // the loop is visited in increasing block id order, if the number of
400   // non-loop-exiting successor edges at the dominator_candidate block doesn't
401   // exceed the number of previously encountered predecessor edges, there is no
402   // path from the loop header to any block with higher id that doesn't go
403   // through the dominator_candidate block. In this case, the
404   // dominator_candidate block is guaranteed to dominate all blocks reachable
405   // from it with higher ids.
406   HBasicBlock* last = loop_information()->GetLastBackEdge();
407   int outstanding_successors = 1;  // one edge from the pre-header
408   // Header always dominates everything.
409   MarkAsLoopSuccessorDominator();
410   for (int j = block_id(); j <= last->block_id(); ++j) {
411     HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
412     for (HPredecessorIterator it(dominator_candidate); !it.Done();
413          it.Advance()) {
414       HBasicBlock* predecessor = it.Current();
415       // Don't count back edges.
416       if (predecessor->block_id() < dominator_candidate->block_id()) {
417         outstanding_successors--;
418       }
419     }
420
421     // If more successors than predecessors have been seen in the loop up to
422     // now, it's not possible to guarantee that the current block dominates
423     // all of the blocks with higher IDs. In this case, assume conservatively
424     // that those paths through loop that don't go through the current block
425     // contain all of the loop's dependencies. Also be careful to record
426     // dominator information about the current loop that's being processed,
427     // and not nested loops, which will be processed when
428     // AssignLoopSuccessorDominators gets called on their header.
429     ASSERT(outstanding_successors >= 0);
430     HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
431     if (outstanding_successors == 0 &&
432         (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
433       dominator_candidate->MarkAsLoopSuccessorDominator();
434     }
435     HControlInstruction* end = dominator_candidate->end();
436     for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
437       HBasicBlock* successor = it.Current();
438       // Only count successors that remain inside the loop and don't loop back
439       // to a loop header.
440       if (successor->block_id() > dominator_candidate->block_id() &&
441           successor->block_id() <= last->block_id()) {
442         // Backwards edges must land on loop headers.
443         ASSERT(successor->block_id() > dominator_candidate->block_id() ||
444                successor->IsLoopHeader());
445         outstanding_successors++;
446       }
447     }
448   }
449 }
450
451
452 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
453   for (int i = 0; i < predecessors_.length(); ++i) {
454     if (predecessors_[i] == predecessor) return i;
455   }
456   UNREACHABLE();
457   return -1;
458 }
459
460
461 #ifdef DEBUG
462 void HBasicBlock::Verify() {
463   // Check that every block is finished.
464   ASSERT(IsFinished());
465   ASSERT(block_id() >= 0);
466
467   // Check that the incoming edges are in edge split form.
468   if (predecessors_.length() > 1) {
469     for (int i = 0; i < predecessors_.length(); ++i) {
470       ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
471     }
472   }
473 }
474 #endif
475
476
477 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
478   this->back_edges_.Add(block, block->zone());
479   AddBlock(block);
480 }
481
482
483 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
484   int max_id = -1;
485   HBasicBlock* result = NULL;
486   for (int i = 0; i < back_edges_.length(); ++i) {
487     HBasicBlock* cur = back_edges_[i];
488     if (cur->block_id() > max_id) {
489       max_id = cur->block_id();
490       result = cur;
491     }
492   }
493   return result;
494 }
495
496
497 void HLoopInformation::AddBlock(HBasicBlock* block) {
498   if (block == loop_header()) return;
499   if (block->parent_loop_header() == loop_header()) return;
500   if (block->parent_loop_header() != NULL) {
501     AddBlock(block->parent_loop_header());
502   } else {
503     block->set_parent_loop_header(loop_header());
504     blocks_.Add(block, block->zone());
505     for (int i = 0; i < block->predecessors()->length(); ++i) {
506       AddBlock(block->predecessors()->at(i));
507     }
508   }
509 }
510
511
512 #ifdef DEBUG
513
514 // Checks reachability of the blocks in this graph and stores a bit in
515 // the BitVector "reachable()" for every block that can be reached
516 // from the start block of the graph. If "dont_visit" is non-null, the given
517 // block is treated as if it would not be part of the graph. "visited_count()"
518 // returns the number of reachable blocks.
519 class ReachabilityAnalyzer BASE_EMBEDDED {
520  public:
521   ReachabilityAnalyzer(HBasicBlock* entry_block,
522                        int block_count,
523                        HBasicBlock* dont_visit)
524       : visited_count_(0),
525         stack_(16, entry_block->zone()),
526         reachable_(block_count, entry_block->zone()),
527         dont_visit_(dont_visit) {
528     PushBlock(entry_block);
529     Analyze();
530   }
531
532   int visited_count() const { return visited_count_; }
533   const BitVector* reachable() const { return &reachable_; }
534
535  private:
536   void PushBlock(HBasicBlock* block) {
537     if (block != NULL && block != dont_visit_ &&
538         !reachable_.Contains(block->block_id())) {
539       reachable_.Add(block->block_id());
540       stack_.Add(block, block->zone());
541       visited_count_++;
542     }
543   }
544
545   void Analyze() {
546     while (!stack_.is_empty()) {
547       HControlInstruction* end = stack_.RemoveLast()->end();
548       for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
549         PushBlock(it.Current());
550       }
551     }
552   }
553
554   int visited_count_;
555   ZoneList<HBasicBlock*> stack_;
556   BitVector reachable_;
557   HBasicBlock* dont_visit_;
558 };
559
560
561 void HGraph::Verify(bool do_full_verify) const {
562   Heap::RelocationLock relocation_lock(isolate()->heap());
563   AllowHandleDereference allow_deref;
564   AllowDeferredHandleDereference allow_deferred_deref;
565   for (int i = 0; i < blocks_.length(); i++) {
566     HBasicBlock* block = blocks_.at(i);
567
568     block->Verify();
569
570     // Check that every block contains at least one node and that only the last
571     // node is a control instruction.
572     HInstruction* current = block->first();
573     ASSERT(current != NULL && current->IsBlockEntry());
574     while (current != NULL) {
575       ASSERT((current->next() == NULL) == current->IsControlInstruction());
576       ASSERT(current->block() == block);
577       current->Verify();
578       current = current->next();
579     }
580
581     // Check that successors are correctly set.
582     HBasicBlock* first = block->end()->FirstSuccessor();
583     HBasicBlock* second = block->end()->SecondSuccessor();
584     ASSERT(second == NULL || first != NULL);
585
586     // Check that the predecessor array is correct.
587     if (first != NULL) {
588       ASSERT(first->predecessors()->Contains(block));
589       if (second != NULL) {
590         ASSERT(second->predecessors()->Contains(block));
591       }
592     }
593
594     // Check that phis have correct arguments.
595     for (int j = 0; j < block->phis()->length(); j++) {
596       HPhi* phi = block->phis()->at(j);
597       phi->Verify();
598     }
599
600     // Check that all join blocks have predecessors that end with an
601     // unconditional goto and agree on their environment node id.
602     if (block->predecessors()->length() >= 2) {
603       BailoutId id =
604           block->predecessors()->first()->last_environment()->ast_id();
605       for (int k = 0; k < block->predecessors()->length(); k++) {
606         HBasicBlock* predecessor = block->predecessors()->at(k);
607         ASSERT(predecessor->end()->IsGoto() ||
608                predecessor->end()->IsDeoptimize());
609         ASSERT(predecessor->last_environment()->ast_id() == id);
610       }
611     }
612   }
613
614   // Check special property of first block to have no predecessors.
615   ASSERT(blocks_.at(0)->predecessors()->is_empty());
616
617   if (do_full_verify) {
618     // Check that the graph is fully connected.
619     ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
620     ASSERT(analyzer.visited_count() == blocks_.length());
621
622     // Check that entry block dominator is NULL.
623     ASSERT(entry_block_->dominator() == NULL);
624
625     // Check dominators.
626     for (int i = 0; i < blocks_.length(); ++i) {
627       HBasicBlock* block = blocks_.at(i);
628       if (block->dominator() == NULL) {
629         // Only start block may have no dominator assigned to.
630         ASSERT(i == 0);
631       } else {
632         // Assert that block is unreachable if dominator must not be visited.
633         ReachabilityAnalyzer dominator_analyzer(entry_block_,
634                                                 blocks_.length(),
635                                                 block->dominator());
636         ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
637       }
638     }
639   }
640 }
641
642 #endif
643
644
645 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
646                                int32_t value) {
647   if (!pointer->is_set()) {
648     // Can't pass GetInvalidContext() to HConstant::New, because that will
649     // recursively call GetConstant
650     HConstant* constant = HConstant::New(zone(), NULL, value);
651     constant->InsertAfter(entry_block()->first());
652     pointer->set(constant);
653     return constant;
654   }
655   return ReinsertConstantIfNecessary(pointer->get());
656 }
657
658
659 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
660   if (!constant->IsLinked()) {
661     // The constant was removed from the graph. Reinsert.
662     constant->ClearFlag(HValue::kIsDead);
663     constant->InsertAfter(entry_block()->first());
664   }
665   return constant;
666 }
667
668
669 HConstant* HGraph::GetConstant0() {
670   return GetConstant(&constant_0_, 0);
671 }
672
673
674 HConstant* HGraph::GetConstant1() {
675   return GetConstant(&constant_1_, 1);
676 }
677
678
679 HConstant* HGraph::GetConstantMinus1() {
680   return GetConstant(&constant_minus1_, -1);
681 }
682
683
684 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value)            \
685 HConstant* HGraph::GetConstant##Name() {                                       \
686   if (!constant_##name##_.is_set()) {                                          \
687     HConstant* constant = new(zone()) HConstant(                               \
688         Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
689         Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()),      \
690         false,                                                                 \
691         Representation::Tagged(),                                              \
692         htype,                                                                 \
693         true,                                                                  \
694         boolean_value,                                                         \
695         false,                                                                 \
696         ODDBALL_TYPE);                                                         \
697     constant->InsertAfter(entry_block()->first());                             \
698     constant_##name##_.set(constant);                                          \
699   }                                                                            \
700   return ReinsertConstantIfNecessary(constant_##name##_.get());                \
701 }
702
703
704 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false)
705 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true)
706 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false)
707 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false)
708 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false)
709
710
711 #undef DEFINE_GET_CONSTANT
712
713 #define DEFINE_IS_CONSTANT(Name, name)                                         \
714 bool HGraph::IsConstant##Name(HConstant* constant) {                           \
715   return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
716 }
717 DEFINE_IS_CONSTANT(Undefined, undefined)
718 DEFINE_IS_CONSTANT(0, 0)
719 DEFINE_IS_CONSTANT(1, 1)
720 DEFINE_IS_CONSTANT(Minus1, minus1)
721 DEFINE_IS_CONSTANT(True, true)
722 DEFINE_IS_CONSTANT(False, false)
723 DEFINE_IS_CONSTANT(Hole, the_hole)
724 DEFINE_IS_CONSTANT(Null, null)
725
726 #undef DEFINE_IS_CONSTANT
727
728
729 HConstant* HGraph::GetInvalidContext() {
730   return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
731 }
732
733
734 bool HGraph::IsStandardConstant(HConstant* constant) {
735   if (IsConstantUndefined(constant)) return true;
736   if (IsConstant0(constant)) return true;
737   if (IsConstant1(constant)) return true;
738   if (IsConstantMinus1(constant)) return true;
739   if (IsConstantTrue(constant)) return true;
740   if (IsConstantFalse(constant)) return true;
741   if (IsConstantHole(constant)) return true;
742   if (IsConstantNull(constant)) return true;
743   return false;
744 }
745
746
747 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
748     : builder_(builder),
749       finished_(false),
750       did_then_(false),
751       did_else_(false),
752       did_else_if_(false),
753       did_and_(false),
754       did_or_(false),
755       captured_(false),
756       needs_compare_(true),
757       pending_merge_block_(false),
758       split_edge_merge_block_(NULL),
759       merge_at_join_blocks_(NULL),
760       normal_merge_at_join_block_count_(0),
761       deopt_merge_at_join_block_count_(0) {
762   HEnvironment* env = builder->environment();
763   first_true_block_ = builder->CreateBasicBlock(env->Copy());
764   first_false_block_ = builder->CreateBasicBlock(env->Copy());
765 }
766
767
768 HGraphBuilder::IfBuilder::IfBuilder(
769     HGraphBuilder* builder,
770     HIfContinuation* continuation)
771     : builder_(builder),
772       finished_(false),
773       did_then_(false),
774       did_else_(false),
775       did_else_if_(false),
776       did_and_(false),
777       did_or_(false),
778       captured_(false),
779       needs_compare_(false),
780       pending_merge_block_(false),
781       first_true_block_(NULL),
782       first_false_block_(NULL),
783       split_edge_merge_block_(NULL),
784       merge_at_join_blocks_(NULL),
785       normal_merge_at_join_block_count_(0),
786       deopt_merge_at_join_block_count_(0) {
787   continuation->Continue(&first_true_block_,
788                          &first_false_block_);
789 }
790
791
792 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
793     HControlInstruction* compare) {
794   ASSERT(did_then_ == did_else_);
795   if (did_else_) {
796     // Handle if-then-elseif
797     did_else_if_ = true;
798     did_else_ = false;
799     did_then_ = false;
800     did_and_ = false;
801     did_or_ = false;
802     pending_merge_block_ = false;
803     split_edge_merge_block_ = NULL;
804     HEnvironment* env = builder_->environment();
805     first_true_block_ = builder_->CreateBasicBlock(env->Copy());
806     first_false_block_ = builder_->CreateBasicBlock(env->Copy());
807   }
808   if (split_edge_merge_block_ != NULL) {
809     HEnvironment* env = first_false_block_->last_environment();
810     HBasicBlock* split_edge =
811         builder_->CreateBasicBlock(env->Copy());
812     if (did_or_) {
813       compare->SetSuccessorAt(0, split_edge);
814       compare->SetSuccessorAt(1, first_false_block_);
815     } else {
816       compare->SetSuccessorAt(0, first_true_block_);
817       compare->SetSuccessorAt(1, split_edge);
818     }
819     builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
820   } else {
821     compare->SetSuccessorAt(0, first_true_block_);
822     compare->SetSuccessorAt(1, first_false_block_);
823   }
824   builder_->FinishCurrentBlock(compare);
825   needs_compare_ = false;
826   return compare;
827 }
828
829
830 void HGraphBuilder::IfBuilder::Or() {
831   ASSERT(!needs_compare_);
832   ASSERT(!did_and_);
833   did_or_ = true;
834   HEnvironment* env = first_false_block_->last_environment();
835   if (split_edge_merge_block_ == NULL) {
836     split_edge_merge_block_ =
837         builder_->CreateBasicBlock(env->Copy());
838     builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
839     first_true_block_ = split_edge_merge_block_;
840   }
841   builder_->set_current_block(first_false_block_);
842   first_false_block_ = builder_->CreateBasicBlock(env->Copy());
843 }
844
845
846 void HGraphBuilder::IfBuilder::And() {
847   ASSERT(!needs_compare_);
848   ASSERT(!did_or_);
849   did_and_ = true;
850   HEnvironment* env = first_false_block_->last_environment();
851   if (split_edge_merge_block_ == NULL) {
852     split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
853     builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
854     first_false_block_ = split_edge_merge_block_;
855   }
856   builder_->set_current_block(first_true_block_);
857   first_true_block_ = builder_->CreateBasicBlock(env->Copy());
858 }
859
860
861 void HGraphBuilder::IfBuilder::CaptureContinuation(
862     HIfContinuation* continuation) {
863   ASSERT(!did_else_if_);
864   ASSERT(!finished_);
865   ASSERT(!captured_);
866
867   HBasicBlock* true_block = NULL;
868   HBasicBlock* false_block = NULL;
869   Finish(&true_block, &false_block);
870   ASSERT(true_block != NULL);
871   ASSERT(false_block != NULL);
872   continuation->Capture(true_block, false_block);
873   captured_ = true;
874   builder_->set_current_block(NULL);
875   End();
876 }
877
878
879 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
880   ASSERT(!did_else_if_);
881   ASSERT(!finished_);
882   ASSERT(!captured_);
883   HBasicBlock* true_block = NULL;
884   HBasicBlock* false_block = NULL;
885   Finish(&true_block, &false_block);
886   merge_at_join_blocks_ = NULL;
887   if (true_block != NULL && !true_block->IsFinished()) {
888     ASSERT(continuation->IsTrueReachable());
889     builder_->GotoNoSimulate(true_block, continuation->true_branch());
890   }
891   if (false_block != NULL && !false_block->IsFinished()) {
892     ASSERT(continuation->IsFalseReachable());
893     builder_->GotoNoSimulate(false_block, continuation->false_branch());
894   }
895   captured_ = true;
896   End();
897 }
898
899
900 void HGraphBuilder::IfBuilder::Then() {
901   ASSERT(!captured_);
902   ASSERT(!finished_);
903   did_then_ = true;
904   if (needs_compare_) {
905     // Handle if's without any expressions, they jump directly to the "else"
906     // branch. However, we must pretend that the "then" branch is reachable,
907     // so that the graph builder visits it and sees any live range extending
908     // constructs within it.
909     HConstant* constant_false = builder_->graph()->GetConstantFalse();
910     ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
911     boolean_type.Add(ToBooleanStub::BOOLEAN);
912     HBranch* branch = builder()->New<HBranch>(
913         constant_false, boolean_type, first_true_block_, first_false_block_);
914     builder_->FinishCurrentBlock(branch);
915   }
916   builder_->set_current_block(first_true_block_);
917   pending_merge_block_ = true;
918 }
919
920
921 void HGraphBuilder::IfBuilder::Else() {
922   ASSERT(did_then_);
923   ASSERT(!captured_);
924   ASSERT(!finished_);
925   AddMergeAtJoinBlock(false);
926   builder_->set_current_block(first_false_block_);
927   pending_merge_block_ = true;
928   did_else_ = true;
929 }
930
931
932 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
933   ASSERT(did_then_);
934   builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
935   AddMergeAtJoinBlock(true);
936 }
937
938
939 void HGraphBuilder::IfBuilder::Return(HValue* value) {
940   HValue* parameter_count = builder_->graph()->GetConstantMinus1();
941   builder_->FinishExitCurrentBlock(
942       builder_->New<HReturn>(value, parameter_count));
943   AddMergeAtJoinBlock(false);
944 }
945
946
947 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
948   if (!pending_merge_block_) return;
949   HBasicBlock* block = builder_->current_block();
950   ASSERT(block == NULL || !block->IsFinished());
951   MergeAtJoinBlock* record =
952       new(builder_->zone()) MergeAtJoinBlock(block, deopt,
953                                              merge_at_join_blocks_);
954   merge_at_join_blocks_ = record;
955   if (block != NULL) {
956     ASSERT(block->end() == NULL);
957     if (deopt) {
958       normal_merge_at_join_block_count_++;
959     } else {
960       deopt_merge_at_join_block_count_++;
961     }
962   }
963   builder_->set_current_block(NULL);
964   pending_merge_block_ = false;
965 }
966
967
968 void HGraphBuilder::IfBuilder::Finish() {
969   ASSERT(!finished_);
970   if (!did_then_) {
971     Then();
972   }
973   AddMergeAtJoinBlock(false);
974   if (!did_else_) {
975     Else();
976     AddMergeAtJoinBlock(false);
977   }
978   finished_ = true;
979 }
980
981
982 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
983                                       HBasicBlock** else_continuation) {
984   Finish();
985
986   MergeAtJoinBlock* else_record = merge_at_join_blocks_;
987   if (else_continuation != NULL) {
988     *else_continuation = else_record->block_;
989   }
990   MergeAtJoinBlock* then_record = else_record->next_;
991   if (then_continuation != NULL) {
992     *then_continuation = then_record->block_;
993   }
994   ASSERT(then_record->next_ == NULL);
995 }
996
997
998 void HGraphBuilder::IfBuilder::End() {
999   if (captured_) return;
1000   Finish();
1001
1002   int total_merged_blocks = normal_merge_at_join_block_count_ +
1003     deopt_merge_at_join_block_count_;
1004   ASSERT(total_merged_blocks >= 1);
1005   HBasicBlock* merge_block = total_merged_blocks == 1
1006       ? NULL : builder_->graph()->CreateBasicBlock();
1007
1008   // Merge non-deopt blocks first to ensure environment has right size for
1009   // padding.
1010   MergeAtJoinBlock* current = merge_at_join_blocks_;
1011   while (current != NULL) {
1012     if (!current->deopt_ && current->block_ != NULL) {
1013       // If there is only one block that makes it through to the end of the
1014       // if, then just set it as the current block and continue rather then
1015       // creating an unnecessary merge block.
1016       if (total_merged_blocks == 1) {
1017         builder_->set_current_block(current->block_);
1018         return;
1019       }
1020       builder_->GotoNoSimulate(current->block_, merge_block);
1021     }
1022     current = current->next_;
1023   }
1024
1025   // Merge deopt blocks, padding when necessary.
1026   current = merge_at_join_blocks_;
1027   while (current != NULL) {
1028     if (current->deopt_ && current->block_ != NULL) {
1029       current->block_->FinishExit(
1030           HAbnormalExit::New(builder_->zone(), NULL),
1031           HSourcePosition::Unknown());
1032     }
1033     current = current->next_;
1034   }
1035   builder_->set_current_block(merge_block);
1036 }
1037
1038
1039 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1040                                         HValue* context,
1041                                         LoopBuilder::Direction direction)
1042     : builder_(builder),
1043       context_(context),
1044       direction_(direction),
1045       finished_(false) {
1046   header_block_ = builder->CreateLoopHeaderBlock();
1047   body_block_ = NULL;
1048   exit_block_ = NULL;
1049   exit_trampoline_block_ = NULL;
1050   increment_amount_ = builder_->graph()->GetConstant1();
1051 }
1052
1053
1054 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1055                                         HValue* context,
1056                                         LoopBuilder::Direction direction,
1057                                         HValue* increment_amount)
1058     : builder_(builder),
1059       context_(context),
1060       direction_(direction),
1061       finished_(false) {
1062   header_block_ = builder->CreateLoopHeaderBlock();
1063   body_block_ = NULL;
1064   exit_block_ = NULL;
1065   exit_trampoline_block_ = NULL;
1066   increment_amount_ = increment_amount;
1067 }
1068
1069
1070 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1071     HValue* initial,
1072     HValue* terminating,
1073     Token::Value token) {
1074   HEnvironment* env = builder_->environment();
1075   phi_ = header_block_->AddNewPhi(env->values()->length());
1076   phi_->AddInput(initial);
1077   env->Push(initial);
1078   builder_->GotoNoSimulate(header_block_);
1079
1080   HEnvironment* body_env = env->Copy();
1081   HEnvironment* exit_env = env->Copy();
1082   // Remove the phi from the expression stack
1083   body_env->Pop();
1084   exit_env->Pop();
1085   body_block_ = builder_->CreateBasicBlock(body_env);
1086   exit_block_ = builder_->CreateBasicBlock(exit_env);
1087
1088   builder_->set_current_block(header_block_);
1089   env->Pop();
1090   builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1091           phi_, terminating, token, body_block_, exit_block_));
1092
1093   builder_->set_current_block(body_block_);
1094   if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1095     HValue* one = builder_->graph()->GetConstant1();
1096     if (direction_ == kPreIncrement) {
1097       increment_ = HAdd::New(zone(), context_, phi_, one);
1098     } else {
1099       increment_ = HSub::New(zone(), context_, phi_, one);
1100     }
1101     increment_->ClearFlag(HValue::kCanOverflow);
1102     builder_->AddInstruction(increment_);
1103     return increment_;
1104   } else {
1105     return phi_;
1106   }
1107 }
1108
1109
1110 void HGraphBuilder::LoopBuilder::Break() {
1111   if (exit_trampoline_block_ == NULL) {
1112     // Its the first time we saw a break.
1113     HEnvironment* env = exit_block_->last_environment()->Copy();
1114     exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1115     builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1116   }
1117
1118   builder_->GotoNoSimulate(exit_trampoline_block_);
1119   builder_->set_current_block(NULL);
1120 }
1121
1122
1123 void HGraphBuilder::LoopBuilder::EndBody() {
1124   ASSERT(!finished_);
1125
1126   if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1127     if (direction_ == kPostIncrement) {
1128       increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1129     } else {
1130       increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1131     }
1132     increment_->ClearFlag(HValue::kCanOverflow);
1133     builder_->AddInstruction(increment_);
1134   }
1135
1136   // Push the new increment value on the expression stack to merge into the phi.
1137   builder_->environment()->Push(increment_);
1138   HBasicBlock* last_block = builder_->current_block();
1139   builder_->GotoNoSimulate(last_block, header_block_);
1140   header_block_->loop_information()->RegisterBackEdge(last_block);
1141
1142   if (exit_trampoline_block_ != NULL) {
1143     builder_->set_current_block(exit_trampoline_block_);
1144   } else {
1145     builder_->set_current_block(exit_block_);
1146   }
1147   finished_ = true;
1148 }
1149
1150
1151 HGraph* HGraphBuilder::CreateGraph() {
1152   graph_ = new(zone()) HGraph(info_);
1153   if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1154   CompilationPhase phase("H_Block building", info_);
1155   set_current_block(graph()->entry_block());
1156   if (!BuildGraph()) return NULL;
1157   graph()->FinalizeUniqueness();
1158   return graph_;
1159 }
1160
1161
1162 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1163   ASSERT(current_block() != NULL);
1164   ASSERT(!FLAG_hydrogen_track_positions ||
1165          !position_.IsUnknown() ||
1166          !info_->IsOptimizing());
1167   current_block()->AddInstruction(instr, source_position());
1168   if (graph()->IsInsideNoSideEffectsScope()) {
1169     instr->SetFlag(HValue::kHasNoObservableSideEffects);
1170   }
1171   return instr;
1172 }
1173
1174
1175 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1176   ASSERT(!FLAG_hydrogen_track_positions ||
1177          !info_->IsOptimizing() ||
1178          !position_.IsUnknown());
1179   current_block()->Finish(last, source_position());
1180   if (last->IsReturn() || last->IsAbnormalExit()) {
1181     set_current_block(NULL);
1182   }
1183 }
1184
1185
1186 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1187   ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1188          !position_.IsUnknown());
1189   current_block()->FinishExit(instruction, source_position());
1190   if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1191     set_current_block(NULL);
1192   }
1193 }
1194
1195
1196 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1197   if (FLAG_native_code_counters && counter->Enabled()) {
1198     HValue* reference = Add<HConstant>(ExternalReference(counter));
1199     HValue* old_value = Add<HLoadNamedField>(
1200         reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1201     HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1202     new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
1203     Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1204                           new_value, STORE_TO_INITIALIZED_ENTRY);
1205   }
1206 }
1207
1208
1209 void HGraphBuilder::AddSimulate(BailoutId id,
1210                                 RemovableSimulate removable) {
1211   ASSERT(current_block() != NULL);
1212   ASSERT(!graph()->IsInsideNoSideEffectsScope());
1213   current_block()->AddNewSimulate(id, source_position(), removable);
1214 }
1215
1216
1217 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1218   HBasicBlock* b = graph()->CreateBasicBlock();
1219   b->SetInitialEnvironment(env);
1220   return b;
1221 }
1222
1223
1224 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1225   HBasicBlock* header = graph()->CreateBasicBlock();
1226   HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1227   header->SetInitialEnvironment(entry_env);
1228   header->AttachLoopInformation();
1229   return header;
1230 }
1231
1232
1233 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1234   HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1235                                      HObjectAccess::ForMap());
1236
1237   HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1238                                             HObjectAccess::ForMapBitField2());
1239   return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1240 }
1241
1242
1243 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1244   if (obj->type().IsHeapObject()) return obj;
1245   return Add<HCheckHeapObject>(obj);
1246 }
1247
1248
1249 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1250   Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1251   FinishExitCurrentBlock(New<HAbnormalExit>());
1252 }
1253
1254
1255 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1256   if (!string->type().IsString()) {
1257     ASSERT(!string->IsConstant() ||
1258            !HConstant::cast(string)->HasStringValue());
1259     BuildCheckHeapObject(string);
1260     return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1261   }
1262   return string;
1263 }
1264
1265
1266 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1267   if (object->type().IsJSObject()) return object;
1268   if (function->IsConstant() &&
1269       HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1270     Handle<JSFunction> f = Handle<JSFunction>::cast(
1271         HConstant::cast(function)->handle(isolate()));
1272     SharedFunctionInfo* shared = f->shared();
1273     if (shared->strict_mode() == STRICT || shared->native()) return object;
1274   }
1275   return Add<HWrapReceiver>(object, function);
1276 }
1277
1278
1279 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1280     HValue* object,
1281     HValue* elements,
1282     ElementsKind kind,
1283     HValue* length,
1284     HValue* key,
1285     bool is_js_array,
1286     PropertyAccessType access_type) {
1287   IfBuilder length_checker(this);
1288
1289   Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1290   length_checker.If<HCompareNumericAndBranch>(key, length, token);
1291
1292   length_checker.Then();
1293
1294   HValue* current_capacity = AddLoadFixedArrayLength(elements);
1295
1296   IfBuilder capacity_checker(this);
1297
1298   capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1299                                                 Token::GTE);
1300   capacity_checker.Then();
1301
1302   HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1303   HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1304
1305   Add<HBoundsCheck>(key, max_capacity);
1306
1307   HValue* new_capacity = BuildNewElementsCapacity(key);
1308   HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1309                                                    kind, kind, length,
1310                                                    new_capacity);
1311
1312   environment()->Push(new_elements);
1313   capacity_checker.Else();
1314
1315   environment()->Push(elements);
1316   capacity_checker.End();
1317
1318   if (is_js_array) {
1319     HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1320     new_length->ClearFlag(HValue::kCanOverflow);
1321
1322     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1323                           new_length);
1324   }
1325
1326   if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1327     HValue* checked_elements = environment()->Top();
1328
1329     // Write zero to ensure that the new element is initialized with some smi.
1330     Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1331   }
1332
1333   length_checker.Else();
1334   Add<HBoundsCheck>(key, length);
1335
1336   environment()->Push(elements);
1337   length_checker.End();
1338
1339   return environment()->Pop();
1340 }
1341
1342
1343 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1344                                                 HValue* elements,
1345                                                 ElementsKind kind,
1346                                                 HValue* length) {
1347   Factory* factory = isolate()->factory();
1348
1349   IfBuilder cow_checker(this);
1350
1351   cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1352   cow_checker.Then();
1353
1354   HValue* capacity = AddLoadFixedArrayLength(elements);
1355
1356   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1357                                                    kind, length, capacity);
1358
1359   environment()->Push(new_elements);
1360
1361   cow_checker.Else();
1362
1363   environment()->Push(elements);
1364
1365   cow_checker.End();
1366
1367   return environment()->Pop();
1368 }
1369
1370
1371 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1372                                                 HValue* map,
1373                                                 ElementsKind from_kind,
1374                                                 ElementsKind to_kind,
1375                                                 bool is_jsarray) {
1376   ASSERT(!IsFastHoleyElementsKind(from_kind) ||
1377          IsFastHoleyElementsKind(to_kind));
1378
1379   if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1380     Add<HTrapAllocationMemento>(object);
1381   }
1382
1383   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1384     HInstruction* elements = AddLoadElements(object);
1385
1386     HInstruction* empty_fixed_array = Add<HConstant>(
1387         isolate()->factory()->empty_fixed_array());
1388
1389     IfBuilder if_builder(this);
1390
1391     if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1392
1393     if_builder.Then();
1394
1395     HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1396
1397     HInstruction* array_length = is_jsarray
1398         ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1399                                HObjectAccess::ForArrayLength(from_kind))
1400         : elements_length;
1401
1402     BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1403                               array_length, elements_length);
1404
1405     if_builder.End();
1406   }
1407
1408   Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1409 }
1410
1411
1412 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1413                                        int bit_field_mask) {
1414   // Check that the object isn't a smi.
1415   Add<HCheckHeapObject>(receiver);
1416
1417   // Get the map of the receiver.
1418   HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1419                                      HObjectAccess::ForMap());
1420
1421   // Check the instance type and if an access check is needed, this can be
1422   // done with a single load, since both bytes are adjacent in the map.
1423   HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1424   HValue* instance_type_and_bit_field =
1425       Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access);
1426
1427   HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1428   HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1429                                              instance_type_and_bit_field,
1430                                              mask);
1431   HValue* sub_result = AddUncasted<HSub>(and_result,
1432                                          Add<HConstant>(JS_OBJECT_TYPE));
1433   Add<HBoundsCheck>(sub_result, Add<HConstant>(0x100 - JS_OBJECT_TYPE));
1434 }
1435
1436
1437 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1438                                          HIfContinuation* join_continuation) {
1439   // The sometimes unintuitively backward ordering of the ifs below is
1440   // convoluted, but necessary.  All of the paths must guarantee that the
1441   // if-true of the continuation returns a smi element index and the if-false of
1442   // the continuation returns either a symbol or a unique string key. All other
1443   // object types cause a deopt to fall back to the runtime.
1444
1445   IfBuilder key_smi_if(this);
1446   key_smi_if.If<HIsSmiAndBranch>(key);
1447   key_smi_if.Then();
1448   {
1449     Push(key);  // Nothing to do, just continue to true of continuation.
1450   }
1451   key_smi_if.Else();
1452   {
1453     HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1454                                        HObjectAccess::ForMap());
1455     HValue* instance_type =
1456         Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1457                              HObjectAccess::ForMapInstanceType());
1458
1459     // Non-unique string, check for a string with a hash code that is actually
1460     // an index.
1461     STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
1462     IfBuilder not_string_or_name_if(this);
1463     not_string_or_name_if.If<HCompareNumericAndBranch>(
1464         instance_type,
1465         Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1466         Token::GT);
1467
1468     not_string_or_name_if.Then();
1469     {
1470       // Non-smi, non-Name, non-String: Try to convert to smi in case of
1471       // HeapNumber.
1472       // TODO(danno): This could call some variant of ToString
1473       Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1474     }
1475     not_string_or_name_if.Else();
1476     {
1477       // String or Name: check explicitly for Name, they can short-circuit
1478       // directly to unique non-index key path.
1479       IfBuilder not_symbol_if(this);
1480       not_symbol_if.If<HCompareNumericAndBranch>(
1481           instance_type,
1482           Add<HConstant>(SYMBOL_TYPE),
1483           Token::NE);
1484
1485       not_symbol_if.Then();
1486       {
1487         // String: check whether the String is a String of an index. If it is,
1488         // extract the index value from the hash.
1489         HValue* hash =
1490             Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1491                                  HObjectAccess::ForNameHashField());
1492         HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1493             String::kContainsCachedArrayIndexMask));
1494
1495         HValue* not_index_test = AddUncasted<HBitwise>(
1496             Token::BIT_AND, hash, not_index_mask);
1497
1498         IfBuilder string_index_if(this);
1499         string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1500                                                      graph()->GetConstant0(),
1501                                                      Token::EQ);
1502         string_index_if.Then();
1503         {
1504           // String with index in hash: extract string and merge to index path.
1505           Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1506         }
1507         string_index_if.Else();
1508         {
1509           // Key is a non-index String, check for uniqueness/internalization. If
1510           // it's not, deopt.
1511           HValue* not_internalized_bit = AddUncasted<HBitwise>(
1512               Token::BIT_AND,
1513               instance_type,
1514               Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1515           DeoptimizeIf<HCompareNumericAndBranch>(
1516               not_internalized_bit,
1517               graph()->GetConstant0(),
1518               Token::NE,
1519               "BuildKeyedIndexCheck: string isn't internalized");
1520           // Key guaranteed to be a unqiue string
1521           Push(key);
1522         }
1523         string_index_if.JoinContinuation(join_continuation);
1524       }
1525       not_symbol_if.Else();
1526       {
1527         Push(key);  // Key is symbol
1528       }
1529       not_symbol_if.JoinContinuation(join_continuation);
1530     }
1531     not_string_or_name_if.JoinContinuation(join_continuation);
1532   }
1533   key_smi_if.JoinContinuation(join_continuation);
1534 }
1535
1536
1537 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1538   // Get the the instance type of the receiver, and make sure that it is
1539   // not one of the global object types.
1540   HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1541                                      HObjectAccess::ForMap());
1542   HValue* instance_type =
1543     Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1544                          HObjectAccess::ForMapInstanceType());
1545   STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1);
1546   HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1547   HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE);
1548
1549   IfBuilder if_global_object(this);
1550   if_global_object.If<HCompareNumericAndBranch>(instance_type,
1551                                                 max_global_type,
1552                                                 Token::LTE);
1553   if_global_object.And();
1554   if_global_object.If<HCompareNumericAndBranch>(instance_type,
1555                                                 min_global_type,
1556                                                 Token::GTE);
1557   if_global_object.ThenDeopt("receiver was a global object");
1558   if_global_object.End();
1559 }
1560
1561
1562 void HGraphBuilder::BuildTestForDictionaryProperties(
1563     HValue* object,
1564     HIfContinuation* continuation) {
1565   HValue* properties = Add<HLoadNamedField>(
1566       object, static_cast<HValue*>(NULL),
1567       HObjectAccess::ForPropertiesPointer());
1568   HValue* properties_map =
1569       Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL),
1570                            HObjectAccess::ForMap());
1571   HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1572   IfBuilder builder(this);
1573   builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1574   builder.CaptureContinuation(continuation);
1575 }
1576
1577
1578 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1579                                                  HValue* key) {
1580   // Load the map of the receiver, compute the keyed lookup cache hash
1581   // based on 32 bits of the map pointer and the string hash.
1582   HValue* object_map =
1583       Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1584                            HObjectAccess::ForMapAsInteger32());
1585   HValue* shifted_map = AddUncasted<HShr>(
1586       object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1587   HValue* string_hash =
1588       Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1589                            HObjectAccess::ForStringHashField());
1590   HValue* shifted_hash = AddUncasted<HShr>(
1591       string_hash, Add<HConstant>(String::kHashShift));
1592   HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1593                                              shifted_hash);
1594   int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1595   return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1596                                Add<HConstant>(mask));
1597 }
1598
1599
1600 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1601     HValue* elements,
1602     HValue* key,
1603     HValue* hash,
1604     HValue* mask,
1605     int current_probe) {
1606   if (current_probe == kNumberDictionaryProbes) {
1607     return NULL;
1608   }
1609
1610   int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1611   HValue* raw_index = (current_probe == 0)
1612       ? hash
1613       : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1614   raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1615   int32_t entry_size = SeededNumberDictionary::kEntrySize;
1616   raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1617   raw_index->ClearFlag(HValue::kCanOverflow);
1618
1619   int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1620   HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1621   key_index->ClearFlag(HValue::kCanOverflow);
1622
1623   HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1624                                           static_cast<HValue*>(NULL),
1625                                           FAST_ELEMENTS);
1626
1627   IfBuilder key_compare(this);
1628   key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1629   key_compare.Then();
1630   {
1631     // Key at the current probe doesn't match, try at the next probe.
1632     HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1633         elements, key, hash, mask, current_probe + 1);
1634     if (result == NULL) {
1635       key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
1636       result = graph()->GetConstantUndefined();
1637     } else {
1638       Push(result);
1639     }
1640   }
1641   key_compare.Else();
1642   {
1643     // Key at current probe matches. Details must be zero, otherwise the
1644     // dictionary element requires special handling.
1645     HValue* details_index = AddUncasted<HAdd>(
1646         raw_index, Add<HConstant>(base_offset + 2));
1647     details_index->ClearFlag(HValue::kCanOverflow);
1648
1649     HValue* details = Add<HLoadKeyed>(elements, details_index,
1650                                       static_cast<HValue*>(NULL),
1651                                       FAST_ELEMENTS);
1652     IfBuilder details_compare(this);
1653     details_compare.If<HCompareNumericAndBranch>(details,
1654                                                  graph()->GetConstant0(),
1655                                                  Token::NE);
1656     details_compare.ThenDeopt("keyed load dictionary element not fast case");
1657
1658     details_compare.Else();
1659     {
1660       // Key matches and details are zero --> fast case. Load and return the
1661       // value.
1662       HValue* result_index = AddUncasted<HAdd>(
1663           raw_index, Add<HConstant>(base_offset + 1));
1664       result_index->ClearFlag(HValue::kCanOverflow);
1665
1666       Push(Add<HLoadKeyed>(elements, result_index,
1667                            static_cast<HValue*>(NULL),
1668                            FAST_ELEMENTS));
1669     }
1670     details_compare.End();
1671   }
1672   key_compare.End();
1673
1674   return Pop();
1675 }
1676
1677
1678 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1679   int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1680   HValue* seed = Add<HConstant>(seed_value);
1681   HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1682
1683   // hash = ~hash + (hash << 15);
1684   HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1685   HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1686                                            graph()->GetConstantMinus1());
1687   hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1688
1689   // hash = hash ^ (hash >> 12);
1690   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1691   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1692
1693   // hash = hash + (hash << 2);
1694   shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1695   hash = AddUncasted<HAdd>(hash, shifted_hash);
1696
1697   // hash = hash ^ (hash >> 4);
1698   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1699   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1700
1701   // hash = hash * 2057;
1702   hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1703   hash->ClearFlag(HValue::kCanOverflow);
1704
1705   // hash = hash ^ (hash >> 16);
1706   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1707   return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1708 }
1709
1710
1711 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1712                                                            HValue* elements,
1713                                                            HValue* key,
1714                                                            HValue* hash) {
1715   HValue* capacity = Add<HLoadKeyed>(
1716       elements,
1717       Add<HConstant>(NameDictionary::kCapacityIndex),
1718       static_cast<HValue*>(NULL),
1719       FAST_ELEMENTS);
1720
1721   HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1722   mask->ChangeRepresentation(Representation::Integer32());
1723   mask->ClearFlag(HValue::kCanOverflow);
1724
1725   return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1726                                                    hash, mask, 0);
1727 }
1728
1729
1730 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1731                                                   HValue* index,
1732                                                   HValue* input) {
1733   NoObservableSideEffectsScope scope(this);
1734   HConstant* max_length = Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1735   Add<HBoundsCheck>(length, max_length);
1736
1737   // Generate size calculation code here in order to make it dominate
1738   // the JSRegExpResult allocation.
1739   ElementsKind elements_kind = FAST_ELEMENTS;
1740   HValue* size = BuildCalculateElementsSize(elements_kind, length);
1741
1742   // Allocate the JSRegExpResult and the FixedArray in one step.
1743   HValue* result = Add<HAllocate>(
1744       Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
1745       NOT_TENURED, JS_ARRAY_TYPE);
1746
1747   // Initialize the JSRegExpResult header.
1748   HValue* global_object = Add<HLoadNamedField>(
1749       context(), static_cast<HValue*>(NULL),
1750       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1751   HValue* native_context = Add<HLoadNamedField>(
1752       global_object, static_cast<HValue*>(NULL),
1753       HObjectAccess::ForGlobalObjectNativeContext());
1754   Add<HStoreNamedField>(
1755       result, HObjectAccess::ForMap(),
1756       Add<HLoadNamedField>(
1757           native_context, static_cast<HValue*>(NULL),
1758           HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1759   HConstant* empty_fixed_array =
1760       Add<HConstant>(isolate()->factory()->empty_fixed_array());
1761   Add<HStoreNamedField>(
1762       result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1763       empty_fixed_array);
1764   Add<HStoreNamedField>(
1765       result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1766       empty_fixed_array);
1767   Add<HStoreNamedField>(
1768       result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1769
1770   // Initialize the additional fields.
1771   Add<HStoreNamedField>(
1772       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1773       index);
1774   Add<HStoreNamedField>(
1775       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1776       input);
1777
1778   // Allocate and initialize the elements header.
1779   HAllocate* elements = BuildAllocateElements(elements_kind, size);
1780   BuildInitializeElementsHeader(elements, elements_kind, length);
1781
1782   HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize(
1783       elements_kind, max_length->Integer32Value());
1784   elements->set_size_upper_bound(size_in_bytes_upper_bound);
1785
1786   Add<HStoreNamedField>(
1787       result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1788       elements);
1789
1790   // Initialize the elements contents with undefined.
1791   BuildFillElementsWithValue(
1792       elements, elements_kind, graph()->GetConstant0(), length,
1793       graph()->GetConstantUndefined());
1794
1795   return result;
1796 }
1797
1798
1799 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1800   NoObservableSideEffectsScope scope(this);
1801
1802   // Convert constant numbers at compile time.
1803   if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1804     Handle<Object> number = HConstant::cast(object)->handle(isolate());
1805     Handle<String> result = isolate()->factory()->NumberToString(number);
1806     return Add<HConstant>(result);
1807   }
1808
1809   // Create a joinable continuation.
1810   HIfContinuation found(graph()->CreateBasicBlock(),
1811                         graph()->CreateBasicBlock());
1812
1813   // Load the number string cache.
1814   HValue* number_string_cache =
1815       Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1816
1817   // Make the hash mask from the length of the number string cache. It
1818   // contains two elements (number and string) for each cache entry.
1819   HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1820   mask->set_type(HType::Smi());
1821   mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1822   mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1823
1824   // Check whether object is a smi.
1825   IfBuilder if_objectissmi(this);
1826   if_objectissmi.If<HIsSmiAndBranch>(object);
1827   if_objectissmi.Then();
1828   {
1829     // Compute hash for smi similar to smi_get_hash().
1830     HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1831
1832     // Load the key.
1833     HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1834     HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1835                                   static_cast<HValue*>(NULL),
1836                                   FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1837
1838     // Check if object == key.
1839     IfBuilder if_objectiskey(this);
1840     if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1841     if_objectiskey.Then();
1842     {
1843       // Make the key_index available.
1844       Push(key_index);
1845     }
1846     if_objectiskey.JoinContinuation(&found);
1847   }
1848   if_objectissmi.Else();
1849   {
1850     if (type->Is(Type::SignedSmall())) {
1851       if_objectissmi.Deopt("Expected smi");
1852     } else {
1853       // Check if the object is a heap number.
1854       IfBuilder if_objectisnumber(this);
1855       HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1856           object, isolate()->factory()->heap_number_map());
1857       if_objectisnumber.Then();
1858       {
1859         // Compute hash for heap number similar to double_get_hash().
1860         HValue* low = Add<HLoadNamedField>(
1861             object, objectisnumber,
1862             HObjectAccess::ForHeapNumberValueLowestBits());
1863         HValue* high = Add<HLoadNamedField>(
1864             object, objectisnumber,
1865             HObjectAccess::ForHeapNumberValueHighestBits());
1866         HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1867         hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1868
1869         // Load the key.
1870         HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1871         HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1872                                       static_cast<HValue*>(NULL),
1873                                       FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1874
1875         // Check if the key is a heap number and compare it with the object.
1876         IfBuilder if_keyisnotsmi(this);
1877         HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1878         if_keyisnotsmi.Then();
1879         {
1880           IfBuilder if_keyisheapnumber(this);
1881           if_keyisheapnumber.If<HCompareMap>(
1882               key, isolate()->factory()->heap_number_map());
1883           if_keyisheapnumber.Then();
1884           {
1885             // Check if values of key and object match.
1886             IfBuilder if_keyeqobject(this);
1887             if_keyeqobject.If<HCompareNumericAndBranch>(
1888                 Add<HLoadNamedField>(key, keyisnotsmi,
1889                                      HObjectAccess::ForHeapNumberValue()),
1890                 Add<HLoadNamedField>(object, objectisnumber,
1891                                      HObjectAccess::ForHeapNumberValue()),
1892                 Token::EQ);
1893             if_keyeqobject.Then();
1894             {
1895               // Make the key_index available.
1896               Push(key_index);
1897             }
1898             if_keyeqobject.JoinContinuation(&found);
1899           }
1900           if_keyisheapnumber.JoinContinuation(&found);
1901         }
1902         if_keyisnotsmi.JoinContinuation(&found);
1903       }
1904       if_objectisnumber.Else();
1905       {
1906         if (type->Is(Type::Number())) {
1907           if_objectisnumber.Deopt("Expected heap number");
1908         }
1909       }
1910       if_objectisnumber.JoinContinuation(&found);
1911     }
1912   }
1913   if_objectissmi.JoinContinuation(&found);
1914
1915   // Check for cache hit.
1916   IfBuilder if_found(this, &found);
1917   if_found.Then();
1918   {
1919     // Count number to string operation in native code.
1920     AddIncrementCounter(isolate()->counters()->number_to_string_native());
1921
1922     // Load the value in case of cache hit.
1923     HValue* key_index = Pop();
1924     HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1925     Push(Add<HLoadKeyed>(number_string_cache, value_index,
1926                          static_cast<HValue*>(NULL),
1927                          FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1928   }
1929   if_found.Else();
1930   {
1931     // Cache miss, fallback to runtime.
1932     Add<HPushArguments>(object);
1933     Push(Add<HCallRuntime>(
1934             isolate()->factory()->empty_string(),
1935             Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache),
1936             1));
1937   }
1938   if_found.End();
1939
1940   return Pop();
1941 }
1942
1943
1944 HAllocate* HGraphBuilder::BuildAllocate(
1945     HValue* object_size,
1946     HType type,
1947     InstanceType instance_type,
1948     HAllocationMode allocation_mode) {
1949   // Compute the effective allocation size.
1950   HValue* size = object_size;
1951   if (allocation_mode.CreateAllocationMementos()) {
1952     size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
1953     size->ClearFlag(HValue::kCanOverflow);
1954   }
1955
1956   // Perform the actual allocation.
1957   HAllocate* object = Add<HAllocate>(
1958       size, type, allocation_mode.GetPretenureMode(),
1959       instance_type, allocation_mode.feedback_site());
1960
1961   // Setup the allocation memento.
1962   if (allocation_mode.CreateAllocationMementos()) {
1963     BuildCreateAllocationMemento(
1964         object, object_size, allocation_mode.current_site());
1965   }
1966
1967   return object;
1968 }
1969
1970
1971 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
1972                                              HValue* right_length) {
1973   // Compute the combined string length and check against max string length.
1974   HValue* length = AddUncasted<HAdd>(left_length, right_length);
1975   // Check that length <= kMaxLength <=> length < MaxLength + 1.
1976   HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
1977   Add<HBoundsCheck>(length, max_length);
1978   return length;
1979 }
1980
1981
1982 HValue* HGraphBuilder::BuildCreateConsString(
1983     HValue* length,
1984     HValue* left,
1985     HValue* right,
1986     HAllocationMode allocation_mode) {
1987   // Determine the string instance types.
1988   HInstruction* left_instance_type = AddLoadStringInstanceType(left);
1989   HInstruction* right_instance_type = AddLoadStringInstanceType(right);
1990
1991   // Allocate the cons string object. HAllocate does not care whether we
1992   // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
1993   // CONS_STRING_TYPE here. Below we decide whether the cons string is
1994   // one-byte or two-byte and set the appropriate map.
1995   ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
1996                                             CONS_ASCII_STRING_TYPE));
1997   HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
1998                                     HType::String(), CONS_STRING_TYPE,
1999                                     allocation_mode);
2000
2001   // Compute intersection and difference of instance types.
2002   HValue* anded_instance_types = AddUncasted<HBitwise>(
2003       Token::BIT_AND, left_instance_type, right_instance_type);
2004   HValue* xored_instance_types = AddUncasted<HBitwise>(
2005       Token::BIT_XOR, left_instance_type, right_instance_type);
2006
2007   // We create a one-byte cons string if
2008   // 1. both strings are one-byte, or
2009   // 2. at least one of the strings is two-byte, but happens to contain only
2010   //    one-byte characters.
2011   // To do this, we check
2012   // 1. if both strings are one-byte, or if the one-byte data hint is set in
2013   //    both strings, or
2014   // 2. if one of the strings has the one-byte data hint set and the other
2015   //    string is one-byte.
2016   IfBuilder if_onebyte(this);
2017   STATIC_ASSERT(kOneByteStringTag != 0);
2018   STATIC_ASSERT(kOneByteDataHintMask != 0);
2019   if_onebyte.If<HCompareNumericAndBranch>(
2020       AddUncasted<HBitwise>(
2021           Token::BIT_AND, anded_instance_types,
2022           Add<HConstant>(static_cast<int32_t>(
2023                   kStringEncodingMask | kOneByteDataHintMask))),
2024       graph()->GetConstant0(), Token::NE);
2025   if_onebyte.Or();
2026   STATIC_ASSERT(kOneByteStringTag != 0 &&
2027                 kOneByteDataHintTag != 0 &&
2028                 kOneByteDataHintTag != kOneByteStringTag);
2029   if_onebyte.If<HCompareNumericAndBranch>(
2030       AddUncasted<HBitwise>(
2031           Token::BIT_AND, xored_instance_types,
2032           Add<HConstant>(static_cast<int32_t>(
2033                   kOneByteStringTag | kOneByteDataHintTag))),
2034       Add<HConstant>(static_cast<int32_t>(
2035               kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2036   if_onebyte.Then();
2037   {
2038     // We can safely skip the write barrier for storing the map here.
2039     Add<HStoreNamedField>(
2040         result, HObjectAccess::ForMap(),
2041         Add<HConstant>(isolate()->factory()->cons_ascii_string_map()));
2042   }
2043   if_onebyte.Else();
2044   {
2045     // We can safely skip the write barrier for storing the map here.
2046     Add<HStoreNamedField>(
2047         result, HObjectAccess::ForMap(),
2048         Add<HConstant>(isolate()->factory()->cons_string_map()));
2049   }
2050   if_onebyte.End();
2051
2052   // Initialize the cons string fields.
2053   Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2054                         Add<HConstant>(String::kEmptyHashField));
2055   Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2056   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2057   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2058
2059   // Count the native string addition.
2060   AddIncrementCounter(isolate()->counters()->string_add_native());
2061
2062   return result;
2063 }
2064
2065
2066 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2067                                             HValue* src_offset,
2068                                             String::Encoding src_encoding,
2069                                             HValue* dst,
2070                                             HValue* dst_offset,
2071                                             String::Encoding dst_encoding,
2072                                             HValue* length) {
2073   ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
2074          src_encoding == String::ONE_BYTE_ENCODING);
2075   LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2076   HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2077   {
2078     HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2079     HValue* value =
2080         AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2081     HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2082     Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2083   }
2084   loop.EndBody();
2085 }
2086
2087
2088 HValue* HGraphBuilder::BuildObjectSizeAlignment(
2089     HValue* unaligned_size, int header_size) {
2090   ASSERT((header_size & kObjectAlignmentMask) == 0);
2091   HValue* size = AddUncasted<HAdd>(
2092       unaligned_size, Add<HConstant>(static_cast<int32_t>(
2093           header_size + kObjectAlignmentMask)));
2094   size->ClearFlag(HValue::kCanOverflow);
2095   return AddUncasted<HBitwise>(
2096       Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2097           ~kObjectAlignmentMask)));
2098 }
2099
2100
2101 HValue* HGraphBuilder::BuildUncheckedStringAdd(
2102     HValue* left,
2103     HValue* right,
2104     HAllocationMode allocation_mode) {
2105   // Determine the string lengths.
2106   HValue* left_length = AddLoadStringLength(left);
2107   HValue* right_length = AddLoadStringLength(right);
2108
2109   // Compute the combined string length.
2110   HValue* length = BuildAddStringLengths(left_length, right_length);
2111
2112   // Do some manual constant folding here.
2113   if (left_length->IsConstant()) {
2114     HConstant* c_left_length = HConstant::cast(left_length);
2115     ASSERT_NE(0, c_left_length->Integer32Value());
2116     if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2117       // The right string contains at least one character.
2118       return BuildCreateConsString(length, left, right, allocation_mode);
2119     }
2120   } else if (right_length->IsConstant()) {
2121     HConstant* c_right_length = HConstant::cast(right_length);
2122     ASSERT_NE(0, c_right_length->Integer32Value());
2123     if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2124       // The left string contains at least one character.
2125       return BuildCreateConsString(length, left, right, allocation_mode);
2126     }
2127   }
2128
2129   // Check if we should create a cons string.
2130   IfBuilder if_createcons(this);
2131   if_createcons.If<HCompareNumericAndBranch>(
2132       length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2133   if_createcons.Then();
2134   {
2135     // Create a cons string.
2136     Push(BuildCreateConsString(length, left, right, allocation_mode));
2137   }
2138   if_createcons.Else();
2139   {
2140     // Determine the string instance types.
2141     HValue* left_instance_type = AddLoadStringInstanceType(left);
2142     HValue* right_instance_type = AddLoadStringInstanceType(right);
2143
2144     // Compute union and difference of instance types.
2145     HValue* ored_instance_types = AddUncasted<HBitwise>(
2146         Token::BIT_OR, left_instance_type, right_instance_type);
2147     HValue* xored_instance_types = AddUncasted<HBitwise>(
2148         Token::BIT_XOR, left_instance_type, right_instance_type);
2149
2150     // Check if both strings have the same encoding and both are
2151     // sequential.
2152     IfBuilder if_sameencodingandsequential(this);
2153     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2154         AddUncasted<HBitwise>(
2155             Token::BIT_AND, xored_instance_types,
2156             Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2157         graph()->GetConstant0(), Token::EQ);
2158     if_sameencodingandsequential.And();
2159     STATIC_ASSERT(kSeqStringTag == 0);
2160     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2161         AddUncasted<HBitwise>(
2162             Token::BIT_AND, ored_instance_types,
2163             Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2164         graph()->GetConstant0(), Token::EQ);
2165     if_sameencodingandsequential.Then();
2166     {
2167       HConstant* string_map =
2168           Add<HConstant>(isolate()->factory()->string_map());
2169       HConstant* ascii_string_map =
2170           Add<HConstant>(isolate()->factory()->ascii_string_map());
2171
2172       // Determine map and size depending on whether result is one-byte string.
2173       IfBuilder if_onebyte(this);
2174       STATIC_ASSERT(kOneByteStringTag != 0);
2175       if_onebyte.If<HCompareNumericAndBranch>(
2176           AddUncasted<HBitwise>(
2177               Token::BIT_AND, ored_instance_types,
2178               Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2179           graph()->GetConstant0(), Token::NE);
2180       if_onebyte.Then();
2181       {
2182         // Allocate sequential one-byte string object.
2183         Push(length);
2184         Push(ascii_string_map);
2185       }
2186       if_onebyte.Else();
2187       {
2188         // Allocate sequential two-byte string object.
2189         HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2190         size->ClearFlag(HValue::kCanOverflow);
2191         size->SetFlag(HValue::kUint32);
2192         Push(size);
2193         Push(string_map);
2194       }
2195       if_onebyte.End();
2196       HValue* map = Pop();
2197
2198       // Calculate the number of bytes needed for the characters in the
2199       // string while observing object alignment.
2200       STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2201       HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2202
2203       // Allocate the string object. HAllocate does not care whether we pass
2204       // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here.
2205       HAllocate* result = BuildAllocate(
2206           size, HType::String(), STRING_TYPE, allocation_mode);
2207       Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2208
2209       // Initialize the string fields.
2210       Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2211                             Add<HConstant>(String::kEmptyHashField));
2212       Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2213
2214       // Copy characters to the result string.
2215       IfBuilder if_twobyte(this);
2216       if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2217       if_twobyte.Then();
2218       {
2219         // Copy characters from the left string.
2220         BuildCopySeqStringChars(
2221             left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2222             result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2223             left_length);
2224
2225         // Copy characters from the right string.
2226         BuildCopySeqStringChars(
2227             right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2228             result, left_length, String::TWO_BYTE_ENCODING,
2229             right_length);
2230       }
2231       if_twobyte.Else();
2232       {
2233         // Copy characters from the left string.
2234         BuildCopySeqStringChars(
2235             left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2236             result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2237             left_length);
2238
2239         // Copy characters from the right string.
2240         BuildCopySeqStringChars(
2241             right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2242             result, left_length, String::ONE_BYTE_ENCODING,
2243             right_length);
2244       }
2245       if_twobyte.End();
2246
2247       // Count the native string addition.
2248       AddIncrementCounter(isolate()->counters()->string_add_native());
2249
2250       // Return the sequential string.
2251       Push(result);
2252     }
2253     if_sameencodingandsequential.Else();
2254     {
2255       // Fallback to the runtime to add the two strings.
2256       Add<HPushArguments>(left, right);
2257       Push(Add<HCallRuntime>(
2258             isolate()->factory()->empty_string(),
2259             Runtime::FunctionForId(Runtime::kHiddenStringAdd),
2260             2));
2261     }
2262     if_sameencodingandsequential.End();
2263   }
2264   if_createcons.End();
2265
2266   return Pop();
2267 }
2268
2269
2270 HValue* HGraphBuilder::BuildStringAdd(
2271     HValue* left,
2272     HValue* right,
2273     HAllocationMode allocation_mode) {
2274   NoObservableSideEffectsScope no_effects(this);
2275
2276   // Determine string lengths.
2277   HValue* left_length = AddLoadStringLength(left);
2278   HValue* right_length = AddLoadStringLength(right);
2279
2280   // Check if left string is empty.
2281   IfBuilder if_leftempty(this);
2282   if_leftempty.If<HCompareNumericAndBranch>(
2283       left_length, graph()->GetConstant0(), Token::EQ);
2284   if_leftempty.Then();
2285   {
2286     // Count the native string addition.
2287     AddIncrementCounter(isolate()->counters()->string_add_native());
2288
2289     // Just return the right string.
2290     Push(right);
2291   }
2292   if_leftempty.Else();
2293   {
2294     // Check if right string is empty.
2295     IfBuilder if_rightempty(this);
2296     if_rightempty.If<HCompareNumericAndBranch>(
2297         right_length, graph()->GetConstant0(), Token::EQ);
2298     if_rightempty.Then();
2299     {
2300       // Count the native string addition.
2301       AddIncrementCounter(isolate()->counters()->string_add_native());
2302
2303       // Just return the left string.
2304       Push(left);
2305     }
2306     if_rightempty.Else();
2307     {
2308       // Add the two non-empty strings.
2309       Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2310     }
2311     if_rightempty.End();
2312   }
2313   if_leftempty.End();
2314
2315   return Pop();
2316 }
2317
2318
2319 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2320     HValue* checked_object,
2321     HValue* key,
2322     HValue* val,
2323     bool is_js_array,
2324     ElementsKind elements_kind,
2325     PropertyAccessType access_type,
2326     LoadKeyedHoleMode load_mode,
2327     KeyedAccessStoreMode store_mode) {
2328   ASSERT((!IsExternalArrayElementsKind(elements_kind) &&
2329               !IsFixedTypedArrayElementsKind(elements_kind)) ||
2330          !is_js_array);
2331   // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2332   // on a HElementsTransition instruction. The flag can also be removed if the
2333   // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2334   // ElementsKind transitions. Finally, the dependency can be removed for stores
2335   // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2336   // generated store code.
2337   if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2338       (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2339     checked_object->ClearDependsOnFlag(kElementsKind);
2340   }
2341
2342   bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2343   bool fast_elements = IsFastObjectElementsKind(elements_kind);
2344   HValue* elements = AddLoadElements(checked_object);
2345   if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2346       store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2347     HCheckMaps* check_cow_map = Add<HCheckMaps>(
2348         elements, isolate()->factory()->fixed_array_map());
2349     check_cow_map->ClearDependsOnFlag(kElementsKind);
2350   }
2351   HInstruction* length = NULL;
2352   if (is_js_array) {
2353     length = Add<HLoadNamedField>(
2354         checked_object->ActualValue(), checked_object,
2355         HObjectAccess::ForArrayLength(elements_kind));
2356   } else {
2357     length = AddLoadFixedArrayLength(elements);
2358   }
2359   length->set_type(HType::Smi());
2360   HValue* checked_key = NULL;
2361   if (IsExternalArrayElementsKind(elements_kind) ||
2362       IsFixedTypedArrayElementsKind(elements_kind)) {
2363     HValue* backing_store;
2364     if (IsExternalArrayElementsKind(elements_kind)) {
2365       backing_store = Add<HLoadNamedField>(
2366           elements, static_cast<HValue*>(NULL),
2367           HObjectAccess::ForExternalArrayExternalPointer());
2368     } else {
2369       backing_store = elements;
2370     }
2371     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2372       NoObservableSideEffectsScope no_effects(this);
2373       IfBuilder length_checker(this);
2374       length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2375       length_checker.Then();
2376       IfBuilder negative_checker(this);
2377       HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2378           key, graph()->GetConstant0(), Token::GTE);
2379       negative_checker.Then();
2380       HInstruction* result = AddElementAccess(
2381           backing_store, key, val, bounds_check, elements_kind, access_type);
2382       negative_checker.ElseDeopt("Negative key encountered");
2383       negative_checker.End();
2384       length_checker.End();
2385       return result;
2386     } else {
2387       ASSERT(store_mode == STANDARD_STORE);
2388       checked_key = Add<HBoundsCheck>(key, length);
2389       return AddElementAccess(
2390           backing_store, checked_key, val,
2391           checked_object, elements_kind, access_type);
2392     }
2393   }
2394   ASSERT(fast_smi_only_elements ||
2395          fast_elements ||
2396          IsFastDoubleElementsKind(elements_kind));
2397
2398   // In case val is stored into a fast smi array, assure that the value is a smi
2399   // before manipulating the backing store. Otherwise the actual store may
2400   // deopt, leaving the backing store in an invalid state.
2401   if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2402       !val->type().IsSmi()) {
2403     val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2404   }
2405
2406   if (IsGrowStoreMode(store_mode)) {
2407     NoObservableSideEffectsScope no_effects(this);
2408     elements = BuildCheckForCapacityGrow(checked_object, elements,
2409                                          elements_kind, length, key,
2410                                          is_js_array, access_type);
2411     checked_key = key;
2412   } else {
2413     checked_key = Add<HBoundsCheck>(key, length);
2414
2415     if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2416       if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2417         NoObservableSideEffectsScope no_effects(this);
2418         elements = BuildCopyElementsOnWrite(checked_object, elements,
2419                                             elements_kind, length);
2420       } else {
2421         HCheckMaps* check_cow_map = Add<HCheckMaps>(
2422             elements, isolate()->factory()->fixed_array_map());
2423         check_cow_map->ClearDependsOnFlag(kElementsKind);
2424       }
2425     }
2426   }
2427   return AddElementAccess(elements, checked_key, val, checked_object,
2428                           elements_kind, access_type, load_mode);
2429 }
2430
2431
2432 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2433     JSArrayBuilder* array_builder,
2434     HValue* length_argument) {
2435   if (length_argument->IsConstant() &&
2436       HConstant::cast(length_argument)->HasSmiValue()) {
2437     int array_length = HConstant::cast(length_argument)->Integer32Value();
2438     if (array_length == 0) {
2439       return array_builder->AllocateEmptyArray();
2440     } else {
2441       return array_builder->AllocateArray(length_argument,
2442                                           array_length,
2443                                           length_argument);
2444     }
2445   }
2446
2447   HValue* constant_zero = graph()->GetConstant0();
2448   HConstant* max_alloc_length =
2449       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2450   HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2451                                                    max_alloc_length);
2452   IfBuilder if_builder(this);
2453   if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2454                                           Token::EQ);
2455   if_builder.Then();
2456   const int initial_capacity = JSArray::kPreallocatedArrayElements;
2457   HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2458   Push(initial_capacity_node);  // capacity
2459   Push(constant_zero);          // length
2460   if_builder.Else();
2461   if (!(top_info()->IsStub()) &&
2462       IsFastPackedElementsKind(array_builder->kind())) {
2463     // We'll come back later with better (holey) feedback.
2464     if_builder.Deopt("Holey array despite packed elements_kind feedback");
2465   } else {
2466     Push(checked_length);         // capacity
2467     Push(checked_length);         // length
2468   }
2469   if_builder.End();
2470
2471   // Figure out total size
2472   HValue* length = Pop();
2473   HValue* capacity = Pop();
2474   return array_builder->AllocateArray(capacity, max_alloc_length, length);
2475 }
2476
2477
2478 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2479                                                   HValue* capacity) {
2480   int elements_size = IsFastDoubleElementsKind(kind)
2481       ? kDoubleSize
2482       : kPointerSize;
2483
2484   HConstant* elements_size_value = Add<HConstant>(elements_size);
2485   HInstruction* mul = HMul::NewImul(zone(), context(),
2486                                     capacity->ActualValue(),
2487                                     elements_size_value);
2488   AddInstruction(mul);
2489   mul->ClearFlag(HValue::kCanOverflow);
2490
2491   STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2492
2493   HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2494   HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2495   total_size->ClearFlag(HValue::kCanOverflow);
2496   return total_size;
2497 }
2498
2499
2500 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2501   int base_size = JSArray::kSize;
2502   if (mode == TRACK_ALLOCATION_SITE) {
2503     base_size += AllocationMemento::kSize;
2504   }
2505   HConstant* size_in_bytes = Add<HConstant>(base_size);
2506   return Add<HAllocate>(
2507       size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE);
2508 }
2509
2510
2511 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2512     ElementsKind kind,
2513     int capacity) {
2514   int base_size = IsFastDoubleElementsKind(kind)
2515       ? FixedDoubleArray::SizeFor(capacity)
2516       : FixedArray::SizeFor(capacity);
2517
2518   return Add<HConstant>(base_size);
2519 }
2520
2521
2522 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2523                                                 HValue* size_in_bytes) {
2524   InstanceType instance_type = IsFastDoubleElementsKind(kind)
2525       ? FIXED_DOUBLE_ARRAY_TYPE
2526       : FIXED_ARRAY_TYPE;
2527
2528   return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2529                         instance_type);
2530 }
2531
2532
2533 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2534                                                   ElementsKind kind,
2535                                                   HValue* capacity) {
2536   Factory* factory = isolate()->factory();
2537   Handle<Map> map = IsFastDoubleElementsKind(kind)
2538       ? factory->fixed_double_array_map()
2539       : factory->fixed_array_map();
2540
2541   Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2542   Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2543                         capacity);
2544 }
2545
2546
2547 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2548     ElementsKind kind,
2549     HValue* capacity) {
2550   // The HForceRepresentation is to prevent possible deopt on int-smi
2551   // conversion after allocation but before the new object fields are set.
2552   capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2553   HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2554   HValue* new_elements = BuildAllocateElements(kind, size_in_bytes);
2555   BuildInitializeElementsHeader(new_elements, kind, capacity);
2556   return new_elements;
2557 }
2558
2559
2560 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2561                                        HValue* array_map,
2562                                        HValue* elements,
2563                                        AllocationSiteMode mode,
2564                                        ElementsKind elements_kind,
2565                                        HValue* allocation_site_payload,
2566                                        HValue* length_field) {
2567   Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2568
2569   HConstant* empty_fixed_array =
2570     Add<HConstant>(isolate()->factory()->empty_fixed_array());
2571
2572   Add<HStoreNamedField>(
2573       array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2574
2575   Add<HStoreNamedField>(
2576       array, HObjectAccess::ForElementsPointer(),
2577       elements != NULL ? elements : empty_fixed_array);
2578
2579   Add<HStoreNamedField>(
2580       array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2581
2582   if (mode == TRACK_ALLOCATION_SITE) {
2583     BuildCreateAllocationMemento(
2584         array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2585   }
2586 }
2587
2588
2589 HInstruction* HGraphBuilder::AddElementAccess(
2590     HValue* elements,
2591     HValue* checked_key,
2592     HValue* val,
2593     HValue* dependency,
2594     ElementsKind elements_kind,
2595     PropertyAccessType access_type,
2596     LoadKeyedHoleMode load_mode) {
2597   if (access_type == STORE) {
2598     ASSERT(val != NULL);
2599     if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2600         elements_kind == UINT8_CLAMPED_ELEMENTS) {
2601       val = Add<HClampToUint8>(val);
2602     }
2603     return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2604                             elements_kind == FAST_SMI_ELEMENTS
2605                               ? STORE_TO_INITIALIZED_ENTRY
2606                               : INITIALIZING_STORE);
2607   }
2608
2609   ASSERT(access_type == LOAD);
2610   ASSERT(val == NULL);
2611   HLoadKeyed* load = Add<HLoadKeyed>(
2612       elements, checked_key, dependency, elements_kind, load_mode);
2613   if (FLAG_opt_safe_uint32_operations &&
2614       (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2615        elements_kind == UINT32_ELEMENTS)) {
2616     graph()->RecordUint32Instruction(load);
2617   }
2618   return load;
2619 }
2620
2621
2622 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2623                                            HValue* dependency) {
2624   return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2625 }
2626
2627
2628 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2629                                                 HValue* dependency) {
2630   return Add<HLoadNamedField>(
2631       object, dependency, HObjectAccess::ForElementsPointer());
2632 }
2633
2634
2635 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2636     HValue* array,
2637     HValue* dependency) {
2638   return Add<HLoadNamedField>(
2639       array, dependency, HObjectAccess::ForFixedArrayLength());
2640 }
2641
2642
2643 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2644                                                    ElementsKind kind,
2645                                                    HValue* dependency) {
2646   return Add<HLoadNamedField>(
2647       array, dependency, HObjectAccess::ForArrayLength(kind));
2648 }
2649
2650
2651 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2652   HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2653                                                 graph_->GetConstant1());
2654
2655   HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2656   new_capacity->ClearFlag(HValue::kCanOverflow);
2657
2658   HValue* min_growth = Add<HConstant>(16);
2659
2660   new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2661   new_capacity->ClearFlag(HValue::kCanOverflow);
2662
2663   return new_capacity;
2664 }
2665
2666
2667 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2668                                                  HValue* elements,
2669                                                  ElementsKind kind,
2670                                                  ElementsKind new_kind,
2671                                                  HValue* length,
2672                                                  HValue* new_capacity) {
2673   Add<HBoundsCheck>(new_capacity, Add<HConstant>(
2674           (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2675           ElementsKindToShiftSize(kind)));
2676
2677   HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2678       new_kind, new_capacity);
2679
2680   BuildCopyElements(elements, kind, new_elements,
2681                     new_kind, length, new_capacity);
2682
2683   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2684                         new_elements);
2685
2686   return new_elements;
2687 }
2688
2689
2690 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2691                                                ElementsKind elements_kind,
2692                                                HValue* from,
2693                                                HValue* to,
2694                                                HValue* value) {
2695   if (to == NULL) {
2696     to = AddLoadFixedArrayLength(elements);
2697   }
2698
2699   // Special loop unfolding case
2700   STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2701                 kElementLoopUnrollThreshold);
2702   int initial_capacity = -1;
2703   if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2704     int constant_from = from->GetInteger32Constant();
2705     int constant_to = to->GetInteger32Constant();
2706
2707     if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2708       initial_capacity = constant_to;
2709     }
2710   }
2711
2712   // Since we're about to store a hole value, the store instruction below must
2713   // assume an elements kind that supports heap object values.
2714   if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2715     elements_kind = FAST_HOLEY_ELEMENTS;
2716   }
2717
2718   if (initial_capacity >= 0) {
2719     for (int i = 0; i < initial_capacity; i++) {
2720       HInstruction* key = Add<HConstant>(i);
2721       Add<HStoreKeyed>(elements, key, value, elements_kind);
2722     }
2723   } else {
2724     // Carefully loop backwards so that the "from" remains live through the loop
2725     // rather than the to. This often corresponds to keeping length live rather
2726     // then capacity, which helps register allocation, since length is used more
2727     // other than capacity after filling with holes.
2728     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2729
2730     HValue* key = builder.BeginBody(to, from, Token::GT);
2731
2732     HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2733     adjusted_key->ClearFlag(HValue::kCanOverflow);
2734
2735     Add<HStoreKeyed>(elements, adjusted_key, value, elements_kind);
2736
2737     builder.EndBody();
2738   }
2739 }
2740
2741
2742 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2743                                               ElementsKind elements_kind,
2744                                               HValue* from,
2745                                               HValue* to) {
2746   // Fast elements kinds need to be initialized in case statements below cause a
2747   // garbage collection.
2748   Factory* factory = isolate()->factory();
2749
2750   double nan_double = FixedDoubleArray::hole_nan_as_double();
2751   HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2752       ? Add<HConstant>(factory->the_hole_value())
2753       : Add<HConstant>(nan_double);
2754
2755   BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2756 }
2757
2758
2759 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2760                                       ElementsKind from_elements_kind,
2761                                       HValue* to_elements,
2762                                       ElementsKind to_elements_kind,
2763                                       HValue* length,
2764                                       HValue* capacity) {
2765   int constant_capacity = -1;
2766   if (capacity != NULL &&
2767       capacity->IsConstant() &&
2768       HConstant::cast(capacity)->HasInteger32Value()) {
2769     int constant_candidate = HConstant::cast(capacity)->Integer32Value();
2770     if (constant_candidate <= kElementLoopUnrollThreshold) {
2771       constant_capacity = constant_candidate;
2772     }
2773   }
2774
2775   bool pre_fill_with_holes =
2776     IsFastDoubleElementsKind(from_elements_kind) &&
2777     IsFastObjectElementsKind(to_elements_kind);
2778   if (pre_fill_with_holes) {
2779     // If the copy might trigger a GC, make sure that the FixedArray is
2780     // pre-initialized with holes to make sure that it's always in a
2781     // consistent state.
2782     BuildFillElementsWithHole(to_elements, to_elements_kind,
2783                               graph()->GetConstant0(), NULL);
2784   }
2785
2786   if (constant_capacity != -1) {
2787     // Unroll the loop for small elements kinds.
2788     for (int i = 0; i < constant_capacity; i++) {
2789       HValue* key_constant = Add<HConstant>(i);
2790       HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
2791                                             static_cast<HValue*>(NULL),
2792                                             from_elements_kind);
2793       Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
2794     }
2795   } else {
2796     if (!pre_fill_with_holes &&
2797         (capacity == NULL || !length->Equals(capacity))) {
2798       BuildFillElementsWithHole(to_elements, to_elements_kind,
2799                                 length, NULL);
2800     }
2801
2802     if (capacity == NULL) {
2803       capacity = AddLoadFixedArrayLength(to_elements);
2804     }
2805
2806     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2807
2808     HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
2809                                     Token::GT);
2810
2811     key = AddUncasted<HSub>(key, graph()->GetConstant1());
2812     key->ClearFlag(HValue::kCanOverflow);
2813
2814     HValue* element = Add<HLoadKeyed>(from_elements, key,
2815                                       static_cast<HValue*>(NULL),
2816                                       from_elements_kind,
2817                                       ALLOW_RETURN_HOLE);
2818
2819     ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2820                          IsFastSmiElementsKind(to_elements_kind))
2821       ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2822
2823     if (IsHoleyElementsKind(from_elements_kind) &&
2824         from_elements_kind != to_elements_kind) {
2825       IfBuilder if_hole(this);
2826       if_hole.If<HCompareHoleAndBranch>(element);
2827       if_hole.Then();
2828       HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2829         ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2830         : graph()->GetConstantHole();
2831       Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2832       if_hole.Else();
2833       HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2834       store->SetFlag(HValue::kAllowUndefinedAsNaN);
2835       if_hole.End();
2836     } else {
2837       HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2838       store->SetFlag(HValue::kAllowUndefinedAsNaN);
2839     }
2840
2841     builder.EndBody();
2842   }
2843
2844   Counters* counters = isolate()->counters();
2845   AddIncrementCounter(counters->inlined_copied_elements());
2846 }
2847
2848
2849 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
2850                                                  HValue* allocation_site,
2851                                                  AllocationSiteMode mode,
2852                                                  ElementsKind kind) {
2853   HAllocate* array = AllocateJSArrayObject(mode);
2854
2855   HValue* map = AddLoadMap(boilerplate);
2856   HValue* elements = AddLoadElements(boilerplate);
2857   HValue* length = AddLoadArrayLength(boilerplate, kind);
2858
2859   BuildJSArrayHeader(array,
2860                      map,
2861                      elements,
2862                      mode,
2863                      FAST_ELEMENTS,
2864                      allocation_site,
2865                      length);
2866   return array;
2867 }
2868
2869
2870 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
2871                                                    HValue* allocation_site,
2872                                                    AllocationSiteMode mode) {
2873   HAllocate* array = AllocateJSArrayObject(mode);
2874
2875   HValue* map = AddLoadMap(boilerplate);
2876
2877   BuildJSArrayHeader(array,
2878                      map,
2879                      NULL,  // set elements to empty fixed array
2880                      mode,
2881                      FAST_ELEMENTS,
2882                      allocation_site,
2883                      graph()->GetConstant0());
2884   return array;
2885 }
2886
2887
2888 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
2889                                                       HValue* allocation_site,
2890                                                       AllocationSiteMode mode,
2891                                                       ElementsKind kind) {
2892   HValue* boilerplate_elements = AddLoadElements(boilerplate);
2893   HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
2894
2895   // Generate size calculation code here in order to make it dominate
2896   // the JSArray allocation.
2897   HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
2898
2899   // Create empty JSArray object for now, store elimination should remove
2900   // redundant initialization of elements and length fields and at the same
2901   // time the object will be fully prepared for GC if it happens during
2902   // elements allocation.
2903   HValue* result = BuildCloneShallowArrayEmpty(
2904       boilerplate, allocation_site, mode);
2905
2906   HAllocate* elements = BuildAllocateElements(kind, elements_size);
2907
2908   // This function implicitly relies on the fact that the
2909   // FastCloneShallowArrayStub is called only for literals shorter than
2910   // JSObject::kInitialMaxFastElementArray.
2911   // Can't add HBoundsCheck here because otherwise the stub will eager a frame.
2912   HConstant* size_upper_bound = EstablishElementsAllocationSize(
2913       kind, JSObject::kInitialMaxFastElementArray);
2914   elements->set_size_upper_bound(size_upper_bound);
2915
2916   Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
2917
2918   // The allocation for the cloned array above causes register pressure on
2919   // machines with low register counts. Force a reload of the boilerplate
2920   // elements here to free up a register for the allocation to avoid unnecessary
2921   // spillage.
2922   boilerplate_elements = AddLoadElements(boilerplate);
2923   boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
2924
2925   // Copy the elements array header.
2926   for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
2927     HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2928     Add<HStoreNamedField>(elements, access,
2929         Add<HLoadNamedField>(boilerplate_elements,
2930                              static_cast<HValue*>(NULL), access));
2931   }
2932
2933   // And the result of the length
2934   HValue* length = AddLoadArrayLength(boilerplate, kind);
2935   Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
2936
2937   BuildCopyElements(boilerplate_elements, kind, elements,
2938                     kind, length, NULL);
2939   return result;
2940 }
2941
2942
2943 void HGraphBuilder::BuildCompareNil(
2944     HValue* value,
2945     Type* type,
2946     HIfContinuation* continuation) {
2947   IfBuilder if_nil(this);
2948   bool some_case_handled = false;
2949   bool some_case_missing = false;
2950
2951   if (type->Maybe(Type::Null())) {
2952     if (some_case_handled) if_nil.Or();
2953     if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
2954     some_case_handled = true;
2955   } else {
2956     some_case_missing = true;
2957   }
2958
2959   if (type->Maybe(Type::Undefined())) {
2960     if (some_case_handled) if_nil.Or();
2961     if_nil.If<HCompareObjectEqAndBranch>(value,
2962                                          graph()->GetConstantUndefined());
2963     some_case_handled = true;
2964   } else {
2965     some_case_missing = true;
2966   }
2967
2968   if (type->Maybe(Type::Undetectable())) {
2969     if (some_case_handled) if_nil.Or();
2970     if_nil.If<HIsUndetectableAndBranch>(value);
2971     some_case_handled = true;
2972   } else {
2973     some_case_missing = true;
2974   }
2975
2976   if (some_case_missing) {
2977     if_nil.Then();
2978     if_nil.Else();
2979     if (type->NumClasses() == 1) {
2980       BuildCheckHeapObject(value);
2981       // For ICs, the map checked below is a sentinel map that gets replaced by
2982       // the monomorphic map when the code is used as a template to generate a
2983       // new IC. For optimized functions, there is no sentinel map, the map
2984       // emitted below is the actual monomorphic map.
2985       Add<HCheckMaps>(value, type->Classes().Current());
2986     } else {
2987       if_nil.Deopt("Too many undetectable types");
2988     }
2989   }
2990
2991   if_nil.CaptureContinuation(continuation);
2992 }
2993
2994
2995 void HGraphBuilder::BuildCreateAllocationMemento(
2996     HValue* previous_object,
2997     HValue* previous_object_size,
2998     HValue* allocation_site) {
2999   ASSERT(allocation_site != NULL);
3000   HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3001       previous_object, previous_object_size, HType::HeapObject());
3002   AddStoreMapConstant(
3003       allocation_memento, isolate()->factory()->allocation_memento_map());
3004   Add<HStoreNamedField>(
3005       allocation_memento,
3006       HObjectAccess::ForAllocationMementoSite(),
3007       allocation_site);
3008   if (FLAG_allocation_site_pretenuring) {
3009     HValue* memento_create_count = Add<HLoadNamedField>(
3010         allocation_site, static_cast<HValue*>(NULL),
3011         HObjectAccess::ForAllocationSiteOffset(
3012             AllocationSite::kPretenureCreateCountOffset));
3013     memento_create_count = AddUncasted<HAdd>(
3014         memento_create_count, graph()->GetConstant1());
3015     // This smi value is reset to zero after every gc, overflow isn't a problem
3016     // since the counter is bounded by the new space size.
3017     memento_create_count->ClearFlag(HValue::kCanOverflow);
3018     Add<HStoreNamedField>(
3019         allocation_site, HObjectAccess::ForAllocationSiteOffset(
3020             AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3021   }
3022 }
3023
3024
3025 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3026   // Get the global context, then the native context
3027   HInstruction* context =
3028       Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
3029                            HObjectAccess::ForFunctionContextPointer());
3030   HInstruction* global_object = Add<HLoadNamedField>(
3031       context, static_cast<HValue*>(NULL),
3032       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3033   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3034       GlobalObject::kNativeContextOffset);
3035   return Add<HLoadNamedField>(
3036       global_object, static_cast<HValue*>(NULL), access);
3037 }
3038
3039
3040 HInstruction* HGraphBuilder::BuildGetNativeContext() {
3041   // Get the global context, then the native context
3042   HValue* global_object = Add<HLoadNamedField>(
3043       context(), static_cast<HValue*>(NULL),
3044       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3045   return Add<HLoadNamedField>(
3046       global_object, static_cast<HValue*>(NULL),
3047       HObjectAccess::ForObservableJSObjectOffset(
3048           GlobalObject::kNativeContextOffset));
3049 }
3050
3051
3052 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3053   HInstruction* native_context = BuildGetNativeContext();
3054   HInstruction* index =
3055       Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3056   return Add<HLoadKeyed>(
3057       native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3058 }
3059
3060
3061 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3062     ElementsKind kind,
3063     HValue* allocation_site_payload,
3064     HValue* constructor_function,
3065     AllocationSiteOverrideMode override_mode) :
3066         builder_(builder),
3067         kind_(kind),
3068         allocation_site_payload_(allocation_site_payload),
3069         constructor_function_(constructor_function) {
3070   ASSERT(!allocation_site_payload->IsConstant() ||
3071          HConstant::cast(allocation_site_payload)->handle(
3072              builder_->isolate())->IsAllocationSite());
3073   mode_ = override_mode == DISABLE_ALLOCATION_SITES
3074       ? DONT_TRACK_ALLOCATION_SITE
3075       : AllocationSite::GetMode(kind);
3076 }
3077
3078
3079 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3080                                               ElementsKind kind,
3081                                               HValue* constructor_function) :
3082     builder_(builder),
3083     kind_(kind),
3084     mode_(DONT_TRACK_ALLOCATION_SITE),
3085     allocation_site_payload_(NULL),
3086     constructor_function_(constructor_function) {
3087 }
3088
3089
3090 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3091   if (!builder()->top_info()->IsStub()) {
3092     // A constant map is fine.
3093     Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3094                     builder()->isolate());
3095     return builder()->Add<HConstant>(map);
3096   }
3097
3098   if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3099     // No need for a context lookup if the kind_ matches the initial
3100     // map, because we can just load the map in that case.
3101     HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3102     return builder()->Add<HLoadNamedField>(
3103         constructor_function_, static_cast<HValue*>(NULL), access);
3104   }
3105
3106   // TODO(mvstanton): we should always have a constructor function if we
3107   // are creating a stub.
3108   HInstruction* native_context = constructor_function_ != NULL
3109       ? builder()->BuildGetNativeContext(constructor_function_)
3110       : builder()->BuildGetNativeContext();
3111
3112   HInstruction* index = builder()->Add<HConstant>(
3113       static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
3114
3115   HInstruction* map_array = builder()->Add<HLoadKeyed>(
3116       native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3117
3118   HInstruction* kind_index = builder()->Add<HConstant>(kind_);
3119
3120   return builder()->Add<HLoadKeyed>(
3121       map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3122 }
3123
3124
3125 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3126   // Find the map near the constructor function
3127   HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3128   return builder()->Add<HLoadNamedField>(
3129       constructor_function_, static_cast<HValue*>(NULL), access);
3130 }
3131
3132
3133 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3134   HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3135   return AllocateArray(capacity,
3136                        capacity,
3137                        builder()->graph()->GetConstant0());
3138 }
3139
3140
3141 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3142     HValue* capacity,
3143     HConstant* capacity_upper_bound,
3144     HValue* length_field,
3145     FillMode fill_mode) {
3146   return AllocateArray(capacity,
3147                        capacity_upper_bound->GetInteger32Constant(),
3148                        length_field,
3149                        fill_mode);
3150 }
3151
3152
3153 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3154     HValue* capacity,
3155     int capacity_upper_bound,
3156     HValue* length_field,
3157     FillMode fill_mode) {
3158   HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant()
3159       ? HConstant::cast(capacity)
3160       : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound);
3161
3162   HAllocate* array = AllocateArray(capacity, length_field, fill_mode);
3163   if (!elements_location_->has_size_upper_bound()) {
3164     elements_location_->set_size_upper_bound(elememts_size_upper_bound);
3165   }
3166   return array;
3167 }
3168
3169
3170 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3171     HValue* capacity,
3172     HValue* length_field,
3173     FillMode fill_mode) {
3174   // These HForceRepresentations are because we store these as fields in the
3175   // objects we construct, and an int32-to-smi HChange could deopt. Accept
3176   // the deopt possibility now, before allocation occurs.
3177   capacity =
3178       builder()->AddUncasted<HForceRepresentation>(capacity,
3179                                                    Representation::Smi());
3180   length_field =
3181       builder()->AddUncasted<HForceRepresentation>(length_field,
3182                                                    Representation::Smi());
3183
3184   // Generate size calculation code here in order to make it dominate
3185   // the JSArray allocation.
3186   HValue* elements_size =
3187       builder()->BuildCalculateElementsSize(kind_, capacity);
3188
3189   // Allocate (dealing with failure appropriately)
3190   HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3191
3192   // Fill in the fields: map, properties, length
3193   HValue* map;
3194   if (allocation_site_payload_ == NULL) {
3195     map = EmitInternalMapCode();
3196   } else {
3197     map = EmitMapCode();
3198   }
3199
3200   builder()->BuildJSArrayHeader(array_object,
3201                                 map,
3202                                 NULL,  // set elements to empty fixed array
3203                                 mode_,
3204                                 kind_,
3205                                 allocation_site_payload_,
3206                                 length_field);
3207
3208   // Allocate and initialize the elements
3209   elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3210
3211   builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3212
3213   // Set the elements
3214   builder()->Add<HStoreNamedField>(
3215       array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3216
3217   if (fill_mode == FILL_WITH_HOLE) {
3218     builder()->BuildFillElementsWithHole(elements_location_, kind_,
3219                                          graph()->GetConstant0(), capacity);
3220   }
3221
3222   return array_object;
3223 }
3224
3225
3226 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
3227   HValue* global_object = Add<HLoadNamedField>(
3228       context(), static_cast<HValue*>(NULL),
3229       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3230   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3231       GlobalObject::kBuiltinsOffset);
3232   HValue* builtins = Add<HLoadNamedField>(
3233       global_object, static_cast<HValue*>(NULL), access);
3234   HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
3235           JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
3236   return Add<HLoadNamedField>(
3237       builtins, static_cast<HValue*>(NULL), function_access);
3238 }
3239
3240
3241 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
3242     : HGraphBuilder(info),
3243       function_state_(NULL),
3244       initial_function_state_(this, info, NORMAL_RETURN, 0),
3245       ast_context_(NULL),
3246       break_scope_(NULL),
3247       inlined_count_(0),
3248       globals_(10, info->zone()),
3249       inline_bailout_(false),
3250       osr_(new(info->zone()) HOsrBuilder(this)) {
3251   // This is not initialized in the initializer list because the
3252   // constructor for the initial state relies on function_state_ == NULL
3253   // to know it's the initial state.
3254   function_state_= &initial_function_state_;
3255   InitializeAstVisitor(info->zone());
3256   if (FLAG_hydrogen_track_positions) {
3257     SetSourcePosition(info->shared_info()->start_position());
3258   }
3259 }
3260
3261
3262 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3263                                                 HBasicBlock* second,
3264                                                 BailoutId join_id) {
3265   if (first == NULL) {
3266     return second;
3267   } else if (second == NULL) {
3268     return first;
3269   } else {
3270     HBasicBlock* join_block = graph()->CreateBasicBlock();
3271     Goto(first, join_block);
3272     Goto(second, join_block);
3273     join_block->SetJoinId(join_id);
3274     return join_block;
3275   }
3276 }
3277
3278
3279 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3280                                                   HBasicBlock* exit_block,
3281                                                   HBasicBlock* continue_block) {
3282   if (continue_block != NULL) {
3283     if (exit_block != NULL) Goto(exit_block, continue_block);
3284     continue_block->SetJoinId(statement->ContinueId());
3285     return continue_block;
3286   }
3287   return exit_block;
3288 }
3289
3290
3291 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3292                                                 HBasicBlock* loop_entry,
3293                                                 HBasicBlock* body_exit,
3294                                                 HBasicBlock* loop_successor,
3295                                                 HBasicBlock* break_block) {
3296   if (body_exit != NULL) Goto(body_exit, loop_entry);
3297   loop_entry->PostProcessLoopHeader(statement);
3298   if (break_block != NULL) {
3299     if (loop_successor != NULL) Goto(loop_successor, break_block);
3300     break_block->SetJoinId(statement->ExitId());
3301     return break_block;
3302   }
3303   return loop_successor;
3304 }
3305
3306
3307 // Build a new loop header block and set it as the current block.
3308 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3309   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3310   Goto(loop_entry);
3311   set_current_block(loop_entry);
3312   return loop_entry;
3313 }
3314
3315
3316 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3317     IterationStatement* statement) {
3318   HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3319       ? osr()->BuildOsrLoopEntry(statement)
3320       : BuildLoopEntry();
3321   return loop_entry;
3322 }
3323
3324
3325 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3326                              HSourcePosition position) {
3327   Finish(instruction, position);
3328   ClearEnvironment();
3329 }
3330
3331
3332 HGraph::HGraph(CompilationInfo* info)
3333     : isolate_(info->isolate()),
3334       next_block_id_(0),
3335       entry_block_(NULL),
3336       blocks_(8, info->zone()),
3337       values_(16, info->zone()),
3338       phi_list_(NULL),
3339       uint32_instructions_(NULL),
3340       osr_(NULL),
3341       info_(info),
3342       zone_(info->zone()),
3343       is_recursive_(false),
3344       use_optimistic_licm_(false),
3345       depends_on_empty_array_proto_elements_(false),
3346       type_change_checksum_(0),
3347       maximum_environment_size_(0),
3348       no_side_effects_scope_count_(0),
3349       disallow_adding_new_values_(false),
3350       next_inline_id_(0),
3351       inlined_functions_(5, info->zone()) {
3352   if (info->IsStub()) {
3353     HydrogenCodeStub* stub = info->code_stub();
3354     CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor();
3355     start_environment_ =
3356         new(zone_) HEnvironment(zone_, descriptor->environment_length());
3357   } else {
3358     TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3359     start_environment_ =
3360         new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3361   }
3362   start_environment_->set_ast_id(BailoutId::FunctionEntry());
3363   entry_block_ = CreateBasicBlock();
3364   entry_block_->SetInitialEnvironment(start_environment_);
3365 }
3366
3367
3368 HBasicBlock* HGraph::CreateBasicBlock() {
3369   HBasicBlock* result = new(zone()) HBasicBlock(this);
3370   blocks_.Add(result, zone());
3371   return result;
3372 }
3373
3374
3375 void HGraph::FinalizeUniqueness() {
3376   DisallowHeapAllocation no_gc;
3377   ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3378   for (int i = 0; i < blocks()->length(); ++i) {
3379     for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3380       it.Current()->FinalizeUniqueness();
3381     }
3382   }
3383 }
3384
3385
3386 int HGraph::TraceInlinedFunction(
3387     Handle<SharedFunctionInfo> shared,
3388     HSourcePosition position) {
3389   if (!FLAG_hydrogen_track_positions) {
3390     return 0;
3391   }
3392
3393   int id = 0;
3394   for (; id < inlined_functions_.length(); id++) {
3395     if (inlined_functions_[id].shared().is_identical_to(shared)) {
3396       break;
3397     }
3398   }
3399
3400   if (id == inlined_functions_.length()) {
3401     inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3402
3403     if (!shared->script()->IsUndefined()) {
3404       Handle<Script> script(Script::cast(shared->script()));
3405       if (!script->source()->IsUndefined()) {
3406         CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3407         PrintF(tracing_scope.file(),
3408                "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n",
3409                shared->DebugName()->ToCString().get(),
3410                info()->optimization_id(),
3411                id);
3412
3413         {
3414           ConsStringIteratorOp op;
3415           StringCharacterStream stream(String::cast(script->source()),
3416                                        &op,
3417                                        shared->start_position());
3418           // fun->end_position() points to the last character in the stream. We
3419           // need to compensate by adding one to calculate the length.
3420           int source_len =
3421               shared->end_position() - shared->start_position() + 1;
3422           for (int i = 0; i < source_len; i++) {
3423             if (stream.HasMore()) {
3424               PrintF(tracing_scope.file(), "%c", stream.GetNext());
3425             }
3426           }
3427         }
3428
3429         PrintF(tracing_scope.file(), "\n--- END ---\n");
3430       }
3431     }
3432   }
3433
3434   int inline_id = next_inline_id_++;
3435
3436   if (inline_id != 0) {
3437     CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3438     PrintF(tracing_scope.file(), "INLINE (%s) id{%d,%d} AS %d AT ",
3439            shared->DebugName()->ToCString().get(),
3440            info()->optimization_id(),
3441            id,
3442            inline_id);
3443     position.PrintTo(tracing_scope.file());
3444     PrintF(tracing_scope.file(), "\n");
3445   }
3446
3447   return inline_id;
3448 }
3449
3450
3451 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3452   if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3453     return pos.raw();
3454   }
3455
3456   return inlined_functions_[pos.inlining_id()].start_position() +
3457       pos.position();
3458 }
3459
3460
3461 // Block ordering was implemented with two mutually recursive methods,
3462 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3463 // The recursion could lead to stack overflow so the algorithm has been
3464 // implemented iteratively.
3465 // At a high level the algorithm looks like this:
3466 //
3467 // Postorder(block, loop_header) : {
3468 //   if (block has already been visited or is of another loop) return;
3469 //   mark block as visited;
3470 //   if (block is a loop header) {
3471 //     VisitLoopMembers(block, loop_header);
3472 //     VisitSuccessorsOfLoopHeader(block);
3473 //   } else {
3474 //     VisitSuccessors(block)
3475 //   }
3476 //   put block in result list;
3477 // }
3478 //
3479 // VisitLoopMembers(block, outer_loop_header) {
3480 //   foreach (block b in block loop members) {
3481 //     VisitSuccessorsOfLoopMember(b, outer_loop_header);
3482 //     if (b is loop header) VisitLoopMembers(b);
3483 //   }
3484 // }
3485 //
3486 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3487 //   foreach (block b in block successors) Postorder(b, outer_loop_header)
3488 // }
3489 //
3490 // VisitSuccessorsOfLoopHeader(block) {
3491 //   foreach (block b in block successors) Postorder(b, block)
3492 // }
3493 //
3494 // VisitSuccessors(block, loop_header) {
3495 //   foreach (block b in block successors) Postorder(b, loop_header)
3496 // }
3497 //
3498 // The ordering is started calling Postorder(entry, NULL).
3499 //
3500 // Each instance of PostorderProcessor represents the "stack frame" of the
3501 // recursion, and particularly keeps the state of the loop (iteration) of the
3502 // "Visit..." function it represents.
3503 // To recycle memory we keep all the frames in a double linked list but
3504 // this means that we cannot use constructors to initialize the frames.
3505 //
3506 class PostorderProcessor : public ZoneObject {
3507  public:
3508   // Back link (towards the stack bottom).
3509   PostorderProcessor* parent() {return father_; }
3510   // Forward link (towards the stack top).
3511   PostorderProcessor* child() {return child_; }
3512   HBasicBlock* block() { return block_; }
3513   HLoopInformation* loop() { return loop_; }
3514   HBasicBlock* loop_header() { return loop_header_; }
3515
3516   static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3517                                                   HBasicBlock* block) {
3518     PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3519     return result->SetupSuccessors(zone, block, NULL);
3520   }
3521
3522   PostorderProcessor* PerformStep(Zone* zone,
3523                                   ZoneList<HBasicBlock*>* order) {
3524     PostorderProcessor* next =
3525         PerformNonBacktrackingStep(zone, order);
3526     if (next != NULL) {
3527       return next;
3528     } else {
3529       return Backtrack(zone, order);
3530     }
3531   }
3532
3533  private:
3534   explicit PostorderProcessor(PostorderProcessor* father)
3535       : father_(father), child_(NULL), successor_iterator(NULL) { }
3536
3537   // Each enum value states the cycle whose state is kept by this instance.
3538   enum LoopKind {
3539     NONE,
3540     SUCCESSORS,
3541     SUCCESSORS_OF_LOOP_HEADER,
3542     LOOP_MEMBERS,
3543     SUCCESSORS_OF_LOOP_MEMBER
3544   };
3545
3546   // Each "Setup..." method is like a constructor for a cycle state.
3547   PostorderProcessor* SetupSuccessors(Zone* zone,
3548                                       HBasicBlock* block,
3549                                       HBasicBlock* loop_header) {
3550     if (block == NULL || block->IsOrdered() ||
3551         block->parent_loop_header() != loop_header) {
3552       kind_ = NONE;
3553       block_ = NULL;
3554       loop_ = NULL;
3555       loop_header_ = NULL;
3556       return this;
3557     } else {
3558       block_ = block;
3559       loop_ = NULL;
3560       block->MarkAsOrdered();
3561
3562       if (block->IsLoopHeader()) {
3563         kind_ = SUCCESSORS_OF_LOOP_HEADER;
3564         loop_header_ = block;
3565         InitializeSuccessors();
3566         PostorderProcessor* result = Push(zone);
3567         return result->SetupLoopMembers(zone, block, block->loop_information(),
3568                                         loop_header);
3569       } else {
3570         ASSERT(block->IsFinished());
3571         kind_ = SUCCESSORS;
3572         loop_header_ = loop_header;
3573         InitializeSuccessors();
3574         return this;
3575       }
3576     }
3577   }
3578
3579   PostorderProcessor* SetupLoopMembers(Zone* zone,
3580                                        HBasicBlock* block,
3581                                        HLoopInformation* loop,
3582                                        HBasicBlock* loop_header) {
3583     kind_ = LOOP_MEMBERS;
3584     block_ = block;
3585     loop_ = loop;
3586     loop_header_ = loop_header;
3587     InitializeLoopMembers();
3588     return this;
3589   }
3590
3591   PostorderProcessor* SetupSuccessorsOfLoopMember(
3592       HBasicBlock* block,
3593       HLoopInformation* loop,
3594       HBasicBlock* loop_header) {
3595     kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3596     block_ = block;
3597     loop_ = loop;
3598     loop_header_ = loop_header;
3599     InitializeSuccessors();
3600     return this;
3601   }
3602
3603   // This method "allocates" a new stack frame.
3604   PostorderProcessor* Push(Zone* zone) {
3605     if (child_ == NULL) {
3606       child_ = new(zone) PostorderProcessor(this);
3607     }
3608     return child_;
3609   }
3610
3611   void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3612     ASSERT(block_->end()->FirstSuccessor() == NULL ||
3613            order->Contains(block_->end()->FirstSuccessor()) ||
3614            block_->end()->FirstSuccessor()->IsLoopHeader());
3615     ASSERT(block_->end()->SecondSuccessor() == NULL ||
3616            order->Contains(block_->end()->SecondSuccessor()) ||
3617            block_->end()->SecondSuccessor()->IsLoopHeader());
3618     order->Add(block_, zone);
3619   }
3620
3621   // This method is the basic block to walk up the stack.
3622   PostorderProcessor* Pop(Zone* zone,
3623                           ZoneList<HBasicBlock*>* order) {
3624     switch (kind_) {
3625       case SUCCESSORS:
3626       case SUCCESSORS_OF_LOOP_HEADER:
3627         ClosePostorder(order, zone);
3628         return father_;
3629       case LOOP_MEMBERS:
3630         return father_;
3631       case SUCCESSORS_OF_LOOP_MEMBER:
3632         if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3633           // In this case we need to perform a LOOP_MEMBERS cycle so we
3634           // initialize it and return this instead of father.
3635           return SetupLoopMembers(zone, block(),
3636                                   block()->loop_information(), loop_header_);
3637         } else {
3638           return father_;
3639         }
3640       case NONE:
3641         return father_;
3642     }
3643     UNREACHABLE();
3644     return NULL;
3645   }
3646
3647   // Walks up the stack.
3648   PostorderProcessor* Backtrack(Zone* zone,
3649                                 ZoneList<HBasicBlock*>* order) {
3650     PostorderProcessor* parent = Pop(zone, order);
3651     while (parent != NULL) {
3652       PostorderProcessor* next =
3653           parent->PerformNonBacktrackingStep(zone, order);
3654       if (next != NULL) {
3655         return next;
3656       } else {
3657         parent = parent->Pop(zone, order);
3658       }
3659     }
3660     return NULL;
3661   }
3662
3663   PostorderProcessor* PerformNonBacktrackingStep(
3664       Zone* zone,
3665       ZoneList<HBasicBlock*>* order) {
3666     HBasicBlock* next_block;
3667     switch (kind_) {
3668       case SUCCESSORS:
3669         next_block = AdvanceSuccessors();
3670         if (next_block != NULL) {
3671           PostorderProcessor* result = Push(zone);
3672           return result->SetupSuccessors(zone, next_block, loop_header_);
3673         }
3674         break;
3675       case SUCCESSORS_OF_LOOP_HEADER:
3676         next_block = AdvanceSuccessors();
3677         if (next_block != NULL) {
3678           PostorderProcessor* result = Push(zone);
3679           return result->SetupSuccessors(zone, next_block, block());
3680         }
3681         break;
3682       case LOOP_MEMBERS:
3683         next_block = AdvanceLoopMembers();
3684         if (next_block != NULL) {
3685           PostorderProcessor* result = Push(zone);
3686           return result->SetupSuccessorsOfLoopMember(next_block,
3687                                                      loop_, loop_header_);
3688         }
3689         break;
3690       case SUCCESSORS_OF_LOOP_MEMBER:
3691         next_block = AdvanceSuccessors();
3692         if (next_block != NULL) {
3693           PostorderProcessor* result = Push(zone);
3694           return result->SetupSuccessors(zone, next_block, loop_header_);
3695         }
3696         break;
3697       case NONE:
3698         return NULL;
3699     }
3700     return NULL;
3701   }
3702
3703   // The following two methods implement a "foreach b in successors" cycle.
3704   void InitializeSuccessors() {
3705     loop_index = 0;
3706     loop_length = 0;
3707     successor_iterator = HSuccessorIterator(block_->end());
3708   }
3709
3710   HBasicBlock* AdvanceSuccessors() {
3711     if (!successor_iterator.Done()) {
3712       HBasicBlock* result = successor_iterator.Current();
3713       successor_iterator.Advance();
3714       return result;
3715     }
3716     return NULL;
3717   }
3718
3719   // The following two methods implement a "foreach b in loop members" cycle.
3720   void InitializeLoopMembers() {
3721     loop_index = 0;
3722     loop_length = loop_->blocks()->length();
3723   }
3724
3725   HBasicBlock* AdvanceLoopMembers() {
3726     if (loop_index < loop_length) {
3727       HBasicBlock* result = loop_->blocks()->at(loop_index);
3728       loop_index++;
3729       return result;
3730     } else {
3731       return NULL;
3732     }
3733   }
3734
3735   LoopKind kind_;
3736   PostorderProcessor* father_;
3737   PostorderProcessor* child_;
3738   HLoopInformation* loop_;
3739   HBasicBlock* block_;
3740   HBasicBlock* loop_header_;
3741   int loop_index;
3742   int loop_length;
3743   HSuccessorIterator successor_iterator;
3744 };
3745
3746
3747 void HGraph::OrderBlocks() {
3748   CompilationPhase phase("H_Block ordering", info());
3749
3750 #ifdef DEBUG
3751   // Initially the blocks must not be ordered.
3752   for (int i = 0; i < blocks_.length(); ++i) {
3753     ASSERT(!blocks_[i]->IsOrdered());
3754   }
3755 #endif
3756
3757   PostorderProcessor* postorder =
3758       PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
3759   blocks_.Rewind(0);
3760   while (postorder) {
3761     postorder = postorder->PerformStep(zone(), &blocks_);
3762   }
3763
3764 #ifdef DEBUG
3765   // Now all blocks must be marked as ordered.
3766   for (int i = 0; i < blocks_.length(); ++i) {
3767     ASSERT(blocks_[i]->IsOrdered());
3768   }
3769 #endif
3770
3771   // Reverse block list and assign block IDs.
3772   for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
3773     HBasicBlock* bi = blocks_[i];
3774     HBasicBlock* bj = blocks_[j];
3775     bi->set_block_id(j);
3776     bj->set_block_id(i);
3777     blocks_[i] = bj;
3778     blocks_[j] = bi;
3779   }
3780 }
3781
3782
3783 void HGraph::AssignDominators() {
3784   HPhase phase("H_Assign dominators", this);
3785   for (int i = 0; i < blocks_.length(); ++i) {
3786     HBasicBlock* block = blocks_[i];
3787     if (block->IsLoopHeader()) {
3788       // Only the first predecessor of a loop header is from outside the loop.
3789       // All others are back edges, and thus cannot dominate the loop header.
3790       block->AssignCommonDominator(block->predecessors()->first());
3791       block->AssignLoopSuccessorDominators();
3792     } else {
3793       for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3794         blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3795       }
3796     }
3797   }
3798 }
3799
3800
3801 bool HGraph::CheckArgumentsPhiUses() {
3802   int block_count = blocks_.length();
3803   for (int i = 0; i < block_count; ++i) {
3804     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3805       HPhi* phi = blocks_[i]->phis()->at(j);
3806       // We don't support phi uses of arguments for now.
3807       if (phi->CheckFlag(HValue::kIsArguments)) return false;
3808     }
3809   }
3810   return true;
3811 }
3812
3813
3814 bool HGraph::CheckConstPhiUses() {
3815   int block_count = blocks_.length();
3816   for (int i = 0; i < block_count; ++i) {
3817     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3818       HPhi* phi = blocks_[i]->phis()->at(j);
3819       // Check for the hole value (from an uninitialized const).
3820       for (int k = 0; k < phi->OperandCount(); k++) {
3821         if (phi->OperandAt(k) == GetConstantHole()) return false;
3822       }
3823     }
3824   }
3825   return true;
3826 }
3827
3828
3829 void HGraph::CollectPhis() {
3830   int block_count = blocks_.length();
3831   phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3832   for (int i = 0; i < block_count; ++i) {
3833     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3834       HPhi* phi = blocks_[i]->phis()->at(j);
3835       phi_list_->Add(phi, zone());
3836     }
3837   }
3838 }
3839
3840
3841 // Implementation of utility class to encapsulate the translation state for
3842 // a (possibly inlined) function.
3843 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3844                              CompilationInfo* info,
3845                              InliningKind inlining_kind,
3846                              int inlining_id)
3847     : owner_(owner),
3848       compilation_info_(info),
3849       call_context_(NULL),
3850       inlining_kind_(inlining_kind),
3851       function_return_(NULL),
3852       test_context_(NULL),
3853       entry_(NULL),
3854       arguments_object_(NULL),
3855       arguments_elements_(NULL),
3856       inlining_id_(inlining_id),
3857       outer_source_position_(HSourcePosition::Unknown()),
3858       outer_(owner->function_state()) {
3859   if (outer_ != NULL) {
3860     // State for an inline function.
3861     if (owner->ast_context()->IsTest()) {
3862       HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3863       HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3864       if_true->MarkAsInlineReturnTarget(owner->current_block());
3865       if_false->MarkAsInlineReturnTarget(owner->current_block());
3866       TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3867       Expression* cond = outer_test_context->condition();
3868       // The AstContext constructor pushed on the context stack.  This newed
3869       // instance is the reason that AstContext can't be BASE_EMBEDDED.
3870       test_context_ = new TestContext(owner, cond, if_true, if_false);
3871     } else {
3872       function_return_ = owner->graph()->CreateBasicBlock();
3873       function_return()->MarkAsInlineReturnTarget(owner->current_block());
3874     }
3875     // Set this after possibly allocating a new TestContext above.
3876     call_context_ = owner->ast_context();
3877   }
3878
3879   // Push on the state stack.
3880   owner->set_function_state(this);
3881
3882   if (FLAG_hydrogen_track_positions) {
3883     outer_source_position_ = owner->source_position();
3884     owner->EnterInlinedSource(
3885       info->shared_info()->start_position(),
3886       inlining_id);
3887     owner->SetSourcePosition(info->shared_info()->start_position());
3888   }
3889 }
3890
3891
3892 FunctionState::~FunctionState() {
3893   delete test_context_;
3894   owner_->set_function_state(outer_);
3895
3896   if (FLAG_hydrogen_track_positions) {
3897     owner_->set_source_position(outer_source_position_);
3898     owner_->EnterInlinedSource(
3899       outer_->compilation_info()->shared_info()->start_position(),
3900       outer_->inlining_id());
3901   }
3902 }
3903
3904
3905 // Implementation of utility classes to represent an expression's context in
3906 // the AST.
3907 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3908     : owner_(owner),
3909       kind_(kind),
3910       outer_(owner->ast_context()),
3911       for_typeof_(false) {
3912   owner->set_ast_context(this);  // Push.
3913 #ifdef DEBUG
3914   ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
3915   original_length_ = owner->environment()->length();
3916 #endif
3917 }
3918
3919
3920 AstContext::~AstContext() {
3921   owner_->set_ast_context(outer_);  // Pop.
3922 }
3923
3924
3925 EffectContext::~EffectContext() {
3926   ASSERT(owner()->HasStackOverflow() ||
3927          owner()->current_block() == NULL ||
3928          (owner()->environment()->length() == original_length_ &&
3929           owner()->environment()->frame_type() == JS_FUNCTION));
3930 }
3931
3932
3933 ValueContext::~ValueContext() {
3934   ASSERT(owner()->HasStackOverflow() ||
3935          owner()->current_block() == NULL ||
3936          (owner()->environment()->length() == original_length_ + 1 &&
3937           owner()->environment()->frame_type() == JS_FUNCTION));
3938 }
3939
3940
3941 void EffectContext::ReturnValue(HValue* value) {
3942   // The value is simply ignored.
3943 }
3944
3945
3946 void ValueContext::ReturnValue(HValue* value) {
3947   // The value is tracked in the bailout environment, and communicated
3948   // through the environment as the result of the expression.
3949   if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3950     owner()->Bailout(kBadValueContextForArgumentsValue);
3951   }
3952   owner()->Push(value);
3953 }
3954
3955
3956 void TestContext::ReturnValue(HValue* value) {
3957   BuildBranch(value);
3958 }
3959
3960
3961 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3962   ASSERT(!instr->IsControlInstruction());
3963   owner()->AddInstruction(instr);
3964   if (instr->HasObservableSideEffects()) {
3965     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3966   }
3967 }
3968
3969
3970 void EffectContext::ReturnControl(HControlInstruction* instr,
3971                                   BailoutId ast_id) {
3972   ASSERT(!instr->HasObservableSideEffects());
3973   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3974   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3975   instr->SetSuccessorAt(0, empty_true);
3976   instr->SetSuccessorAt(1, empty_false);
3977   owner()->FinishCurrentBlock(instr);
3978   HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3979   owner()->set_current_block(join);
3980 }
3981
3982
3983 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3984                                        BailoutId ast_id) {
3985   HBasicBlock* true_branch = NULL;
3986   HBasicBlock* false_branch = NULL;
3987   continuation->Continue(&true_branch, &false_branch);
3988   if (!continuation->IsTrueReachable()) {
3989     owner()->set_current_block(false_branch);
3990   } else if (!continuation->IsFalseReachable()) {
3991     owner()->set_current_block(true_branch);
3992   } else {
3993     HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3994     owner()->set_current_block(join);
3995   }
3996 }
3997
3998
3999 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4000   ASSERT(!instr->IsControlInstruction());
4001   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4002     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4003   }
4004   owner()->AddInstruction(instr);
4005   owner()->Push(instr);
4006   if (instr->HasObservableSideEffects()) {
4007     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4008   }
4009 }
4010
4011
4012 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4013   ASSERT(!instr->HasObservableSideEffects());
4014   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4015     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4016   }
4017   HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4018   HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4019   instr->SetSuccessorAt(0, materialize_true);
4020   instr->SetSuccessorAt(1, materialize_false);
4021   owner()->FinishCurrentBlock(instr);
4022   owner()->set_current_block(materialize_true);
4023   owner()->Push(owner()->graph()->GetConstantTrue());
4024   owner()->set_current_block(materialize_false);
4025   owner()->Push(owner()->graph()->GetConstantFalse());
4026   HBasicBlock* join =
4027     owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4028   owner()->set_current_block(join);
4029 }
4030
4031
4032 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4033                                       BailoutId ast_id) {
4034   HBasicBlock* materialize_true = NULL;
4035   HBasicBlock* materialize_false = NULL;
4036   continuation->Continue(&materialize_true, &materialize_false);
4037   if (continuation->IsTrueReachable()) {
4038     owner()->set_current_block(materialize_true);
4039     owner()->Push(owner()->graph()->GetConstantTrue());
4040     owner()->set_current_block(materialize_true);
4041   }
4042   if (continuation->IsFalseReachable()) {
4043     owner()->set_current_block(materialize_false);
4044     owner()->Push(owner()->graph()->GetConstantFalse());
4045     owner()->set_current_block(materialize_false);
4046   }
4047   if (continuation->TrueAndFalseReachable()) {
4048     HBasicBlock* join =
4049         owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4050     owner()->set_current_block(join);
4051   }
4052 }
4053
4054
4055 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4056   ASSERT(!instr->IsControlInstruction());
4057   HOptimizedGraphBuilder* builder = owner();
4058   builder->AddInstruction(instr);
4059   // We expect a simulate after every expression with side effects, though
4060   // this one isn't actually needed (and wouldn't work if it were targeted).
4061   if (instr->HasObservableSideEffects()) {
4062     builder->Push(instr);
4063     builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4064     builder->Pop();
4065   }
4066   BuildBranch(instr);
4067 }
4068
4069
4070 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4071   ASSERT(!instr->HasObservableSideEffects());
4072   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4073   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4074   instr->SetSuccessorAt(0, empty_true);
4075   instr->SetSuccessorAt(1, empty_false);
4076   owner()->FinishCurrentBlock(instr);
4077   owner()->Goto(empty_true, if_true(), owner()->function_state());
4078   owner()->Goto(empty_false, if_false(), owner()->function_state());
4079   owner()->set_current_block(NULL);
4080 }
4081
4082
4083 void TestContext::ReturnContinuation(HIfContinuation* continuation,
4084                                      BailoutId ast_id) {
4085   HBasicBlock* true_branch = NULL;
4086   HBasicBlock* false_branch = NULL;
4087   continuation->Continue(&true_branch, &false_branch);
4088   if (continuation->IsTrueReachable()) {
4089     owner()->Goto(true_branch, if_true(), owner()->function_state());
4090   }
4091   if (continuation->IsFalseReachable()) {
4092     owner()->Goto(false_branch, if_false(), owner()->function_state());
4093   }
4094   owner()->set_current_block(NULL);
4095 }
4096
4097
4098 void TestContext::BuildBranch(HValue* value) {
4099   // We expect the graph to be in edge-split form: there is no edge that
4100   // connects a branch node to a join node.  We conservatively ensure that
4101   // property by always adding an empty block on the outgoing edges of this
4102   // branch.
4103   HOptimizedGraphBuilder* builder = owner();
4104   if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4105     builder->Bailout(kArgumentsObjectValueInATestContext);
4106   }
4107   ToBooleanStub::Types expected(condition()->to_boolean_types());
4108   ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4109 }
4110
4111
4112 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4113 #define CHECK_BAILOUT(call)                     \
4114   do {                                          \
4115     call;                                       \
4116     if (HasStackOverflow()) return;             \
4117   } while (false)
4118
4119
4120 #define CHECK_ALIVE(call)                                       \
4121   do {                                                          \
4122     call;                                                       \
4123     if (HasStackOverflow() || current_block() == NULL) return;  \
4124   } while (false)
4125
4126
4127 #define CHECK_ALIVE_OR_RETURN(call, value)                            \
4128   do {                                                                \
4129     call;                                                             \
4130     if (HasStackOverflow() || current_block() == NULL) return value;  \
4131   } while (false)
4132
4133
4134 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
4135   current_info()->set_bailout_reason(reason);
4136   SetStackOverflow();
4137 }
4138
4139
4140 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
4141   EffectContext for_effect(this);
4142   Visit(expr);
4143 }
4144
4145
4146 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
4147                                            ArgumentsAllowedFlag flag) {
4148   ValueContext for_value(this, flag);
4149   Visit(expr);
4150 }
4151
4152
4153 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
4154   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4155   for_value.set_for_typeof(true);
4156   Visit(expr);
4157 }
4158
4159
4160 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
4161                                              HBasicBlock* true_block,
4162                                              HBasicBlock* false_block) {
4163   TestContext for_test(this, expr, true_block, false_block);
4164   Visit(expr);
4165 }
4166
4167
4168 void HOptimizedGraphBuilder::VisitExpressions(
4169     ZoneList<Expression*>* exprs) {
4170   for (int i = 0; i < exprs->length(); ++i) {
4171     CHECK_ALIVE(VisitForValue(exprs->at(i)));
4172   }
4173 }
4174
4175
4176 bool HOptimizedGraphBuilder::BuildGraph() {
4177   if (current_info()->function()->is_generator()) {
4178     Bailout(kFunctionIsAGenerator);
4179     return false;
4180   }
4181   Scope* scope = current_info()->scope();
4182   if (scope->HasIllegalRedeclaration()) {
4183     Bailout(kFunctionWithIllegalRedeclaration);
4184     return false;
4185   }
4186   if (scope->calls_eval()) {
4187     Bailout(kFunctionCallsEval);
4188     return false;
4189   }
4190   SetUpScope(scope);
4191
4192   // Add an edge to the body entry.  This is warty: the graph's start
4193   // environment will be used by the Lithium translation as the initial
4194   // environment on graph entry, but it has now been mutated by the
4195   // Hydrogen translation of the instructions in the start block.  This
4196   // environment uses values which have not been defined yet.  These
4197   // Hydrogen instructions will then be replayed by the Lithium
4198   // translation, so they cannot have an environment effect.  The edge to
4199   // the body's entry block (along with some special logic for the start
4200   // block in HInstruction::InsertAfter) seals the start block from
4201   // getting unwanted instructions inserted.
4202   //
4203   // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
4204   // Make the Hydrogen instructions in the initial block into Hydrogen
4205   // values (but not instructions), present in the initial environment and
4206   // not replayed by the Lithium translation.
4207   HEnvironment* initial_env = environment()->CopyWithoutHistory();
4208   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4209   Goto(body_entry);
4210   body_entry->SetJoinId(BailoutId::FunctionEntry());
4211   set_current_block(body_entry);
4212
4213   // Handle implicit declaration of the function name in named function
4214   // expressions before other declarations.
4215   if (scope->is_function_scope() && scope->function() != NULL) {
4216     VisitVariableDeclaration(scope->function());
4217   }
4218   VisitDeclarations(scope->declarations());
4219   Add<HSimulate>(BailoutId::Declarations());
4220
4221   Add<HStackCheck>(HStackCheck::kFunctionEntry);
4222
4223   VisitStatements(current_info()->function()->body());
4224   if (HasStackOverflow()) return false;
4225
4226   if (current_block() != NULL) {
4227     Add<HReturn>(graph()->GetConstantUndefined());
4228     set_current_block(NULL);
4229   }
4230
4231   // If the checksum of the number of type info changes is the same as the
4232   // last time this function was compiled, then this recompile is likely not
4233   // due to missing/inadequate type feedback, but rather too aggressive
4234   // optimization. Disable optimistic LICM in that case.
4235   Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4236   ASSERT(unoptimized_code->kind() == Code::FUNCTION);
4237   Handle<TypeFeedbackInfo> type_info(
4238       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4239   int checksum = type_info->own_type_change_checksum();
4240   int composite_checksum = graph()->update_type_change_checksum(checksum);
4241   graph()->set_use_optimistic_licm(
4242       !type_info->matches_inlined_type_change_checksum(composite_checksum));
4243   type_info->set_inlined_type_change_checksum(composite_checksum);
4244
4245   // Perform any necessary OSR-specific cleanups or changes to the graph.
4246   osr()->FinishGraph();
4247
4248   return true;
4249 }
4250
4251
4252 bool HGraph::Optimize(BailoutReason* bailout_reason) {
4253   OrderBlocks();
4254   AssignDominators();
4255
4256   // We need to create a HConstant "zero" now so that GVN will fold every
4257   // zero-valued constant in the graph together.
4258   // The constant is needed to make idef-based bounds check work: the pass
4259   // evaluates relations with "zero" and that zero cannot be created after GVN.
4260   GetConstant0();
4261
4262 #ifdef DEBUG
4263   // Do a full verify after building the graph and computing dominators.
4264   Verify(true);
4265 #endif
4266
4267   if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4268     Run<HEnvironmentLivenessAnalysisPhase>();
4269   }
4270
4271   if (!CheckConstPhiUses()) {
4272     *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4273     return false;
4274   }
4275   Run<HRedundantPhiEliminationPhase>();
4276   if (!CheckArgumentsPhiUses()) {
4277     *bailout_reason = kUnsupportedPhiUseOfArguments;
4278     return false;
4279   }
4280
4281   // Find and mark unreachable code to simplify optimizations, especially gvn,
4282   // where unreachable code could unnecessarily defeat LICM.
4283   Run<HMarkUnreachableBlocksPhase>();
4284
4285   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4286   if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4287
4288   if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4289
4290   CollectPhis();
4291
4292   if (has_osr()) osr()->FinishOsrValues();
4293
4294   Run<HInferRepresentationPhase>();
4295
4296   // Remove HSimulate instructions that have turned out not to be needed
4297   // after all by folding them into the following HSimulate.
4298   // This must happen after inferring representations.
4299   Run<HMergeRemovableSimulatesPhase>();
4300
4301   Run<HMarkDeoptimizeOnUndefinedPhase>();
4302   Run<HRepresentationChangesPhase>();
4303
4304   Run<HInferTypesPhase>();
4305
4306   // Must be performed before canonicalization to ensure that Canonicalize
4307   // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4308   // zero.
4309   if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4310
4311   if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4312
4313   if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4314
4315   if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4316
4317   if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4318
4319   Run<HRangeAnalysisPhase>();
4320
4321   Run<HComputeChangeUndefinedToNaN>();
4322
4323   // Eliminate redundant stack checks on backwards branches.
4324   Run<HStackCheckEliminationPhase>();
4325
4326   if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4327   if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4328   if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4329   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4330
4331   RestoreActualValues();
4332
4333   // Find unreachable code a second time, GVN and other optimizations may have
4334   // made blocks unreachable that were previously reachable.
4335   Run<HMarkUnreachableBlocksPhase>();
4336
4337   return true;
4338 }
4339
4340
4341 void HGraph::RestoreActualValues() {
4342   HPhase phase("H_Restore actual values", this);
4343
4344   for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4345     HBasicBlock* block = blocks()->at(block_index);
4346
4347 #ifdef DEBUG
4348     for (int i = 0; i < block->phis()->length(); i++) {
4349       HPhi* phi = block->phis()->at(i);
4350       ASSERT(phi->ActualValue() == phi);
4351     }
4352 #endif
4353
4354     for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4355       HInstruction* instruction = it.Current();
4356       if (instruction->ActualValue() == instruction) continue;
4357       if (instruction->CheckFlag(HValue::kIsDead)) {
4358         // The instruction was marked as deleted but left in the graph
4359         // as a control flow dependency point for subsequent
4360         // instructions.
4361         instruction->DeleteAndReplaceWith(instruction->ActualValue());
4362       } else {
4363         ASSERT(instruction->IsInformativeDefinition());
4364         if (instruction->IsPurelyInformativeDefinition()) {
4365           instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4366         } else {
4367           instruction->ReplaceAllUsesWith(instruction->ActualValue());
4368         }
4369       }
4370     }
4371   }
4372 }
4373
4374
4375 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4376   ZoneList<HValue*> arguments(count, zone());
4377   for (int i = 0; i < count; ++i) {
4378     arguments.Add(Pop(), zone());
4379   }
4380
4381   HPushArguments* push_args = New<HPushArguments>();
4382   while (!arguments.is_empty()) {
4383     push_args->AddInput(arguments.RemoveLast());
4384   }
4385   AddInstruction(push_args);
4386 }
4387
4388
4389 template <class Instruction>
4390 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4391   PushArgumentsFromEnvironment(call->argument_count());
4392   return call;
4393 }
4394
4395
4396 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4397   // First special is HContext.
4398   HInstruction* context = Add<HContext>();
4399   environment()->BindContext(context);
4400
4401   // Create an arguments object containing the initial parameters.  Set the
4402   // initial values of parameters including "this" having parameter index 0.
4403   ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4404   HArgumentsObject* arguments_object =
4405       New<HArgumentsObject>(environment()->parameter_count());
4406   for (int i = 0; i < environment()->parameter_count(); ++i) {
4407     HInstruction* parameter = Add<HParameter>(i);
4408     arguments_object->AddArgument(parameter, zone());
4409     environment()->Bind(i, parameter);
4410   }
4411   AddInstruction(arguments_object);
4412   graph()->SetArgumentsObject(arguments_object);
4413
4414   HConstant* undefined_constant = graph()->GetConstantUndefined();
4415   // Initialize specials and locals to undefined.
4416   for (int i = environment()->parameter_count() + 1;
4417        i < environment()->length();
4418        ++i) {
4419     environment()->Bind(i, undefined_constant);
4420   }
4421
4422   // Handle the arguments and arguments shadow variables specially (they do
4423   // not have declarations).
4424   if (scope->arguments() != NULL) {
4425     if (!scope->arguments()->IsStackAllocated()) {
4426       return Bailout(kContextAllocatedArguments);
4427     }
4428
4429     environment()->Bind(scope->arguments(),
4430                         graph()->GetArgumentsObject());
4431   }
4432 }
4433
4434
4435 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4436   for (int i = 0; i < statements->length(); i++) {
4437     Statement* stmt = statements->at(i);
4438     CHECK_ALIVE(Visit(stmt));
4439     if (stmt->IsJump()) break;
4440   }
4441 }
4442
4443
4444 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4445   ASSERT(!HasStackOverflow());
4446   ASSERT(current_block() != NULL);
4447   ASSERT(current_block()->HasPredecessor());
4448
4449   Scope* outer_scope = scope();
4450   Scope* scope = stmt->scope();
4451   BreakAndContinueInfo break_info(stmt, outer_scope);
4452
4453   { BreakAndContinueScope push(&break_info, this);
4454     if (scope != NULL) {
4455       // Load the function object.
4456       Scope* declaration_scope = scope->DeclarationScope();
4457       HInstruction* function;
4458       HValue* outer_context = environment()->context();
4459       if (declaration_scope->is_global_scope() ||
4460           declaration_scope->is_eval_scope()) {
4461         function = new(zone()) HLoadContextSlot(
4462             outer_context, Context::CLOSURE_INDEX, HLoadContextSlot::kNoCheck);
4463       } else {
4464         function = New<HThisFunction>();
4465       }
4466       AddInstruction(function);
4467       // Allocate a block context and store it to the stack frame.
4468       HInstruction* inner_context = Add<HAllocateBlockContext>(
4469           outer_context, function, scope->GetScopeInfo());
4470       HInstruction* instr = Add<HStoreFrameContext>(inner_context);
4471       if (instr->HasObservableSideEffects()) {
4472         AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE);
4473       }
4474       set_scope(scope);
4475       environment()->BindContext(inner_context);
4476       VisitDeclarations(scope->declarations());
4477       AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4478     }
4479     CHECK_BAILOUT(VisitStatements(stmt->statements()));
4480   }
4481   set_scope(outer_scope);
4482   if (scope != NULL && current_block() != NULL) {
4483     HValue* inner_context = environment()->context();
4484     HValue* outer_context = Add<HLoadNamedField>(
4485         inner_context, static_cast<HValue*>(NULL),
4486         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4487
4488     HInstruction* instr = Add<HStoreFrameContext>(outer_context);
4489     if (instr->HasObservableSideEffects()) {
4490       AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE);
4491     }
4492     environment()->BindContext(outer_context);
4493   }
4494   HBasicBlock* break_block = break_info.break_block();
4495   if (break_block != NULL) {
4496     if (current_block() != NULL) Goto(break_block);
4497     break_block->SetJoinId(stmt->ExitId());
4498     set_current_block(break_block);
4499   }
4500 }
4501
4502
4503 void HOptimizedGraphBuilder::VisitExpressionStatement(
4504     ExpressionStatement* stmt) {
4505   ASSERT(!HasStackOverflow());
4506   ASSERT(current_block() != NULL);
4507   ASSERT(current_block()->HasPredecessor());
4508   VisitForEffect(stmt->expression());
4509 }
4510
4511
4512 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4513   ASSERT(!HasStackOverflow());
4514   ASSERT(current_block() != NULL);
4515   ASSERT(current_block()->HasPredecessor());
4516 }
4517
4518
4519 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4520   ASSERT(!HasStackOverflow());
4521   ASSERT(current_block() != NULL);
4522   ASSERT(current_block()->HasPredecessor());
4523   if (stmt->condition()->ToBooleanIsTrue()) {
4524     Add<HSimulate>(stmt->ThenId());
4525     Visit(stmt->then_statement());
4526   } else if (stmt->condition()->ToBooleanIsFalse()) {
4527     Add<HSimulate>(stmt->ElseId());
4528     Visit(stmt->else_statement());
4529   } else {
4530     HBasicBlock* cond_true = graph()->CreateBasicBlock();
4531     HBasicBlock* cond_false = graph()->CreateBasicBlock();
4532     CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4533
4534     if (cond_true->HasPredecessor()) {
4535       cond_true->SetJoinId(stmt->ThenId());
4536       set_current_block(cond_true);
4537       CHECK_BAILOUT(Visit(stmt->then_statement()));
4538       cond_true = current_block();
4539     } else {
4540       cond_true = NULL;
4541     }
4542
4543     if (cond_false->HasPredecessor()) {
4544       cond_false->SetJoinId(stmt->ElseId());
4545       set_current_block(cond_false);
4546       CHECK_BAILOUT(Visit(stmt->else_statement()));
4547       cond_false = current_block();
4548     } else {
4549       cond_false = NULL;
4550     }
4551
4552     HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4553     set_current_block(join);
4554   }
4555 }
4556
4557
4558 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4559     BreakableStatement* stmt,
4560     BreakType type,
4561     Scope** scope,
4562     int* drop_extra) {
4563   *drop_extra = 0;
4564   BreakAndContinueScope* current = this;
4565   while (current != NULL && current->info()->target() != stmt) {
4566     *drop_extra += current->info()->drop_extra();
4567     current = current->next();
4568   }
4569   ASSERT(current != NULL);  // Always found (unless stack is malformed).
4570   *scope = current->info()->scope();
4571
4572   if (type == BREAK) {
4573     *drop_extra += current->info()->drop_extra();
4574   }
4575
4576   HBasicBlock* block = NULL;
4577   switch (type) {
4578     case BREAK:
4579       block = current->info()->break_block();
4580       if (block == NULL) {
4581         block = current->owner()->graph()->CreateBasicBlock();
4582         current->info()->set_break_block(block);
4583       }
4584       break;
4585
4586     case CONTINUE:
4587       block = current->info()->continue_block();
4588       if (block == NULL) {
4589         block = current->owner()->graph()->CreateBasicBlock();
4590         current->info()->set_continue_block(block);
4591       }
4592       break;
4593   }
4594
4595   return block;
4596 }
4597
4598
4599 void HOptimizedGraphBuilder::VisitContinueStatement(
4600     ContinueStatement* stmt) {
4601   ASSERT(!HasStackOverflow());
4602   ASSERT(current_block() != NULL);
4603   ASSERT(current_block()->HasPredecessor());
4604   Scope* outer_scope = NULL;
4605   Scope* inner_scope = scope();
4606   int drop_extra = 0;
4607   HBasicBlock* continue_block = break_scope()->Get(
4608       stmt->target(), BreakAndContinueScope::CONTINUE,
4609       &outer_scope, &drop_extra);
4610   HValue* context = environment()->context();
4611   Drop(drop_extra);
4612   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4613   if (context_pop_count > 0) {
4614     while (context_pop_count-- > 0) {
4615       HInstruction* context_instruction = Add<HLoadNamedField>(
4616           context, static_cast<HValue*>(NULL),
4617           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4618       context = context_instruction;
4619     }
4620     HInstruction* instr = Add<HStoreFrameContext>(context);
4621     if (instr->HasObservableSideEffects()) {
4622       AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE);
4623     }
4624     environment()->BindContext(context);
4625   }
4626
4627   Goto(continue_block);
4628   set_current_block(NULL);
4629 }
4630
4631
4632 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4633   ASSERT(!HasStackOverflow());
4634   ASSERT(current_block() != NULL);
4635   ASSERT(current_block()->HasPredecessor());
4636   Scope* outer_scope = NULL;
4637   Scope* inner_scope = scope();
4638   int drop_extra = 0;
4639   HBasicBlock* break_block = break_scope()->Get(
4640       stmt->target(), BreakAndContinueScope::BREAK,
4641       &outer_scope, &drop_extra);
4642   HValue* context = environment()->context();
4643   Drop(drop_extra);
4644   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4645   if (context_pop_count > 0) {
4646     while (context_pop_count-- > 0) {
4647       HInstruction* context_instruction = Add<HLoadNamedField>(
4648           context, static_cast<HValue*>(NULL),
4649           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4650       context = context_instruction;
4651     }
4652     HInstruction* instr = Add<HStoreFrameContext>(context);
4653     if (instr->HasObservableSideEffects()) {
4654       AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE);
4655     }
4656     environment()->BindContext(context);
4657   }
4658   Goto(break_block);
4659   set_current_block(NULL);
4660 }
4661
4662
4663 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4664   ASSERT(!HasStackOverflow());
4665   ASSERT(current_block() != NULL);
4666   ASSERT(current_block()->HasPredecessor());
4667   FunctionState* state = function_state();
4668   AstContext* context = call_context();
4669   if (context == NULL) {
4670     // Not an inlined return, so an actual one.
4671     CHECK_ALIVE(VisitForValue(stmt->expression()));
4672     HValue* result = environment()->Pop();
4673     Add<HReturn>(result);
4674   } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4675     // Return from an inlined construct call. In a test context the return value
4676     // will always evaluate to true, in a value context the return value needs
4677     // to be a JSObject.
4678     if (context->IsTest()) {
4679       TestContext* test = TestContext::cast(context);
4680       CHECK_ALIVE(VisitForEffect(stmt->expression()));
4681       Goto(test->if_true(), state);
4682     } else if (context->IsEffect()) {
4683       CHECK_ALIVE(VisitForEffect(stmt->expression()));
4684       Goto(function_return(), state);
4685     } else {
4686       ASSERT(context->IsValue());
4687       CHECK_ALIVE(VisitForValue(stmt->expression()));
4688       HValue* return_value = Pop();
4689       HValue* receiver = environment()->arguments_environment()->Lookup(0);
4690       HHasInstanceTypeAndBranch* typecheck =
4691           New<HHasInstanceTypeAndBranch>(return_value,
4692                                          FIRST_SPEC_OBJECT_TYPE,
4693                                          LAST_SPEC_OBJECT_TYPE);
4694       HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4695       HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4696       typecheck->SetSuccessorAt(0, if_spec_object);
4697       typecheck->SetSuccessorAt(1, not_spec_object);
4698       FinishCurrentBlock(typecheck);
4699       AddLeaveInlined(if_spec_object, return_value, state);
4700       AddLeaveInlined(not_spec_object, receiver, state);
4701     }
4702   } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4703     // Return from an inlined setter call. The returned value is never used, the
4704     // value of an assignment is always the value of the RHS of the assignment.
4705     CHECK_ALIVE(VisitForEffect(stmt->expression()));
4706     if (context->IsTest()) {
4707       HValue* rhs = environment()->arguments_environment()->Lookup(1);
4708       context->ReturnValue(rhs);
4709     } else if (context->IsEffect()) {
4710       Goto(function_return(), state);
4711     } else {
4712       ASSERT(context->IsValue());
4713       HValue* rhs = environment()->arguments_environment()->Lookup(1);
4714       AddLeaveInlined(rhs, state);
4715     }
4716   } else {
4717     // Return from a normal inlined function. Visit the subexpression in the
4718     // expression context of the call.
4719     if (context->IsTest()) {
4720       TestContext* test = TestContext::cast(context);
4721       VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4722     } else if (context->IsEffect()) {
4723       // Visit in value context and ignore the result. This is needed to keep
4724       // environment in sync with full-codegen since some visitors (e.g.
4725       // VisitCountOperation) use the operand stack differently depending on
4726       // context.
4727       CHECK_ALIVE(VisitForValue(stmt->expression()));
4728       Pop();
4729       Goto(function_return(), state);
4730     } else {
4731       ASSERT(context->IsValue());
4732       CHECK_ALIVE(VisitForValue(stmt->expression()));
4733       AddLeaveInlined(Pop(), state);
4734     }
4735   }
4736   set_current_block(NULL);
4737 }
4738
4739
4740 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4741   ASSERT(!HasStackOverflow());
4742   ASSERT(current_block() != NULL);
4743   ASSERT(current_block()->HasPredecessor());
4744   return Bailout(kWithStatement);
4745 }
4746
4747
4748 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4749   ASSERT(!HasStackOverflow());
4750   ASSERT(current_block() != NULL);
4751   ASSERT(current_block()->HasPredecessor());
4752
4753   // We only optimize switch statements with a bounded number of clauses.
4754   const int kCaseClauseLimit = 128;
4755   ZoneList<CaseClause*>* clauses = stmt->cases();
4756   int clause_count = clauses->length();
4757   ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4758   if (clause_count > kCaseClauseLimit) {
4759     return Bailout(kSwitchStatementTooManyClauses);
4760   }
4761
4762   CHECK_ALIVE(VisitForValue(stmt->tag()));
4763   Add<HSimulate>(stmt->EntryId());
4764   HValue* tag_value = Top();
4765   Type* tag_type = stmt->tag()->bounds().lower;
4766
4767   // 1. Build all the tests, with dangling true branches
4768   BailoutId default_id = BailoutId::None();
4769   for (int i = 0; i < clause_count; ++i) {
4770     CaseClause* clause = clauses->at(i);
4771     if (clause->is_default()) {
4772       body_blocks.Add(NULL, zone());
4773       if (default_id.IsNone()) default_id = clause->EntryId();
4774       continue;
4775     }
4776
4777     // Generate a compare and branch.
4778     CHECK_ALIVE(VisitForValue(clause->label()));
4779     HValue* label_value = Pop();
4780
4781     Type* label_type = clause->label()->bounds().lower;
4782     Type* combined_type = clause->compare_type();
4783     HControlInstruction* compare = BuildCompareInstruction(
4784         Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4785         combined_type,
4786         ScriptPositionToSourcePosition(stmt->tag()->position()),
4787         ScriptPositionToSourcePosition(clause->label()->position()),
4788         PUSH_BEFORE_SIMULATE, clause->id());
4789
4790     HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4791     HBasicBlock* body_block = graph()->CreateBasicBlock();
4792     body_blocks.Add(body_block, zone());
4793     compare->SetSuccessorAt(0, body_block);
4794     compare->SetSuccessorAt(1, next_test_block);
4795     FinishCurrentBlock(compare);
4796
4797     set_current_block(body_block);
4798     Drop(1);  // tag_value
4799
4800     set_current_block(next_test_block);
4801   }
4802
4803   // Save the current block to use for the default or to join with the
4804   // exit.
4805   HBasicBlock* last_block = current_block();
4806   Drop(1);  // tag_value
4807
4808   // 2. Loop over the clauses and the linked list of tests in lockstep,
4809   // translating the clause bodies.
4810   HBasicBlock* fall_through_block = NULL;
4811
4812   BreakAndContinueInfo break_info(stmt, scope());
4813   { BreakAndContinueScope push(&break_info, this);
4814     for (int i = 0; i < clause_count; ++i) {
4815       CaseClause* clause = clauses->at(i);
4816
4817       // Identify the block where normal (non-fall-through) control flow
4818       // goes to.
4819       HBasicBlock* normal_block = NULL;
4820       if (clause->is_default()) {
4821         if (last_block == NULL) continue;
4822         normal_block = last_block;
4823         last_block = NULL;  // Cleared to indicate we've handled it.
4824       } else {
4825         normal_block = body_blocks[i];
4826       }
4827
4828       if (fall_through_block == NULL) {
4829         set_current_block(normal_block);
4830       } else {
4831         HBasicBlock* join = CreateJoin(fall_through_block,
4832                                        normal_block,
4833                                        clause->EntryId());
4834         set_current_block(join);
4835       }
4836
4837       CHECK_BAILOUT(VisitStatements(clause->statements()));
4838       fall_through_block = current_block();
4839     }
4840   }
4841
4842   // Create an up-to-3-way join.  Use the break block if it exists since
4843   // it's already a join block.
4844   HBasicBlock* break_block = break_info.break_block();
4845   if (break_block == NULL) {
4846     set_current_block(CreateJoin(fall_through_block,
4847                                  last_block,
4848                                  stmt->ExitId()));
4849   } else {
4850     if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4851     if (last_block != NULL) Goto(last_block, break_block);
4852     break_block->SetJoinId(stmt->ExitId());
4853     set_current_block(break_block);
4854   }
4855 }
4856
4857
4858 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4859                                            HBasicBlock* loop_entry) {
4860   Add<HSimulate>(stmt->StackCheckId());
4861   HStackCheck* stack_check =
4862       HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4863   ASSERT(loop_entry->IsLoopHeader());
4864   loop_entry->loop_information()->set_stack_check(stack_check);
4865   CHECK_BAILOUT(Visit(stmt->body()));
4866 }
4867
4868
4869 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4870   ASSERT(!HasStackOverflow());
4871   ASSERT(current_block() != NULL);
4872   ASSERT(current_block()->HasPredecessor());
4873   ASSERT(current_block() != NULL);
4874   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4875
4876   BreakAndContinueInfo break_info(stmt, scope());
4877   {
4878     BreakAndContinueScope push(&break_info, this);
4879     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
4880   }
4881   HBasicBlock* body_exit =
4882       JoinContinue(stmt, current_block(), break_info.continue_block());
4883   HBasicBlock* loop_successor = NULL;
4884   if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4885     set_current_block(body_exit);
4886     loop_successor = graph()->CreateBasicBlock();
4887     if (stmt->cond()->ToBooleanIsFalse()) {
4888       loop_entry->loop_information()->stack_check()->Eliminate();
4889       Goto(loop_successor);
4890       body_exit = NULL;
4891     } else {
4892       // The block for a true condition, the actual predecessor block of the
4893       // back edge.
4894       body_exit = graph()->CreateBasicBlock();
4895       CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4896     }
4897     if (body_exit != NULL && body_exit->HasPredecessor()) {
4898       body_exit->SetJoinId(stmt->BackEdgeId());
4899     } else {
4900       body_exit = NULL;
4901     }
4902     if (loop_successor->HasPredecessor()) {
4903       loop_successor->SetJoinId(stmt->ExitId());
4904     } else {
4905       loop_successor = NULL;
4906     }
4907   }
4908   HBasicBlock* loop_exit = CreateLoop(stmt,
4909                                       loop_entry,
4910                                       body_exit,
4911                                       loop_successor,
4912                                       break_info.break_block());
4913   set_current_block(loop_exit);
4914 }
4915
4916
4917 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4918   ASSERT(!HasStackOverflow());
4919   ASSERT(current_block() != NULL);
4920   ASSERT(current_block()->HasPredecessor());
4921   ASSERT(current_block() != NULL);
4922   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4923
4924   // If the condition is constant true, do not generate a branch.
4925   HBasicBlock* loop_successor = NULL;
4926   if (!stmt->cond()->ToBooleanIsTrue()) {
4927     HBasicBlock* body_entry = graph()->CreateBasicBlock();
4928     loop_successor = graph()->CreateBasicBlock();
4929     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4930     if (body_entry->HasPredecessor()) {
4931       body_entry->SetJoinId(stmt->BodyId());
4932       set_current_block(body_entry);
4933     }
4934     if (loop_successor->HasPredecessor()) {
4935       loop_successor->SetJoinId(stmt->ExitId());
4936     } else {
4937       loop_successor = NULL;
4938     }
4939   }
4940
4941   BreakAndContinueInfo break_info(stmt, scope());
4942   if (current_block() != NULL) {
4943     BreakAndContinueScope push(&break_info, this);
4944     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
4945   }
4946   HBasicBlock* body_exit =
4947       JoinContinue(stmt, current_block(), break_info.continue_block());
4948   HBasicBlock* loop_exit = CreateLoop(stmt,
4949                                       loop_entry,
4950                                       body_exit,
4951                                       loop_successor,
4952                                       break_info.break_block());
4953   set_current_block(loop_exit);
4954 }
4955
4956
4957 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4958   ASSERT(!HasStackOverflow());
4959   ASSERT(current_block() != NULL);
4960   ASSERT(current_block()->HasPredecessor());
4961   if (stmt->init() != NULL) {
4962     CHECK_ALIVE(Visit(stmt->init()));
4963   }
4964   ASSERT(current_block() != NULL);
4965   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4966
4967   HBasicBlock* loop_successor = NULL;
4968   if (stmt->cond() != NULL) {
4969     HBasicBlock* body_entry = graph()->CreateBasicBlock();
4970     loop_successor = graph()->CreateBasicBlock();
4971     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4972     if (body_entry->HasPredecessor()) {
4973       body_entry->SetJoinId(stmt->BodyId());
4974       set_current_block(body_entry);
4975     }
4976     if (loop_successor->HasPredecessor()) {
4977       loop_successor->SetJoinId(stmt->ExitId());
4978     } else {
4979       loop_successor = NULL;
4980     }
4981   }
4982
4983   BreakAndContinueInfo break_info(stmt, scope());
4984   if (current_block() != NULL) {
4985     BreakAndContinueScope push(&break_info, this);
4986     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
4987   }
4988   HBasicBlock* body_exit =
4989       JoinContinue(stmt, current_block(), break_info.continue_block());
4990
4991   if (stmt->next() != NULL && body_exit != NULL) {
4992     set_current_block(body_exit);
4993     CHECK_BAILOUT(Visit(stmt->next()));
4994     body_exit = current_block();
4995   }
4996
4997   HBasicBlock* loop_exit = CreateLoop(stmt,
4998                                       loop_entry,
4999                                       body_exit,
5000                                       loop_successor,
5001                                       break_info.break_block());
5002   set_current_block(loop_exit);
5003 }
5004
5005
5006 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5007   ASSERT(!HasStackOverflow());
5008   ASSERT(current_block() != NULL);
5009   ASSERT(current_block()->HasPredecessor());
5010
5011   if (!FLAG_optimize_for_in) {
5012     return Bailout(kForInStatementOptimizationIsDisabled);
5013   }
5014
5015   if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
5016     return Bailout(kForInStatementIsNotFastCase);
5017   }
5018
5019   if (!stmt->each()->IsVariableProxy() ||
5020       !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5021     return Bailout(kForInStatementWithNonLocalEachVariable);
5022   }
5023
5024   Variable* each_var = stmt->each()->AsVariableProxy()->var();
5025
5026   CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5027   HValue* enumerable = Top();  // Leave enumerable at the top.
5028
5029   HInstruction* map = Add<HForInPrepareMap>(enumerable);
5030   Add<HSimulate>(stmt->PrepareId());
5031
5032   HInstruction* array = Add<HForInCacheArray>(
5033       enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5034
5035   HInstruction* enum_length = Add<HMapEnumLength>(map);
5036
5037   HInstruction* start_index = Add<HConstant>(0);
5038
5039   Push(map);
5040   Push(array);
5041   Push(enum_length);
5042   Push(start_index);
5043
5044   HInstruction* index_cache = Add<HForInCacheArray>(
5045       enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
5046   HForInCacheArray::cast(array)->set_index_cache(
5047       HForInCacheArray::cast(index_cache));
5048
5049   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5050
5051   HValue* index = environment()->ExpressionStackAt(0);
5052   HValue* limit = environment()->ExpressionStackAt(1);
5053
5054   // Check that we still have more keys.
5055   HCompareNumericAndBranch* compare_index =
5056       New<HCompareNumericAndBranch>(index, limit, Token::LT);
5057   compare_index->set_observed_input_representation(
5058       Representation::Smi(), Representation::Smi());
5059
5060   HBasicBlock* loop_body = graph()->CreateBasicBlock();
5061   HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5062
5063   compare_index->SetSuccessorAt(0, loop_body);
5064   compare_index->SetSuccessorAt(1, loop_successor);
5065   FinishCurrentBlock(compare_index);
5066
5067   set_current_block(loop_successor);
5068   Drop(5);
5069
5070   set_current_block(loop_body);
5071
5072   HValue* key = Add<HLoadKeyed>(
5073       environment()->ExpressionStackAt(2),  // Enum cache.
5074       environment()->ExpressionStackAt(0),  // Iteration index.
5075       environment()->ExpressionStackAt(0),
5076       FAST_ELEMENTS);
5077
5078   // Check if the expected map still matches that of the enumerable.
5079   // If not just deoptimize.
5080   Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
5081                       environment()->ExpressionStackAt(3));
5082
5083   Bind(each_var, key);
5084
5085   BreakAndContinueInfo break_info(stmt, scope(), 5);
5086   {
5087     BreakAndContinueScope push(&break_info, this);
5088     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5089   }
5090
5091   HBasicBlock* body_exit =
5092       JoinContinue(stmt, current_block(), break_info.continue_block());
5093
5094   if (body_exit != NULL) {
5095     set_current_block(body_exit);
5096
5097     HValue* current_index = Pop();
5098     Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
5099     body_exit = current_block();
5100   }
5101
5102   HBasicBlock* loop_exit = CreateLoop(stmt,
5103                                       loop_entry,
5104                                       body_exit,
5105                                       loop_successor,
5106                                       break_info.break_block());
5107
5108   set_current_block(loop_exit);
5109 }
5110
5111
5112 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5113   ASSERT(!HasStackOverflow());
5114   ASSERT(current_block() != NULL);
5115   ASSERT(current_block()->HasPredecessor());
5116   return Bailout(kForOfStatement);
5117 }
5118
5119
5120 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5121   ASSERT(!HasStackOverflow());
5122   ASSERT(current_block() != NULL);
5123   ASSERT(current_block()->HasPredecessor());
5124   return Bailout(kTryCatchStatement);
5125 }
5126
5127
5128 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5129     TryFinallyStatement* stmt) {
5130   ASSERT(!HasStackOverflow());
5131   ASSERT(current_block() != NULL);
5132   ASSERT(current_block()->HasPredecessor());
5133   return Bailout(kTryFinallyStatement);
5134 }
5135
5136
5137 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5138   ASSERT(!HasStackOverflow());
5139   ASSERT(current_block() != NULL);
5140   ASSERT(current_block()->HasPredecessor());
5141   return Bailout(kDebuggerStatement);
5142 }
5143
5144
5145 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5146   UNREACHABLE();
5147 }
5148
5149
5150 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5151   ASSERT(!HasStackOverflow());
5152   ASSERT(current_block() != NULL);
5153   ASSERT(current_block()->HasPredecessor());
5154   Handle<SharedFunctionInfo> shared_info = expr->shared_info();
5155   if (shared_info.is_null()) {
5156     shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script());
5157   }
5158   // We also have a stack overflow if the recursive compilation did.
5159   if (HasStackOverflow()) return;
5160   HFunctionLiteral* instr =
5161       New<HFunctionLiteral>(shared_info, expr->pretenure());
5162   return ast_context()->ReturnInstruction(instr, expr->id());
5163 }
5164
5165
5166 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5167     NativeFunctionLiteral* expr) {
5168   ASSERT(!HasStackOverflow());
5169   ASSERT(current_block() != NULL);
5170   ASSERT(current_block()->HasPredecessor());
5171   return Bailout(kNativeFunctionLiteral);
5172 }
5173
5174
5175 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5176   ASSERT(!HasStackOverflow());
5177   ASSERT(current_block() != NULL);
5178   ASSERT(current_block()->HasPredecessor());
5179   HBasicBlock* cond_true = graph()->CreateBasicBlock();
5180   HBasicBlock* cond_false = graph()->CreateBasicBlock();
5181   CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5182
5183   // Visit the true and false subexpressions in the same AST context as the
5184   // whole expression.
5185   if (cond_true->HasPredecessor()) {
5186     cond_true->SetJoinId(expr->ThenId());
5187     set_current_block(cond_true);
5188     CHECK_BAILOUT(Visit(expr->then_expression()));
5189     cond_true = current_block();
5190   } else {
5191     cond_true = NULL;
5192   }
5193
5194   if (cond_false->HasPredecessor()) {
5195     cond_false->SetJoinId(expr->ElseId());
5196     set_current_block(cond_false);
5197     CHECK_BAILOUT(Visit(expr->else_expression()));
5198     cond_false = current_block();
5199   } else {
5200     cond_false = NULL;
5201   }
5202
5203   if (!ast_context()->IsTest()) {
5204     HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5205     set_current_block(join);
5206     if (join != NULL && !ast_context()->IsEffect()) {
5207       return ast_context()->ReturnValue(Pop());
5208     }
5209   }
5210 }
5211
5212
5213 HOptimizedGraphBuilder::GlobalPropertyAccess
5214     HOptimizedGraphBuilder::LookupGlobalProperty(
5215         Variable* var, LookupResult* lookup, PropertyAccessType access_type) {
5216   if (var->is_this() || !current_info()->has_global_object()) {
5217     return kUseGeneric;
5218   }
5219   Handle<GlobalObject> global(current_info()->global_object());
5220   global->Lookup(var->name(), lookup);
5221   if (!lookup->IsNormal() ||
5222       (access_type == STORE && lookup->IsReadOnly()) ||
5223       lookup->holder() != *global) {
5224     return kUseGeneric;
5225   }
5226
5227   return kUseCell;
5228 }
5229
5230
5231 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5232   ASSERT(var->IsContextSlot());
5233   HValue* context = environment()->context();
5234   int length = scope()->ContextChainLength(var->scope());
5235   while (length-- > 0) {
5236     context = Add<HLoadNamedField>(
5237         context, static_cast<HValue*>(NULL),
5238         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5239   }
5240   return context;
5241 }
5242
5243
5244 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5245   if (expr->is_this()) {
5246     current_info()->set_this_has_uses(true);
5247   }
5248
5249   ASSERT(!HasStackOverflow());
5250   ASSERT(current_block() != NULL);
5251   ASSERT(current_block()->HasPredecessor());
5252   Variable* variable = expr->var();
5253   switch (variable->location()) {
5254     case Variable::UNALLOCATED: {
5255       if (IsLexicalVariableMode(variable->mode())) {
5256         // TODO(rossberg): should this be an ASSERT?
5257         return Bailout(kReferenceToGlobalLexicalVariable);
5258       }
5259       // Handle known global constants like 'undefined' specially to avoid a
5260       // load from a global cell for them.
5261       Handle<Object> constant_value =
5262           isolate()->factory()->GlobalConstantFor(variable->name());
5263       if (!constant_value.is_null()) {
5264         HConstant* instr = New<HConstant>(constant_value);
5265         return ast_context()->ReturnInstruction(instr, expr->id());
5266       }
5267
5268       LookupResult lookup(isolate());
5269       GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, LOAD);
5270
5271       if (type == kUseCell &&
5272           current_info()->global_object()->IsAccessCheckNeeded()) {
5273         type = kUseGeneric;
5274       }
5275
5276       if (type == kUseCell) {
5277         Handle<GlobalObject> global(current_info()->global_object());
5278         Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
5279         if (cell->type()->IsConstant()) {
5280           PropertyCell::AddDependentCompilationInfo(cell, top_info());
5281           Handle<Object> constant_object = cell->type()->AsConstant()->Value();
5282           if (constant_object->IsConsString()) {
5283             constant_object =
5284                 String::Flatten(Handle<String>::cast(constant_object));
5285           }
5286           HConstant* constant = New<HConstant>(constant_object);
5287           return ast_context()->ReturnInstruction(constant, expr->id());
5288         } else {
5289           HLoadGlobalCell* instr =
5290               New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
5291           return ast_context()->ReturnInstruction(instr, expr->id());
5292         }
5293       } else {
5294         HValue* global_object = Add<HLoadNamedField>(
5295             context(), static_cast<HValue*>(NULL),
5296             HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
5297         HLoadGlobalGeneric* instr =
5298             New<HLoadGlobalGeneric>(global_object,
5299                                     variable->name(),
5300                                     ast_context()->is_for_typeof());
5301         return ast_context()->ReturnInstruction(instr, expr->id());
5302       }
5303     }
5304
5305     case Variable::PARAMETER:
5306     case Variable::LOCAL: {
5307       HValue* value = LookupAndMakeLive(variable);
5308       if (value == graph()->GetConstantHole()) {
5309         ASSERT(IsDeclaredVariableMode(variable->mode()) &&
5310                variable->mode() != VAR);
5311         return Bailout(kReferenceToUninitializedVariable);
5312       }
5313       return ast_context()->ReturnValue(value);
5314     }
5315
5316     case Variable::CONTEXT: {
5317       HValue* context = BuildContextChainWalk(variable);
5318       HLoadContextSlot::Mode mode;
5319       switch (variable->mode()) {
5320         case LET:
5321         case CONST:
5322           mode = HLoadContextSlot::kCheckDeoptimize;
5323           break;
5324         case CONST_LEGACY:
5325           mode = HLoadContextSlot::kCheckReturnUndefined;
5326           break;
5327         default:
5328           mode = HLoadContextSlot::kNoCheck;
5329           break;
5330       }
5331       HLoadContextSlot* instr =
5332           new(zone()) HLoadContextSlot(context, variable->index(), mode);
5333       return ast_context()->ReturnInstruction(instr, expr->id());
5334     }
5335
5336     case Variable::LOOKUP:
5337       return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5338   }
5339 }
5340
5341
5342 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5343   ASSERT(!HasStackOverflow());
5344   ASSERT(current_block() != NULL);
5345   ASSERT(current_block()->HasPredecessor());
5346   HConstant* instr = New<HConstant>(expr->value());
5347   return ast_context()->ReturnInstruction(instr, expr->id());
5348 }
5349
5350
5351 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5352   ASSERT(!HasStackOverflow());
5353   ASSERT(current_block() != NULL);
5354   ASSERT(current_block()->HasPredecessor());
5355   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5356   Handle<FixedArray> literals(closure->literals());
5357   HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
5358                                               expr->pattern(),
5359                                               expr->flags(),
5360                                               expr->literal_index());
5361   return ast_context()->ReturnInstruction(instr, expr->id());
5362 }
5363
5364
5365 static bool CanInlinePropertyAccess(Type* type) {
5366   if (type->Is(Type::NumberOrString())) return true;
5367   if (!type->IsClass()) return false;
5368   Handle<Map> map = type->AsClass()->Map();
5369   return map->IsJSObjectMap() &&
5370       !map->is_dictionary_map() &&
5371       !map->has_named_interceptor();
5372 }
5373
5374
5375 // Determines whether the given array or object literal boilerplate satisfies
5376 // all limits to be considered for fast deep-copying and computes the total
5377 // size of all objects that are part of the graph.
5378 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5379                           int max_depth,
5380                           int* max_properties) {
5381   if (boilerplate->map()->is_deprecated() &&
5382       !JSObject::TryMigrateInstance(boilerplate)) {
5383     return false;
5384   }
5385
5386   ASSERT(max_depth >= 0 && *max_properties >= 0);
5387   if (max_depth == 0) return false;
5388
5389   Isolate* isolate = boilerplate->GetIsolate();
5390   Handle<FixedArrayBase> elements(boilerplate->elements());
5391   if (elements->length() > 0 &&
5392       elements->map() != isolate->heap()->fixed_cow_array_map()) {
5393     if (boilerplate->HasFastObjectElements()) {
5394       Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5395       int length = elements->length();
5396       for (int i = 0; i < length; i++) {
5397         if ((*max_properties)-- == 0) return false;
5398         Handle<Object> value(fast_elements->get(i), isolate);
5399         if (value->IsJSObject()) {
5400           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5401           if (!IsFastLiteral(value_object,
5402                              max_depth - 1,
5403                              max_properties)) {
5404             return false;
5405           }
5406         }
5407       }
5408     } else if (!boilerplate->HasFastDoubleElements()) {
5409       return false;
5410     }
5411   }
5412
5413   Handle<FixedArray> properties(boilerplate->properties());
5414   if (properties->length() > 0) {
5415     return false;
5416   } else {
5417     Handle<DescriptorArray> descriptors(
5418         boilerplate->map()->instance_descriptors());
5419     int limit = boilerplate->map()->NumberOfOwnDescriptors();
5420     for (int i = 0; i < limit; i++) {
5421       PropertyDetails details = descriptors->GetDetails(i);
5422       if (details.type() != FIELD) continue;
5423       int index = descriptors->GetFieldIndex(i);
5424       if ((*max_properties)-- == 0) return false;
5425       Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5426       if (value->IsJSObject()) {
5427         Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5428         if (!IsFastLiteral(value_object,
5429                            max_depth - 1,
5430                            max_properties)) {
5431           return false;
5432         }
5433       }
5434     }
5435   }
5436   return true;
5437 }
5438
5439
5440 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5441   ASSERT(!HasStackOverflow());
5442   ASSERT(current_block() != NULL);
5443   ASSERT(current_block()->HasPredecessor());
5444   expr->BuildConstantProperties(isolate());
5445   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5446   HInstruction* literal;
5447
5448   // Check whether to use fast or slow deep-copying for boilerplate.
5449   int max_properties = kMaxFastLiteralProperties;
5450   Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5451                                isolate());
5452   Handle<AllocationSite> site;
5453   Handle<JSObject> boilerplate;
5454   if (!literals_cell->IsUndefined()) {
5455     // Retrieve the boilerplate
5456     site = Handle<AllocationSite>::cast(literals_cell);
5457     boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5458                                    isolate());
5459   }
5460
5461   if (!boilerplate.is_null() &&
5462       IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5463     AllocationSiteUsageContext usage_context(isolate(), site, false);
5464     usage_context.EnterNewScope();
5465     literal = BuildFastLiteral(boilerplate, &usage_context);
5466     usage_context.ExitScope(site, boilerplate);
5467   } else {
5468     NoObservableSideEffectsScope no_effects(this);
5469     Handle<FixedArray> closure_literals(closure->literals(), isolate());
5470     Handle<FixedArray> constant_properties = expr->constant_properties();
5471     int literal_index = expr->literal_index();
5472     int flags = expr->fast_elements()
5473         ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5474     flags |= expr->has_function()
5475         ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5476
5477     Add<HPushArguments>(Add<HConstant>(closure_literals),
5478                         Add<HConstant>(literal_index),
5479                         Add<HConstant>(constant_properties),
5480                         Add<HConstant>(flags));
5481
5482     // TODO(mvstanton): Add a flag to turn off creation of any
5483     // AllocationMementos for this call: we are in crankshaft and should have
5484     // learned enough about transition behavior to stop emitting mementos.
5485     Runtime::FunctionId function_id = Runtime::kHiddenCreateObjectLiteral;
5486     literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5487                                 Runtime::FunctionForId(function_id),
5488                                 4);
5489   }
5490
5491   // The object is expected in the bailout environment during computation
5492   // of the property values and is the value of the entire expression.
5493   Push(literal);
5494
5495   expr->CalculateEmitStore(zone());
5496
5497   for (int i = 0; i < expr->properties()->length(); i++) {
5498     ObjectLiteral::Property* property = expr->properties()->at(i);
5499     if (property->IsCompileTimeValue()) continue;
5500
5501     Literal* key = property->key();
5502     Expression* value = property->value();
5503
5504     switch (property->kind()) {
5505       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5506         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
5507         // Fall through.
5508       case ObjectLiteral::Property::COMPUTED:
5509         if (key->value()->IsInternalizedString()) {
5510           if (property->emit_store()) {
5511             CHECK_ALIVE(VisitForValue(value));
5512             HValue* value = Pop();
5513             Handle<Map> map = property->GetReceiverType();
5514             Handle<String> name = property->key()->AsPropertyName();
5515             HInstruction* store;
5516             if (map.is_null()) {
5517               // If we don't know the monomorphic type, do a generic store.
5518               CHECK_ALIVE(store = BuildNamedGeneric(
5519                   STORE, literal, name, value));
5520             } else {
5521               PropertyAccessInfo info(
5522                   this, STORE, ToType(map), name, map->instance_type());
5523               if (info.CanAccessMonomorphic()) {
5524                 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5525                 ASSERT(!info.lookup()->IsPropertyCallbacks());
5526                 store = BuildMonomorphicAccess(
5527                     &info, literal, checked_literal, value,
5528                     BailoutId::None(), BailoutId::None());
5529               } else {
5530                 CHECK_ALIVE(store = BuildNamedGeneric(
5531                     STORE, literal, name, value));
5532               }
5533             }
5534             AddInstruction(store);
5535             if (store->HasObservableSideEffects()) {
5536               Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5537             }
5538           } else {
5539             CHECK_ALIVE(VisitForEffect(value));
5540           }
5541           break;
5542         }
5543         // Fall through.
5544       case ObjectLiteral::Property::PROTOTYPE:
5545       case ObjectLiteral::Property::SETTER:
5546       case ObjectLiteral::Property::GETTER:
5547         return Bailout(kObjectLiteralWithComplexProperty);
5548       default: UNREACHABLE();
5549     }
5550   }
5551
5552   if (expr->has_function()) {
5553     // Return the result of the transformation to fast properties
5554     // instead of the original since this operation changes the map
5555     // of the object. This makes sure that the original object won't
5556     // be used by other optimized code before it is transformed
5557     // (e.g. because of code motion).
5558     HToFastProperties* result = Add<HToFastProperties>(Pop());
5559     return ast_context()->ReturnValue(result);
5560   } else {
5561     return ast_context()->ReturnValue(Pop());
5562   }
5563 }
5564
5565
5566 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5567   ASSERT(!HasStackOverflow());
5568   ASSERT(current_block() != NULL);
5569   ASSERT(current_block()->HasPredecessor());
5570   expr->BuildConstantElements(isolate());
5571   ZoneList<Expression*>* subexprs = expr->values();
5572   int length = subexprs->length();
5573   HInstruction* literal;
5574
5575   Handle<AllocationSite> site;
5576   Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5577   bool uninitialized = false;
5578   Handle<Object> literals_cell(literals->get(expr->literal_index()),
5579                                isolate());
5580   Handle<JSObject> boilerplate_object;
5581   if (literals_cell->IsUndefined()) {
5582     uninitialized = true;
5583     Handle<Object> raw_boilerplate;
5584     ASSIGN_RETURN_ON_EXCEPTION_VALUE(
5585         isolate(), raw_boilerplate,
5586         Runtime::CreateArrayLiteralBoilerplate(
5587             isolate(), literals, expr->constant_elements()),
5588         Bailout(kArrayBoilerplateCreationFailed));
5589
5590     boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5591     AllocationSiteCreationContext creation_context(isolate());
5592     site = creation_context.EnterNewScope();
5593     if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5594       return Bailout(kArrayBoilerplateCreationFailed);
5595     }
5596     creation_context.ExitScope(site, boilerplate_object);
5597     literals->set(expr->literal_index(), *site);
5598
5599     if (boilerplate_object->elements()->map() ==
5600         isolate()->heap()->fixed_cow_array_map()) {
5601       isolate()->counters()->cow_arrays_created_runtime()->Increment();
5602     }
5603   } else {
5604     ASSERT(literals_cell->IsAllocationSite());
5605     site = Handle<AllocationSite>::cast(literals_cell);
5606     boilerplate_object = Handle<JSObject>(
5607         JSObject::cast(site->transition_info()), isolate());
5608   }
5609
5610   ASSERT(!boilerplate_object.is_null());
5611   ASSERT(site->SitePointsToLiteral());
5612
5613   ElementsKind boilerplate_elements_kind =
5614       boilerplate_object->GetElementsKind();
5615
5616   // Check whether to use fast or slow deep-copying for boilerplate.
5617   int max_properties = kMaxFastLiteralProperties;
5618   if (IsFastLiteral(boilerplate_object,
5619                     kMaxFastLiteralDepth,
5620                     &max_properties)) {
5621     AllocationSiteUsageContext usage_context(isolate(), site, false);
5622     usage_context.EnterNewScope();
5623     literal = BuildFastLiteral(boilerplate_object, &usage_context);
5624     usage_context.ExitScope(site, boilerplate_object);
5625   } else {
5626     NoObservableSideEffectsScope no_effects(this);
5627     // Boilerplate already exists and constant elements are never accessed,
5628     // pass an empty fixed array to the runtime function instead.
5629     Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5630     int literal_index = expr->literal_index();
5631     int flags = expr->depth() == 1
5632         ? ArrayLiteral::kShallowElements
5633         : ArrayLiteral::kNoFlags;
5634     flags |= ArrayLiteral::kDisableMementos;
5635
5636     Add<HPushArguments>(Add<HConstant>(literals),
5637                         Add<HConstant>(literal_index),
5638                         Add<HConstant>(constants),
5639                         Add<HConstant>(flags));
5640
5641     // TODO(mvstanton): Consider a flag to turn off creation of any
5642     // AllocationMementos for this call: we are in crankshaft and should have
5643     // learned enough about transition behavior to stop emitting mementos.
5644     Runtime::FunctionId function_id = Runtime::kHiddenCreateArrayLiteral;
5645     literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5646                                 Runtime::FunctionForId(function_id),
5647                                 4);
5648
5649     // De-opt if elements kind changed from boilerplate_elements_kind.
5650     Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5651     literal = Add<HCheckMaps>(literal, map);
5652   }
5653
5654   // The array is expected in the bailout environment during computation
5655   // of the property values and is the value of the entire expression.
5656   Push(literal);
5657   // The literal index is on the stack, too.
5658   Push(Add<HConstant>(expr->literal_index()));
5659
5660   HInstruction* elements = NULL;
5661
5662   for (int i = 0; i < length; i++) {
5663     Expression* subexpr = subexprs->at(i);
5664     // If the subexpression is a literal or a simple materialized literal it
5665     // is already set in the cloned array.
5666     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5667
5668     CHECK_ALIVE(VisitForValue(subexpr));
5669     HValue* value = Pop();
5670     if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5671
5672     elements = AddLoadElements(literal);
5673
5674     HValue* key = Add<HConstant>(i);
5675
5676     switch (boilerplate_elements_kind) {
5677       case FAST_SMI_ELEMENTS:
5678       case FAST_HOLEY_SMI_ELEMENTS:
5679       case FAST_ELEMENTS:
5680       case FAST_HOLEY_ELEMENTS:
5681       case FAST_DOUBLE_ELEMENTS:
5682       case FAST_HOLEY_DOUBLE_ELEMENTS: {
5683         HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5684                                               boilerplate_elements_kind);
5685         instr->SetUninitialized(uninitialized);
5686         break;
5687       }
5688       default:
5689         UNREACHABLE();
5690         break;
5691     }
5692
5693     Add<HSimulate>(expr->GetIdForElement(i));
5694   }
5695
5696   Drop(1);  // array literal index
5697   return ast_context()->ReturnValue(Pop());
5698 }
5699
5700
5701 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5702                                                 Handle<Map> map) {
5703   BuildCheckHeapObject(object);
5704   return Add<HCheckMaps>(object, map);
5705 }
5706
5707
5708 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5709     PropertyAccessInfo* info,
5710     HValue* checked_object) {
5711   // See if this is a load for an immutable property
5712   if (checked_object->ActualValue()->IsConstant() &&
5713       info->lookup()->IsCacheable() &&
5714       info->lookup()->IsReadOnly() && info->lookup()->IsDontDelete()) {
5715     Handle<Object> object(
5716         HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5717
5718     if (object->IsJSObject()) {
5719       LookupResult lookup(isolate());
5720       Handle<JSObject>::cast(object)->Lookup(info->name(), &lookup);
5721       Handle<Object> value(lookup.GetLazyValue(), isolate());
5722
5723       if (!value->IsTheHole()) {
5724         return New<HConstant>(value);
5725       }
5726     }
5727   }
5728
5729   HObjectAccess access = info->access();
5730   if (access.representation().IsDouble()) {
5731     // Load the heap number.
5732     checked_object = Add<HLoadNamedField>(
5733         checked_object, static_cast<HValue*>(NULL),
5734         access.WithRepresentation(Representation::Tagged()));
5735     // Load the double value from it.
5736     access = HObjectAccess::ForHeapNumberValue();
5737   }
5738
5739   SmallMapList* map_list = info->field_maps();
5740   if (map_list->length() == 0) {
5741     return New<HLoadNamedField>(checked_object, checked_object, access);
5742   }
5743
5744   UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
5745   for (int i = 0; i < map_list->length(); ++i) {
5746     maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
5747   }
5748   return New<HLoadNamedField>(
5749       checked_object, checked_object, access, maps, info->field_type());
5750 }
5751
5752
5753 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5754     PropertyAccessInfo* info,
5755     HValue* checked_object,
5756     HValue* value) {
5757   bool transition_to_field = info->lookup()->IsTransition();
5758   // TODO(verwaest): Move this logic into PropertyAccessInfo.
5759   HObjectAccess field_access = info->access();
5760
5761   HStoreNamedField *instr;
5762   if (field_access.representation().IsDouble()) {
5763     HObjectAccess heap_number_access =
5764         field_access.WithRepresentation(Representation::Tagged());
5765     if (transition_to_field) {
5766       // The store requires a mutable HeapNumber to be allocated.
5767       NoObservableSideEffectsScope no_side_effects(this);
5768       HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5769
5770       // TODO(hpayer): Allocation site pretenuring support.
5771       HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5772           HType::HeapObject(),
5773           NOT_TENURED,
5774           HEAP_NUMBER_TYPE);
5775       AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
5776       Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5777                             value);
5778       instr = New<HStoreNamedField>(checked_object->ActualValue(),
5779                                     heap_number_access,
5780                                     heap_number);
5781     } else {
5782       // Already holds a HeapNumber; load the box and write its value field.
5783       HInstruction* heap_number = Add<HLoadNamedField>(
5784           checked_object, static_cast<HValue*>(NULL), heap_number_access);
5785       instr = New<HStoreNamedField>(heap_number,
5786                                     HObjectAccess::ForHeapNumberValue(),
5787                                     value, STORE_TO_INITIALIZED_ENTRY);
5788     }
5789   } else {
5790     if (field_access.representation().IsHeapObject()) {
5791       BuildCheckHeapObject(value);
5792     }
5793
5794     if (!info->field_maps()->is_empty()) {
5795       ASSERT(field_access.representation().IsHeapObject());
5796       value = Add<HCheckMaps>(value, info->field_maps());
5797     }
5798
5799     // This is a normal store.
5800     instr = New<HStoreNamedField>(
5801         checked_object->ActualValue(), field_access, value,
5802         transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5803   }
5804
5805   if (transition_to_field) {
5806     Handle<Map> transition(info->transition());
5807     ASSERT(!transition->is_deprecated());
5808     instr->SetTransition(Add<HConstant>(transition));
5809   }
5810   return instr;
5811 }
5812
5813
5814 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5815     PropertyAccessInfo* info) {
5816   if (!CanInlinePropertyAccess(type_)) return false;
5817
5818   // Currently only handle Type::Number as a polymorphic case.
5819   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5820   // instruction.
5821   if (type_->Is(Type::Number())) return false;
5822
5823   // Values are only compatible for monomorphic load if they all behave the same
5824   // regarding value wrappers.
5825   if (type_->Is(Type::NumberOrString())) {
5826     if (!info->type_->Is(Type::NumberOrString())) return false;
5827   } else {
5828     if (info->type_->Is(Type::NumberOrString())) return false;
5829   }
5830
5831   if (!LookupDescriptor()) return false;
5832
5833   if (!lookup_.IsFound()) {
5834     return (!info->lookup_.IsFound() || info->has_holder()) &&
5835         map()->prototype() == info->map()->prototype();
5836   }
5837
5838   // Mismatch if the other access info found the property in the prototype
5839   // chain.
5840   if (info->has_holder()) return false;
5841
5842   if (lookup_.IsPropertyCallbacks()) {
5843     return accessor_.is_identical_to(info->accessor_) &&
5844         api_holder_.is_identical_to(info->api_holder_);
5845   }
5846
5847   if (lookup_.IsConstant()) {
5848     return constant_.is_identical_to(info->constant_);
5849   }
5850
5851   ASSERT(lookup_.IsField());
5852   if (!info->lookup_.IsField()) return false;
5853
5854   Representation r = access_.representation();
5855   if (IsLoad()) {
5856     if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5857   } else {
5858     if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5859   }
5860   if (info->access_.offset() != access_.offset()) return false;
5861   if (info->access_.IsInobject() != access_.IsInobject()) return false;
5862   if (IsLoad()) {
5863     if (field_maps_.is_empty()) {
5864       info->field_maps_.Clear();
5865     } else if (!info->field_maps_.is_empty()) {
5866       for (int i = 0; i < field_maps_.length(); ++i) {
5867         info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5868       }
5869       info->field_maps_.Sort();
5870     }
5871   } else {
5872     // We can only merge stores that agree on their field maps. The comparison
5873     // below is safe, since we keep the field maps sorted.
5874     if (field_maps_.length() != info->field_maps_.length()) return false;
5875     for (int i = 0; i < field_maps_.length(); ++i) {
5876       if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5877         return false;
5878       }
5879     }
5880   }
5881   info->GeneralizeRepresentation(r);
5882   info->field_type_ = info->field_type_.Combine(field_type_);
5883   return true;
5884 }
5885
5886
5887 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5888   if (!type_->IsClass()) return true;
5889   map()->LookupDescriptor(NULL, *name_, &lookup_);
5890   return LoadResult(map());
5891 }
5892
5893
5894 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5895   if (!IsLoad() && lookup_.IsProperty() &&
5896       (lookup_.IsReadOnly() || !lookup_.IsCacheable())) {
5897     return false;
5898   }
5899
5900   if (lookup_.IsField()) {
5901     // Construct the object field access.
5902     access_ = HObjectAccess::ForField(map, &lookup_, name_);
5903
5904     // Load field map for heap objects.
5905     LoadFieldMaps(map);
5906   } else if (lookup_.IsPropertyCallbacks()) {
5907     Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5908     if (!callback->IsAccessorPair()) return false;
5909     Object* raw_accessor = IsLoad()
5910         ? Handle<AccessorPair>::cast(callback)->getter()
5911         : Handle<AccessorPair>::cast(callback)->setter();
5912     if (!raw_accessor->IsJSFunction()) return false;
5913     Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
5914     if (accessor->shared()->IsApiFunction()) {
5915       CallOptimization call_optimization(accessor);
5916       if (call_optimization.is_simple_api_call()) {
5917         CallOptimization::HolderLookup holder_lookup;
5918         Handle<Map> receiver_map = this->map();
5919         api_holder_ = call_optimization.LookupHolderOfExpectedType(
5920             receiver_map, &holder_lookup);
5921       }
5922     }
5923     accessor_ = accessor;
5924   } else if (lookup_.IsConstant()) {
5925     constant_ = handle(lookup_.GetConstantFromMap(*map), isolate());
5926   }
5927
5928   return true;
5929 }
5930
5931
5932 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
5933     Handle<Map> map) {
5934   // Clear any previously collected field maps/type.
5935   field_maps_.Clear();
5936   field_type_ = HType::Tagged();
5937
5938   // Figure out the field type from the accessor map.
5939   Handle<HeapType> field_type(lookup_.GetFieldTypeFromMap(*map), isolate());
5940
5941   // Collect the (stable) maps from the field type.
5942   int num_field_maps = field_type->NumClasses();
5943   if (num_field_maps == 0) return;
5944   ASSERT(access_.representation().IsHeapObject());
5945   field_maps_.Reserve(num_field_maps, zone());
5946   HeapType::Iterator<Map> it = field_type->Classes();
5947   while (!it.Done()) {
5948     Handle<Map> field_map = it.Current();
5949     if (!field_map->is_stable()) {
5950       field_maps_.Clear();
5951       return;
5952     }
5953     field_maps_.Add(field_map, zone());
5954     it.Advance();
5955   }
5956   field_maps_.Sort();
5957   ASSERT_EQ(num_field_maps, field_maps_.length());
5958
5959   // Determine field HType from field HeapType.
5960   field_type_ = HType::FromType<HeapType>(field_type);
5961   ASSERT(field_type_.IsHeapObject());
5962
5963   // Add dependency on the map that introduced the field.
5964   Map::AddDependentCompilationInfo(
5965       handle(lookup_.GetFieldOwnerFromMap(*map), isolate()),
5966       DependentCode::kFieldTypeGroup, top_info());
5967 }
5968
5969
5970 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5971   Handle<Map> map = this->map();
5972
5973   while (map->prototype()->IsJSObject()) {
5974     holder_ = handle(JSObject::cast(map->prototype()));
5975     if (holder_->map()->is_deprecated()) {
5976       JSObject::TryMigrateInstance(holder_);
5977     }
5978     map = Handle<Map>(holder_->map());
5979     if (!CanInlinePropertyAccess(ToType(map))) {
5980       lookup_.NotFound();
5981       return false;
5982     }
5983     map->LookupDescriptor(*holder_, *name_, &lookup_);
5984     if (lookup_.IsFound()) return LoadResult(map);
5985   }
5986   lookup_.NotFound();
5987   return true;
5988 }
5989
5990
5991 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
5992   if (IsSIMD128PropertyCallback() &&
5993       CpuFeatures::SupportsSIMD128InCrankshaft()) {
5994     return true;
5995   }
5996   if (!CanInlinePropertyAccess(type_)) return false;
5997   if (IsJSObjectFieldAccessor()) return IsLoad();
5998   if (!LookupDescriptor()) return false;
5999   if (lookup_.IsFound()) {
6000     if (IsLoad()) return true;
6001     return !lookup_.IsReadOnly() && lookup_.IsCacheable();
6002   }
6003   if (!LookupInPrototypes()) return false;
6004   if (IsLoad()) return true;
6005
6006   if (lookup_.IsPropertyCallbacks()) return true;
6007   Handle<Map> map = this->map();
6008   map->LookupTransition(NULL, *name_, &lookup_);
6009   if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
6010     // Construct the object field access.
6011     access_ = HObjectAccess::ForField(map, &lookup_, name_);
6012
6013     // Load field map for heap objects.
6014     LoadFieldMaps(transition());
6015     return true;
6016   }
6017   return false;
6018 }
6019
6020
6021 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6022     SmallMapList* types) {
6023   ASSERT(type_->Is(ToType(types->first())));
6024   if (!CanAccessMonomorphic()) return false;
6025   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6026   if (types->length() > kMaxLoadPolymorphism) return false;
6027
6028   if (IsSIMD128PropertyCallback() &&
6029       CpuFeatures::SupportsSIMD128InCrankshaft()) {
6030     for (int i = 1; i < types->length(); ++i) {
6031       if (types->at(i)->instance_type() == types->first()->instance_type()) {
6032         return false;
6033       }
6034     }
6035     return true;
6036   }
6037
6038   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
6039   if (GetJSObjectFieldAccess(&access)) {
6040     for (int i = 1; i < types->length(); ++i) {
6041       PropertyAccessInfo test_info(
6042           builder_, access_type_, ToType(types->at(i)), name_,
6043           types->at(i)->instance_type());
6044       HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
6045       if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6046       if (!access.Equals(test_access)) return false;
6047     }
6048     return true;
6049   }
6050
6051   // Currently only handle Type::Number as a polymorphic case.
6052   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6053   // instruction.
6054   if (type_->Is(Type::Number())) return false;
6055
6056   // Multiple maps cannot transition to the same target map.
6057   ASSERT(!IsLoad() || !lookup_.IsTransition());
6058   if (lookup_.IsTransition() && types->length() > 1) return false;
6059
6060   for (int i = 1; i < types->length(); ++i) {
6061     PropertyAccessInfo test_info(
6062         builder_, access_type_, ToType(types->at(i)), name_,
6063         types->at(i)->instance_type());
6064     if (!test_info.IsCompatible(this)) return false;
6065   }
6066
6067   return true;
6068 }
6069
6070
6071 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
6072   return type->Is(Type::NumberOrString()) &&
6073       target->shared()->strict_mode() == SLOPPY &&
6074       !target->shared()->native();
6075 }
6076
6077
6078 static bool IsSIMDProperty(Handle<String> name, uint8_t* mask) {
6079   SmartArrayPointer<char> cstring = name->ToCString();
6080   int i = 0;
6081   while (i <= 3) {
6082     int shift = 0;
6083     switch (cstring[i]) {
6084       case 'W':
6085         shift++;
6086       case 'Z':
6087         shift++;
6088       case 'Y':
6089         shift++;
6090       case 'X':
6091         break;
6092       default:
6093         return false;
6094     }
6095     *mask |= (shift << 2*i);
6096     i++;
6097   }
6098
6099   return true;
6100 }
6101
6102
6103 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6104     PropertyAccessInfo* info,
6105     HValue* object,
6106     HValue* checked_object,
6107     HValue* value,
6108     BailoutId ast_id,
6109     BailoutId return_id,
6110     bool can_inline_accessor) {
6111
6112   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
6113   if (info->GetJSObjectFieldAccess(&access)) {
6114     ASSERT(info->IsLoad());
6115     return New<HLoadNamedField>(object, checked_object, access);
6116   }
6117
6118   HValue* checked_holder = checked_object;
6119   if (info->has_holder()) {
6120     Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6121     checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6122   }
6123
6124   if (!info->lookup()->IsFound()) {
6125     ASSERT(info->IsLoad());
6126     return graph()->GetConstantUndefined();
6127   }
6128
6129   if (info->lookup()->IsField()) {
6130     if (info->IsLoad()) {
6131       if (info->map()->constructor()->IsJSFunction()) {
6132         JSFunction* constructor = JSFunction::cast(info->map()->constructor());
6133         String* class_name =
6134           String::cast(constructor->shared()->instance_class_name());
6135         uint8_t mask = 0;
6136         if (class_name->Equals(isolate()->heap()->simd()) &&
6137             IsSIMDProperty(info->name(), &mask) &&
6138             CpuFeatures::SupportsSIMD128InCrankshaft()) {
6139           return New<HConstant>(mask);
6140         }
6141       }
6142       return BuildLoadNamedField(info, checked_holder);
6143     } else {
6144       return BuildStoreNamedField(info, checked_object, value);
6145     }
6146   }
6147
6148   if (info->lookup()->IsTransition()) {
6149     ASSERT(!info->IsLoad());
6150     return BuildStoreNamedField(info, checked_object, value);
6151   }
6152
6153   if (info->lookup()->IsPropertyCallbacks()) {
6154     Push(checked_object);
6155     int argument_count = 1;
6156     if (!info->IsLoad()) {
6157       argument_count = 2;
6158       Push(value);
6159     }
6160
6161     if (NeedsWrappingFor(info->type(), info->accessor())) {
6162       HValue* function = Add<HConstant>(info->accessor());
6163       PushArgumentsFromEnvironment(argument_count);
6164       return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
6165     } else if (FLAG_inline_accessors && can_inline_accessor) {
6166       bool success = info->IsLoad()
6167           ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6168           : TryInlineSetter(
6169               info->accessor(), info->map(), ast_id, return_id, value);
6170       if (success || HasStackOverflow()) return NULL;
6171     }
6172
6173     PushArgumentsFromEnvironment(argument_count);
6174     return BuildCallConstantFunction(info->accessor(), argument_count);
6175   }
6176
6177   ASSERT(info->lookup()->IsConstant());
6178   if (info->IsLoad()) {
6179     return New<HConstant>(info->constant());
6180   } else {
6181     return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6182   }
6183 }
6184
6185
6186 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6187     PropertyAccessType access_type,
6188     BailoutId ast_id,
6189     BailoutId return_id,
6190     HValue* object,
6191     HValue* value,
6192     SmallMapList* types,
6193     Handle<String> name) {
6194   // Something did not match; must use a polymorphic load.
6195   int count = 0;
6196   HBasicBlock* join = NULL;
6197   HBasicBlock* number_block = NULL;
6198   bool handled_string = false;
6199
6200   bool handle_smi = false;
6201   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6202   for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6203     PropertyAccessInfo info(
6204         this, access_type, ToType(types->at(i)), name,
6205         types->at(i)->instance_type());
6206     if (info.type()->Is(Type::String())) {
6207       if (handled_string) continue;
6208       handled_string = true;
6209     }
6210     if (info.CanAccessMonomorphic()) {
6211       count++;
6212       if (info.type()->Is(Type::Number())) {
6213         handle_smi = true;
6214         break;
6215       }
6216     }
6217   }
6218
6219   count = 0;
6220   HControlInstruction* smi_check = NULL;
6221   handled_string = false;
6222
6223   for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6224     PropertyAccessInfo info(
6225         this, access_type, ToType(types->at(i)), name,
6226         types->at(i)->instance_type());
6227     if (info.type()->Is(Type::String())) {
6228       if (handled_string) continue;
6229       handled_string = true;
6230     }
6231     if (!info.CanAccessMonomorphic()) continue;
6232
6233     if (count == 0) {
6234       join = graph()->CreateBasicBlock();
6235       if (handle_smi) {
6236         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6237         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6238         number_block = graph()->CreateBasicBlock();
6239         smi_check = New<HIsSmiAndBranch>(
6240             object, empty_smi_block, not_smi_block);
6241         FinishCurrentBlock(smi_check);
6242         GotoNoSimulate(empty_smi_block, number_block);
6243         set_current_block(not_smi_block);
6244       } else {
6245         BuildCheckHeapObject(object);
6246       }
6247     }
6248     ++count;
6249     HBasicBlock* if_true = graph()->CreateBasicBlock();
6250     HBasicBlock* if_false = graph()->CreateBasicBlock();
6251     HUnaryControlInstruction* compare;
6252
6253     HValue* dependency;
6254     if (info.type()->Is(Type::Number())) {
6255       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6256       compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6257       dependency = smi_check;
6258     } else if (info.type()->Is(Type::String())) {
6259       compare = New<HIsStringAndBranch>(object, if_true, if_false);
6260       dependency = compare;
6261     } else {
6262       compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6263       dependency = compare;
6264     }
6265     FinishCurrentBlock(compare);
6266
6267     if (info.type()->Is(Type::Number())) {
6268       GotoNoSimulate(if_true, number_block);
6269       if_true = number_block;
6270     }
6271
6272     set_current_block(if_true);
6273
6274     HInstruction* access = BuildMonomorphicAccess(
6275         &info, object, dependency, value, ast_id,
6276         return_id, FLAG_polymorphic_inlining);
6277
6278     HValue* result = NULL;
6279     switch (access_type) {
6280       case LOAD:
6281         result = access;
6282         break;
6283       case STORE:
6284         result = value;
6285         break;
6286     }
6287
6288     if (access == NULL) {
6289       if (HasStackOverflow()) return;
6290     } else {
6291       if (!access->IsLinked()) AddInstruction(access);
6292       if (!ast_context()->IsEffect()) Push(result);
6293     }
6294
6295     if (current_block() != NULL) Goto(join);
6296     set_current_block(if_false);
6297   }
6298
6299   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
6300   // know about and do not want to handle ones we've never seen.  Otherwise
6301   // use a generic IC.
6302   if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
6303     FinishExitWithHardDeoptimization("Uknown map in polymorphic access");
6304   } else {
6305     HInstruction* instr = BuildNamedGeneric(access_type, object, name, value);
6306     AddInstruction(instr);
6307     if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6308
6309     if (join != NULL) {
6310       Goto(join);
6311     } else {
6312       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6313       if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6314       return;
6315     }
6316   }
6317
6318   ASSERT(join != NULL);
6319   if (join->HasPredecessor()) {
6320     join->SetJoinId(ast_id);
6321     set_current_block(join);
6322     if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6323   } else {
6324     set_current_block(NULL);
6325   }
6326 }
6327
6328
6329 static bool ComputeReceiverTypes(Expression* expr,
6330                                  HValue* receiver,
6331                                  SmallMapList** t,
6332                                  Zone* zone) {
6333   SmallMapList* types = expr->GetReceiverTypes();
6334   *t = types;
6335   bool monomorphic = expr->IsMonomorphic();
6336   if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
6337     Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6338     types->FilterForPossibleTransitions(root_map);
6339     monomorphic = types->length() == 1;
6340   }
6341   return monomorphic && CanInlinePropertyAccess(
6342       IC::MapToType<Type>(types->first(), zone));
6343 }
6344
6345
6346 static bool AreStringTypes(SmallMapList* types) {
6347   for (int i = 0; i < types->length(); i++) {
6348     if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6349   }
6350   return true;
6351 }
6352
6353
6354 static bool AreInt32x4Types(SmallMapList* types) {
6355   if (types == NULL || types->length() == 0) return false;
6356   for (int i = 0; i < types->length(); i++) {
6357     if (types->at(i)->instance_type() != INT32x4_TYPE) return false;
6358   }
6359   return true;
6360 }
6361
6362
6363 static bool AreFloat32x4Types(SmallMapList* types) {
6364   if (types == NULL || types->length() == 0) return false;
6365   for (int i = 0; i < types->length(); i++) {
6366     if (types->at(i)->instance_type() != FLOAT32x4_TYPE) return false;
6367   }
6368   return true;
6369 }
6370
6371
6372 static bool AreFloat64x2Types(SmallMapList* types) {
6373   if (types == NULL || types->length() == 0) return false;
6374   for (int i = 0; i < types->length(); i++) {
6375     if (types->at(i)->instance_type() != FLOAT64x2_TYPE) return false;
6376   }
6377   return true;
6378 }
6379
6380
6381 static BuiltinFunctionId NameToId(Isolate* isolate, Handle<String> name,
6382                                   InstanceType type) {
6383   BuiltinFunctionId id;
6384   if (name->Equals(isolate->heap()->signMask())) {
6385     if (type == FLOAT32x4_TYPE) {
6386       id = kFloat32x4GetSignMask;
6387     } else if (type == FLOAT64x2_TYPE) {
6388       id = kFloat64x2GetSignMask;
6389     } else {
6390       ASSERT(type == INT32x4_TYPE);
6391       id = kInt32x4GetSignMask;
6392     }
6393   } else if (name->Equals(isolate->heap()->x())) {
6394     if (type == FLOAT32x4_TYPE) {
6395       id = kFloat32x4GetX;
6396     } else if (type == FLOAT64x2_TYPE) {
6397       id = kFloat64x2GetX;
6398     } else {
6399       ASSERT(type == INT32x4_TYPE);
6400       id = kInt32x4GetX;
6401     }
6402   } else if (name->Equals(isolate->heap()->y())) {
6403     if (type == FLOAT32x4_TYPE) {
6404       id = kFloat32x4GetY;
6405     } else if (type == FLOAT64x2_TYPE) {
6406       id = kFloat64x2GetY;
6407     } else {
6408       ASSERT(type == INT32x4_TYPE);
6409       id = kInt32x4GetY;
6410     }
6411   } else if (name->Equals(isolate->heap()->z())) {
6412     id = type == FLOAT32x4_TYPE ? kFloat32x4GetZ : kInt32x4GetZ;
6413   } else if (name->Equals(isolate->heap()->w())) {
6414     id = type == FLOAT32x4_TYPE ? kFloat32x4GetW : kInt32x4GetW;
6415   } else if (name->Equals(isolate->heap()->flagX())) {
6416     ASSERT(type == INT32x4_TYPE);
6417     id = kInt32x4GetFlagX;
6418   } else if (name->Equals(isolate->heap()->flagY())) {
6419     ASSERT(type == INT32x4_TYPE);
6420     id = kInt32x4GetFlagY;
6421   } else if (name->Equals(isolate->heap()->flagZ())) {
6422     ASSERT(type == INT32x4_TYPE);
6423     id = kInt32x4GetFlagZ;
6424   } else if (name->Equals(isolate->heap()->flagW())) {
6425     ASSERT(type == INT32x4_TYPE);
6426     id = kInt32x4GetFlagW;
6427   } else {
6428     UNREACHABLE();
6429     id = kSIMD128Unreachable;
6430   }
6431
6432   return id;
6433 }
6434
6435
6436 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
6437                                         Property* prop,
6438                                         BailoutId ast_id,
6439                                         BailoutId return_id,
6440                                         bool is_uninitialized) {
6441   if (!prop->key()->IsPropertyName()) {
6442     // Keyed store.
6443     HValue* value = environment()->ExpressionStackAt(0);
6444     HValue* key = environment()->ExpressionStackAt(1);
6445     HValue* object = environment()->ExpressionStackAt(2);
6446     bool has_side_effects = false;
6447     HandleKeyedElementAccess(object, key, value, expr,
6448                              STORE, &has_side_effects);
6449     Drop(3);
6450     Push(value);
6451     Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
6452     return ast_context()->ReturnValue(Pop());
6453   }
6454
6455   // Named store.
6456   HValue* value = Pop();
6457   HValue* object = Pop();
6458
6459   Literal* key = prop->key()->AsLiteral();
6460   Handle<String> name = Handle<String>::cast(key->value());
6461   ASSERT(!name.is_null());
6462
6463   HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
6464                                          object, name, value, is_uninitialized);
6465   if (instr == NULL) return;
6466
6467   if (!ast_context()->IsEffect()) Push(value);
6468   AddInstruction(instr);
6469   if (instr->HasObservableSideEffects()) {
6470     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6471   }
6472   if (!ast_context()->IsEffect()) Drop(1);
6473   return ast_context()->ReturnValue(value);
6474 }
6475
6476
6477 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6478   Property* prop = expr->target()->AsProperty();
6479   ASSERT(prop != NULL);
6480   CHECK_ALIVE(VisitForValue(prop->obj()));
6481   if (!prop->key()->IsPropertyName()) {
6482     CHECK_ALIVE(VisitForValue(prop->key()));
6483   }
6484   CHECK_ALIVE(VisitForValue(expr->value()));
6485   BuildStore(expr, prop, expr->id(),
6486              expr->AssignmentId(), expr->IsUninitialized());
6487 }
6488
6489
6490 // Because not every expression has a position and there is not common
6491 // superclass of Assignment and CountOperation, we cannot just pass the
6492 // owning expression instead of position and ast_id separately.
6493 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6494     Variable* var,
6495     HValue* value,
6496     BailoutId ast_id) {
6497   LookupResult lookup(isolate());
6498   GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, STORE);
6499   if (type == kUseCell) {
6500     Handle<GlobalObject> global(current_info()->global_object());
6501     Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
6502     if (cell->type()->IsConstant()) {
6503       Handle<Object> constant = cell->type()->AsConstant()->Value();
6504       if (value->IsConstant()) {
6505         HConstant* c_value = HConstant::cast(value);
6506         if (!constant.is_identical_to(c_value->handle(isolate()))) {
6507           Add<HDeoptimize>("Constant global variable assignment",
6508                            Deoptimizer::EAGER);
6509         }
6510       } else {
6511         HValue* c_constant = Add<HConstant>(constant);
6512         IfBuilder builder(this);
6513         if (constant->IsNumber()) {
6514           builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6515         } else {
6516           builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6517         }
6518         builder.Then();
6519         builder.Else();
6520         Add<HDeoptimize>("Constant global variable assignment",
6521                          Deoptimizer::EAGER);
6522         builder.End();
6523       }
6524     }
6525     HInstruction* instr =
6526         Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
6527     if (instr->HasObservableSideEffects()) {
6528       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6529     }
6530   } else {
6531     HValue* global_object = Add<HLoadNamedField>(
6532         context(), static_cast<HValue*>(NULL),
6533         HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
6534     HStoreNamedGeneric* instr =
6535         Add<HStoreNamedGeneric>(global_object, var->name(),
6536                                  value, function_strict_mode());
6537     USE(instr);
6538     ASSERT(instr->HasObservableSideEffects());
6539     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6540   }
6541 }
6542
6543
6544 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6545   Expression* target = expr->target();
6546   VariableProxy* proxy = target->AsVariableProxy();
6547   Property* prop = target->AsProperty();
6548   ASSERT(proxy == NULL || prop == NULL);
6549
6550   // We have a second position recorded in the FullCodeGenerator to have
6551   // type feedback for the binary operation.
6552   BinaryOperation* operation = expr->binary_operation();
6553
6554   if (proxy != NULL) {
6555     Variable* var = proxy->var();
6556     if (var->mode() == LET)  {
6557       return Bailout(kUnsupportedLetCompoundAssignment);
6558     }
6559
6560     CHECK_ALIVE(VisitForValue(operation));
6561
6562     switch (var->location()) {
6563       case Variable::UNALLOCATED:
6564         HandleGlobalVariableAssignment(var,
6565                                        Top(),
6566                                        expr->AssignmentId());
6567         break;
6568
6569       case Variable::PARAMETER:
6570       case Variable::LOCAL:
6571         if (var->mode() == CONST_LEGACY)  {
6572           return Bailout(kUnsupportedConstCompoundAssignment);
6573         }
6574         BindIfLive(var, Top());
6575         break;
6576
6577       case Variable::CONTEXT: {
6578         // Bail out if we try to mutate a parameter value in a function
6579         // using the arguments object.  We do not (yet) correctly handle the
6580         // arguments property of the function.
6581         if (current_info()->scope()->arguments() != NULL) {
6582           // Parameters will be allocated to context slots.  We have no
6583           // direct way to detect that the variable is a parameter so we do
6584           // a linear search of the parameter variables.
6585           int count = current_info()->scope()->num_parameters();
6586           for (int i = 0; i < count; ++i) {
6587             if (var == current_info()->scope()->parameter(i)) {
6588               Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6589             }
6590           }
6591         }
6592
6593         HStoreContextSlot::Mode mode;
6594
6595         switch (var->mode()) {
6596           case LET:
6597             mode = HStoreContextSlot::kCheckDeoptimize;
6598             break;
6599           case CONST:
6600             // This case is checked statically so no need to
6601             // perform checks here
6602             UNREACHABLE();
6603           case CONST_LEGACY:
6604             return ast_context()->ReturnValue(Pop());
6605           default:
6606             mode = HStoreContextSlot::kNoCheck;
6607         }
6608
6609         HValue* context = BuildContextChainWalk(var);
6610         HStoreContextSlot* instr = Add<HStoreContextSlot>(
6611             context, var->index(), mode, Top());
6612         if (instr->HasObservableSideEffects()) {
6613           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6614         }
6615         break;
6616       }
6617
6618       case Variable::LOOKUP:
6619         return Bailout(kCompoundAssignmentToLookupSlot);
6620     }
6621     return ast_context()->ReturnValue(Pop());
6622
6623   } else if (prop != NULL) {
6624     CHECK_ALIVE(VisitForValue(prop->obj()));
6625     HValue* object = Top();
6626     HValue* key = NULL;
6627     if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
6628         prop->IsStringAccess()) {
6629       CHECK_ALIVE(VisitForValue(prop->key()));
6630       key = Top();
6631     }
6632
6633     CHECK_ALIVE(PushLoad(prop, object, key));
6634
6635     CHECK_ALIVE(VisitForValue(expr->value()));
6636     HValue* right = Pop();
6637     HValue* left = Pop();
6638
6639     Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6640
6641     BuildStore(expr, prop, expr->id(),
6642                expr->AssignmentId(), expr->IsUninitialized());
6643   } else {
6644     return Bailout(kInvalidLhsInCompoundAssignment);
6645   }
6646 }
6647
6648
6649 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6650   ASSERT(!HasStackOverflow());
6651   ASSERT(current_block() != NULL);
6652   ASSERT(current_block()->HasPredecessor());
6653   VariableProxy* proxy = expr->target()->AsVariableProxy();
6654   Property* prop = expr->target()->AsProperty();
6655   ASSERT(proxy == NULL || prop == NULL);
6656
6657   if (expr->is_compound()) {
6658     HandleCompoundAssignment(expr);
6659     return;
6660   }
6661
6662   if (prop != NULL) {
6663     HandlePropertyAssignment(expr);
6664   } else if (proxy != NULL) {
6665     Variable* var = proxy->var();
6666
6667     if (var->mode() == CONST) {
6668       if (expr->op() != Token::INIT_CONST) {
6669         return Bailout(kNonInitializerAssignmentToConst);
6670       }
6671     } else if (var->mode() == CONST_LEGACY) {
6672       if (expr->op() != Token::INIT_CONST_LEGACY) {
6673         CHECK_ALIVE(VisitForValue(expr->value()));
6674         return ast_context()->ReturnValue(Pop());
6675       }
6676
6677       if (var->IsStackAllocated()) {
6678         // We insert a use of the old value to detect unsupported uses of const
6679         // variables (e.g. initialization inside a loop).
6680         HValue* old_value = environment()->Lookup(var);
6681         Add<HUseConst>(old_value);
6682       }
6683     }
6684
6685     if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6686
6687     // Handle the assignment.
6688     switch (var->location()) {
6689       case Variable::UNALLOCATED:
6690         CHECK_ALIVE(VisitForValue(expr->value()));
6691         HandleGlobalVariableAssignment(var,
6692                                        Top(),
6693                                        expr->AssignmentId());
6694         return ast_context()->ReturnValue(Pop());
6695
6696       case Variable::PARAMETER:
6697       case Variable::LOCAL: {
6698         // Perform an initialization check for let declared variables
6699         // or parameters.
6700         if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6701           HValue* env_value = environment()->Lookup(var);
6702           if (env_value == graph()->GetConstantHole()) {
6703             return Bailout(kAssignmentToLetVariableBeforeInitialization);
6704           }
6705         }
6706         // We do not allow the arguments object to occur in a context where it
6707         // may escape, but assignments to stack-allocated locals are
6708         // permitted.
6709         CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6710         HValue* value = Pop();
6711         BindIfLive(var, value);
6712         return ast_context()->ReturnValue(value);
6713       }
6714
6715       case Variable::CONTEXT: {
6716         // Bail out if we try to mutate a parameter value in a function using
6717         // the arguments object.  We do not (yet) correctly handle the
6718         // arguments property of the function.
6719         if (current_info()->scope()->arguments() != NULL) {
6720           // Parameters will rewrite to context slots.  We have no direct way
6721           // to detect that the variable is a parameter.
6722           int count = current_info()->scope()->num_parameters();
6723           for (int i = 0; i < count; ++i) {
6724             if (var == current_info()->scope()->parameter(i)) {
6725               return Bailout(kAssignmentToParameterInArgumentsObject);
6726             }
6727           }
6728         }
6729
6730         CHECK_ALIVE(VisitForValue(expr->value()));
6731         HStoreContextSlot::Mode mode;
6732         if (expr->op() == Token::ASSIGN) {
6733           switch (var->mode()) {
6734             case LET:
6735               mode = HStoreContextSlot::kCheckDeoptimize;
6736               break;
6737             case CONST:
6738               // This case is checked statically so no need to
6739               // perform checks here
6740               UNREACHABLE();
6741             case CONST_LEGACY:
6742               return ast_context()->ReturnValue(Pop());
6743             default:
6744               mode = HStoreContextSlot::kNoCheck;
6745           }
6746         } else if (expr->op() == Token::INIT_VAR ||
6747                    expr->op() == Token::INIT_LET ||
6748                    expr->op() == Token::INIT_CONST) {
6749           mode = HStoreContextSlot::kNoCheck;
6750         } else {
6751           ASSERT(expr->op() == Token::INIT_CONST_LEGACY);
6752
6753           mode = HStoreContextSlot::kCheckIgnoreAssignment;
6754         }
6755
6756         HValue* context = BuildContextChainWalk(var);
6757         HStoreContextSlot* instr = Add<HStoreContextSlot>(
6758             context, var->index(), mode, Top());
6759         if (instr->HasObservableSideEffects()) {
6760           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6761         }
6762         return ast_context()->ReturnValue(Pop());
6763       }
6764
6765       case Variable::LOOKUP:
6766         return Bailout(kAssignmentToLOOKUPVariable);
6767     }
6768   } else {
6769     return Bailout(kInvalidLeftHandSideInAssignment);
6770   }
6771 }
6772
6773
6774 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6775   // Generators are not optimized, so we should never get here.
6776   UNREACHABLE();
6777 }
6778
6779
6780 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6781   ASSERT(!HasStackOverflow());
6782   ASSERT(current_block() != NULL);
6783   ASSERT(current_block()->HasPredecessor());
6784   // We don't optimize functions with invalid left-hand sides in
6785   // assignments, count operations, or for-in.  Consequently throw can
6786   // currently only occur in an effect context.
6787   ASSERT(ast_context()->IsEffect());
6788   CHECK_ALIVE(VisitForValue(expr->exception()));
6789
6790   HValue* value = environment()->Pop();
6791   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6792   Add<HPushArguments>(value);
6793   Add<HCallRuntime>(isolate()->factory()->empty_string(),
6794                     Runtime::FunctionForId(Runtime::kHiddenThrow), 1);
6795   Add<HSimulate>(expr->id());
6796
6797   // If the throw definitely exits the function, we can finish with a dummy
6798   // control flow at this point.  This is not the case if the throw is inside
6799   // an inlined function which may be replaced.
6800   if (call_context() == NULL) {
6801     FinishExitCurrentBlock(New<HAbnormalExit>());
6802   }
6803 }
6804
6805
6806 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6807   if (string->IsConstant()) {
6808     HConstant* c_string = HConstant::cast(string);
6809     if (c_string->HasStringValue()) {
6810       return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6811     }
6812   }
6813   return Add<HLoadNamedField>(
6814       Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6815                            HObjectAccess::ForMap()),
6816       static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6817 }
6818
6819
6820 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6821   if (string->IsConstant()) {
6822     HConstant* c_string = HConstant::cast(string);
6823     if (c_string->HasStringValue()) {
6824       return Add<HConstant>(c_string->StringValue()->length());
6825     }
6826   }
6827   return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6828                               HObjectAccess::ForStringLength());
6829 }
6830
6831
6832 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6833     PropertyAccessType access_type,
6834     HValue* object,
6835     Handle<String> name,
6836     HValue* value,
6837     bool is_uninitialized) {
6838   if (is_uninitialized) {
6839     Add<HDeoptimize>("Insufficient type feedback for generic named access",
6840                      Deoptimizer::SOFT);
6841   }
6842   if (access_type == LOAD) {
6843     return New<HLoadNamedGeneric>(object, name);
6844   } else {
6845     return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6846   }
6847 }
6848
6849
6850
6851 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6852     PropertyAccessType access_type,
6853     HValue* object,
6854     HValue* key,
6855     HValue* value) {
6856   if (access_type == LOAD) {
6857     return New<HLoadKeyedGeneric>(object, key);
6858   } else {
6859     return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6860   }
6861 }
6862
6863
6864 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6865   // Loads from a "stock" fast holey double arrays can elide the hole check.
6866   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6867   if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6868       isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6869     Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6870     Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6871     BuildCheckPrototypeMaps(prototype, object_prototype);
6872     load_mode = ALLOW_RETURN_HOLE;
6873     graph()->MarkDependsOnEmptyArrayProtoElements();
6874   }
6875
6876   return load_mode;
6877 }
6878
6879
6880 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6881     HValue* object,
6882     HValue* key,
6883     HValue* val,
6884     HValue* dependency,
6885     Handle<Map> map,
6886     PropertyAccessType access_type,
6887     KeyedAccessStoreMode store_mode) {
6888   HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6889   if (dependency) {
6890     checked_object->ClearDependsOnFlag(kElementsKind);
6891   }
6892
6893   if (access_type == STORE && map->prototype()->IsJSObject()) {
6894     // monomorphic stores need a prototype chain check because shape
6895     // changes could allow callbacks on elements in the chain that
6896     // aren't compatible with monomorphic keyed stores.
6897     Handle<JSObject> prototype(JSObject::cast(map->prototype()));
6898     JSObject* holder = JSObject::cast(map->prototype());
6899     while (!holder->GetPrototype()->IsNull()) {
6900       holder = JSObject::cast(holder->GetPrototype());
6901     }
6902
6903     BuildCheckPrototypeMaps(prototype,
6904                             Handle<JSObject>(JSObject::cast(holder)));
6905   }
6906
6907   LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6908   return BuildUncheckedMonomorphicElementAccess(
6909       checked_object, key, val,
6910       map->instance_type() == JS_ARRAY_TYPE,
6911       map->elements_kind(), access_type,
6912       load_mode, store_mode);
6913 }
6914
6915
6916 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6917     HValue* object,
6918     HValue* key,
6919     HValue* val,
6920     SmallMapList* maps) {
6921   // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6922   // double), always use the "worst case" code without a transition.  This is
6923   // much faster than transitioning the elements to the worst case, trading a
6924   // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6925   bool has_double_maps = false;
6926   bool has_smi_or_object_maps = false;
6927   bool has_js_array_access = false;
6928   bool has_non_js_array_access = false;
6929   bool has_seen_holey_elements = false;
6930   Handle<Map> most_general_consolidated_map;
6931   for (int i = 0; i < maps->length(); ++i) {
6932     Handle<Map> map = maps->at(i);
6933     if (!map->IsJSObjectMap()) return NULL;
6934     // Don't allow mixing of JSArrays with JSObjects.
6935     if (map->instance_type() == JS_ARRAY_TYPE) {
6936       if (has_non_js_array_access) return NULL;
6937       has_js_array_access = true;
6938     } else if (has_js_array_access) {
6939       return NULL;
6940     } else {
6941       has_non_js_array_access = true;
6942     }
6943     // Don't allow mixed, incompatible elements kinds.
6944     if (map->has_fast_double_elements()) {
6945       if (has_smi_or_object_maps) return NULL;
6946       has_double_maps = true;
6947     } else if (map->has_fast_smi_or_object_elements()) {
6948       if (has_double_maps) return NULL;
6949       has_smi_or_object_maps = true;
6950     } else {
6951       return NULL;
6952     }
6953     // Remember if we've ever seen holey elements.
6954     if (IsHoleyElementsKind(map->elements_kind())) {
6955       has_seen_holey_elements = true;
6956     }
6957     // Remember the most general elements kind, the code for its load will
6958     // properly handle all of the more specific cases.
6959     if ((i == 0) || IsMoreGeneralElementsKindTransition(
6960             most_general_consolidated_map->elements_kind(),
6961             map->elements_kind())) {
6962       most_general_consolidated_map = map;
6963     }
6964   }
6965   if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6966
6967   HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6968   // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6969   // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6970   ElementsKind consolidated_elements_kind = has_seen_holey_elements
6971       ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6972       : most_general_consolidated_map->elements_kind();
6973   HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6974       checked_object, key, val,
6975       most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6976       consolidated_elements_kind,
6977       LOAD, NEVER_RETURN_HOLE, STANDARD_STORE);
6978   return instr;
6979 }
6980
6981
6982 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6983     HValue* object,
6984     HValue* key,
6985     HValue* val,
6986     SmallMapList* maps,
6987     PropertyAccessType access_type,
6988     KeyedAccessStoreMode store_mode,
6989     bool* has_side_effects) {
6990   *has_side_effects = false;
6991   BuildCheckHeapObject(object);
6992
6993   if (access_type == LOAD) {
6994     HInstruction* consolidated_load =
6995         TryBuildConsolidatedElementLoad(object, key, val, maps);
6996     if (consolidated_load != NULL) {
6997       *has_side_effects |= consolidated_load->HasObservableSideEffects();
6998       return consolidated_load;
6999     }
7000   }
7001
7002   // Elements_kind transition support.
7003   MapHandleList transition_target(maps->length());
7004   // Collect possible transition targets.
7005   MapHandleList possible_transitioned_maps(maps->length());
7006   for (int i = 0; i < maps->length(); ++i) {
7007     Handle<Map> map = maps->at(i);
7008     ElementsKind elements_kind = map->elements_kind();
7009     if (IsFastElementsKind(elements_kind) &&
7010         elements_kind != GetInitialFastElementsKind()) {
7011       possible_transitioned_maps.Add(map);
7012     }
7013     if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
7014       HInstruction* result = BuildKeyedGeneric(access_type, object, key, val);
7015       *has_side_effects = result->HasObservableSideEffects();
7016       return AddInstruction(result);
7017     }
7018   }
7019   // Get transition target for each map (NULL == no transition).
7020   for (int i = 0; i < maps->length(); ++i) {
7021     Handle<Map> map = maps->at(i);
7022     Handle<Map> transitioned_map =
7023         map->FindTransitionedMap(&possible_transitioned_maps);
7024     transition_target.Add(transitioned_map);
7025   }
7026
7027   MapHandleList untransitionable_maps(maps->length());
7028   HTransitionElementsKind* transition = NULL;
7029   for (int i = 0; i < maps->length(); ++i) {
7030     Handle<Map> map = maps->at(i);
7031     ASSERT(map->IsMap());
7032     if (!transition_target.at(i).is_null()) {
7033       ASSERT(Map::IsValidElementsTransition(
7034           map->elements_kind(),
7035           transition_target.at(i)->elements_kind()));
7036       transition = Add<HTransitionElementsKind>(object, map,
7037                                                 transition_target.at(i));
7038     } else {
7039       untransitionable_maps.Add(map);
7040     }
7041   }
7042
7043   // If only one map is left after transitioning, handle this case
7044   // monomorphically.
7045   ASSERT(untransitionable_maps.length() >= 1);
7046   if (untransitionable_maps.length() == 1) {
7047     Handle<Map> untransitionable_map = untransitionable_maps[0];
7048     HInstruction* instr = NULL;
7049     if (untransitionable_map->has_slow_elements_kind() ||
7050         !untransitionable_map->IsJSObjectMap()) {
7051       instr = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
7052     } else {
7053       instr = BuildMonomorphicElementAccess(
7054           object, key, val, transition, untransitionable_map, access_type,
7055           store_mode);
7056     }
7057     *has_side_effects |= instr->HasObservableSideEffects();
7058     return access_type == STORE ? NULL : instr;
7059   }
7060
7061   HBasicBlock* join = graph()->CreateBasicBlock();
7062
7063   for (int i = 0; i < untransitionable_maps.length(); ++i) {
7064     Handle<Map> map = untransitionable_maps[i];
7065     if (!map->IsJSObjectMap()) continue;
7066     ElementsKind elements_kind = map->elements_kind();
7067     HBasicBlock* this_map = graph()->CreateBasicBlock();
7068     HBasicBlock* other_map = graph()->CreateBasicBlock();
7069     HCompareMap* mapcompare =
7070         New<HCompareMap>(object, map, this_map, other_map);
7071     FinishCurrentBlock(mapcompare);
7072
7073     set_current_block(this_map);
7074     HInstruction* access = NULL;
7075     if (IsDictionaryElementsKind(elements_kind)) {
7076       access = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
7077     } else {
7078       ASSERT(IsFastElementsKind(elements_kind) ||
7079              IsExternalArrayElementsKind(elements_kind) ||
7080              IsFixedTypedArrayElementsKind(elements_kind));
7081       LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7082       // Happily, mapcompare is a checked object.
7083       access = BuildUncheckedMonomorphicElementAccess(
7084           mapcompare, key, val,
7085           map->instance_type() == JS_ARRAY_TYPE,
7086           elements_kind, access_type,
7087           load_mode,
7088           store_mode);
7089     }
7090     *has_side_effects |= access->HasObservableSideEffects();
7091     // The caller will use has_side_effects and add a correct Simulate.
7092     access->SetFlag(HValue::kHasNoObservableSideEffects);
7093     if (access_type == LOAD) {
7094       Push(access);
7095     }
7096     NoObservableSideEffectsScope scope(this);
7097     GotoNoSimulate(join);
7098     set_current_block(other_map);
7099   }
7100
7101   // Ensure that we visited at least one map above that goes to join. This is
7102   // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7103   // rather than joining the join block. If this becomes an issue, insert a
7104   // generic access in the case length() == 0.
7105   ASSERT(join->predecessors()->length() > 0);
7106   // Deopt if none of the cases matched.
7107   NoObservableSideEffectsScope scope(this);
7108   FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
7109   set_current_block(join);
7110   return access_type == STORE ? NULL : Pop();
7111 }
7112
7113
7114 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7115     HValue* obj,
7116     HValue* key,
7117     HValue* val,
7118     Expression* expr,
7119     PropertyAccessType access_type,
7120     bool* has_side_effects) {
7121   ASSERT(!expr->IsPropertyName());
7122   HInstruction* instr = NULL;
7123
7124   SmallMapList* types;
7125   bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
7126
7127   bool force_generic = false;
7128   if (access_type == STORE &&
7129       (monomorphic || (types != NULL && !types->is_empty()))) {
7130     // Stores can't be mono/polymorphic if their prototype chain has dictionary
7131     // elements. However a receiver map that has dictionary elements itself
7132     // should be left to normal mono/poly behavior (the other maps may benefit
7133     // from highly optimized stores).
7134     for (int i = 0; i < types->length(); i++) {
7135       Handle<Map> current_map = types->at(i);
7136       if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7137         force_generic = true;
7138         monomorphic = false;
7139         break;
7140       }
7141     }
7142   }
7143
7144   if (monomorphic) {
7145     Handle<Map> map = types->first();
7146     if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
7147       instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
7148     } else {
7149       BuildCheckHeapObject(obj);
7150       instr = BuildMonomorphicElementAccess(
7151           obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7152     }
7153   } else if (!force_generic && (types != NULL && !types->is_empty())) {
7154     return HandlePolymorphicElementAccess(
7155         obj, key, val, types, access_type,
7156         expr->GetStoreMode(), has_side_effects);
7157   } else {
7158     if (access_type == STORE) {
7159       if (expr->IsAssignment() &&
7160           expr->AsAssignment()->HasNoTypeInformation()) {
7161         Add<HDeoptimize>("Insufficient type feedback for keyed store",
7162                          Deoptimizer::SOFT);
7163       }
7164     } else {
7165       if (expr->AsProperty()->HasNoTypeInformation()) {
7166         Add<HDeoptimize>("Insufficient type feedback for keyed load",
7167                          Deoptimizer::SOFT);
7168       }
7169     }
7170     instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
7171   }
7172   *has_side_effects = instr->HasObservableSideEffects();
7173   return instr;
7174 }
7175
7176
7177 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7178   // Outermost function already has arguments on the stack.
7179   if (function_state()->outer() == NULL) return;
7180
7181   if (function_state()->arguments_pushed()) return;
7182
7183   // Push arguments when entering inlined function.
7184   HEnterInlined* entry = function_state()->entry();
7185   entry->set_arguments_pushed();
7186
7187   HArgumentsObject* arguments = entry->arguments_object();
7188   const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7189
7190   HInstruction* insert_after = entry;
7191   for (int i = 0; i < arguments_values->length(); i++) {
7192     HValue* argument = arguments_values->at(i);
7193     HInstruction* push_argument = New<HPushArguments>(argument);
7194     push_argument->InsertAfter(insert_after);
7195     insert_after = push_argument;
7196   }
7197
7198   HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7199   arguments_elements->ClearFlag(HValue::kUseGVN);
7200   arguments_elements->InsertAfter(insert_after);
7201   function_state()->set_arguments_elements(arguments_elements);
7202 }
7203
7204
7205 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7206   VariableProxy* proxy = expr->obj()->AsVariableProxy();
7207   if (proxy == NULL) return false;
7208   if (!proxy->var()->IsStackAllocated()) return false;
7209   if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7210     return false;
7211   }
7212
7213   HInstruction* result = NULL;
7214   if (expr->key()->IsPropertyName()) {
7215     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7216     if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
7217
7218     if (function_state()->outer() == NULL) {
7219       HInstruction* elements = Add<HArgumentsElements>(false);
7220       result = New<HArgumentsLength>(elements);
7221     } else {
7222       // Number of arguments without receiver.
7223       int argument_count = environment()->
7224           arguments_environment()->parameter_count() - 1;
7225       result = New<HConstant>(argument_count);
7226     }
7227   } else {
7228     Push(graph()->GetArgumentsObject());
7229     CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7230     HValue* key = Pop();
7231     Drop(1);  // Arguments object.
7232     if (function_state()->outer() == NULL) {
7233       HInstruction* elements = Add<HArgumentsElements>(false);
7234       HInstruction* length = Add<HArgumentsLength>(elements);
7235       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7236       result = New<HAccessArgumentsAt>(elements, length, checked_key);
7237     } else {
7238       EnsureArgumentsArePushedForAccess();
7239
7240       // Number of arguments without receiver.
7241       HInstruction* elements = function_state()->arguments_elements();
7242       int argument_count = environment()->
7243           arguments_environment()->parameter_count() - 1;
7244       HInstruction* length = Add<HConstant>(argument_count);
7245       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7246       result = New<HAccessArgumentsAt>(elements, length, checked_key);
7247     }
7248   }
7249   ast_context()->ReturnInstruction(result, expr->id());
7250   return true;
7251 }
7252
7253
7254 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
7255     PropertyAccessType access,
7256     BailoutId ast_id,
7257     BailoutId return_id,
7258     Expression* expr,
7259     HValue* object,
7260     Handle<String> name,
7261     HValue* value,
7262     bool is_uninitialized) {
7263   SmallMapList* types;
7264   ComputeReceiverTypes(expr, object, &types, zone());
7265   ASSERT(types != NULL);
7266
7267   if (types->length() > 0) {
7268     PropertyAccessInfo info(
7269         this, access, ToType(types->first()), name,
7270         types->first()->instance_type());
7271     if (!info.CanAccessAsMonomorphic(types)) {
7272       HandlePolymorphicNamedFieldAccess(
7273           access, ast_id, return_id, object, value, types, name);
7274       return NULL;
7275     }
7276
7277     HValue* checked_object;
7278     // Type::Number() is only supported by polymorphic load/call handling.
7279     ASSERT(!info.type()->Is(Type::Number()));
7280     BuildCheckHeapObject(object);
7281
7282     if (AreStringTypes(types)) {
7283       checked_object =
7284           Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7285     } else if (info.IsSIMD128PropertyCallback() &&
7286                AreFloat32x4Types(types) &&
7287                CpuFeatures::SupportsSIMD128InCrankshaft()) {
7288       Handle<JSFunction> function(
7289           isolate()->native_context()->float32x4_function());
7290       HInstruction* constant_function = Add<HConstant>(function);
7291       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
7292       HInstruction* map = Add<HLoadNamedField>(
7293           constant_function, static_cast<HValue*>(NULL), map_access);
7294       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
7295       HInstruction* prototype = Add<HLoadNamedField>(
7296           map, static_cast<HValue*>(NULL), prototype_access);
7297       Handle<Map> initial_function_prototype_map(
7298           isolate()->native_context()->float32x4_function_prototype_map());
7299       Add<HCheckMaps>(prototype, initial_function_prototype_map);
7300       BuiltinFunctionId id = NameToId(isolate(), name, FLOAT32x4_TYPE);
7301       return NewUncasted<HUnarySIMDOperation>(object, id);
7302     } else if (info.IsSIMD128PropertyCallback() &&
7303                AreFloat64x2Types(types) &&
7304                CpuFeatures::SupportsSIMD128InCrankshaft()) {
7305       Handle<JSFunction> function(
7306           isolate()->native_context()->float64x2_function());
7307       HInstruction* constant_function = Add<HConstant>(function);
7308       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
7309       HInstruction* map = Add<HLoadNamedField>(
7310           constant_function, static_cast<HValue*>(NULL), map_access);
7311       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
7312       HInstruction* prototype = Add<HLoadNamedField>(
7313           map, static_cast<HValue*>(NULL), prototype_access);
7314       Handle<Map> initial_function_prototype_map(
7315           isolate()->native_context()->float64x2_function_prototype_map());
7316       Add<HCheckMaps>(prototype, initial_function_prototype_map);
7317       BuiltinFunctionId id = NameToId(isolate(), name, FLOAT64x2_TYPE);
7318       return NewUncasted<HUnarySIMDOperation>(object, id);
7319     } else if (info.IsSIMD128PropertyCallback() &&
7320                AreInt32x4Types(types) &&
7321                CpuFeatures::SupportsSIMD128InCrankshaft()) {
7322       Handle<JSFunction> function(
7323           isolate()->native_context()->int32x4_function());
7324       HInstruction* constant_function = Add<HConstant>(function);
7325       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
7326       HInstruction* map = Add<HLoadNamedField>(
7327           constant_function, static_cast<HValue*>(NULL), map_access);
7328       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
7329       HInstruction* prototype = Add<HLoadNamedField>(
7330           map, static_cast<HValue*>(NULL), prototype_access);
7331       Handle<Map> initial_function_prototype_map(
7332           isolate()->native_context()->int32x4_function_prototype_map());
7333       Add<HCheckMaps>(prototype, initial_function_prototype_map);
7334       BuiltinFunctionId id = NameToId(isolate(), name, INT32x4_TYPE);
7335       return NewUncasted<HUnarySIMDOperation>(object, id);
7336     } else {
7337       checked_object = Add<HCheckMaps>(object, types);
7338     }
7339     return BuildMonomorphicAccess(
7340         &info, object, checked_object, value, ast_id, return_id);
7341   }
7342
7343   return BuildNamedGeneric(access, object, name, value, is_uninitialized);
7344 }
7345
7346
7347 void HOptimizedGraphBuilder::PushLoad(Property* expr,
7348                                       HValue* object,
7349                                       HValue* key) {
7350   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7351   Push(object);
7352   if (key != NULL) Push(key);
7353   BuildLoad(expr, expr->LoadId());
7354 }
7355
7356
7357 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
7358                                        BailoutId ast_id) {
7359   HInstruction* instr = NULL;
7360   if (expr->IsStringAccess()) {
7361     HValue* index = Pop();
7362     HValue* string = Pop();
7363     HInstruction* char_code = BuildStringCharCodeAt(string, index);
7364     AddInstruction(char_code);
7365     instr = NewUncasted<HStringCharFromCode>(char_code);
7366
7367   } else if (expr->IsFunctionPrototype()) {
7368     HValue* function = Pop();
7369     BuildCheckHeapObject(function);
7370     instr = New<HLoadFunctionPrototype>(function);
7371
7372   } else if (expr->key()->IsPropertyName()) {
7373     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7374     HValue* object = Pop();
7375
7376     instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7377                              object, name, NULL, expr->IsUninitialized());
7378     if (instr == NULL) return;
7379     if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7380
7381   } else {
7382     HValue* key = Pop();
7383     HValue* obj = Pop();
7384
7385     bool has_side_effects = false;
7386     HValue* load = HandleKeyedElementAccess(
7387         obj, key, NULL, expr, LOAD, &has_side_effects);
7388     if (has_side_effects) {
7389       if (ast_context()->IsEffect()) {
7390         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7391       } else {
7392         Push(load);
7393         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7394         Drop(1);
7395       }
7396     }
7397     return ast_context()->ReturnValue(load);
7398   }
7399   return ast_context()->ReturnInstruction(instr, ast_id);
7400 }
7401
7402
7403 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7404   ASSERT(!HasStackOverflow());
7405   ASSERT(current_block() != NULL);
7406   ASSERT(current_block()->HasPredecessor());
7407
7408   if (TryArgumentsAccess(expr)) return;
7409
7410   CHECK_ALIVE(VisitForValue(expr->obj()));
7411   if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
7412       expr->IsStringAccess()) {
7413     CHECK_ALIVE(VisitForValue(expr->key()));
7414   }
7415
7416   BuildLoad(expr, expr->id());
7417 }
7418
7419
7420 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7421   HCheckMaps* check = Add<HCheckMaps>(
7422       Add<HConstant>(constant), handle(constant->map()));
7423   check->ClearDependsOnFlag(kElementsKind);
7424   return check;
7425 }
7426
7427
7428 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7429                                                      Handle<JSObject> holder) {
7430   while (holder.is_null() || !prototype.is_identical_to(holder)) {
7431     BuildConstantMapCheck(prototype);
7432     Object* next_prototype = prototype->GetPrototype();
7433     if (next_prototype->IsNull()) return NULL;
7434     CHECK(next_prototype->IsJSObject());
7435     prototype = handle(JSObject::cast(next_prototype));
7436   }
7437   return BuildConstantMapCheck(prototype);
7438 }
7439
7440
7441 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7442                                                    Handle<Map> receiver_map) {
7443   if (!holder.is_null()) {
7444     Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7445     BuildCheckPrototypeMaps(prototype, holder);
7446   }
7447 }
7448
7449
7450 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
7451     HValue* fun, int argument_count, bool pass_argument_count) {
7452   return New<HCallJSFunction>(
7453       fun, argument_count, pass_argument_count);
7454 }
7455
7456
7457 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
7458     HValue* fun, HValue* context,
7459     int argument_count, HValue* expected_param_count) {
7460   CallInterfaceDescriptor* descriptor =
7461       isolate()->call_descriptor(Isolate::ArgumentAdaptorCall);
7462
7463   HValue* arity = Add<HConstant>(argument_count - 1);
7464
7465   HValue* op_vals[] = { fun, context, arity, expected_param_count };
7466
7467   Handle<Code> adaptor =
7468       isolate()->builtins()->ArgumentsAdaptorTrampoline();
7469   HConstant* adaptor_value = Add<HConstant>(adaptor);
7470
7471   return New<HCallWithDescriptor>(
7472       adaptor_value, argument_count, descriptor,
7473       Vector<HValue*>(op_vals, descriptor->environment_length()));
7474 }
7475
7476
7477 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
7478     Handle<JSFunction> jsfun, int argument_count) {
7479   HValue* target = Add<HConstant>(jsfun);
7480   // For constant functions, we try to avoid calling the
7481   // argument adaptor and instead call the function directly
7482   int formal_parameter_count = jsfun->shared()->formal_parameter_count();
7483   bool dont_adapt_arguments =
7484       (formal_parameter_count ==
7485        SharedFunctionInfo::kDontAdaptArgumentsSentinel);
7486   int arity = argument_count - 1;
7487   bool can_invoke_directly =
7488       dont_adapt_arguments || formal_parameter_count == arity;
7489   if (can_invoke_directly) {
7490     if (jsfun.is_identical_to(current_info()->closure())) {
7491       graph()->MarkRecursive();
7492     }
7493     return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
7494   } else {
7495     HValue* param_count_value = Add<HConstant>(formal_parameter_count);
7496     HValue* context = Add<HLoadNamedField>(
7497         target, static_cast<HValue*>(NULL),
7498         HObjectAccess::ForFunctionContextPointer());
7499     return NewArgumentAdaptorCall(target, context,
7500         argument_count, param_count_value);
7501   }
7502   UNREACHABLE();
7503   return NULL;
7504 }
7505
7506
7507 class FunctionSorter {
7508  public:
7509   FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7510       : index_(index), ticks_(ticks), size_(size) { }
7511
7512   int index() const { return index_; }
7513   int ticks() const { return ticks_; }
7514   int size() const { return size_; }
7515
7516  private:
7517   int index_;
7518   int ticks_;
7519   int size_;
7520 };
7521
7522
7523 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7524   int diff = lhs.ticks() - rhs.ticks();
7525   if (diff != 0) return diff > 0;
7526   return lhs.size() < rhs.size();
7527 }
7528
7529
7530 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
7531     Call* expr,
7532     HValue* receiver,
7533     SmallMapList* types,
7534     Handle<String> name) {
7535   int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
7536   FunctionSorter order[kMaxCallPolymorphism];
7537
7538   bool handle_smi = false;
7539   bool handled_string = false;
7540   int ordered_functions = 0;
7541
7542   for (int i = 0;
7543        i < types->length() && ordered_functions < kMaxCallPolymorphism;
7544        ++i) {
7545     PropertyAccessInfo info(
7546         this, LOAD, ToType(types->at(i)), name,
7547         types->at(i)->instance_type());
7548     if (info.CanAccessMonomorphic() &&
7549         info.lookup()->IsConstant() &&
7550         info.constant()->IsJSFunction()) {
7551       if (info.type()->Is(Type::String())) {
7552         if (handled_string) continue;
7553         handled_string = true;
7554       }
7555       Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7556       if (info.type()->Is(Type::Number())) {
7557         handle_smi = true;
7558       }
7559       expr->set_target(target);
7560       order[ordered_functions++] = FunctionSorter(
7561           i, target->shared()->profiler_ticks(), InliningAstSize(target));
7562     }
7563   }
7564
7565   std::sort(order, order + ordered_functions);
7566
7567   HBasicBlock* number_block = NULL;
7568   HBasicBlock* join = NULL;
7569   handled_string = false;
7570   int count = 0;
7571
7572   for (int fn = 0; fn < ordered_functions; ++fn) {
7573     int i = order[fn].index();
7574     PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name,
7575                             types->at(i)->instance_type());
7576     if (info.type()->Is(Type::String())) {
7577       if (handled_string) continue;
7578       handled_string = true;
7579     }
7580     // Reloads the target.
7581     info.CanAccessMonomorphic();
7582     Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7583
7584     expr->set_target(target);
7585     if (count == 0) {
7586       // Only needed once.
7587       join = graph()->CreateBasicBlock();
7588       if (handle_smi) {
7589         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7590         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7591         number_block = graph()->CreateBasicBlock();
7592         FinishCurrentBlock(New<HIsSmiAndBranch>(
7593                 receiver, empty_smi_block, not_smi_block));
7594         GotoNoSimulate(empty_smi_block, number_block);
7595         set_current_block(not_smi_block);
7596       } else {
7597         BuildCheckHeapObject(receiver);
7598       }
7599     }
7600     ++count;
7601     HBasicBlock* if_true = graph()->CreateBasicBlock();
7602     HBasicBlock* if_false = graph()->CreateBasicBlock();
7603     HUnaryControlInstruction* compare;
7604
7605     Handle<Map> map = info.map();
7606     if (info.type()->Is(Type::Number())) {
7607       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7608       compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7609     } else if (info.type()->Is(Type::String())) {
7610       compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7611     } else {
7612       compare = New<HCompareMap>(receiver, map, if_true, if_false);
7613     }
7614     FinishCurrentBlock(compare);
7615
7616     if (info.type()->Is(Type::Number())) {
7617       GotoNoSimulate(if_true, number_block);
7618       if_true = number_block;
7619     }
7620
7621     set_current_block(if_true);
7622
7623     AddCheckPrototypeMaps(info.holder(), map);
7624
7625     HValue* function = Add<HConstant>(expr->target());
7626     environment()->SetExpressionStackAt(0, function);
7627     Push(receiver);
7628     CHECK_ALIVE(VisitExpressions(expr->arguments()));
7629     bool needs_wrapping = NeedsWrappingFor(info.type(), target);
7630     bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7631     if (FLAG_trace_inlining && try_inline) {
7632       Handle<JSFunction> caller = current_info()->closure();
7633       SmartArrayPointer<char> caller_name =
7634           caller->shared()->DebugName()->ToCString();
7635       PrintF("Trying to inline the polymorphic call to %s from %s\n",
7636              name->ToCString().get(),
7637              caller_name.get());
7638     }
7639     if (try_inline && TryInlineCall(expr)) {
7640       // Trying to inline will signal that we should bailout from the
7641       // entire compilation by setting stack overflow on the visitor.
7642       if (HasStackOverflow()) return;
7643     } else {
7644       // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7645       // use the regular CallFunctionStub for method calls to wrap the receiver.
7646       // TODO(verwaest): Support creation of value wrappers directly in
7647       // HWrapReceiver.
7648       HInstruction* call = needs_wrapping
7649           ? NewUncasted<HCallFunction>(
7650               function, argument_count, WRAP_AND_CALL)
7651           : BuildCallConstantFunction(target, argument_count);
7652       PushArgumentsFromEnvironment(argument_count);
7653       AddInstruction(call);
7654       Drop(1);  // Drop the function.
7655       if (!ast_context()->IsEffect()) Push(call);
7656     }
7657
7658     if (current_block() != NULL) Goto(join);
7659     set_current_block(if_false);
7660   }
7661
7662   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
7663   // know about and do not want to handle ones we've never seen.  Otherwise
7664   // use a generic IC.
7665   if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
7666     FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
7667   } else {
7668     Property* prop = expr->expression()->AsProperty();
7669     HInstruction* function = BuildNamedGeneric(
7670         LOAD, receiver, name, NULL, prop->IsUninitialized());
7671     AddInstruction(function);
7672     Push(function);
7673     AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7674
7675     environment()->SetExpressionStackAt(1, function);
7676     environment()->SetExpressionStackAt(0, receiver);
7677     CHECK_ALIVE(VisitExpressions(expr->arguments()));
7678
7679     CallFunctionFlags flags = receiver->type().IsJSObject()
7680         ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
7681     HInstruction* call = New<HCallFunction>(
7682         function, argument_count, flags);
7683
7684     PushArgumentsFromEnvironment(argument_count);
7685
7686     Drop(1);  // Function.
7687
7688     if (join != NULL) {
7689       AddInstruction(call);
7690       if (!ast_context()->IsEffect()) Push(call);
7691       Goto(join);
7692     } else {
7693       return ast_context()->ReturnInstruction(call, expr->id());
7694     }
7695   }
7696
7697   // We assume that control flow is always live after an expression.  So
7698   // even without predecessors to the join block, we set it as the exit
7699   // block and continue by adding instructions there.
7700   ASSERT(join != NULL);
7701   if (join->HasPredecessor()) {
7702     set_current_block(join);
7703     join->SetJoinId(expr->id());
7704     if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7705   } else {
7706     set_current_block(NULL);
7707   }
7708 }
7709
7710
7711 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7712                                          Handle<JSFunction> caller,
7713                                          const char* reason) {
7714   if (FLAG_trace_inlining) {
7715     SmartArrayPointer<char> target_name =
7716         target->shared()->DebugName()->ToCString();
7717     SmartArrayPointer<char> caller_name =
7718         caller->shared()->DebugName()->ToCString();
7719     if (reason == NULL) {
7720       PrintF("Inlined %s called from %s.\n", target_name.get(),
7721              caller_name.get());
7722     } else {
7723       PrintF("Did not inline %s called from %s (%s).\n",
7724              target_name.get(), caller_name.get(), reason);
7725     }
7726   }
7727 }
7728
7729
7730 static const int kNotInlinable = 1000000000;
7731
7732
7733 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7734   if (!FLAG_use_inlining) return kNotInlinable;
7735
7736   // Precondition: call is monomorphic and we have found a target with the
7737   // appropriate arity.
7738   Handle<JSFunction> caller = current_info()->closure();
7739   Handle<SharedFunctionInfo> target_shared(target->shared());
7740
7741   // Always inline builtins marked for inlining.
7742   if (target->IsBuiltin()) {
7743     return target_shared->inline_builtin() ? 0 : kNotInlinable;
7744   }
7745
7746   if (target_shared->IsApiFunction()) {
7747     TraceInline(target, caller, "target is api function");
7748     return kNotInlinable;
7749   }
7750
7751   // Do a quick check on source code length to avoid parsing large
7752   // inlining candidates.
7753   if (target_shared->SourceSize() >
7754       Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7755     TraceInline(target, caller, "target text too big");
7756     return kNotInlinable;
7757   }
7758
7759   // Target must be inlineable.
7760   if (!target_shared->IsInlineable()) {
7761     TraceInline(target, caller, "target not inlineable");
7762     return kNotInlinable;
7763   }
7764   if (target_shared->dont_inline() || target_shared->dont_optimize()) {
7765     TraceInline(target, caller, "target contains unsupported syntax [early]");
7766     return kNotInlinable;
7767   }
7768
7769   int nodes_added = target_shared->ast_node_count();
7770   return nodes_added;
7771 }
7772
7773
7774 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7775                                        int arguments_count,
7776                                        HValue* implicit_return_value,
7777                                        BailoutId ast_id,
7778                                        BailoutId return_id,
7779                                        InliningKind inlining_kind,
7780                                        HSourcePosition position) {
7781   int nodes_added = InliningAstSize(target);
7782   if (nodes_added == kNotInlinable) return false;
7783
7784   Handle<JSFunction> caller = current_info()->closure();
7785
7786   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7787     TraceInline(target, caller, "target AST is too large [early]");
7788     return false;
7789   }
7790
7791   // Don't inline deeper than the maximum number of inlining levels.
7792   HEnvironment* env = environment();
7793   int current_level = 1;
7794   while (env->outer() != NULL) {
7795     if (current_level == FLAG_max_inlining_levels) {
7796       TraceInline(target, caller, "inline depth limit reached");
7797       return false;
7798     }
7799     if (env->outer()->frame_type() == JS_FUNCTION) {
7800       current_level++;
7801     }
7802     env = env->outer();
7803   }
7804
7805   // Don't inline recursive functions.
7806   for (FunctionState* state = function_state();
7807        state != NULL;
7808        state = state->outer()) {
7809     if (*state->compilation_info()->closure() == *target) {
7810       TraceInline(target, caller, "target is recursive");
7811       return false;
7812     }
7813   }
7814
7815   // We don't want to add more than a certain number of nodes from inlining.
7816   if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7817                            kUnlimitedMaxInlinedNodesCumulative)) {
7818     TraceInline(target, caller, "cumulative AST node limit reached");
7819     return false;
7820   }
7821
7822   // Parse and allocate variables.
7823   CompilationInfo target_info(target, zone());
7824   Handle<SharedFunctionInfo> target_shared(target->shared());
7825   if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7826     if (target_info.isolate()->has_pending_exception()) {
7827       // Parse or scope error, never optimize this function.
7828       SetStackOverflow();
7829       target_shared->DisableOptimization(kParseScopeError);
7830     }
7831     TraceInline(target, caller, "parse failure");
7832     return false;
7833   }
7834
7835   if (target_info.scope()->num_heap_slots() > 0) {
7836     TraceInline(target, caller, "target has context-allocated variables");
7837     return false;
7838   }
7839   FunctionLiteral* function = target_info.function();
7840
7841   // The following conditions must be checked again after re-parsing, because
7842   // earlier the information might not have been complete due to lazy parsing.
7843   nodes_added = function->ast_node_count();
7844   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7845     TraceInline(target, caller, "target AST is too large [late]");
7846     return false;
7847   }
7848   AstProperties::Flags* flags(function->flags());
7849   if (flags->Contains(kDontInline) || function->dont_optimize()) {
7850     TraceInline(target, caller, "target contains unsupported syntax [late]");
7851     return false;
7852   }
7853
7854   // If the function uses the arguments object check that inlining of functions
7855   // with arguments object is enabled and the arguments-variable is
7856   // stack allocated.
7857   if (function->scope()->arguments() != NULL) {
7858     if (!FLAG_inline_arguments) {
7859       TraceInline(target, caller, "target uses arguments object");
7860       return false;
7861     }
7862
7863     if (!function->scope()->arguments()->IsStackAllocated()) {
7864       TraceInline(target,
7865                   caller,
7866                   "target uses non-stackallocated arguments object");
7867       return false;
7868     }
7869   }
7870
7871   // All declarations must be inlineable.
7872   ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7873   int decl_count = decls->length();
7874   for (int i = 0; i < decl_count; ++i) {
7875     if (!decls->at(i)->IsInlineable()) {
7876       TraceInline(target, caller, "target has non-trivial declaration");
7877       return false;
7878     }
7879   }
7880
7881   // Generate the deoptimization data for the unoptimized version of
7882   // the target function if we don't already have it.
7883   if (!target_shared->has_deoptimization_support()) {
7884     // Note that we compile here using the same AST that we will use for
7885     // generating the optimized inline code.
7886     target_info.EnableDeoptimizationSupport();
7887     if (!FullCodeGenerator::MakeCode(&target_info)) {
7888       TraceInline(target, caller, "could not generate deoptimization info");
7889       return false;
7890     }
7891     if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) {
7892       // The scope info might not have been set if a lazily compiled
7893       // function is inlined before being called for the first time.
7894       Handle<ScopeInfo> target_scope_info =
7895           ScopeInfo::Create(target_info.scope(), zone());
7896       target_shared->set_scope_info(*target_scope_info);
7897     }
7898     target_shared->EnableDeoptimizationSupport(*target_info.code());
7899     target_shared->set_feedback_vector(*target_info.feedback_vector());
7900     Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7901                                         &target_info,
7902                                         target_shared);
7903   }
7904
7905   // ----------------------------------------------------------------
7906   // After this point, we've made a decision to inline this function (so
7907   // TryInline should always return true).
7908
7909   // Type-check the inlined function.
7910   ASSERT(target_shared->has_deoptimization_support());
7911   AstTyper::Run(&target_info);
7912
7913   int function_id = graph()->TraceInlinedFunction(target_shared, position);
7914
7915   // Save the pending call context. Set up new one for the inlined function.
7916   // The function state is new-allocated because we need to delete it
7917   // in two different places.
7918   FunctionState* target_state = new FunctionState(
7919       this, &target_info, inlining_kind, function_id);
7920
7921   HConstant* undefined = graph()->GetConstantUndefined();
7922
7923   HEnvironment* inner_env =
7924       environment()->CopyForInlining(target,
7925                                      arguments_count,
7926                                      function,
7927                                      undefined,
7928                                      function_state()->inlining_kind());
7929
7930   HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7931   inner_env->BindContext(context);
7932
7933   HArgumentsObject* arguments_object = NULL;
7934
7935   // If the function uses arguments object create and bind one, also copy
7936   // current arguments values to use them for materialization.
7937   if (function->scope()->arguments() != NULL) {
7938     ASSERT(function->scope()->arguments()->IsStackAllocated());
7939     HEnvironment* arguments_env = inner_env->arguments_environment();
7940     int arguments_count = arguments_env->parameter_count();
7941     arguments_object = Add<HArgumentsObject>(arguments_count);
7942     inner_env->Bind(function->scope()->arguments(), arguments_object);
7943     for (int i = 0; i < arguments_count; i++) {
7944       arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7945     }
7946   }
7947
7948   // Capture the state before invoking the inlined function for deopt in the
7949   // inlined function. This simulate has no bailout-id since it's not directly
7950   // reachable for deopt, and is only used to capture the state. If the simulate
7951   // becomes reachable by merging, the ast id of the simulate merged into it is
7952   // adopted.
7953   Add<HSimulate>(BailoutId::None());
7954
7955   current_block()->UpdateEnvironment(inner_env);
7956   Scope* saved_scope = scope();
7957   set_scope(target_info.scope());
7958   HEnterInlined* enter_inlined =
7959       Add<HEnterInlined>(return_id, target, arguments_count, function,
7960                          function_state()->inlining_kind(),
7961                          function->scope()->arguments(),
7962                          arguments_object);
7963   function_state()->set_entry(enter_inlined);
7964
7965   VisitDeclarations(target_info.scope()->declarations());
7966   VisitStatements(function->body());
7967   set_scope(saved_scope);
7968   if (HasStackOverflow()) {
7969     // Bail out if the inline function did, as we cannot residualize a call
7970     // instead.
7971     TraceInline(target, caller, "inline graph construction failed");
7972     target_shared->DisableOptimization(kInliningBailedOut);
7973     inline_bailout_ = true;
7974     delete target_state;
7975     return true;
7976   }
7977
7978   // Update inlined nodes count.
7979   inlined_count_ += nodes_added;
7980
7981   Handle<Code> unoptimized_code(target_shared->code());
7982   ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7983   Handle<TypeFeedbackInfo> type_info(
7984       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7985   graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7986
7987   TraceInline(target, caller, NULL);
7988
7989   if (current_block() != NULL) {
7990     FunctionState* state = function_state();
7991     if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7992       // Falling off the end of an inlined construct call. In a test context the
7993       // return value will always evaluate to true, in a value context the
7994       // return value is the newly allocated receiver.
7995       if (call_context()->IsTest()) {
7996         Goto(inlined_test_context()->if_true(), state);
7997       } else if (call_context()->IsEffect()) {
7998         Goto(function_return(), state);
7999       } else {
8000         ASSERT(call_context()->IsValue());
8001         AddLeaveInlined(implicit_return_value, state);
8002       }
8003     } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8004       // Falling off the end of an inlined setter call. The returned value is
8005       // never used, the value of an assignment is always the value of the RHS
8006       // of the assignment.
8007       if (call_context()->IsTest()) {
8008         inlined_test_context()->ReturnValue(implicit_return_value);
8009       } else if (call_context()->IsEffect()) {
8010         Goto(function_return(), state);
8011       } else {
8012         ASSERT(call_context()->IsValue());
8013         AddLeaveInlined(implicit_return_value, state);
8014       }
8015     } else {
8016       // Falling off the end of a normal inlined function. This basically means
8017       // returning undefined.
8018       if (call_context()->IsTest()) {
8019         Goto(inlined_test_context()->if_false(), state);
8020       } else if (call_context()->IsEffect()) {
8021         Goto(function_return(), state);
8022       } else {
8023         ASSERT(call_context()->IsValue());
8024         AddLeaveInlined(undefined, state);
8025       }
8026     }
8027   }
8028
8029   // Fix up the function exits.
8030   if (inlined_test_context() != NULL) {
8031     HBasicBlock* if_true = inlined_test_context()->if_true();
8032     HBasicBlock* if_false = inlined_test_context()->if_false();
8033
8034     HEnterInlined* entry = function_state()->entry();
8035
8036     // Pop the return test context from the expression context stack.
8037     ASSERT(ast_context() == inlined_test_context());
8038     ClearInlinedTestContext();
8039     delete target_state;
8040
8041     // Forward to the real test context.
8042     if (if_true->HasPredecessor()) {
8043       entry->RegisterReturnTarget(if_true, zone());
8044       if_true->SetJoinId(ast_id);
8045       HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8046       Goto(if_true, true_target, function_state());
8047     }
8048     if (if_false->HasPredecessor()) {
8049       entry->RegisterReturnTarget(if_false, zone());
8050       if_false->SetJoinId(ast_id);
8051       HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8052       Goto(if_false, false_target, function_state());
8053     }
8054     set_current_block(NULL);
8055     return true;
8056
8057   } else if (function_return()->HasPredecessor()) {
8058     function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8059     function_return()->SetJoinId(ast_id);
8060     set_current_block(function_return());
8061   } else {
8062     set_current_block(NULL);
8063   }
8064   delete target_state;
8065   return true;
8066 }
8067
8068
8069 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8070   return TryInline(expr->target(),
8071                    expr->arguments()->length(),
8072                    NULL,
8073                    expr->id(),
8074                    expr->ReturnId(),
8075                    NORMAL_RETURN,
8076                    ScriptPositionToSourcePosition(expr->position()));
8077 }
8078
8079
8080 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8081                                                 HValue* implicit_return_value) {
8082   return TryInline(expr->target(),
8083                    expr->arguments()->length(),
8084                    implicit_return_value,
8085                    expr->id(),
8086                    expr->ReturnId(),
8087                    CONSTRUCT_CALL_RETURN,
8088                    ScriptPositionToSourcePosition(expr->position()));
8089 }
8090
8091
8092 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
8093                                              Handle<Map> receiver_map,
8094                                              BailoutId ast_id,
8095                                              BailoutId return_id) {
8096   if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8097   return TryInline(getter,
8098                    0,
8099                    NULL,
8100                    ast_id,
8101                    return_id,
8102                    GETTER_CALL_RETURN,
8103                    source_position());
8104 }
8105
8106
8107 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
8108                                              Handle<Map> receiver_map,
8109                                              BailoutId id,
8110                                              BailoutId assignment_id,
8111                                              HValue* implicit_return_value) {
8112   if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8113   return TryInline(setter,
8114                    1,
8115                    implicit_return_value,
8116                    id, assignment_id,
8117                    SETTER_CALL_RETURN,
8118                    source_position());
8119 }
8120
8121
8122 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function,
8123                                             Call* expr,
8124                                             int arguments_count) {
8125   return TryInline(function,
8126                    arguments_count,
8127                    NULL,
8128                    expr->id(),
8129                    expr->ReturnId(),
8130                    NORMAL_RETURN,
8131                    ScriptPositionToSourcePosition(expr->position()));
8132 }
8133
8134
8135 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8136   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8137   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8138   switch (id) {
8139     case kMathExp:
8140       if (!FLAG_fast_math) break;
8141       // Fall through if FLAG_fast_math.
8142     case kMathRound:
8143     case kMathFloor:
8144     case kMathAbs:
8145     case kMathSqrt:
8146     case kMathLog:
8147     case kMathClz32:
8148       if (expr->arguments()->length() == 1) {
8149         HValue* argument = Pop();
8150         Drop(2);  // Receiver and function.
8151         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8152         ast_context()->ReturnInstruction(op, expr->id());
8153         return true;
8154       }
8155       break;
8156     case kMathImul:
8157       if (expr->arguments()->length() == 2) {
8158         HValue* right = Pop();
8159         HValue* left = Pop();
8160         Drop(2);  // Receiver and function.
8161         HInstruction* op = HMul::NewImul(zone(), context(), left, right);
8162         ast_context()->ReturnInstruction(op, expr->id());
8163         return true;
8164       }
8165       break;
8166 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4)                     \
8167     case k##name:
8168 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
8169 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
8170       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
8171           expr->arguments()->length() == 0) {
8172         Drop(2);  // Receiver and function.
8173         HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
8174         ast_context()->ReturnInstruction(op, expr->id());
8175         return true;
8176       }
8177       break;
8178 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5)                   \
8179     case k##name:
8180 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
8181 #undef SIMD_UNARY_OPERATION_CASE_ITEM
8182       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
8183           expr->arguments()->length() == 1) {
8184         HValue* argument = Pop();
8185         Drop(2);  // Receiver and function.
8186         HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
8187         ast_context()->ReturnInstruction(op, expr->id());
8188         return true;
8189       }
8190       break;
8191 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6)              \
8192     case k##name:
8193 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
8194 #undef SIMD_BINARY_OPERATION_CASE_ITEM
8195       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
8196           expr->arguments()->length() == 2) {
8197         HValue* right = Pop();
8198         HValue* left = Pop();
8199         Drop(2);  // Receiver and function.
8200         HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
8201         ast_context()->ReturnInstruction(op, expr->id());
8202         return true;
8203       }
8204       break;
8205 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7)         \
8206     case k##name:
8207 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
8208 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
8209       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
8210           expr->arguments()->length() == 3) {
8211         HValue* right = Pop();
8212         HValue* left = Pop();
8213         HValue* value = Pop();
8214         Drop(2);  // Receiver and function.
8215         HInstruction* op =
8216             NewUncasted<HTernarySIMDOperation>(value, left, right, id);
8217         ast_context()->ReturnInstruction(op, expr->id());
8218         return true;
8219       }
8220       break;
8221 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
8222     case k##name:
8223 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
8224 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
8225       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
8226           expr->arguments()->length() == 4) {
8227         HValue* w = Pop();
8228         HValue* z = Pop();
8229         HValue* y = Pop();
8230         HValue* x = Pop();
8231         Drop(2);  // Receiver and function.
8232         HInstruction* op =
8233             NewUncasted<HQuarternarySIMDOperation>(x, y, z, w, id);
8234         ast_context()->ReturnInstruction(op, expr->id());
8235         return true;
8236       }
8237       break;
8238     default:
8239       // Not supported for inlining yet.
8240       break;
8241   }
8242   return false;
8243 }
8244
8245
8246 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8247     Call* expr,
8248     HValue* receiver,
8249     Handle<Map> receiver_map) {
8250   // Try to inline calls like Math.* as operations in the calling function.
8251   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8252   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8253   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
8254   switch (id) {
8255     case kStringCharCodeAt:
8256     case kStringCharAt:
8257       if (argument_count == 2) {
8258         HValue* index = Pop();
8259         HValue* string = Pop();
8260         Drop(1);  // Function.
8261         HInstruction* char_code =
8262             BuildStringCharCodeAt(string, index);
8263         if (id == kStringCharCodeAt) {
8264           ast_context()->ReturnInstruction(char_code, expr->id());
8265           return true;
8266         }
8267         AddInstruction(char_code);
8268         HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8269         ast_context()->ReturnInstruction(result, expr->id());
8270         return true;
8271       }
8272       break;
8273     case kStringFromCharCode:
8274       if (argument_count == 2) {
8275         HValue* argument = Pop();
8276         Drop(2);  // Receiver and function.
8277         HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8278         ast_context()->ReturnInstruction(result, expr->id());
8279         return true;
8280       }
8281       break;
8282     case kMathExp:
8283       if (!FLAG_fast_math) break;
8284       // Fall through if FLAG_fast_math.
8285     case kMathRound:
8286     case kMathFloor:
8287     case kMathAbs:
8288     case kMathSqrt:
8289     case kMathLog:
8290     case kMathClz32:
8291       if (argument_count == 2) {
8292         HValue* argument = Pop();
8293         Drop(2);  // Receiver and function.
8294         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8295         ast_context()->ReturnInstruction(op, expr->id());
8296         return true;
8297       }
8298       break;
8299     case kMathPow:
8300       if (argument_count == 3) {
8301         HValue* right = Pop();
8302         HValue* left = Pop();
8303         Drop(2);  // Receiver and function.
8304         HInstruction* result = NULL;
8305         // Use sqrt() if exponent is 0.5 or -0.5.
8306         if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8307           double exponent = HConstant::cast(right)->DoubleValue();
8308           if (exponent == 0.5) {
8309             result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8310           } else if (exponent == -0.5) {
8311             HValue* one = graph()->GetConstant1();
8312             HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8313                 left, kMathPowHalf);
8314             // MathPowHalf doesn't have side effects so there's no need for
8315             // an environment simulation here.
8316             ASSERT(!sqrt->HasObservableSideEffects());
8317             result = NewUncasted<HDiv>(one, sqrt);
8318           } else if (exponent == 2.0) {
8319             result = NewUncasted<HMul>(left, left);
8320           }
8321         }
8322
8323         if (result == NULL) {
8324           result = NewUncasted<HPower>(left, right);
8325         }
8326         ast_context()->ReturnInstruction(result, expr->id());
8327         return true;
8328       }
8329       break;
8330     case kMathMax:
8331     case kMathMin:
8332       if (argument_count == 3) {
8333         HValue* right = Pop();
8334         HValue* left = Pop();
8335         Drop(2);  // Receiver and function.
8336         HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8337                                                      : HMathMinMax::kMathMax;
8338         HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8339         ast_context()->ReturnInstruction(result, expr->id());
8340         return true;
8341       }
8342       break;
8343     case kMathImul:
8344       if (argument_count == 3) {
8345         HValue* right = Pop();
8346         HValue* left = Pop();
8347         Drop(2);  // Receiver and function.
8348         HInstruction* result = HMul::NewImul(zone(), context(), left, right);
8349         ast_context()->ReturnInstruction(result, expr->id());
8350         return true;
8351       }
8352       break;
8353     case kArrayPop: {
8354       if (receiver_map.is_null()) return false;
8355       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8356       ElementsKind elements_kind = receiver_map->elements_kind();
8357       if (!IsFastElementsKind(elements_kind)) return false;
8358       if (receiver_map->is_observed()) return false;
8359       ASSERT(receiver_map->is_extensible());
8360
8361       Drop(expr->arguments()->length());
8362       HValue* result;
8363       HValue* reduced_length;
8364       HValue* receiver = Pop();
8365
8366       HValue* checked_object = AddCheckMap(receiver, receiver_map);
8367       HValue* length = Add<HLoadNamedField>(
8368           checked_object, static_cast<HValue*>(NULL),
8369           HObjectAccess::ForArrayLength(elements_kind));
8370
8371       Drop(1);  // Function.
8372
8373       { NoObservableSideEffectsScope scope(this);
8374         IfBuilder length_checker(this);
8375
8376         HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8377             length, graph()->GetConstant0(), Token::EQ);
8378         length_checker.Then();
8379
8380         if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8381
8382         length_checker.Else();
8383         HValue* elements = AddLoadElements(checked_object);
8384         // Ensure that we aren't popping from a copy-on-write array.
8385         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8386           elements = BuildCopyElementsOnWrite(checked_object, elements,
8387                                               elements_kind, length);
8388         }
8389         reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8390         result = AddElementAccess(elements, reduced_length, NULL,
8391                                   bounds_check, elements_kind, LOAD);
8392         Factory* factory = isolate()->factory();
8393         double nan_double = FixedDoubleArray::hole_nan_as_double();
8394         HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8395             ? Add<HConstant>(factory->the_hole_value())
8396             : Add<HConstant>(nan_double);
8397         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8398           elements_kind = FAST_HOLEY_ELEMENTS;
8399         }
8400         AddElementAccess(
8401             elements, reduced_length, hole, bounds_check, elements_kind, STORE);
8402         Add<HStoreNamedField>(
8403             checked_object, HObjectAccess::ForArrayLength(elements_kind),
8404             reduced_length, STORE_TO_INITIALIZED_ENTRY);
8405
8406         if (!ast_context()->IsEffect()) Push(result);
8407
8408         length_checker.End();
8409       }
8410       result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8411       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8412       if (!ast_context()->IsEffect()) Drop(1);
8413
8414       ast_context()->ReturnValue(result);
8415       return true;
8416     }
8417     case kArrayPush: {
8418       if (receiver_map.is_null()) return false;
8419       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8420       ElementsKind elements_kind = receiver_map->elements_kind();
8421       if (!IsFastElementsKind(elements_kind)) return false;
8422       if (receiver_map->is_observed()) return false;
8423       if (JSArray::IsReadOnlyLengthDescriptor(receiver_map)) return false;
8424       ASSERT(receiver_map->is_extensible());
8425
8426       // If there may be elements accessors in the prototype chain, the fast
8427       // inlined version can't be used.
8428       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8429       // If there currently can be no elements accessors on the prototype chain,
8430       // it doesn't mean that there won't be any later. Install a full prototype
8431       // chain check to trap element accessors being installed on the prototype
8432       // chain, which would cause elements to go to dictionary mode and result
8433       // in a map change.
8434       Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8435       BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8436
8437       const int argc = expr->arguments()->length();
8438       if (argc != 1) return false;
8439
8440       HValue* value_to_push = Pop();
8441       HValue* array = Pop();
8442       Drop(1);  // Drop function.
8443
8444       HInstruction* new_size = NULL;
8445       HValue* length = NULL;
8446
8447       {
8448         NoObservableSideEffectsScope scope(this);
8449
8450         length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL),
8451           HObjectAccess::ForArrayLength(elements_kind));
8452
8453         new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8454
8455         bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8456         BuildUncheckedMonomorphicElementAccess(array, length,
8457                                                value_to_push, is_array,
8458                                                elements_kind, STORE,
8459                                                NEVER_RETURN_HOLE,
8460                                                STORE_AND_GROW_NO_TRANSITION);
8461
8462         if (!ast_context()->IsEffect()) Push(new_size);
8463         Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8464         if (!ast_context()->IsEffect()) Drop(1);
8465       }
8466
8467       ast_context()->ReturnValue(new_size);
8468       return true;
8469     }
8470     case kArrayShift: {
8471       if (receiver_map.is_null()) return false;
8472       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8473       ElementsKind kind = receiver_map->elements_kind();
8474       if (!IsFastElementsKind(kind)) return false;
8475       if (receiver_map->is_observed()) return false;
8476       ASSERT(receiver_map->is_extensible());
8477
8478       // If there may be elements accessors in the prototype chain, the fast
8479       // inlined version can't be used.
8480       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8481
8482       // If there currently can be no elements accessors on the prototype chain,
8483       // it doesn't mean that there won't be any later. Install a full prototype
8484       // chain check to trap element accessors being installed on the prototype
8485       // chain, which would cause elements to go to dictionary mode and result
8486       // in a map change.
8487       BuildCheckPrototypeMaps(
8488           handle(JSObject::cast(receiver_map->prototype()), isolate()),
8489           Handle<JSObject>::null());
8490
8491       // Threshold for fast inlined Array.shift().
8492       HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8493
8494       Drop(expr->arguments()->length());
8495       HValue* receiver = Pop();
8496       HValue* function = Pop();
8497       HValue* result;
8498
8499       {
8500         NoObservableSideEffectsScope scope(this);
8501
8502         HValue* length = Add<HLoadNamedField>(
8503             receiver, static_cast<HValue*>(NULL),
8504             HObjectAccess::ForArrayLength(kind));
8505
8506         IfBuilder if_lengthiszero(this);
8507         HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8508             length, graph()->GetConstant0(), Token::EQ);
8509         if_lengthiszero.Then();
8510         {
8511           if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8512         }
8513         if_lengthiszero.Else();
8514         {
8515           HValue* elements = AddLoadElements(receiver);
8516
8517           // Check if we can use the fast inlined Array.shift().
8518           IfBuilder if_inline(this);
8519           if_inline.If<HCompareNumericAndBranch>(
8520               length, inline_threshold, Token::LTE);
8521           if (IsFastSmiOrObjectElementsKind(kind)) {
8522             // We cannot handle copy-on-write backing stores here.
8523             if_inline.AndIf<HCompareMap>(
8524                 elements, isolate()->factory()->fixed_array_map());
8525           }
8526           if_inline.Then();
8527           {
8528             // Remember the result.
8529             if (!ast_context()->IsEffect()) {
8530               Push(AddElementAccess(elements, graph()->GetConstant0(), NULL,
8531                                     lengthiszero, kind, LOAD));
8532             }
8533
8534             // Compute the new length.
8535             HValue* new_length = AddUncasted<HSub>(
8536                 length, graph()->GetConstant1());
8537             new_length->ClearFlag(HValue::kCanOverflow);
8538
8539             // Copy the remaining elements.
8540             LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
8541             {
8542               HValue* new_key = loop.BeginBody(
8543                   graph()->GetConstant0(), new_length, Token::LT);
8544               HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
8545               key->ClearFlag(HValue::kCanOverflow);
8546               HValue* element = AddUncasted<HLoadKeyed>(
8547                   elements, key, lengthiszero, kind, ALLOW_RETURN_HOLE);
8548               HStoreKeyed* store = Add<HStoreKeyed>(
8549                   elements, new_key, element, kind);
8550               store->SetFlag(HValue::kAllowUndefinedAsNaN);
8551             }
8552             loop.EndBody();
8553
8554             // Put a hole at the end.
8555             HValue* hole = IsFastSmiOrObjectElementsKind(kind)
8556                 ? Add<HConstant>(isolate()->factory()->the_hole_value())
8557                 : Add<HConstant>(FixedDoubleArray::hole_nan_as_double());
8558             if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
8559             Add<HStoreKeyed>(
8560                 elements, new_length, hole, kind, INITIALIZING_STORE);
8561
8562             // Remember new length.
8563             Add<HStoreNamedField>(
8564                 receiver, HObjectAccess::ForArrayLength(kind),
8565                 new_length, STORE_TO_INITIALIZED_ENTRY);
8566           }
8567           if_inline.Else();
8568           {
8569             Add<HPushArguments>(receiver);
8570             result = Add<HCallJSFunction>(function, 1, true);
8571             if (!ast_context()->IsEffect()) Push(result);
8572           }
8573           if_inline.End();
8574         }
8575         if_lengthiszero.End();
8576       }
8577       result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8578       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8579       if (!ast_context()->IsEffect()) Drop(1);
8580       ast_context()->ReturnValue(result);
8581       return true;
8582     }
8583     case kArrayIndexOf:
8584     case kArrayLastIndexOf: {
8585       if (receiver_map.is_null()) return false;
8586       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8587       ElementsKind kind = receiver_map->elements_kind();
8588       if (!IsFastElementsKind(kind)) return false;
8589       if (receiver_map->is_observed()) return false;
8590       if (argument_count != 2) return false;
8591       ASSERT(receiver_map->is_extensible());
8592
8593       // If there may be elements accessors in the prototype chain, the fast
8594       // inlined version can't be used.
8595       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8596
8597       // If there currently can be no elements accessors on the prototype chain,
8598       // it doesn't mean that there won't be any later. Install a full prototype
8599       // chain check to trap element accessors being installed on the prototype
8600       // chain, which would cause elements to go to dictionary mode and result
8601       // in a map change.
8602       BuildCheckPrototypeMaps(
8603           handle(JSObject::cast(receiver_map->prototype()), isolate()),
8604           Handle<JSObject>::null());
8605
8606       HValue* search_element = Pop();
8607       HValue* receiver = Pop();
8608       Drop(1);  // Drop function.
8609
8610       ArrayIndexOfMode mode = (id == kArrayIndexOf)
8611           ? kFirstIndexOf : kLastIndexOf;
8612       HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
8613
8614       if (!ast_context()->IsEffect()) Push(index);
8615       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8616       if (!ast_context()->IsEffect()) Drop(1);
8617       ast_context()->ReturnValue(index);
8618       return true;
8619     }
8620 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4)                     \
8621     case k##name:
8622 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
8623 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
8624       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 1) {
8625         Drop(2);  // Receiver and function.
8626         HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
8627         ast_context()->ReturnInstruction(op, expr->id());
8628         return true;
8629       }
8630       break;
8631 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5)                   \
8632     case k##name:
8633 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
8634 #undef SIMD_UNARY_OPERATION_CASE_ITEM
8635       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8636         HValue* argument = Pop();
8637         Drop(2);  // Receiver and function.
8638         HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
8639         ast_context()->ReturnInstruction(op, expr->id());
8640         return true;
8641       }
8642       break;
8643 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6)              \
8644     case k##name:
8645 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
8646 #undef SIMD_BINARY_OPERATION_CASE_ITEM
8647       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8648         HValue* right = Pop();
8649         HValue* left = Pop();
8650         Drop(2);  // Receiver and function.
8651         HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
8652         ast_context()->ReturnInstruction(op, expr->id());
8653         return true;
8654       }
8655       break;
8656 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7)         \
8657     case k##name:
8658 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
8659 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
8660       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 4) {
8661         HValue* right = Pop();
8662         HValue* left = Pop();
8663         HValue* value = Pop();
8664         Drop(2);  // Receiver and function.
8665         HInstruction* op =
8666             NewUncasted<HTernarySIMDOperation>(value, left, right, id);
8667         ast_context()->ReturnInstruction(op, expr->id());
8668         return true;
8669       }
8670       break;
8671 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
8672     case k##name:
8673 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
8674 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
8675       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 5) {
8676         HValue* w = Pop();
8677         HValue* z = Pop();
8678         HValue* y = Pop();
8679         HValue* x = Pop();
8680         Drop(2);  // Receiver and function.
8681         HValue* context = environment()->context();
8682         HInstruction* op =
8683             HQuarternarySIMDOperation::New(zone(), context, x, y, z, w, id);
8684         ast_context()->ReturnInstruction(op, expr->id());
8685         return true;
8686       }
8687       break;
8688     case kFloat32x4ArrayGetAt:
8689     case kFloat64x2ArrayGetAt:
8690     case kInt32x4ArrayGetAt:
8691       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8692         HValue* key = Pop();
8693         HValue* typed32x4_array = Pop();
8694         ASSERT(typed32x4_array == receiver);
8695         Drop(1);  // Drop function.
8696         HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
8697             typed32x4_array, key, NULL,
8698             receiver_map->instance_type() == JS_ARRAY_TYPE,
8699             receiver_map->elements_kind(),
8700             LOAD,  // is_store.
8701             NEVER_RETURN_HOLE,  // load_mode.
8702             STANDARD_STORE);
8703         ast_context()->ReturnValue(instr);
8704         return true;
8705       }
8706       break;
8707     case kFloat32x4ArraySetAt:
8708     case kFloat64x2ArraySetAt:
8709     case kInt32x4ArraySetAt:
8710       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8711         HValue* value = Pop();
8712         HValue* key = Pop();
8713         HValue* typed32x4_array = Pop();
8714         ASSERT(typed32x4_array == receiver);
8715         Drop(1);  // Drop function.
8716         // TODO(haitao): add STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS.
8717         KeyedAccessStoreMode store_mode = STANDARD_STORE;
8718         BuildUncheckedMonomorphicElementAccess(
8719             typed32x4_array, key, value,
8720             receiver_map->instance_type() == JS_ARRAY_TYPE,
8721             receiver_map->elements_kind(),
8722             STORE,  // is_store.
8723             NEVER_RETURN_HOLE,  // load_mode.
8724             store_mode);
8725         Push(value);
8726         Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8727         ast_context()->ReturnValue(Pop());
8728         return true;
8729       }
8730       break;
8731     default:
8732       // Not yet supported for inlining.
8733       break;
8734   }
8735   return false;
8736 }
8737
8738
8739 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8740                                                       HValue* receiver) {
8741   Handle<JSFunction> function = expr->target();
8742   int argc = expr->arguments()->length();
8743   SmallMapList receiver_maps;
8744   return TryInlineApiCall(function,
8745                           receiver,
8746                           &receiver_maps,
8747                           argc,
8748                           expr->id(),
8749                           kCallApiFunction);
8750 }
8751
8752
8753 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8754     Call* expr,
8755     HValue* receiver,
8756     SmallMapList* receiver_maps) {
8757   Handle<JSFunction> function = expr->target();
8758   int argc = expr->arguments()->length();
8759   return TryInlineApiCall(function,
8760                           receiver,
8761                           receiver_maps,
8762                           argc,
8763                           expr->id(),
8764                           kCallApiMethod);
8765 }
8766
8767
8768 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
8769                                                 Handle<Map> receiver_map,
8770                                                 BailoutId ast_id) {
8771   SmallMapList receiver_maps(1, zone());
8772   receiver_maps.Add(receiver_map, zone());
8773   return TryInlineApiCall(function,
8774                           NULL,  // Receiver is on expression stack.
8775                           &receiver_maps,
8776                           0,
8777                           ast_id,
8778                           kCallApiGetter);
8779 }
8780
8781
8782 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
8783                                                 Handle<Map> receiver_map,
8784                                                 BailoutId ast_id) {
8785   SmallMapList receiver_maps(1, zone());
8786   receiver_maps.Add(receiver_map, zone());
8787   return TryInlineApiCall(function,
8788                           NULL,  // Receiver is on expression stack.
8789                           &receiver_maps,
8790                           1,
8791                           ast_id,
8792                           kCallApiSetter);
8793 }
8794
8795
8796 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
8797                                                HValue* receiver,
8798                                                SmallMapList* receiver_maps,
8799                                                int argc,
8800                                                BailoutId ast_id,
8801                                                ApiCallType call_type) {
8802   CallOptimization optimization(function);
8803   if (!optimization.is_simple_api_call()) return false;
8804   Handle<Map> holder_map;
8805   if (call_type == kCallApiFunction) {
8806     // Cannot embed a direct reference to the global proxy map
8807     // as it maybe dropped on deserialization.
8808     CHECK(!isolate()->serializer_enabled());
8809     ASSERT_EQ(0, receiver_maps->length());
8810     receiver_maps->Add(handle(
8811         function->context()->global_object()->global_receiver()->map()),
8812         zone());
8813   }
8814   CallOptimization::HolderLookup holder_lookup =
8815       CallOptimization::kHolderNotFound;
8816   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8817       receiver_maps->first(), &holder_lookup);
8818   if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8819
8820   if (FLAG_trace_inlining) {
8821     PrintF("Inlining api function ");
8822     function->ShortPrint();
8823     PrintF("\n");
8824   }
8825
8826   bool drop_extra = false;
8827   bool is_store = false;
8828   switch (call_type) {
8829     case kCallApiFunction:
8830     case kCallApiMethod:
8831       // Need to check that none of the receiver maps could have changed.
8832       Add<HCheckMaps>(receiver, receiver_maps);
8833       // Need to ensure the chain between receiver and api_holder is intact.
8834       if (holder_lookup == CallOptimization::kHolderFound) {
8835         AddCheckPrototypeMaps(api_holder, receiver_maps->first());
8836       } else {
8837         ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
8838       }
8839       // Includes receiver.
8840       PushArgumentsFromEnvironment(argc + 1);
8841       // Drop function after call.
8842       drop_extra = true;
8843       break;
8844     case kCallApiGetter:
8845       // Receiver and prototype chain cannot have changed.
8846       ASSERT_EQ(0, argc);
8847       ASSERT_EQ(NULL, receiver);
8848       // Receiver is on expression stack.
8849       receiver = Pop();
8850       Add<HPushArguments>(receiver);
8851       break;
8852     case kCallApiSetter:
8853       {
8854         is_store = true;
8855         // Receiver and prototype chain cannot have changed.
8856         ASSERT_EQ(1, argc);
8857         ASSERT_EQ(NULL, receiver);
8858         // Receiver and value are on expression stack.
8859         HValue* value = Pop();
8860         receiver = Pop();
8861         Add<HPushArguments>(receiver, value);
8862         break;
8863      }
8864   }
8865
8866   HValue* holder = NULL;
8867   switch (holder_lookup) {
8868     case CallOptimization::kHolderFound:
8869       holder = Add<HConstant>(api_holder);
8870       break;
8871     case CallOptimization::kHolderIsReceiver:
8872       holder = receiver;
8873       break;
8874     case CallOptimization::kHolderNotFound:
8875       UNREACHABLE();
8876       break;
8877   }
8878   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
8879   Handle<Object> call_data_obj(api_call_info->data(), isolate());
8880   bool call_data_is_undefined = call_data_obj->IsUndefined();
8881   HValue* call_data = Add<HConstant>(call_data_obj);
8882   ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
8883   ExternalReference ref = ExternalReference(&fun,
8884                                             ExternalReference::DIRECT_API_CALL,
8885                                             isolate());
8886   HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
8887
8888   HValue* op_vals[] = {
8889     Add<HConstant>(function),
8890     call_data,
8891     holder,
8892     api_function_address,
8893     context()
8894   };
8895
8896   CallInterfaceDescriptor* descriptor =
8897       isolate()->call_descriptor(Isolate::ApiFunctionCall);
8898
8899   CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
8900   Handle<Code> code = stub.GetCode();
8901   HConstant* code_value = Add<HConstant>(code);
8902
8903   ASSERT((sizeof(op_vals) / kPointerSize) ==
8904          descriptor->environment_length());
8905
8906   HInstruction* call = New<HCallWithDescriptor>(
8907       code_value, argc + 1, descriptor,
8908       Vector<HValue*>(op_vals, descriptor->environment_length()));
8909
8910   if (drop_extra) Drop(1);  // Drop function.
8911   ast_context()->ReturnInstruction(call, ast_id);
8912   return true;
8913 }
8914
8915
8916 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
8917   ASSERT(expr->expression()->IsProperty());
8918
8919   if (!expr->IsMonomorphic()) {
8920     return false;
8921   }
8922   Handle<Map> function_map = expr->GetReceiverTypes()->first();
8923   if (function_map->instance_type() != JS_FUNCTION_TYPE ||
8924       !expr->target()->shared()->HasBuiltinFunctionId() ||
8925       expr->target()->shared()->builtin_function_id() != kFunctionApply) {
8926     return false;
8927   }
8928
8929   if (current_info()->scope()->arguments() == NULL) return false;
8930
8931   ZoneList<Expression*>* args = expr->arguments();
8932   if (args->length() != 2) return false;
8933
8934   VariableProxy* arg_two = args->at(1)->AsVariableProxy();
8935   if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
8936   HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
8937   if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
8938
8939   // Found pattern f.apply(receiver, arguments).
8940   CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
8941   HValue* receiver = Pop();  // receiver
8942   HValue* function = Pop();  // f
8943   Drop(1);  // apply
8944
8945   HValue* checked_function = AddCheckMap(function, function_map);
8946
8947   if (function_state()->outer() == NULL) {
8948     HInstruction* elements = Add<HArgumentsElements>(false);
8949     HInstruction* length = Add<HArgumentsLength>(elements);
8950     HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
8951     HInstruction* result = New<HApplyArguments>(function,
8952                                                 wrapped_receiver,
8953                                                 length,
8954                                                 elements);
8955     ast_context()->ReturnInstruction(result, expr->id());
8956     return true;
8957   } else {
8958     // We are inside inlined function and we know exactly what is inside
8959     // arguments object. But we need to be able to materialize at deopt.
8960     ASSERT_EQ(environment()->arguments_environment()->parameter_count(),
8961               function_state()->entry()->arguments_object()->arguments_count());
8962     HArgumentsObject* args = function_state()->entry()->arguments_object();
8963     const ZoneList<HValue*>* arguments_values = args->arguments_values();
8964     int arguments_count = arguments_values->length();
8965     Push(function);
8966     Push(BuildWrapReceiver(receiver, checked_function));
8967     for (int i = 1; i < arguments_count; i++) {
8968       Push(arguments_values->at(i));
8969     }
8970
8971     Handle<JSFunction> known_function;
8972     if (function->IsConstant() &&
8973         HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8974       known_function = Handle<JSFunction>::cast(
8975           HConstant::cast(function)->handle(isolate()));
8976       int args_count = arguments_count - 1;  // Excluding receiver.
8977       if (TryInlineApply(known_function, expr, args_count)) return true;
8978     }
8979
8980     PushArgumentsFromEnvironment(arguments_count);
8981     HInvokeFunction* call = New<HInvokeFunction>(
8982         function, known_function, arguments_count);
8983     Drop(1);  // Function.
8984     ast_context()->ReturnInstruction(call, expr->id());
8985     return true;
8986   }
8987 }
8988
8989
8990 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
8991                                                     Handle<JSFunction> target) {
8992   SharedFunctionInfo* shared = target->shared();
8993   if (shared->strict_mode() == SLOPPY && !shared->native()) {
8994     // Cannot embed a direct reference to the global proxy
8995     // as is it dropped on deserialization.
8996     CHECK(!isolate()->serializer_enabled());
8997     Handle<JSObject> global_receiver(
8998         target->context()->global_object()->global_receiver());
8999     return Add<HConstant>(global_receiver);
9000   }
9001   return graph()->GetConstantUndefined();
9002 }
9003
9004
9005 void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
9006                                             int arguments_count,
9007                                             HValue* function,
9008                                             Handle<AllocationSite> site) {
9009   Add<HCheckValue>(function, array_function());
9010
9011   if (IsCallArrayInlineable(arguments_count, site)) {
9012     BuildInlinedCallArray(expression, arguments_count, site);
9013     return;
9014   }
9015
9016   HInstruction* call = PreProcessCall(New<HCallNewArray>(
9017       function, arguments_count + 1, site->GetElementsKind()));
9018   if (expression->IsCall()) {
9019     Drop(1);
9020   }
9021   ast_context()->ReturnInstruction(call, expression->id());
9022 }
9023
9024
9025 HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9026                                                   HValue* search_element,
9027                                                   ElementsKind kind,
9028                                                   ArrayIndexOfMode mode) {
9029   ASSERT(IsFastElementsKind(kind));
9030
9031   NoObservableSideEffectsScope no_effects(this);
9032
9033   HValue* elements = AddLoadElements(receiver);
9034   HValue* length = AddLoadArrayLength(receiver, kind);
9035
9036   HValue* initial;
9037   HValue* terminating;
9038   Token::Value token;
9039   LoopBuilder::Direction direction;
9040   if (mode == kFirstIndexOf) {
9041     initial = graph()->GetConstant0();
9042     terminating = length;
9043     token = Token::LT;
9044     direction = LoopBuilder::kPostIncrement;
9045   } else {
9046     ASSERT_EQ(kLastIndexOf, mode);
9047     initial = length;
9048     terminating = graph()->GetConstant0();
9049     token = Token::GT;
9050     direction = LoopBuilder::kPreDecrement;
9051   }
9052
9053   Push(graph()->GetConstantMinus1());
9054   if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9055     LoopBuilder loop(this, context(), direction);
9056     {
9057       HValue* index = loop.BeginBody(initial, terminating, token);
9058       HValue* element = AddUncasted<HLoadKeyed>(
9059           elements, index, static_cast<HValue*>(NULL),
9060           kind, ALLOW_RETURN_HOLE);
9061       IfBuilder if_issame(this);
9062       if (IsFastDoubleElementsKind(kind)) {
9063         if_issame.If<HCompareNumericAndBranch>(
9064             element, search_element, Token::EQ_STRICT);
9065       } else {
9066         if_issame.If<HCompareObjectEqAndBranch>(element, search_element);
9067       }
9068       if_issame.Then();
9069       {
9070         Drop(1);
9071         Push(index);
9072         loop.Break();
9073       }
9074       if_issame.End();
9075     }
9076     loop.EndBody();
9077   } else {
9078     IfBuilder if_isstring(this);
9079     if_isstring.If<HIsStringAndBranch>(search_element);
9080     if_isstring.Then();
9081     {
9082       LoopBuilder loop(this, context(), direction);
9083       {
9084         HValue* index = loop.BeginBody(initial, terminating, token);
9085         HValue* element = AddUncasted<HLoadKeyed>(
9086             elements, index, static_cast<HValue*>(NULL),
9087             kind, ALLOW_RETURN_HOLE);
9088         IfBuilder if_issame(this);
9089         if_issame.If<HIsStringAndBranch>(element);
9090         if_issame.AndIf<HStringCompareAndBranch>(
9091             element, search_element, Token::EQ_STRICT);
9092         if_issame.Then();
9093         {
9094           Drop(1);
9095           Push(index);
9096           loop.Break();
9097         }
9098         if_issame.End();
9099       }
9100       loop.EndBody();
9101     }
9102     if_isstring.Else();
9103     {
9104       IfBuilder if_isnumber(this);
9105       if_isnumber.If<HIsSmiAndBranch>(search_element);
9106       if_isnumber.OrIf<HCompareMap>(
9107           search_element, isolate()->factory()->heap_number_map());
9108       if_isnumber.Then();
9109       {
9110         HValue* search_number =
9111             AddUncasted<HForceRepresentation>(search_element,
9112                                               Representation::Double());
9113         LoopBuilder loop(this, context(), direction);
9114         {
9115           HValue* index = loop.BeginBody(initial, terminating, token);
9116           HValue* element = AddUncasted<HLoadKeyed>(
9117               elements, index, static_cast<HValue*>(NULL),
9118               kind, ALLOW_RETURN_HOLE);
9119
9120           IfBuilder if_element_isnumber(this);
9121           if_element_isnumber.If<HIsSmiAndBranch>(element);
9122           if_element_isnumber.OrIf<HCompareMap>(
9123               element, isolate()->factory()->heap_number_map());
9124           if_element_isnumber.Then();
9125           {
9126             HValue* number =
9127                 AddUncasted<HForceRepresentation>(element,
9128                                                   Representation::Double());
9129             IfBuilder if_issame(this);
9130             if_issame.If<HCompareNumericAndBranch>(
9131                 number, search_number, Token::EQ_STRICT);
9132             if_issame.Then();
9133             {
9134               Drop(1);
9135               Push(index);
9136               loop.Break();
9137             }
9138             if_issame.End();
9139           }
9140           if_element_isnumber.End();
9141         }
9142         loop.EndBody();
9143       }
9144       if_isnumber.Else();
9145       {
9146         LoopBuilder loop(this, context(), direction);
9147         {
9148           HValue* index = loop.BeginBody(initial, terminating, token);
9149           HValue* element = AddUncasted<HLoadKeyed>(
9150               elements, index, static_cast<HValue*>(NULL),
9151               kind, ALLOW_RETURN_HOLE);
9152           IfBuilder if_issame(this);
9153           if_issame.If<HCompareObjectEqAndBranch>(
9154               element, search_element);
9155           if_issame.Then();
9156           {
9157             Drop(1);
9158             Push(index);
9159             loop.Break();
9160           }
9161           if_issame.End();
9162         }
9163         loop.EndBody();
9164       }
9165       if_isnumber.End();
9166     }
9167     if_isstring.End();
9168   }
9169
9170   return Pop();
9171 }
9172
9173
9174 bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
9175   if (!array_function().is_identical_to(expr->target())) {
9176     return false;
9177   }
9178
9179   Handle<AllocationSite> site = expr->allocation_site();
9180   if (site.is_null()) return false;
9181
9182   BuildArrayCall(expr,
9183                  expr->arguments()->length(),
9184                  function,
9185                  site);
9186   return true;
9187 }
9188
9189
9190 bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
9191                                                    HValue* function) {
9192   if (!array_function().is_identical_to(expr->target())) {
9193     return false;
9194   }
9195
9196   BuildArrayCall(expr,
9197                  expr->arguments()->length(),
9198                  function,
9199                  expr->allocation_site());
9200   return true;
9201 }
9202
9203
9204 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9205   ASSERT(!HasStackOverflow());
9206   ASSERT(current_block() != NULL);
9207   ASSERT(current_block()->HasPredecessor());
9208   Expression* callee = expr->expression();
9209   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
9210   HInstruction* call = NULL;
9211
9212   Property* prop = callee->AsProperty();
9213   if (prop != NULL) {
9214     CHECK_ALIVE(VisitForValue(prop->obj()));
9215     HValue* receiver = Top();
9216
9217     SmallMapList* types;
9218     ComputeReceiverTypes(expr, receiver, &types, zone());
9219
9220     if (prop->key()->IsPropertyName() && types->length() > 0) {
9221       Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9222       PropertyAccessInfo info(this, LOAD, ToType(types->first()), name,
9223                               types->first()->instance_type());
9224       if (!info.CanAccessAsMonomorphic(types)) {
9225         HandlePolymorphicCallNamed(expr, receiver, types, name);
9226         return;
9227       }
9228     }
9229
9230     HValue* key = NULL;
9231     if (!prop->key()->IsPropertyName()) {
9232       CHECK_ALIVE(VisitForValue(prop->key()));
9233       key = Pop();
9234     }
9235
9236     CHECK_ALIVE(PushLoad(prop, receiver, key));
9237     HValue* function = Pop();
9238
9239     if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9240
9241     // Push the function under the receiver.
9242     environment()->SetExpressionStackAt(0, function);
9243
9244     Push(receiver);
9245
9246     if (function->IsConstant() &&
9247         HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9248       Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9249           HConstant::cast(function)->handle(isolate()));
9250       expr->set_target(known_function);
9251
9252       if (TryCallApply(expr)) return;
9253       CHECK_ALIVE(VisitExpressions(expr->arguments()));
9254
9255       Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
9256       if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
9257         if (FLAG_trace_inlining) {
9258           PrintF("Inlining builtin ");
9259           known_function->ShortPrint();
9260           PrintF("\n");
9261         }
9262         return;
9263       }
9264       if (TryInlineApiMethodCall(expr, receiver, types)) return;
9265
9266       // Wrap the receiver if necessary.
9267       if (NeedsWrappingFor(ToType(types->first()), known_function)) {
9268         // Since HWrapReceiver currently cannot actually wrap numbers and
9269         // strings, use the regular CallFunctionStub for method calls to wrap
9270         // the receiver.
9271         // TODO(verwaest): Support creation of value wrappers directly in
9272         // HWrapReceiver.
9273         call = New<HCallFunction>(
9274             function, argument_count, WRAP_AND_CALL);
9275       } else if (TryInlineCall(expr)) {
9276         return;
9277       } else {
9278         call = BuildCallConstantFunction(known_function, argument_count);
9279       }
9280
9281     } else {
9282       CHECK_ALIVE(VisitExpressions(expr->arguments()));
9283       CallFunctionFlags flags = receiver->type().IsJSObject()
9284           ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
9285       call = New<HCallFunction>(function, argument_count, flags);
9286     }
9287     PushArgumentsFromEnvironment(argument_count);
9288
9289   } else {
9290     VariableProxy* proxy = expr->expression()->AsVariableProxy();
9291     if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
9292       return Bailout(kPossibleDirectCallToEval);
9293     }
9294
9295     // The function is on the stack in the unoptimized code during
9296     // evaluation of the arguments.
9297     CHECK_ALIVE(VisitForValue(expr->expression()));
9298     HValue* function = Top();
9299     if (expr->global_call()) {
9300       Variable* var = proxy->var();
9301       bool known_global_function = false;
9302       // If there is a global property cell for the name at compile time and
9303       // access check is not enabled we assume that the function will not change
9304       // and generate optimized code for calling the function.
9305       LookupResult lookup(isolate());
9306       GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, LOAD);
9307       if (type == kUseCell &&
9308           !current_info()->global_object()->IsAccessCheckNeeded()) {
9309         Handle<GlobalObject> global(current_info()->global_object());
9310         known_global_function = expr->ComputeGlobalTarget(global, &lookup);
9311       }
9312       if (known_global_function) {
9313         Add<HCheckValue>(function, expr->target());
9314
9315         // Placeholder for the receiver.
9316         Push(graph()->GetConstantUndefined());
9317         CHECK_ALIVE(VisitExpressions(expr->arguments()));
9318
9319         // Patch the global object on the stack by the expected receiver.
9320         HValue* receiver = ImplicitReceiverFor(function, expr->target());
9321         const int receiver_index = argument_count - 1;
9322         environment()->SetExpressionStackAt(receiver_index, receiver);
9323
9324         if (TryInlineBuiltinFunctionCall(expr)) {
9325           if (FLAG_trace_inlining) {
9326             PrintF("Inlining builtin ");
9327             expr->target()->ShortPrint();
9328             PrintF("\n");
9329           }
9330           return;
9331         }
9332         if (TryInlineApiFunctionCall(expr, receiver)) return;
9333         if (TryHandleArrayCall(expr, function)) return;
9334         if (TryInlineCall(expr)) return;
9335
9336         PushArgumentsFromEnvironment(argument_count);
9337         call = BuildCallConstantFunction(expr->target(), argument_count);
9338       } else {
9339         Push(graph()->GetConstantUndefined());
9340         CHECK_ALIVE(VisitExpressions(expr->arguments()));
9341         PushArgumentsFromEnvironment(argument_count);
9342         call = New<HCallFunction>(function, argument_count);
9343       }
9344
9345     } else if (expr->IsMonomorphic()) {
9346       Add<HCheckValue>(function, expr->target());
9347
9348       Push(graph()->GetConstantUndefined());
9349       CHECK_ALIVE(VisitExpressions(expr->arguments()));
9350
9351       HValue* receiver = ImplicitReceiverFor(function, expr->target());
9352       const int receiver_index = argument_count - 1;
9353       environment()->SetExpressionStackAt(receiver_index, receiver);
9354
9355       if (TryInlineBuiltinFunctionCall(expr)) {
9356         if (FLAG_trace_inlining) {
9357           PrintF("Inlining builtin ");
9358           expr->target()->ShortPrint();
9359           PrintF("\n");
9360         }
9361         return;
9362       }
9363       if (TryInlineApiFunctionCall(expr, receiver)) return;
9364
9365       if (TryInlineCall(expr)) return;
9366
9367       call = PreProcessCall(New<HInvokeFunction>(
9368           function, expr->target(), argument_count));
9369
9370     } else {
9371       Push(graph()->GetConstantUndefined());
9372       CHECK_ALIVE(VisitExpressions(expr->arguments()));
9373       PushArgumentsFromEnvironment(argument_count);
9374       call = New<HCallFunction>(function, argument_count);
9375     }
9376   }
9377
9378   Drop(1);  // Drop the function.
9379   return ast_context()->ReturnInstruction(call, expr->id());
9380 }
9381
9382
9383 void HOptimizedGraphBuilder::BuildInlinedCallArray(
9384     Expression* expression,
9385     int argument_count,
9386     Handle<AllocationSite> site) {
9387   ASSERT(!site.is_null());
9388   ASSERT(argument_count >= 0 && argument_count <= 1);
9389   NoObservableSideEffectsScope no_effects(this);
9390
9391   // We should at least have the constructor on the expression stack.
9392   HValue* constructor = environment()->ExpressionStackAt(argument_count);
9393
9394   // Register on the site for deoptimization if the transition feedback changes.
9395   AllocationSite::AddDependentCompilationInfo(
9396       site, AllocationSite::TRANSITIONS, top_info());
9397   ElementsKind kind = site->GetElementsKind();
9398   HInstruction* site_instruction = Add<HConstant>(site);
9399
9400   // In the single constant argument case, we may have to adjust elements kind
9401   // to avoid creating a packed non-empty array.
9402   if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
9403     HValue* argument = environment()->Top();
9404     if (argument->IsConstant()) {
9405       HConstant* constant_argument = HConstant::cast(argument);
9406       ASSERT(constant_argument->HasSmiValue());
9407       int constant_array_size = constant_argument->Integer32Value();
9408       if (constant_array_size != 0) {
9409         kind = GetHoleyElementsKind(kind);
9410       }
9411     }
9412   }
9413
9414   // Build the array.
9415   JSArrayBuilder array_builder(this,
9416                                kind,
9417                                site_instruction,
9418                                constructor,
9419                                DISABLE_ALLOCATION_SITES);
9420   HValue* new_object = argument_count == 0
9421       ? array_builder.AllocateEmptyArray()
9422       : BuildAllocateArrayFromLength(&array_builder, Top());
9423
9424   int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9425   Drop(args_to_drop);
9426   ast_context()->ReturnValue(new_object);
9427 }
9428
9429
9430 // Checks whether allocation using the given constructor can be inlined.
9431 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9432   return constructor->has_initial_map() &&
9433       constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9434       constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
9435       constructor->initial_map()->InitialPropertiesLength() == 0;
9436 }
9437
9438
9439 bool HOptimizedGraphBuilder::IsCallArrayInlineable(
9440     int argument_count,
9441     Handle<AllocationSite> site) {
9442   Handle<JSFunction> caller = current_info()->closure();
9443   Handle<JSFunction> target = array_function();
9444   // We should have the function plus array arguments on the environment stack.
9445   ASSERT(environment()->length() >= (argument_count + 1));
9446   ASSERT(!site.is_null());
9447
9448   bool inline_ok = false;
9449   if (site->CanInlineCall()) {
9450     // We also want to avoid inlining in certain 1 argument scenarios.
9451     if (argument_count == 1) {
9452       HValue* argument = Top();
9453       if (argument->IsConstant()) {
9454         // Do not inline if the constant length argument is not a smi or
9455         // outside the valid range for unrolled loop initialization.
9456         HConstant* constant_argument = HConstant::cast(argument);
9457         if (constant_argument->HasSmiValue()) {
9458           int value = constant_argument->Integer32Value();
9459           inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
9460           if (!inline_ok) {
9461             TraceInline(target, caller,
9462                         "Constant length outside of valid inlining range.");
9463           }
9464         }
9465       } else {
9466         TraceInline(target, caller,
9467                     "Dont inline [new] Array(n) where n isn't constant.");
9468       }
9469     } else if (argument_count == 0) {
9470       inline_ok = true;
9471     } else {
9472       TraceInline(target, caller, "Too many arguments to inline.");
9473     }
9474   } else {
9475     TraceInline(target, caller, "AllocationSite requested no inlining.");
9476   }
9477
9478   if (inline_ok) {
9479     TraceInline(target, caller, NULL);
9480   }
9481   return inline_ok;
9482 }
9483
9484
9485 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9486   ASSERT(!HasStackOverflow());
9487   ASSERT(current_block() != NULL);
9488   ASSERT(current_block()->HasPredecessor());
9489   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9490   int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
9491   Factory* factory = isolate()->factory();
9492
9493   // The constructor function is on the stack in the unoptimized code
9494   // during evaluation of the arguments.
9495   CHECK_ALIVE(VisitForValue(expr->expression()));
9496   HValue* function = Top();
9497   CHECK_ALIVE(VisitExpressions(expr->arguments()));
9498
9499   if (FLAG_inline_construct &&
9500       expr->IsMonomorphic() &&
9501       IsAllocationInlineable(expr->target())) {
9502     Handle<JSFunction> constructor = expr->target();
9503     HValue* check = Add<HCheckValue>(function, constructor);
9504
9505     // Force completion of inobject slack tracking before generating
9506     // allocation code to finalize instance size.
9507     if (constructor->IsInobjectSlackTrackingInProgress()) {
9508       constructor->CompleteInobjectSlackTracking();
9509     }
9510
9511     // Calculate instance size from initial map of constructor.
9512     ASSERT(constructor->has_initial_map());
9513     Handle<Map> initial_map(constructor->initial_map());
9514     int instance_size = initial_map->instance_size();
9515     ASSERT(initial_map->InitialPropertiesLength() == 0);
9516
9517     // Allocate an instance of the implicit receiver object.
9518     HValue* size_in_bytes = Add<HConstant>(instance_size);
9519     HAllocationMode allocation_mode;
9520     if (FLAG_pretenuring_call_new) {
9521       if (FLAG_allocation_site_pretenuring) {
9522         // Try to use pretenuring feedback.
9523         Handle<AllocationSite> allocation_site = expr->allocation_site();
9524         allocation_mode = HAllocationMode(allocation_site);
9525         // Take a dependency on allocation site.
9526         AllocationSite::AddDependentCompilationInfo(allocation_site,
9527                                                     AllocationSite::TENURING,
9528                                                     top_info());
9529       }
9530     }
9531
9532     HAllocate* receiver = BuildAllocate(
9533         size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9534     receiver->set_known_initial_map(initial_map);
9535
9536     // Initialize map and fields of the newly allocated object.
9537     { NoObservableSideEffectsScope no_effects(this);
9538       ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
9539       Add<HStoreNamedField>(receiver,
9540           HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9541           Add<HConstant>(initial_map));
9542       HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9543       Add<HStoreNamedField>(receiver,
9544           HObjectAccess::ForMapAndOffset(initial_map,
9545                                          JSObject::kPropertiesOffset),
9546           empty_fixed_array);
9547       Add<HStoreNamedField>(receiver,
9548           HObjectAccess::ForMapAndOffset(initial_map,
9549                                          JSObject::kElementsOffset),
9550           empty_fixed_array);
9551       if (initial_map->inobject_properties() != 0) {
9552         HConstant* undefined = graph()->GetConstantUndefined();
9553         for (int i = 0; i < initial_map->inobject_properties(); i++) {
9554           int property_offset = initial_map->GetInObjectPropertyOffset(i);
9555           Add<HStoreNamedField>(receiver,
9556               HObjectAccess::ForMapAndOffset(initial_map, property_offset),
9557               undefined);
9558         }
9559       }
9560     }
9561
9562     // Replace the constructor function with a newly allocated receiver using
9563     // the index of the receiver from the top of the expression stack.
9564     const int receiver_index = argument_count - 1;
9565     ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
9566     environment()->SetExpressionStackAt(receiver_index, receiver);
9567
9568     if (TryInlineConstruct(expr, receiver)) {
9569       // Inlining worked, add a dependency on the initial map to make sure that
9570       // this code is deoptimized whenever the initial map of the constructor
9571       // changes.
9572       Map::AddDependentCompilationInfo(
9573           initial_map, DependentCode::kInitialMapChangedGroup, top_info());
9574       return;
9575     }
9576
9577     // TODO(mstarzinger): For now we remove the previous HAllocate and all
9578     // corresponding instructions and instead add HPushArguments for the
9579     // arguments in case inlining failed.  What we actually should do is for
9580     // inlining to try to build a subgraph without mutating the parent graph.
9581     HInstruction* instr = current_block()->last();
9582     do {
9583       HInstruction* prev_instr = instr->previous();
9584       instr->DeleteAndReplaceWith(NULL);
9585       instr = prev_instr;
9586     } while (instr != check);
9587     environment()->SetExpressionStackAt(receiver_index, function);
9588     HInstruction* call =
9589       PreProcessCall(New<HCallNew>(function, argument_count));
9590     return ast_context()->ReturnInstruction(call, expr->id());
9591   } else {
9592     // The constructor function is both an operand to the instruction and an
9593     // argument to the construct call.
9594     if (TryHandleArrayCallNew(expr, function)) return;
9595
9596     HInstruction* call =
9597         PreProcessCall(New<HCallNew>(function, argument_count));
9598     return ast_context()->ReturnInstruction(call, expr->id());
9599   }
9600 }
9601
9602
9603 // Support for generating inlined runtime functions.
9604
9605 // Lookup table for generators for runtime calls that are generated inline.
9606 // Elements of the table are member pointers to functions of
9607 // HOptimizedGraphBuilder.
9608 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)  \
9609     &HOptimizedGraphBuilder::Generate##Name,
9610
9611 const HOptimizedGraphBuilder::InlineFunctionGenerator
9612     HOptimizedGraphBuilder::kInlineFunctionGenerators[] = {
9613         INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
9614         INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
9615 };
9616 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
9617
9618
9619 template <class ViewClass>
9620 void HGraphBuilder::BuildArrayBufferViewInitialization(
9621     HValue* obj,
9622     HValue* buffer,
9623     HValue* byte_offset,
9624     HValue* byte_length) {
9625
9626   for (int offset = ViewClass::kSize;
9627        offset < ViewClass::kSizeWithInternalFields;
9628        offset += kPointerSize) {
9629     Add<HStoreNamedField>(obj,
9630         HObjectAccess::ForObservableJSObjectOffset(offset),
9631         graph()->GetConstant0());
9632   }
9633
9634   Add<HStoreNamedField>(
9635       obj,
9636       HObjectAccess::ForJSArrayBufferViewByteOffset(),
9637       byte_offset);
9638   Add<HStoreNamedField>(
9639       obj,
9640       HObjectAccess::ForJSArrayBufferViewByteLength(),
9641       byte_length);
9642
9643   if (buffer != NULL) {
9644     Add<HStoreNamedField>(
9645         obj,
9646         HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
9647     HObjectAccess weak_first_view_access =
9648         HObjectAccess::ForJSArrayBufferWeakFirstView();
9649     Add<HStoreNamedField>(obj,
9650         HObjectAccess::ForJSArrayBufferViewWeakNext(),
9651         Add<HLoadNamedField>(buffer,
9652                              static_cast<HValue*>(NULL),
9653                              weak_first_view_access));
9654     Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
9655   } else {
9656     Add<HStoreNamedField>(
9657         obj,
9658         HObjectAccess::ForJSArrayBufferViewBuffer(),
9659         Add<HConstant>(static_cast<int32_t>(0)));
9660     Add<HStoreNamedField>(obj,
9661         HObjectAccess::ForJSArrayBufferViewWeakNext(),
9662         graph()->GetConstantUndefined());
9663   }
9664 }
9665
9666
9667 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
9668     CallRuntime* expr) {
9669   ZoneList<Expression*>* arguments = expr->arguments();
9670
9671   ASSERT(arguments->length()== 4);
9672   CHECK_ALIVE(VisitForValue(arguments->at(0)));
9673   HValue* obj = Pop();
9674
9675   CHECK_ALIVE(VisitForValue(arguments->at(1)));
9676   HValue* buffer = Pop();
9677
9678   CHECK_ALIVE(VisitForValue(arguments->at(2)));
9679   HValue* byte_offset = Pop();
9680
9681   CHECK_ALIVE(VisitForValue(arguments->at(3)));
9682   HValue* byte_length = Pop();
9683
9684   {
9685     NoObservableSideEffectsScope scope(this);
9686     BuildArrayBufferViewInitialization<JSDataView>(
9687         obj, buffer, byte_offset, byte_length);
9688   }
9689 }
9690
9691
9692 static Handle<Map> TypedArrayMap(Isolate* isolate,
9693                                  ExternalArrayType array_type,
9694                                  ElementsKind target_kind) {
9695   Handle<Context> native_context = isolate->native_context();
9696   Handle<JSFunction> fun;
9697   switch (array_type) {
9698 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
9699     case kExternal##Type##Array:                                              \
9700       fun = Handle<JSFunction>(native_context->type##_array_fun());           \
9701       break;
9702
9703     TYPED_ARRAYS(TYPED_ARRAY_CASE)
9704 #undef TYPED_ARRAY_CASE
9705   }
9706   Handle<Map> map(fun->initial_map());
9707   return Map::AsElementsKind(map, target_kind);
9708 }
9709
9710
9711 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
9712     ExternalArrayType array_type,
9713     bool is_zero_byte_offset,
9714     HValue* buffer, HValue* byte_offset, HValue* length) {
9715   Handle<Map> external_array_map(
9716       isolate()->heap()->MapForExternalArrayType(array_type));
9717
9718   // The HForceRepresentation is to prevent possible deopt on int-smi
9719   // conversion after allocation but before the new object fields are set.
9720   length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9721   HValue* elements =
9722       Add<HAllocate>(
9723           Add<HConstant>(ExternalArray::kAlignedSize),
9724           HType::HeapObject(),
9725           NOT_TENURED,
9726           external_array_map->instance_type());
9727
9728   AddStoreMapConstant(elements, external_array_map);
9729   Add<HStoreNamedField>(elements,
9730       HObjectAccess::ForFixedArrayLength(), length);
9731
9732   HValue* backing_store = Add<HLoadNamedField>(
9733       buffer, static_cast<HValue*>(NULL),
9734       HObjectAccess::ForJSArrayBufferBackingStore());
9735
9736   HValue* typed_array_start;
9737   if (is_zero_byte_offset) {
9738     typed_array_start = backing_store;
9739   } else {
9740     HInstruction* external_pointer =
9741         AddUncasted<HAdd>(backing_store, byte_offset);
9742     // Arguments are checked prior to call to TypedArrayInitialize,
9743     // including byte_offset.
9744     external_pointer->ClearFlag(HValue::kCanOverflow);
9745     typed_array_start = external_pointer;
9746   }
9747
9748   Add<HStoreNamedField>(elements,
9749       HObjectAccess::ForExternalArrayExternalPointer(),
9750       typed_array_start);
9751
9752   return elements;
9753 }
9754
9755
9756 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9757     ExternalArrayType array_type, size_t element_size,
9758     ElementsKind fixed_elements_kind,
9759     HValue* byte_length, HValue* length) {
9760   STATIC_ASSERT(
9761       (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
9762   HValue* total_size;
9763
9764   // if fixed array's elements are not aligned to object's alignment,
9765   // we need to align the whole array to object alignment.
9766   if (element_size % kObjectAlignment != 0) {
9767     total_size = BuildObjectSizeAlignment(
9768         byte_length, FixedTypedArrayBase::kHeaderSize);
9769   } else {
9770     total_size = AddUncasted<HAdd>(byte_length,
9771         Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9772     total_size->ClearFlag(HValue::kCanOverflow);
9773   }
9774
9775   // The HForceRepresentation is to prevent possible deopt on int-smi
9776   // conversion after allocation but before the new object fields are set.
9777   length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9778   Handle<Map> fixed_typed_array_map(
9779       isolate()->heap()->MapForFixedTypedArray(array_type));
9780   HValue* elements =
9781       Add<HAllocate>(total_size, HType::HeapObject(),
9782                      NOT_TENURED, fixed_typed_array_map->instance_type());
9783   AddStoreMapConstant(elements, fixed_typed_array_map);
9784
9785   Add<HStoreNamedField>(elements,
9786       HObjectAccess::ForFixedArrayLength(),
9787       length);
9788
9789   HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9790   if (IsFixedFloat32x4ElementsKind(fixed_elements_kind)) {
9791     filler = AddUncasted<HNullarySIMDOperation>(kFloat32x4Zero);
9792   } else if (IsFixedFloat64x2ElementsKind(fixed_elements_kind)) {
9793     filler = AddUncasted<HNullarySIMDOperation>(kFloat64x2Zero);
9794   } else if (IsFixedInt32x4ElementsKind(fixed_elements_kind)) {
9795     filler = AddUncasted<HNullarySIMDOperation>(kInt32x4Zero);
9796   }
9797
9798   {
9799     LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9800
9801     HValue* key = builder.BeginBody(
9802         Add<HConstant>(static_cast<int32_t>(0)),
9803         length, Token::LT);
9804     Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
9805
9806     builder.EndBody();
9807   }
9808   return elements;
9809 }
9810
9811
9812 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
9813     CallRuntime* expr) {
9814   ZoneList<Expression*>* arguments = expr->arguments();
9815
9816   static const int kObjectArg = 0;
9817   static const int kArrayIdArg = 1;
9818   static const int kBufferArg = 2;
9819   static const int kByteOffsetArg = 3;
9820   static const int kByteLengthArg = 4;
9821   static const int kArgsLength = 5;
9822   ASSERT(arguments->length() == kArgsLength);
9823
9824
9825   CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
9826   HValue* obj = Pop();
9827
9828   if (arguments->at(kArrayIdArg)->IsLiteral()) {
9829     // This should never happen in real use, but can happen when fuzzing.
9830     // Just bail out.
9831     Bailout(kNeedSmiLiteral);
9832     return;
9833   }
9834   Handle<Object> value =
9835       static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
9836   if (!value->IsSmi()) {
9837     // This should never happen in real use, but can happen when fuzzing.
9838     // Just bail out.
9839     Bailout(kNeedSmiLiteral);
9840     return;
9841   }
9842   int array_id = Smi::cast(*value)->value();
9843
9844   HValue* buffer;
9845   if (!arguments->at(kBufferArg)->IsNullLiteral()) {
9846     CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
9847     buffer = Pop();
9848   } else {
9849     buffer = NULL;
9850   }
9851
9852   HValue* byte_offset;
9853   bool is_zero_byte_offset;
9854
9855   if (arguments->at(kByteOffsetArg)->IsLiteral()
9856       && Smi::FromInt(0) ==
9857       *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
9858     byte_offset = Add<HConstant>(static_cast<int32_t>(0));
9859     is_zero_byte_offset = true;
9860   } else {
9861     CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
9862     byte_offset = Pop();
9863     is_zero_byte_offset = false;
9864     ASSERT(buffer != NULL);
9865   }
9866
9867   CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
9868   HValue* byte_length = Pop();
9869
9870   NoObservableSideEffectsScope scope(this);
9871   IfBuilder byte_offset_smi(this);
9872
9873   if (!is_zero_byte_offset) {
9874     byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
9875     byte_offset_smi.Then();
9876   }
9877
9878   ExternalArrayType array_type =
9879       kExternalInt8Array;  // Bogus initialization.
9880   size_t element_size = 1;  // Bogus initialization.
9881   ElementsKind external_elements_kind =  // Bogus initialization.
9882       EXTERNAL_INT8_ELEMENTS;
9883   ElementsKind fixed_elements_kind =  // Bogus initialization.
9884       INT8_ELEMENTS;
9885   Runtime::ArrayIdToTypeAndSize(array_id,
9886       &array_type,
9887       &external_elements_kind,
9888       &fixed_elements_kind,
9889       &element_size);
9890
9891
9892   { //  byte_offset is Smi.
9893     BuildArrayBufferViewInitialization<JSTypedArray>(
9894         obj, buffer, byte_offset, byte_length);
9895
9896
9897     HInstruction* length = AddUncasted<HDiv>(byte_length,
9898         Add<HConstant>(static_cast<int32_t>(element_size)));
9899
9900     Add<HStoreNamedField>(obj,
9901         HObjectAccess::ForJSTypedArrayLength(),
9902         length);
9903
9904     HValue* elements;
9905     if (buffer != NULL) {
9906       elements = BuildAllocateExternalElements(
9907           array_type, is_zero_byte_offset, buffer, byte_offset, length);
9908       Handle<Map> obj_map = TypedArrayMap(
9909           isolate(), array_type, external_elements_kind);
9910       AddStoreMapConstant(obj, obj_map);
9911     } else {
9912       ASSERT(is_zero_byte_offset);
9913       elements = BuildAllocateFixedTypedArray(
9914           array_type, element_size, fixed_elements_kind,
9915           byte_length, length);
9916     }
9917     Add<HStoreNamedField>(
9918         obj, HObjectAccess::ForElementsPointer(), elements);
9919   }
9920
9921   if (!is_zero_byte_offset) {
9922     byte_offset_smi.Else();
9923     { //  byte_offset is not Smi.
9924       Push(obj);
9925       CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
9926       Push(buffer);
9927       Push(byte_offset);
9928       Push(byte_length);
9929       PushArgumentsFromEnvironment(kArgsLength);
9930       Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
9931     }
9932   }
9933   byte_offset_smi.End();
9934 }
9935
9936
9937 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
9938   ASSERT(expr->arguments()->length() == 0);
9939   HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
9940   return ast_context()->ReturnInstruction(max_smi, expr->id());
9941 }
9942
9943
9944 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
9945     CallRuntime* expr) {
9946   ASSERT(expr->arguments()->length() == 0);
9947   HConstant* result = New<HConstant>(static_cast<int32_t>(
9948         FLAG_typed_array_max_size_in_heap));
9949   return ast_context()->ReturnInstruction(result, expr->id());
9950 }
9951
9952
9953 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
9954     CallRuntime* expr) {
9955   ASSERT(expr->arguments()->length() == 1);
9956   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9957   HValue* buffer = Pop();
9958   HInstruction* result = New<HLoadNamedField>(
9959     buffer,
9960     static_cast<HValue*>(NULL),
9961     HObjectAccess::ForJSArrayBufferByteLength());
9962   return ast_context()->ReturnInstruction(result, expr->id());
9963 }
9964
9965
9966 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
9967     CallRuntime* expr) {
9968   ASSERT(expr->arguments()->length() == 1);
9969   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9970   HValue* buffer = Pop();
9971   HInstruction* result = New<HLoadNamedField>(
9972     buffer,
9973     static_cast<HValue*>(NULL),
9974     HObjectAccess::ForJSArrayBufferViewByteLength());
9975   return ast_context()->ReturnInstruction(result, expr->id());
9976 }
9977
9978
9979 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
9980     CallRuntime* expr) {
9981   ASSERT(expr->arguments()->length() == 1);
9982   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9983   HValue* buffer = Pop();
9984   HInstruction* result = New<HLoadNamedField>(
9985     buffer,
9986     static_cast<HValue*>(NULL),
9987     HObjectAccess::ForJSArrayBufferViewByteOffset());
9988   return ast_context()->ReturnInstruction(result, expr->id());
9989 }
9990
9991
9992 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
9993     CallRuntime* expr) {
9994   ASSERT(expr->arguments()->length() == 1);
9995   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9996   HValue* buffer = Pop();
9997   HInstruction* result = New<HLoadNamedField>(
9998     buffer,
9999     static_cast<HValue*>(NULL),
10000     HObjectAccess::ForJSTypedArrayLength());
10001   return ast_context()->ReturnInstruction(result, expr->id());
10002 }
10003
10004
10005 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10006   ASSERT(!HasStackOverflow());
10007   ASSERT(current_block() != NULL);
10008   ASSERT(current_block()->HasPredecessor());
10009   if (expr->is_jsruntime()) {
10010     return Bailout(kCallToAJavaScriptRuntimeFunction);
10011   }
10012
10013   const Runtime::Function* function = expr->function();
10014   ASSERT(function != NULL);
10015
10016   if (function->intrinsic_type == Runtime::INLINE ||
10017       function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
10018     ASSERT(expr->name()->length() > 0);
10019     ASSERT(expr->name()->Get(0) == '_');
10020     // Call to an inline function.
10021     int lookup_index = static_cast<int>(function->function_id) -
10022         static_cast<int>(Runtime::kFirstInlineFunction);
10023     ASSERT(lookup_index >= 0);
10024     ASSERT(static_cast<size_t>(lookup_index) <
10025            ARRAY_SIZE(kInlineFunctionGenerators));
10026     InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
10027
10028     // Call the inline code generator using the pointer-to-member.
10029     (this->*generator)(expr);
10030   } else {
10031     ASSERT(function->intrinsic_type == Runtime::RUNTIME);
10032     Handle<String> name = expr->name();
10033     int argument_count = expr->arguments()->length();
10034     CHECK_ALIVE(VisitExpressions(expr->arguments()));
10035     PushArgumentsFromEnvironment(argument_count);
10036     HCallRuntime* call = New<HCallRuntime>(name, function,
10037                                            argument_count);
10038     return ast_context()->ReturnInstruction(call, expr->id());
10039   }
10040 }
10041
10042
10043 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10044   ASSERT(!HasStackOverflow());
10045   ASSERT(current_block() != NULL);
10046   ASSERT(current_block()->HasPredecessor());
10047   switch (expr->op()) {
10048     case Token::DELETE: return VisitDelete(expr);
10049     case Token::VOID: return VisitVoid(expr);
10050     case Token::TYPEOF: return VisitTypeof(expr);
10051     case Token::NOT: return VisitNot(expr);
10052     default: UNREACHABLE();
10053   }
10054 }
10055
10056
10057 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10058   Property* prop = expr->expression()->AsProperty();
10059   VariableProxy* proxy = expr->expression()->AsVariableProxy();
10060   if (prop != NULL) {
10061     CHECK_ALIVE(VisitForValue(prop->obj()));
10062     CHECK_ALIVE(VisitForValue(prop->key()));
10063     HValue* key = Pop();
10064     HValue* obj = Pop();
10065     HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
10066     Add<HPushArguments>(obj, key, Add<HConstant>(function_strict_mode()));
10067     // TODO(olivf) InvokeFunction produces a check for the parameter count,
10068     // even though we are certain to pass the correct number of arguments here.
10069     HInstruction* instr = New<HInvokeFunction>(function, 3);
10070     return ast_context()->ReturnInstruction(instr, expr->id());
10071   } else if (proxy != NULL) {
10072     Variable* var = proxy->var();
10073     if (var->IsUnallocated()) {
10074       Bailout(kDeleteWithGlobalVariable);
10075     } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10076       // Result of deleting non-global variables is false.  'this' is not
10077       // really a variable, though we implement it as one.  The
10078       // subexpression does not have side effects.
10079       HValue* value = var->is_this()
10080           ? graph()->GetConstantTrue()
10081           : graph()->GetConstantFalse();
10082       return ast_context()->ReturnValue(value);
10083     } else {
10084       Bailout(kDeleteWithNonGlobalVariable);
10085     }
10086   } else {
10087     // Result of deleting non-property, non-variable reference is true.
10088     // Evaluate the subexpression for side effects.
10089     CHECK_ALIVE(VisitForEffect(expr->expression()));
10090     return ast_context()->ReturnValue(graph()->GetConstantTrue());
10091   }
10092 }
10093
10094
10095 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10096   CHECK_ALIVE(VisitForEffect(expr->expression()));
10097   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10098 }
10099
10100
10101 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10102   CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10103   HValue* value = Pop();
10104   HInstruction* instr = New<HTypeof>(value);
10105   return ast_context()->ReturnInstruction(instr, expr->id());
10106 }
10107
10108
10109 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10110   if (ast_context()->IsTest()) {
10111     TestContext* context = TestContext::cast(ast_context());
10112     VisitForControl(expr->expression(),
10113                     context->if_false(),
10114                     context->if_true());
10115     return;
10116   }
10117
10118   if (ast_context()->IsEffect()) {
10119     VisitForEffect(expr->expression());
10120     return;
10121   }
10122
10123   ASSERT(ast_context()->IsValue());
10124   HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10125   HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10126   CHECK_BAILOUT(VisitForControl(expr->expression(),
10127                                 materialize_false,
10128                                 materialize_true));
10129
10130   if (materialize_false->HasPredecessor()) {
10131     materialize_false->SetJoinId(expr->MaterializeFalseId());
10132     set_current_block(materialize_false);
10133     Push(graph()->GetConstantFalse());
10134   } else {
10135     materialize_false = NULL;
10136   }
10137
10138   if (materialize_true->HasPredecessor()) {
10139     materialize_true->SetJoinId(expr->MaterializeTrueId());
10140     set_current_block(materialize_true);
10141     Push(graph()->GetConstantTrue());
10142   } else {
10143     materialize_true = NULL;
10144   }
10145
10146   HBasicBlock* join =
10147     CreateJoin(materialize_false, materialize_true, expr->id());
10148   set_current_block(join);
10149   if (join != NULL) return ast_context()->ReturnValue(Pop());
10150 }
10151
10152
10153 HInstruction* HOptimizedGraphBuilder::BuildIncrement(
10154     bool returns_original_input,
10155     CountOperation* expr) {
10156   // The input to the count operation is on top of the expression stack.
10157   Representation rep = Representation::FromType(expr->type());
10158   if (rep.IsNone() || rep.IsTagged()) {
10159     rep = Representation::Smi();
10160   }
10161
10162   if (returns_original_input) {
10163     // We need an explicit HValue representing ToNumber(input).  The
10164     // actual HChange instruction we need is (sometimes) added in a later
10165     // phase, so it is not available now to be used as an input to HAdd and
10166     // as the return value.
10167     HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10168     if (!rep.IsDouble()) {
10169       number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10170       number_input->SetFlag(HInstruction::kCannotBeTagged);
10171     }
10172     Push(number_input);
10173   }
10174
10175   // The addition has no side effects, so we do not need
10176   // to simulate the expression stack after this instruction.
10177   // Any later failures deopt to the load of the input or earlier.
10178   HConstant* delta = (expr->op() == Token::INC)
10179       ? graph()->GetConstant1()
10180       : graph()->GetConstantMinus1();
10181   HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
10182   if (instr->IsAdd()) {
10183     HAdd* add = HAdd::cast(instr);
10184     add->set_observed_input_representation(1, rep);
10185     add->set_observed_input_representation(2, Representation::Smi());
10186   }
10187   instr->SetFlag(HInstruction::kCannotBeTagged);
10188   instr->ClearAllSideEffects();
10189   return instr;
10190 }
10191
10192
10193 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
10194                                                  Property* prop,
10195                                                  BailoutId ast_id,
10196                                                  BailoutId return_id,
10197                                                  HValue* object,
10198                                                  HValue* key,
10199                                                  HValue* value) {
10200   EffectContext for_effect(this);
10201   Push(object);
10202   if (key != NULL) Push(key);
10203   Push(value);
10204   BuildStore(expr, prop, ast_id, return_id);
10205 }
10206
10207
10208 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10209   ASSERT(!HasStackOverflow());
10210   ASSERT(current_block() != NULL);
10211   ASSERT(current_block()->HasPredecessor());
10212   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10213   Expression* target = expr->expression();
10214   VariableProxy* proxy = target->AsVariableProxy();
10215   Property* prop = target->AsProperty();
10216   if (proxy == NULL && prop == NULL) {
10217     return Bailout(kInvalidLhsInCountOperation);
10218   }
10219
10220   // Match the full code generator stack by simulating an extra stack
10221   // element for postfix operations in a non-effect context.  The return
10222   // value is ToNumber(input).
10223   bool returns_original_input =
10224       expr->is_postfix() && !ast_context()->IsEffect();
10225   HValue* input = NULL;  // ToNumber(original_input).
10226   HValue* after = NULL;  // The result after incrementing or decrementing.
10227
10228   if (proxy != NULL) {
10229     Variable* var = proxy->var();
10230     if (var->mode() == CONST_LEGACY)  {
10231       return Bailout(kUnsupportedCountOperationWithConst);
10232     }
10233     // Argument of the count operation is a variable, not a property.
10234     ASSERT(prop == NULL);
10235     CHECK_ALIVE(VisitForValue(target));
10236
10237     after = BuildIncrement(returns_original_input, expr);
10238     input = returns_original_input ? Top() : Pop();
10239     Push(after);
10240
10241     switch (var->location()) {
10242       case Variable::UNALLOCATED:
10243         HandleGlobalVariableAssignment(var,
10244                                        after,
10245                                        expr->AssignmentId());
10246         break;
10247
10248       case Variable::PARAMETER:
10249       case Variable::LOCAL:
10250         BindIfLive(var, after);
10251         break;
10252
10253       case Variable::CONTEXT: {
10254         // Bail out if we try to mutate a parameter value in a function
10255         // using the arguments object.  We do not (yet) correctly handle the
10256         // arguments property of the function.
10257         if (current_info()->scope()->arguments() != NULL) {
10258           // Parameters will rewrite to context slots.  We have no direct
10259           // way to detect that the variable is a parameter so we use a
10260           // linear search of the parameter list.
10261           int count = current_info()->scope()->num_parameters();
10262           for (int i = 0; i < count; ++i) {
10263             if (var == current_info()->scope()->parameter(i)) {
10264               return Bailout(kAssignmentToParameterInArgumentsObject);
10265             }
10266           }
10267         }
10268
10269         HValue* context = BuildContextChainWalk(var);
10270         HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10271             ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10272         HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10273                                                           mode, after);
10274         if (instr->HasObservableSideEffects()) {
10275           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10276         }
10277         break;
10278       }
10279
10280       case Variable::LOOKUP:
10281         return Bailout(kLookupVariableInCountOperation);
10282     }
10283
10284     Drop(returns_original_input ? 2 : 1);
10285     return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10286   }
10287
10288   // Argument of the count operation is a property.
10289   ASSERT(prop != NULL);
10290   if (returns_original_input) Push(graph()->GetConstantUndefined());
10291
10292   CHECK_ALIVE(VisitForValue(prop->obj()));
10293   HValue* object = Top();
10294
10295   HValue* key = NULL;
10296   if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
10297       prop->IsStringAccess()) {
10298     CHECK_ALIVE(VisitForValue(prop->key()));
10299     key = Top();
10300   }
10301
10302   CHECK_ALIVE(PushLoad(prop, object, key));
10303
10304   after = BuildIncrement(returns_original_input, expr);
10305
10306   if (returns_original_input) {
10307     input = Pop();
10308     // Drop object and key to push it again in the effect context below.
10309     Drop(key == NULL ? 1 : 2);
10310     environment()->SetExpressionStackAt(0, input);
10311     CHECK_ALIVE(BuildStoreForEffect(
10312         expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
10313     return ast_context()->ReturnValue(Pop());
10314   }
10315
10316   environment()->SetExpressionStackAt(0, after);
10317   return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
10318 }
10319
10320
10321 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
10322     HValue* string,
10323     HValue* index) {
10324   if (string->IsConstant() && index->IsConstant()) {
10325     HConstant* c_string = HConstant::cast(string);
10326     HConstant* c_index = HConstant::cast(index);
10327     if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10328       int32_t i = c_index->NumberValueAsInteger32();
10329       Handle<String> s = c_string->StringValue();
10330       if (i < 0 || i >= s->length()) {
10331         return New<HConstant>(OS::nan_value());
10332       }
10333       return New<HConstant>(s->Get(i));
10334     }
10335   }
10336   string = BuildCheckString(string);
10337   index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10338   return New<HStringCharCodeAt>(string, index);
10339 }
10340
10341
10342 // Checks if the given shift amounts have following forms:
10343 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
10344 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
10345                                              HValue* const32_minus_sa) {
10346   if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10347     const HConstant* c1 = HConstant::cast(sa);
10348     const HConstant* c2 = HConstant::cast(const32_minus_sa);
10349     return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10350         (c1->Integer32Value() + c2->Integer32Value() == 32);
10351   }
10352   if (!const32_minus_sa->IsSub()) return false;
10353   HSub* sub = HSub::cast(const32_minus_sa);
10354   return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10355 }
10356
10357
10358 // Checks if the left and the right are shift instructions with the oposite
10359 // directions that can be replaced by one rotate right instruction or not.
10360 // Returns the operand and the shift amount for the rotate instruction in the
10361 // former case.
10362 bool HGraphBuilder::MatchRotateRight(HValue* left,
10363                                      HValue* right,
10364                                      HValue** operand,
10365                                      HValue** shift_amount) {
10366   HShl* shl;
10367   HShr* shr;
10368   if (left->IsShl() && right->IsShr()) {
10369     shl = HShl::cast(left);
10370     shr = HShr::cast(right);
10371   } else if (left->IsShr() && right->IsShl()) {
10372     shl = HShl::cast(right);
10373     shr = HShr::cast(left);
10374   } else {
10375     return false;
10376   }
10377   if (shl->left() != shr->left()) return false;
10378
10379   if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10380       !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10381     return false;
10382   }
10383   *operand= shr->left();
10384   *shift_amount = shr->right();
10385   return true;
10386 }
10387
10388
10389 bool CanBeZero(HValue* right) {
10390   if (right->IsConstant()) {
10391     HConstant* right_const = HConstant::cast(right);
10392     if (right_const->HasInteger32Value() &&
10393        (right_const->Integer32Value() & 0x1f) != 0) {
10394       return false;
10395     }
10396   }
10397   return true;
10398 }
10399
10400
10401 HValue* HGraphBuilder::EnforceNumberType(HValue* number,
10402                                          Type* expected) {
10403   if (expected->Is(Type::SignedSmall())) {
10404     return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10405   }
10406   if (expected->Is(Type::Signed32())) {
10407     return AddUncasted<HForceRepresentation>(number,
10408                                              Representation::Integer32());
10409   }
10410   return number;
10411 }
10412
10413
10414 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
10415   if (value->IsConstant()) {
10416     HConstant* constant = HConstant::cast(value);
10417     Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
10418     if (number.has_value) {
10419       *expected = Type::Number(zone());
10420       return AddInstruction(number.value);
10421     }
10422   }
10423
10424   // We put temporary values on the stack, which don't correspond to anything
10425   // in baseline code. Since nothing is observable we avoid recording those
10426   // pushes with a NoObservableSideEffectsScope.
10427   NoObservableSideEffectsScope no_effects(this);
10428
10429   Type* expected_type = *expected;
10430
10431   // Separate the number type from the rest.
10432   Type* expected_obj =
10433       Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
10434   Type* expected_number =
10435       Type::Intersect(expected_type, Type::Number(zone()), zone());
10436
10437   // We expect to get a number.
10438   // (We need to check first, since Type::None->Is(Type::Any()) == true.
10439   if (expected_obj->Is(Type::None())) {
10440     ASSERT(!expected_number->Is(Type::None(zone())));
10441     return value;
10442   }
10443
10444   if (expected_obj->Is(Type::Undefined(zone()))) {
10445     // This is already done by HChange.
10446     *expected = Type::Union(expected_number, Type::Number(zone()), zone());
10447     return value;
10448   }
10449
10450   return value;
10451 }
10452
10453
10454 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
10455     BinaryOperation* expr,
10456     HValue* left,
10457     HValue* right,
10458     PushBeforeSimulateBehavior push_sim_result) {
10459   Type* left_type = expr->left()->bounds().lower;
10460   Type* right_type = expr->right()->bounds().lower;
10461   Type* result_type = expr->bounds().lower;
10462   Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10463   Handle<AllocationSite> allocation_site = expr->allocation_site();
10464
10465   HAllocationMode allocation_mode;
10466   if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10467     allocation_mode = HAllocationMode(allocation_site);
10468   }
10469
10470   HValue* result = HGraphBuilder::BuildBinaryOperation(
10471       expr->op(), left, right, left_type, right_type, result_type,
10472       fixed_right_arg, allocation_mode);
10473   // Add a simulate after instructions with observable side effects, and
10474   // after phis, which are the result of BuildBinaryOperation when we
10475   // inlined some complex subgraph.
10476   if (result->HasObservableSideEffects() || result->IsPhi()) {
10477     if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10478       Push(result);
10479       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10480       Drop(1);
10481     } else {
10482       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10483     }
10484   }
10485   return result;
10486 }
10487
10488
10489 HValue* HGraphBuilder::BuildBinaryOperation(
10490     Token::Value op,
10491     HValue* left,
10492     HValue* right,
10493     Type* left_type,
10494     Type* right_type,
10495     Type* result_type,
10496     Maybe<int> fixed_right_arg,
10497     HAllocationMode allocation_mode) {
10498
10499   Representation left_rep = Representation::FromType(left_type);
10500   Representation right_rep = Representation::FromType(right_type);
10501
10502   bool maybe_string_add = op == Token::ADD &&
10503                           (left_type->Maybe(Type::String()) ||
10504                            right_type->Maybe(Type::String()));
10505
10506   if (left_type->Is(Type::None())) {
10507     Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
10508                      Deoptimizer::SOFT);
10509     // TODO(rossberg): we should be able to get rid of non-continuous
10510     // defaults.
10511     left_type = Type::Any(zone());
10512   } else {
10513     if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
10514     left_rep = Representation::FromType(left_type);
10515   }
10516
10517   if (right_type->Is(Type::None())) {
10518     Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
10519                      Deoptimizer::SOFT);
10520     right_type = Type::Any(zone());
10521   } else {
10522     if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
10523     right_rep = Representation::FromType(right_type);
10524   }
10525
10526   // Special case for string addition here.
10527   if (op == Token::ADD &&
10528       (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
10529     // Validate type feedback for left argument.
10530     if (left_type->Is(Type::String())) {
10531       left = BuildCheckString(left);
10532     }
10533
10534     // Validate type feedback for right argument.
10535     if (right_type->Is(Type::String())) {
10536       right = BuildCheckString(right);
10537     }
10538
10539     // Convert left argument as necessary.
10540     if (left_type->Is(Type::Number())) {
10541       ASSERT(right_type->Is(Type::String()));
10542       left = BuildNumberToString(left, left_type);
10543     } else if (!left_type->Is(Type::String())) {
10544       ASSERT(right_type->Is(Type::String()));
10545       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
10546       Add<HPushArguments>(left, right);
10547       return AddUncasted<HInvokeFunction>(function, 2);
10548     }
10549
10550     // Convert right argument as necessary.
10551     if (right_type->Is(Type::Number())) {
10552       ASSERT(left_type->Is(Type::String()));
10553       right = BuildNumberToString(right, right_type);
10554     } else if (!right_type->Is(Type::String())) {
10555       ASSERT(left_type->Is(Type::String()));
10556       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
10557       Add<HPushArguments>(left, right);
10558       return AddUncasted<HInvokeFunction>(function, 2);
10559     }
10560
10561     // Fast path for empty constant strings.
10562     if (left->IsConstant() &&
10563         HConstant::cast(left)->HasStringValue() &&
10564         HConstant::cast(left)->StringValue()->length() == 0) {
10565       return right;
10566     }
10567     if (right->IsConstant() &&
10568         HConstant::cast(right)->HasStringValue() &&
10569         HConstant::cast(right)->StringValue()->length() == 0) {
10570       return left;
10571     }
10572
10573     // Register the dependent code with the allocation site.
10574     if (!allocation_mode.feedback_site().is_null()) {
10575       ASSERT(!graph()->info()->IsStub());
10576       Handle<AllocationSite> site(allocation_mode.feedback_site());
10577       AllocationSite::AddDependentCompilationInfo(
10578           site, AllocationSite::TENURING, top_info());
10579     }
10580
10581     // Inline the string addition into the stub when creating allocation
10582     // mementos to gather allocation site feedback, or if we can statically
10583     // infer that we're going to create a cons string.
10584     if ((graph()->info()->IsStub() &&
10585          allocation_mode.CreateAllocationMementos()) ||
10586         (left->IsConstant() &&
10587          HConstant::cast(left)->HasStringValue() &&
10588          HConstant::cast(left)->StringValue()->length() + 1 >=
10589            ConsString::kMinLength) ||
10590         (right->IsConstant() &&
10591          HConstant::cast(right)->HasStringValue() &&
10592          HConstant::cast(right)->StringValue()->length() + 1 >=
10593            ConsString::kMinLength)) {
10594       return BuildStringAdd(left, right, allocation_mode);
10595     }
10596
10597     // Fallback to using the string add stub.
10598     return AddUncasted<HStringAdd>(
10599         left, right, allocation_mode.GetPretenureMode(),
10600         STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
10601   }
10602
10603   if (graph()->info()->IsStub()) {
10604     left = EnforceNumberType(left, left_type);
10605     right = EnforceNumberType(right, right_type);
10606   }
10607
10608   Representation result_rep = Representation::FromType(result_type);
10609
10610   bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
10611                           (right_rep.IsTagged() && !right_rep.IsSmi());
10612
10613   HInstruction* instr = NULL;
10614   // Only the stub is allowed to call into the runtime, since otherwise we would
10615   // inline several instructions (including the two pushes) for every tagged
10616   // operation in optimized code, which is more expensive, than a stub call.
10617   if (graph()->info()->IsStub() && is_non_primitive) {
10618     HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
10619     Add<HPushArguments>(left, right);
10620     instr = AddUncasted<HInvokeFunction>(function, 2);
10621   } else {
10622     switch (op) {
10623       case Token::ADD:
10624         instr = AddUncasted<HAdd>(left, right);
10625         break;
10626       case Token::SUB:
10627         instr = AddUncasted<HSub>(left, right);
10628         break;
10629       case Token::MUL:
10630         instr = AddUncasted<HMul>(left, right);
10631         break;
10632       case Token::MOD: {
10633         if (fixed_right_arg.has_value &&
10634             !right->EqualsInteger32Constant(fixed_right_arg.value)) {
10635           HConstant* fixed_right = Add<HConstant>(
10636               static_cast<int>(fixed_right_arg.value));
10637           IfBuilder if_same(this);
10638           if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
10639           if_same.Then();
10640           if_same.ElseDeopt("Unexpected RHS of binary operation");
10641           right = fixed_right;
10642         }
10643         instr = AddUncasted<HMod>(left, right);
10644         break;
10645       }
10646       case Token::DIV:
10647         instr = AddUncasted<HDiv>(left, right);
10648         break;
10649       case Token::BIT_XOR:
10650       case Token::BIT_AND:
10651         instr = AddUncasted<HBitwise>(op, left, right);
10652         break;
10653       case Token::BIT_OR: {
10654         HValue* operand, *shift_amount;
10655         if (left_type->Is(Type::Signed32()) &&
10656             right_type->Is(Type::Signed32()) &&
10657             MatchRotateRight(left, right, &operand, &shift_amount)) {
10658           instr = AddUncasted<HRor>(operand, shift_amount);
10659         } else {
10660           instr = AddUncasted<HBitwise>(op, left, right);
10661         }
10662         break;
10663       }
10664       case Token::SAR:
10665         instr = AddUncasted<HSar>(left, right);
10666         break;
10667       case Token::SHR:
10668         instr = AddUncasted<HShr>(left, right);
10669         if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
10670             CanBeZero(right)) {
10671           graph()->RecordUint32Instruction(instr);
10672         }
10673         break;
10674       case Token::SHL:
10675         instr = AddUncasted<HShl>(left, right);
10676         break;
10677       default:
10678         UNREACHABLE();
10679     }
10680   }
10681
10682   if (instr->IsBinaryOperation()) {
10683     HBinaryOperation* binop = HBinaryOperation::cast(instr);
10684     binop->set_observed_input_representation(1, left_rep);
10685     binop->set_observed_input_representation(2, right_rep);
10686     binop->initialize_output_representation(result_rep);
10687     if (graph()->info()->IsStub()) {
10688       // Stub should not call into stub.
10689       instr->SetFlag(HValue::kCannotBeTagged);
10690       // And should truncate on HForceRepresentation already.
10691       if (left->IsForceRepresentation()) {
10692         left->CopyFlag(HValue::kTruncatingToSmi, instr);
10693         left->CopyFlag(HValue::kTruncatingToInt32, instr);
10694       }
10695       if (right->IsForceRepresentation()) {
10696         right->CopyFlag(HValue::kTruncatingToSmi, instr);
10697         right->CopyFlag(HValue::kTruncatingToInt32, instr);
10698       }
10699     }
10700   }
10701   return instr;
10702 }
10703
10704
10705 // Check for the form (%_ClassOf(foo) === 'BarClass').
10706 static bool IsClassOfTest(CompareOperation* expr) {
10707   if (expr->op() != Token::EQ_STRICT) return false;
10708   CallRuntime* call = expr->left()->AsCallRuntime();
10709   if (call == NULL) return false;
10710   Literal* literal = expr->right()->AsLiteral();
10711   if (literal == NULL) return false;
10712   if (!literal->value()->IsString()) return false;
10713   if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) {
10714     return false;
10715   }
10716   ASSERT(call->arguments()->length() == 1);
10717   return true;
10718 }
10719
10720
10721 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10722   ASSERT(!HasStackOverflow());
10723   ASSERT(current_block() != NULL);
10724   ASSERT(current_block()->HasPredecessor());
10725   switch (expr->op()) {
10726     case Token::COMMA:
10727       return VisitComma(expr);
10728     case Token::OR:
10729     case Token::AND:
10730       return VisitLogicalExpression(expr);
10731     default:
10732       return VisitArithmeticExpression(expr);
10733   }
10734 }
10735
10736
10737 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
10738   CHECK_ALIVE(VisitForEffect(expr->left()));
10739   // Visit the right subexpression in the same AST context as the entire
10740   // expression.
10741   Visit(expr->right());
10742 }
10743
10744
10745 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
10746   bool is_logical_and = expr->op() == Token::AND;
10747   if (ast_context()->IsTest()) {
10748     TestContext* context = TestContext::cast(ast_context());
10749     // Translate left subexpression.
10750     HBasicBlock* eval_right = graph()->CreateBasicBlock();
10751     if (is_logical_and) {
10752       CHECK_BAILOUT(VisitForControl(expr->left(),
10753                                     eval_right,
10754                                     context->if_false()));
10755     } else {
10756       CHECK_BAILOUT(VisitForControl(expr->left(),
10757                                     context->if_true(),
10758                                     eval_right));
10759     }
10760
10761     // Translate right subexpression by visiting it in the same AST
10762     // context as the entire expression.
10763     if (eval_right->HasPredecessor()) {
10764       eval_right->SetJoinId(expr->RightId());
10765       set_current_block(eval_right);
10766       Visit(expr->right());
10767     }
10768
10769   } else if (ast_context()->IsValue()) {
10770     CHECK_ALIVE(VisitForValue(expr->left()));
10771     ASSERT(current_block() != NULL);
10772     HValue* left_value = Top();
10773
10774     // Short-circuit left values that always evaluate to the same boolean value.
10775     if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
10776       // l (evals true)  && r -> r
10777       // l (evals true)  || r -> l
10778       // l (evals false) && r -> l
10779       // l (evals false) || r -> r
10780       if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
10781         Drop(1);
10782         CHECK_ALIVE(VisitForValue(expr->right()));
10783       }
10784       return ast_context()->ReturnValue(Pop());
10785     }
10786
10787     // We need an extra block to maintain edge-split form.
10788     HBasicBlock* empty_block = graph()->CreateBasicBlock();
10789     HBasicBlock* eval_right = graph()->CreateBasicBlock();
10790     ToBooleanStub::Types expected(expr->left()->to_boolean_types());
10791     HBranch* test = is_logical_and
10792         ? New<HBranch>(left_value, expected, eval_right, empty_block)
10793         : New<HBranch>(left_value, expected, empty_block, eval_right);
10794     FinishCurrentBlock(test);
10795
10796     set_current_block(eval_right);
10797     Drop(1);  // Value of the left subexpression.
10798     CHECK_BAILOUT(VisitForValue(expr->right()));
10799
10800     HBasicBlock* join_block =
10801       CreateJoin(empty_block, current_block(), expr->id());
10802     set_current_block(join_block);
10803     return ast_context()->ReturnValue(Pop());
10804
10805   } else {
10806     ASSERT(ast_context()->IsEffect());
10807     // In an effect context, we don't need the value of the left subexpression,
10808     // only its control flow and side effects.  We need an extra block to
10809     // maintain edge-split form.
10810     HBasicBlock* empty_block = graph()->CreateBasicBlock();
10811     HBasicBlock* right_block = graph()->CreateBasicBlock();
10812     if (is_logical_and) {
10813       CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
10814     } else {
10815       CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
10816     }
10817
10818     // TODO(kmillikin): Find a way to fix this.  It's ugly that there are
10819     // actually two empty blocks (one here and one inserted by
10820     // TestContext::BuildBranch, and that they both have an HSimulate though the
10821     // second one is not a merge node, and that we really have no good AST ID to
10822     // put on that first HSimulate.
10823
10824     if (empty_block->HasPredecessor()) {
10825       empty_block->SetJoinId(expr->id());
10826     } else {
10827       empty_block = NULL;
10828     }
10829
10830     if (right_block->HasPredecessor()) {
10831       right_block->SetJoinId(expr->RightId());
10832       set_current_block(right_block);
10833       CHECK_BAILOUT(VisitForEffect(expr->right()));
10834       right_block = current_block();
10835     } else {
10836       right_block = NULL;
10837     }
10838
10839     HBasicBlock* join_block =
10840       CreateJoin(empty_block, right_block, expr->id());
10841     set_current_block(join_block);
10842     // We did not materialize any value in the predecessor environments,
10843     // so there is no need to handle it here.
10844   }
10845 }
10846
10847
10848 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
10849   CHECK_ALIVE(VisitForValue(expr->left()));
10850   CHECK_ALIVE(VisitForValue(expr->right()));
10851   SetSourcePosition(expr->position());
10852   HValue* right = Pop();
10853   HValue* left = Pop();
10854   HValue* result =
10855       BuildBinaryOperation(expr, left, right,
10856           ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10857                                     : PUSH_BEFORE_SIMULATE);
10858   if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
10859     HBinaryOperation::cast(result)->SetOperandPositions(
10860         zone(),
10861         ScriptPositionToSourcePosition(expr->left()->position()),
10862         ScriptPositionToSourcePosition(expr->right()->position()));
10863   }
10864   return ast_context()->ReturnValue(result);
10865 }
10866
10867
10868 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
10869                                                         Expression* sub_expr,
10870                                                         Handle<String> check) {
10871   CHECK_ALIVE(VisitForTypeOf(sub_expr));
10872   SetSourcePosition(expr->position());
10873   HValue* value = Pop();
10874   HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
10875   return ast_context()->ReturnControl(instr, expr->id());
10876 }
10877
10878
10879 static bool IsLiteralCompareBool(Isolate* isolate,
10880                                  HValue* left,
10881                                  Token::Value op,
10882                                  HValue* right) {
10883   return op == Token::EQ_STRICT &&
10884       ((left->IsConstant() &&
10885           HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
10886        (right->IsConstant() &&
10887            HConstant::cast(right)->handle(isolate)->IsBoolean()));
10888 }
10889
10890
10891 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
10892   ASSERT(!HasStackOverflow());
10893   ASSERT(current_block() != NULL);
10894   ASSERT(current_block()->HasPredecessor());
10895
10896   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10897
10898   // Check for a few fast cases. The AST visiting behavior must be in sync
10899   // with the full codegen: We don't push both left and right values onto
10900   // the expression stack when one side is a special-case literal.
10901   Expression* sub_expr = NULL;
10902   Handle<String> check;
10903   if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
10904     return HandleLiteralCompareTypeof(expr, sub_expr, check);
10905   }
10906   if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
10907     return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
10908   }
10909   if (expr->IsLiteralCompareNull(&sub_expr)) {
10910     return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
10911   }
10912
10913   if (IsClassOfTest(expr)) {
10914     CallRuntime* call = expr->left()->AsCallRuntime();
10915     ASSERT(call->arguments()->length() == 1);
10916     CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10917     HValue* value = Pop();
10918     Literal* literal = expr->right()->AsLiteral();
10919     Handle<String> rhs = Handle<String>::cast(literal->value());
10920     HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
10921     return ast_context()->ReturnControl(instr, expr->id());
10922   }
10923
10924   Type* left_type = expr->left()->bounds().lower;
10925   Type* right_type = expr->right()->bounds().lower;
10926   Type* combined_type = expr->combined_type();
10927
10928   CHECK_ALIVE(VisitForValue(expr->left()));
10929   CHECK_ALIVE(VisitForValue(expr->right()));
10930
10931   if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10932
10933   HValue* right = Pop();
10934   HValue* left = Pop();
10935   Token::Value op = expr->op();
10936
10937   if (IsLiteralCompareBool(isolate(), left, op, right)) {
10938     HCompareObjectEqAndBranch* result =
10939         New<HCompareObjectEqAndBranch>(left, right);
10940     return ast_context()->ReturnControl(result, expr->id());
10941   }
10942
10943   if (op == Token::INSTANCEOF) {
10944     // Check to see if the rhs of the instanceof is a global function not
10945     // residing in new space. If it is we assume that the function will stay the
10946     // same.
10947     Handle<JSFunction> target = Handle<JSFunction>::null();
10948     VariableProxy* proxy = expr->right()->AsVariableProxy();
10949     bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
10950     if (global_function &&
10951         current_info()->has_global_object() &&
10952         !current_info()->global_object()->IsAccessCheckNeeded()) {
10953       Handle<String> name = proxy->name();
10954       Handle<GlobalObject> global(current_info()->global_object());
10955       LookupResult lookup(isolate());
10956       global->Lookup(name, &lookup);
10957       if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
10958         Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
10959         // If the function is in new space we assume it's more likely to
10960         // change and thus prefer the general IC code.
10961         if (!isolate()->heap()->InNewSpace(*candidate)) {
10962           target = candidate;
10963         }
10964       }
10965     }
10966
10967     // If the target is not null we have found a known global function that is
10968     // assumed to stay the same for this instanceof.
10969     if (target.is_null()) {
10970       HInstanceOf* result = New<HInstanceOf>(left, right);
10971       return ast_context()->ReturnInstruction(result, expr->id());
10972     } else {
10973       Add<HCheckValue>(right, target);
10974       HInstanceOfKnownGlobal* result =
10975         New<HInstanceOfKnownGlobal>(left, target);
10976       return ast_context()->ReturnInstruction(result, expr->id());
10977     }
10978
10979     // Code below assumes that we don't fall through.
10980     UNREACHABLE();
10981   } else if (op == Token::IN) {
10982     HValue* function = AddLoadJSBuiltin(Builtins::IN);
10983     Add<HPushArguments>(left, right);
10984     // TODO(olivf) InvokeFunction produces a check for the parameter count,
10985     // even though we are certain to pass the correct number of arguments here.
10986     HInstruction* result = New<HInvokeFunction>(function, 2);
10987     return ast_context()->ReturnInstruction(result, expr->id());
10988   }
10989
10990   PushBeforeSimulateBehavior push_behavior =
10991     ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10992                               : PUSH_BEFORE_SIMULATE;
10993   HControlInstruction* compare = BuildCompareInstruction(
10994       op, left, right, left_type, right_type, combined_type,
10995       ScriptPositionToSourcePosition(expr->left()->position()),
10996       ScriptPositionToSourcePosition(expr->right()->position()),
10997       push_behavior, expr->id());
10998   if (compare == NULL) return;  // Bailed out.
10999   return ast_context()->ReturnControl(compare, expr->id());
11000 }
11001
11002
11003 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11004     Token::Value op,
11005     HValue* left,
11006     HValue* right,
11007     Type* left_type,
11008     Type* right_type,
11009     Type* combined_type,
11010     HSourcePosition left_position,
11011     HSourcePosition right_position,
11012     PushBeforeSimulateBehavior push_sim_result,
11013     BailoutId bailout_id) {
11014   // Cases handled below depend on collected type feedback. They should
11015   // soft deoptimize when there is no type feedback.
11016   if (combined_type->Is(Type::None())) {
11017     Add<HDeoptimize>("Insufficient type feedback for combined type "
11018                      "of binary operation",
11019                      Deoptimizer::SOFT);
11020     combined_type = left_type = right_type = Type::Any(zone());
11021   }
11022
11023   Representation left_rep = Representation::FromType(left_type);
11024   Representation right_rep = Representation::FromType(right_type);
11025   Representation combined_rep = Representation::FromType(combined_type);
11026
11027   if (combined_type->Is(Type::Receiver())) {
11028     if (Token::IsEqualityOp(op)) {
11029       // HCompareObjectEqAndBranch can only deal with object, so
11030       // exclude numbers.
11031       if ((left->IsConstant() &&
11032            HConstant::cast(left)->HasNumberValue()) ||
11033           (right->IsConstant() &&
11034            HConstant::cast(right)->HasNumberValue())) {
11035         Add<HDeoptimize>("Type mismatch between feedback and constant",
11036                          Deoptimizer::SOFT);
11037         // The caller expects a branch instruction, so make it happy.
11038         return New<HBranch>(graph()->GetConstantTrue());
11039       }
11040       // Can we get away with map check and not instance type check?
11041       HValue* operand_to_check =
11042           left->block()->block_id() < right->block()->block_id() ? left : right;
11043       if (combined_type->IsClass()) {
11044         Handle<Map> map = combined_type->AsClass()->Map();
11045         AddCheckMap(operand_to_check, map);
11046         HCompareObjectEqAndBranch* result =
11047             New<HCompareObjectEqAndBranch>(left, right);
11048         if (FLAG_hydrogen_track_positions) {
11049           result->set_operand_position(zone(), 0, left_position);
11050           result->set_operand_position(zone(), 1, right_position);
11051         }
11052         return result;
11053       } else {
11054         BuildCheckHeapObject(operand_to_check);
11055         Add<HCheckInstanceType>(operand_to_check,
11056                                 HCheckInstanceType::IS_SPEC_OBJECT);
11057         HCompareObjectEqAndBranch* result =
11058             New<HCompareObjectEqAndBranch>(left, right);
11059         return result;
11060       }
11061     } else {
11062       Bailout(kUnsupportedNonPrimitiveCompare);
11063       return NULL;
11064     }
11065   } else if (combined_type->Is(Type::InternalizedString()) &&
11066              Token::IsEqualityOp(op)) {
11067     // If we have a constant argument, it should be consistent with the type
11068     // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11069     if ((left->IsConstant() &&
11070          !HConstant::cast(left)->HasInternalizedStringValue()) ||
11071         (right->IsConstant() &&
11072          !HConstant::cast(right)->HasInternalizedStringValue())) {
11073       Add<HDeoptimize>("Type mismatch between feedback and constant",
11074                        Deoptimizer::SOFT);
11075       // The caller expects a branch instruction, so make it happy.
11076       return New<HBranch>(graph()->GetConstantTrue());
11077     }
11078     BuildCheckHeapObject(left);
11079     Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11080     BuildCheckHeapObject(right);
11081     Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11082     HCompareObjectEqAndBranch* result =
11083         New<HCompareObjectEqAndBranch>(left, right);
11084     return result;
11085   } else if (combined_type->Is(Type::String())) {
11086     BuildCheckHeapObject(left);
11087     Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11088     BuildCheckHeapObject(right);
11089     Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11090     HStringCompareAndBranch* result =
11091         New<HStringCompareAndBranch>(left, right, op);
11092     return result;
11093   } else {
11094     if (combined_rep.IsTagged() || combined_rep.IsNone()) {
11095       HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
11096       result->set_observed_input_representation(1, left_rep);
11097       result->set_observed_input_representation(2, right_rep);
11098       if (result->HasObservableSideEffects()) {
11099         if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11100           Push(result);
11101           AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11102           Drop(1);
11103         } else {
11104           AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11105         }
11106       }
11107       // TODO(jkummerow): Can we make this more efficient?
11108       HBranch* branch = New<HBranch>(result);
11109       return branch;
11110     } else {
11111       HCompareNumericAndBranch* result =
11112           New<HCompareNumericAndBranch>(left, right, op);
11113       result->set_observed_input_representation(left_rep, right_rep);
11114       if (FLAG_hydrogen_track_positions) {
11115         result->SetOperandPositions(zone(), left_position, right_position);
11116       }
11117       return result;
11118     }
11119   }
11120 }
11121
11122
11123 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11124                                                      Expression* sub_expr,
11125                                                      NilValue nil) {
11126   ASSERT(!HasStackOverflow());
11127   ASSERT(current_block() != NULL);
11128   ASSERT(current_block()->HasPredecessor());
11129   ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11130   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
11131   CHECK_ALIVE(VisitForValue(sub_expr));
11132   HValue* value = Pop();
11133   if (expr->op() == Token::EQ_STRICT) {
11134     HConstant* nil_constant = nil == kNullValue
11135         ? graph()->GetConstantNull()
11136         : graph()->GetConstantUndefined();
11137     HCompareObjectEqAndBranch* instr =
11138         New<HCompareObjectEqAndBranch>(value, nil_constant);
11139     return ast_context()->ReturnControl(instr, expr->id());
11140   } else {
11141     ASSERT_EQ(Token::EQ, expr->op());
11142     Type* type = expr->combined_type()->Is(Type::None())
11143         ? Type::Any(zone()) : expr->combined_type();
11144     HIfContinuation continuation;
11145     BuildCompareNil(value, type, &continuation);
11146     return ast_context()->ReturnContinuation(&continuation, expr->id());
11147   }
11148 }
11149
11150
11151 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11152   // If we share optimized code between different closures, the
11153   // this-function is not a constant, except inside an inlined body.
11154   if (function_state()->outer() != NULL) {
11155       return New<HConstant>(
11156           function_state()->compilation_info()->closure());
11157   } else {
11158       return New<HThisFunction>();
11159   }
11160 }
11161
11162
11163 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11164     Handle<JSObject> boilerplate_object,
11165     AllocationSiteUsageContext* site_context) {
11166   NoObservableSideEffectsScope no_effects(this);
11167   InstanceType instance_type = boilerplate_object->map()->instance_type();
11168   ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11169
11170   HType type = instance_type == JS_ARRAY_TYPE
11171       ? HType::JSArray() : HType::JSObject();
11172   HValue* object_size_constant = Add<HConstant>(
11173       boilerplate_object->map()->instance_size());
11174
11175   PretenureFlag pretenure_flag = NOT_TENURED;
11176   if (FLAG_allocation_site_pretenuring) {
11177     pretenure_flag = site_context->current()->GetPretenureMode();
11178     Handle<AllocationSite> site(site_context->current());
11179     AllocationSite::AddDependentCompilationInfo(
11180         site, AllocationSite::TENURING, top_info());
11181   }
11182
11183   HInstruction* object = Add<HAllocate>(object_size_constant, type,
11184       pretenure_flag, instance_type, site_context->current());
11185
11186   // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
11187   // elements array may not get folded into the object. Hence, we set the
11188   // elements pointer to empty fixed array and let store elimination remove
11189   // this store in the folding case.
11190   HConstant* empty_fixed_array = Add<HConstant>(
11191       isolate()->factory()->empty_fixed_array());
11192   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11193       empty_fixed_array);
11194
11195   BuildEmitObjectHeader(boilerplate_object, object);
11196
11197   Handle<FixedArrayBase> elements(boilerplate_object->elements());
11198   int elements_size = (elements->length() > 0 &&
11199       elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
11200           elements->Size() : 0;
11201
11202   if (pretenure_flag == TENURED &&
11203       elements->map() == isolate()->heap()->fixed_cow_array_map() &&
11204       isolate()->heap()->InNewSpace(*elements)) {
11205     // If we would like to pretenure a fixed cow array, we must ensure that the
11206     // array is already in old space, otherwise we'll create too many old-to-
11207     // new-space pointers (overflowing the store buffer).
11208     elements = Handle<FixedArrayBase>(
11209         isolate()->factory()->CopyAndTenureFixedCOWArray(
11210             Handle<FixedArray>::cast(elements)));
11211     boilerplate_object->set_elements(*elements);
11212   }
11213
11214   HInstruction* object_elements = NULL;
11215   if (elements_size > 0) {
11216     HValue* object_elements_size = Add<HConstant>(elements_size);
11217     InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
11218         ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
11219     object_elements = Add<HAllocate>(
11220         object_elements_size, HType::HeapObject(),
11221         pretenure_flag, instance_type, site_context->current());
11222   }
11223   BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
11224
11225   // Copy object elements if non-COW.
11226   if (object_elements != NULL) {
11227     BuildEmitElements(boilerplate_object, elements, object_elements,
11228                       site_context);
11229   }
11230
11231   // Copy in-object properties.
11232   if (boilerplate_object->map()->NumberOfFields() != 0) {
11233     BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11234                                 pretenure_flag);
11235   }
11236   return object;
11237 }
11238
11239
11240 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
11241     Handle<JSObject> boilerplate_object,
11242     HInstruction* object) {
11243   ASSERT(boilerplate_object->properties()->length() == 0);
11244
11245   Handle<Map> boilerplate_object_map(boilerplate_object->map());
11246   AddStoreMapConstant(object, boilerplate_object_map);
11247
11248   Handle<Object> properties_field =
11249       Handle<Object>(boilerplate_object->properties(), isolate());
11250   ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
11251   HInstruction* properties = Add<HConstant>(properties_field);
11252   HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11253   Add<HStoreNamedField>(object, access, properties);
11254
11255   if (boilerplate_object->IsJSArray()) {
11256     Handle<JSArray> boilerplate_array =
11257         Handle<JSArray>::cast(boilerplate_object);
11258     Handle<Object> length_field =
11259         Handle<Object>(boilerplate_array->length(), isolate());
11260     HInstruction* length = Add<HConstant>(length_field);
11261
11262     ASSERT(boilerplate_array->length()->IsSmi());
11263     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11264         boilerplate_array->GetElementsKind()), length);
11265   }
11266 }
11267
11268
11269 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
11270     Handle<JSObject> boilerplate_object,
11271     HInstruction* object,
11272     HInstruction* object_elements) {
11273   ASSERT(boilerplate_object->properties()->length() == 0);
11274   if (object_elements == NULL) {
11275     Handle<Object> elements_field =
11276         Handle<Object>(boilerplate_object->elements(), isolate());
11277     object_elements = Add<HConstant>(elements_field);
11278   }
11279   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11280       object_elements);
11281 }
11282
11283
11284 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
11285     Handle<JSObject> boilerplate_object,
11286     HInstruction* object,
11287     AllocationSiteUsageContext* site_context,
11288     PretenureFlag pretenure_flag) {
11289   Handle<Map> boilerplate_map(boilerplate_object->map());
11290   Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11291   int limit = boilerplate_map->NumberOfOwnDescriptors();
11292
11293   int copied_fields = 0;
11294   for (int i = 0; i < limit; i++) {
11295     PropertyDetails details = descriptors->GetDetails(i);
11296     if (details.type() != FIELD) continue;
11297     copied_fields++;
11298     int index = descriptors->GetFieldIndex(i);
11299     int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
11300     Handle<Name> name(descriptors->GetKey(i));
11301     Handle<Object> value =
11302         Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
11303         isolate());
11304
11305     // The access for the store depends on the type of the boilerplate.
11306     HObjectAccess access = boilerplate_object->IsJSArray() ?
11307         HObjectAccess::ForJSArrayOffset(property_offset) :
11308         HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11309
11310     if (value->IsJSObject()) {
11311       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11312       Handle<AllocationSite> current_site = site_context->EnterNewScope();
11313       HInstruction* result =
11314           BuildFastLiteral(value_object, site_context);
11315       site_context->ExitScope(current_site, value_object);
11316       Add<HStoreNamedField>(object, access, result);
11317     } else {
11318       Representation representation = details.representation();
11319       HInstruction* value_instruction;
11320
11321       if (representation.IsDouble()) {
11322         // Allocate a HeapNumber box and store the value into it.
11323         HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11324         // This heap number alloc does not have a corresponding
11325         // AllocationSite. That is okay because
11326         // 1) it's a child object of another object with a valid allocation site
11327         // 2) we can just use the mode of the parent object for pretenuring
11328         HInstruction* double_box =
11329             Add<HAllocate>(heap_number_constant, HType::HeapObject(),
11330                 pretenure_flag, HEAP_NUMBER_TYPE);
11331         AddStoreMapConstant(double_box,
11332             isolate()->factory()->heap_number_map());
11333         Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
11334                               Add<HConstant>(value));
11335         value_instruction = double_box;
11336       } else if (representation.IsSmi()) {
11337         value_instruction = value->IsUninitialized()
11338             ? graph()->GetConstant0()
11339             : Add<HConstant>(value);
11340         // Ensure that value is stored as smi.
11341         access = access.WithRepresentation(representation);
11342       } else {
11343         value_instruction = Add<HConstant>(value);
11344       }
11345
11346       Add<HStoreNamedField>(object, access, value_instruction);
11347     }
11348   }
11349
11350   int inobject_properties = boilerplate_object->map()->inobject_properties();
11351   HInstruction* value_instruction =
11352       Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11353   for (int i = copied_fields; i < inobject_properties; i++) {
11354     ASSERT(boilerplate_object->IsJSObject());
11355     int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11356     HObjectAccess access =
11357         HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11358     Add<HStoreNamedField>(object, access, value_instruction);
11359   }
11360 }
11361
11362
11363 void HOptimizedGraphBuilder::BuildEmitElements(
11364     Handle<JSObject> boilerplate_object,
11365     Handle<FixedArrayBase> elements,
11366     HValue* object_elements,
11367     AllocationSiteUsageContext* site_context) {
11368   ElementsKind kind = boilerplate_object->map()->elements_kind();
11369   int elements_length = elements->length();
11370   HValue* object_elements_length = Add<HConstant>(elements_length);
11371   BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11372
11373   // Copy elements backing store content.
11374   if (elements->IsFixedDoubleArray()) {
11375     BuildEmitFixedDoubleArray(elements, kind, object_elements);
11376   } else if (elements->IsFixedArray()) {
11377     BuildEmitFixedArray(elements, kind, object_elements,
11378                         site_context);
11379   } else {
11380     UNREACHABLE();
11381   }
11382 }
11383
11384
11385 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
11386     Handle<FixedArrayBase> elements,
11387     ElementsKind kind,
11388     HValue* object_elements) {
11389   HInstruction* boilerplate_elements = Add<HConstant>(elements);
11390   int elements_length = elements->length();
11391   for (int i = 0; i < elements_length; i++) {
11392     HValue* key_constant = Add<HConstant>(i);
11393     HInstruction* value_instruction =
11394         Add<HLoadKeyed>(boilerplate_elements, key_constant,
11395                         static_cast<HValue*>(NULL), kind,
11396                         ALLOW_RETURN_HOLE);
11397     HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11398                                            value_instruction, kind);
11399     store->SetFlag(HValue::kAllowUndefinedAsNaN);
11400   }
11401 }
11402
11403
11404 void HOptimizedGraphBuilder::BuildEmitFixedArray(
11405     Handle<FixedArrayBase> elements,
11406     ElementsKind kind,
11407     HValue* object_elements,
11408     AllocationSiteUsageContext* site_context) {
11409   HInstruction* boilerplate_elements = Add<HConstant>(elements);
11410   int elements_length = elements->length();
11411   Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11412   for (int i = 0; i < elements_length; i++) {
11413     Handle<Object> value(fast_elements->get(i), isolate());
11414     HValue* key_constant = Add<HConstant>(i);
11415     if (value->IsJSObject()) {
11416       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11417       Handle<AllocationSite> current_site = site_context->EnterNewScope();
11418       HInstruction* result =
11419           BuildFastLiteral(value_object, site_context);
11420       site_context->ExitScope(current_site, value_object);
11421       Add<HStoreKeyed>(object_elements, key_constant, result, kind);
11422     } else {
11423       HInstruction* value_instruction =
11424           Add<HLoadKeyed>(boilerplate_elements, key_constant,
11425                           static_cast<HValue*>(NULL), kind,
11426                           ALLOW_RETURN_HOLE);
11427       Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
11428     }
11429   }
11430 }
11431
11432
11433 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
11434   ASSERT(!HasStackOverflow());
11435   ASSERT(current_block() != NULL);
11436   ASSERT(current_block()->HasPredecessor());
11437   HInstruction* instr = BuildThisFunction();
11438   return ast_context()->ReturnInstruction(instr, expr->id());
11439 }
11440
11441
11442 void HOptimizedGraphBuilder::VisitDeclarations(
11443     ZoneList<Declaration*>* declarations) {
11444   ASSERT(globals_.is_empty());
11445   AstVisitor::VisitDeclarations(declarations);
11446   if (!globals_.is_empty()) {
11447     Handle<FixedArray> array =
11448        isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
11449     for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
11450     int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
11451         DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
11452         DeclareGlobalsStrictMode::encode(current_info()->strict_mode());
11453     Add<HDeclareGlobals>(array, flags);
11454     globals_.Rewind(0);
11455   }
11456 }
11457
11458
11459 void HOptimizedGraphBuilder::VisitVariableDeclaration(
11460     VariableDeclaration* declaration) {
11461   VariableProxy* proxy = declaration->proxy();
11462   VariableMode mode = declaration->mode();
11463   Variable* variable = proxy->var();
11464   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
11465   switch (variable->location()) {
11466     case Variable::UNALLOCATED:
11467       globals_.Add(variable->name(), zone());
11468       globals_.Add(variable->binding_needs_init()
11469                        ? isolate()->factory()->the_hole_value()
11470                        : isolate()->factory()->undefined_value(), zone());
11471       return;
11472     case Variable::PARAMETER:
11473     case Variable::LOCAL:
11474       if (hole_init) {
11475         HValue* value = graph()->GetConstantHole();
11476         environment()->Bind(variable, value);
11477       }
11478       break;
11479     case Variable::CONTEXT:
11480       if (hole_init) {
11481         HValue* value = graph()->GetConstantHole();
11482         HValue* context = environment()->context();
11483         HStoreContextSlot* store = Add<HStoreContextSlot>(
11484             context, variable->index(), HStoreContextSlot::kNoCheck, value);
11485         if (store->HasObservableSideEffects()) {
11486           Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11487         }
11488       }
11489       break;
11490     case Variable::LOOKUP:
11491       return Bailout(kUnsupportedLookupSlotInDeclaration);
11492   }
11493 }
11494
11495
11496 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
11497     FunctionDeclaration* declaration) {
11498   VariableProxy* proxy = declaration->proxy();
11499   Variable* variable = proxy->var();
11500   switch (variable->location()) {
11501     case Variable::UNALLOCATED: {
11502       globals_.Add(variable->name(), zone());
11503       Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
11504           declaration->fun(), current_info()->script());
11505       // Check for stack-overflow exception.
11506       if (function.is_null()) return SetStackOverflow();
11507       globals_.Add(function, zone());
11508       return;
11509     }
11510     case Variable::PARAMETER:
11511     case Variable::LOCAL: {
11512       CHECK_ALIVE(VisitForValue(declaration->fun()));
11513       HValue* value = Pop();
11514       BindIfLive(variable, value);
11515       break;
11516     }
11517     case Variable::CONTEXT: {
11518       CHECK_ALIVE(VisitForValue(declaration->fun()));
11519       HValue* value = Pop();
11520       HValue* context = environment()->context();
11521       HStoreContextSlot* store = Add<HStoreContextSlot>(
11522           context, variable->index(), HStoreContextSlot::kNoCheck, value);
11523       if (store->HasObservableSideEffects()) {
11524         Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11525       }
11526       break;
11527     }
11528     case Variable::LOOKUP:
11529       return Bailout(kUnsupportedLookupSlotInDeclaration);
11530   }
11531 }
11532
11533
11534 void HOptimizedGraphBuilder::VisitModuleDeclaration(
11535     ModuleDeclaration* declaration) {
11536   UNREACHABLE();
11537 }
11538
11539
11540 void HOptimizedGraphBuilder::VisitImportDeclaration(
11541     ImportDeclaration* declaration) {
11542   UNREACHABLE();
11543 }
11544
11545
11546 void HOptimizedGraphBuilder::VisitExportDeclaration(
11547     ExportDeclaration* declaration) {
11548   UNREACHABLE();
11549 }
11550
11551
11552 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
11553   UNREACHABLE();
11554 }
11555
11556
11557 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
11558   UNREACHABLE();
11559 }
11560
11561
11562 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
11563   UNREACHABLE();
11564 }
11565
11566
11567 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
11568   UNREACHABLE();
11569 }
11570
11571
11572 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
11573   UNREACHABLE();
11574 }
11575
11576
11577 // Generators for inline runtime functions.
11578 // Support for types.
11579 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
11580   ASSERT(call->arguments()->length() == 1);
11581   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11582   HValue* value = Pop();
11583   HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
11584   return ast_context()->ReturnControl(result, call->id());
11585 }
11586
11587
11588 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
11589   ASSERT(call->arguments()->length() == 1);
11590   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11591   HValue* value = Pop();
11592   HHasInstanceTypeAndBranch* result =
11593       New<HHasInstanceTypeAndBranch>(value,
11594                                      FIRST_SPEC_OBJECT_TYPE,
11595                                      LAST_SPEC_OBJECT_TYPE);
11596   return ast_context()->ReturnControl(result, call->id());
11597 }
11598
11599
11600 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
11601   ASSERT(call->arguments()->length() == 1);
11602   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11603   HValue* value = Pop();
11604   HHasInstanceTypeAndBranch* result =
11605       New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
11606   return ast_context()->ReturnControl(result, call->id());
11607 }
11608
11609
11610 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
11611   ASSERT(call->arguments()->length() == 1);
11612   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11613   HValue* value = Pop();
11614   HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
11615   return ast_context()->ReturnControl(result, call->id());
11616 }
11617
11618
11619 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
11620   ASSERT(call->arguments()->length() == 1);
11621   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11622   HValue* value = Pop();
11623   HHasCachedArrayIndexAndBranch* result =
11624       New<HHasCachedArrayIndexAndBranch>(value);
11625   return ast_context()->ReturnControl(result, call->id());
11626 }
11627
11628
11629 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
11630   ASSERT(call->arguments()->length() == 1);
11631   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11632   HValue* value = Pop();
11633   HHasInstanceTypeAndBranch* result =
11634       New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
11635   return ast_context()->ReturnControl(result, call->id());
11636 }
11637
11638
11639 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
11640   ASSERT(call->arguments()->length() == 1);
11641   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11642   HValue* value = Pop();
11643   HHasInstanceTypeAndBranch* result =
11644       New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
11645   return ast_context()->ReturnControl(result, call->id());
11646 }
11647
11648
11649 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
11650   ASSERT(call->arguments()->length() == 1);
11651   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11652   HValue* value = Pop();
11653   HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
11654   return ast_context()->ReturnControl(result, call->id());
11655 }
11656
11657
11658 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
11659   return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
11660 }
11661
11662
11663 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
11664   ASSERT(call->arguments()->length() == 1);
11665   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11666   HValue* value = Pop();
11667   HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
11668   return ast_context()->ReturnControl(result, call->id());
11669 }
11670
11671
11672 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
11673     CallRuntime* call) {
11674   return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
11675 }
11676
11677
11678 // Support for construct call checks.
11679 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
11680   ASSERT(call->arguments()->length() == 0);
11681   if (function_state()->outer() != NULL) {
11682     // We are generating graph for inlined function.
11683     HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
11684         ? graph()->GetConstantTrue()
11685         : graph()->GetConstantFalse();
11686     return ast_context()->ReturnValue(value);
11687   } else {
11688     return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
11689                                         call->id());
11690   }
11691 }
11692
11693
11694 // Support for arguments.length and arguments[?].
11695 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
11696   // Our implementation of arguments (based on this stack frame or an
11697   // adapter below it) does not work for inlined functions.  This runtime
11698   // function is blacklisted by AstNode::IsInlineable.
11699   ASSERT(function_state()->outer() == NULL);
11700   ASSERT(call->arguments()->length() == 0);
11701   HInstruction* elements = Add<HArgumentsElements>(false);
11702   HArgumentsLength* result = New<HArgumentsLength>(elements);
11703   return ast_context()->ReturnInstruction(result, call->id());
11704 }
11705
11706
11707 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
11708   // Our implementation of arguments (based on this stack frame or an
11709   // adapter below it) does not work for inlined functions.  This runtime
11710   // function is blacklisted by AstNode::IsInlineable.
11711   ASSERT(function_state()->outer() == NULL);
11712   ASSERT(call->arguments()->length() == 1);
11713   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11714   HValue* index = Pop();
11715   HInstruction* elements = Add<HArgumentsElements>(false);
11716   HInstruction* length = Add<HArgumentsLength>(elements);
11717   HInstruction* checked_index = Add<HBoundsCheck>(index, length);
11718   HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
11719       elements, length, checked_index);
11720   return ast_context()->ReturnInstruction(result, call->id());
11721 }
11722
11723
11724 // Support for accessing the class and value fields of an object.
11725 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
11726   // The special form detected by IsClassOfTest is detected before we get here
11727   // and does not cause a bailout.
11728   return Bailout(kInlinedRuntimeFunctionClassOf);
11729 }
11730
11731
11732 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
11733   ASSERT(call->arguments()->length() == 1);
11734   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11735   HValue* object = Pop();
11736
11737   IfBuilder if_objectisvalue(this);
11738   HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
11739       object, JS_VALUE_TYPE);
11740   if_objectisvalue.Then();
11741   {
11742     // Return the actual value.
11743     Push(Add<HLoadNamedField>(
11744             object, objectisvalue,
11745             HObjectAccess::ForObservableJSObjectOffset(
11746                 JSValue::kValueOffset)));
11747     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11748   }
11749   if_objectisvalue.Else();
11750   {
11751     // If the object is not a value return the object.
11752     Push(object);
11753     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11754   }
11755   if_objectisvalue.End();
11756   return ast_context()->ReturnValue(Pop());
11757 }
11758
11759
11760 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
11761   ASSERT(call->arguments()->length() == 2);
11762   ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
11763   Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
11764   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11765   HValue* date = Pop();
11766   HDateField* result = New<HDateField>(date, index);
11767   return ast_context()->ReturnInstruction(result, call->id());
11768 }
11769
11770
11771 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
11772     CallRuntime* call) {
11773   ASSERT(call->arguments()->length() == 3);
11774   // We need to follow the evaluation order of full codegen.
11775   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11776   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11777   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11778   HValue* string = Pop();
11779   HValue* value = Pop();
11780   HValue* index = Pop();
11781   Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
11782                          index, value);
11783   Add<HSimulate>(call->id(), FIXED_SIMULATE);
11784   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11785 }
11786
11787
11788 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
11789     CallRuntime* call) {
11790   ASSERT(call->arguments()->length() == 3);
11791   // We need to follow the evaluation order of full codegen.
11792   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11793   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11794   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11795   HValue* string = Pop();
11796   HValue* value = Pop();
11797   HValue* index = Pop();
11798   Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
11799                          index, value);
11800   Add<HSimulate>(call->id(), FIXED_SIMULATE);
11801   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11802 }
11803
11804
11805 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
11806   ASSERT(call->arguments()->length() == 2);
11807   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11808   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11809   HValue* value = Pop();
11810   HValue* object = Pop();
11811
11812   // Check if object is a JSValue.
11813   IfBuilder if_objectisvalue(this);
11814   if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
11815   if_objectisvalue.Then();
11816   {
11817     // Create in-object property store to kValueOffset.
11818     Add<HStoreNamedField>(object,
11819         HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
11820         value);
11821     if (!ast_context()->IsEffect()) {
11822       Push(value);
11823     }
11824     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11825   }
11826   if_objectisvalue.Else();
11827   {
11828     // Nothing to do in this case.
11829     if (!ast_context()->IsEffect()) {
11830       Push(value);
11831     }
11832     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11833   }
11834   if_objectisvalue.End();
11835   if (!ast_context()->IsEffect()) {
11836     Drop(1);
11837   }
11838   return ast_context()->ReturnValue(value);
11839 }
11840
11841
11842 // Fast support for charCodeAt(n).
11843 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
11844   ASSERT(call->arguments()->length() == 2);
11845   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11846   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11847   HValue* index = Pop();
11848   HValue* string = Pop();
11849   HInstruction* result = BuildStringCharCodeAt(string, index);
11850   return ast_context()->ReturnInstruction(result, call->id());
11851 }
11852
11853
11854 // Fast support for string.charAt(n) and string[n].
11855 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
11856   ASSERT(call->arguments()->length() == 1);
11857   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11858   HValue* char_code = Pop();
11859   HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11860   return ast_context()->ReturnInstruction(result, call->id());
11861 }
11862
11863
11864 // Fast support for string.charAt(n) and string[n].
11865 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
11866   ASSERT(call->arguments()->length() == 2);
11867   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11868   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11869   HValue* index = Pop();
11870   HValue* string = Pop();
11871   HInstruction* char_code = BuildStringCharCodeAt(string, index);
11872   AddInstruction(char_code);
11873   HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11874   return ast_context()->ReturnInstruction(result, call->id());
11875 }
11876
11877
11878 // Fast support for object equality testing.
11879 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
11880   ASSERT(call->arguments()->length() == 2);
11881   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11882   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11883   HValue* right = Pop();
11884   HValue* left = Pop();
11885   HCompareObjectEqAndBranch* result =
11886       New<HCompareObjectEqAndBranch>(left, right);
11887   return ast_context()->ReturnControl(result, call->id());
11888 }
11889
11890
11891 // Fast support for StringAdd.
11892 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
11893   ASSERT_EQ(2, call->arguments()->length());
11894   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11895   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11896   HValue* right = Pop();
11897   HValue* left = Pop();
11898   HInstruction* result = NewUncasted<HStringAdd>(left, right);
11899   return ast_context()->ReturnInstruction(result, call->id());
11900 }
11901
11902
11903 // Fast support for SubString.
11904 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
11905   ASSERT_EQ(3, call->arguments()->length());
11906   CHECK_ALIVE(VisitExpressions(call->arguments()));
11907   PushArgumentsFromEnvironment(call->arguments()->length());
11908   HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
11909   return ast_context()->ReturnInstruction(result, call->id());
11910 }
11911
11912
11913 // Fast support for StringCompare.
11914 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
11915   ASSERT_EQ(2, call->arguments()->length());
11916   CHECK_ALIVE(VisitExpressions(call->arguments()));
11917   PushArgumentsFromEnvironment(call->arguments()->length());
11918   HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
11919   return ast_context()->ReturnInstruction(result, call->id());
11920 }
11921
11922
11923 // Support for direct calls from JavaScript to native RegExp code.
11924 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
11925   ASSERT_EQ(4, call->arguments()->length());
11926   CHECK_ALIVE(VisitExpressions(call->arguments()));
11927   PushArgumentsFromEnvironment(call->arguments()->length());
11928   HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
11929   return ast_context()->ReturnInstruction(result, call->id());
11930 }
11931
11932
11933 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
11934   ASSERT_EQ(1, call->arguments()->length());
11935   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11936   HValue* value = Pop();
11937   HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
11938   return ast_context()->ReturnInstruction(result, call->id());
11939 }
11940
11941
11942 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
11943   ASSERT_EQ(1, call->arguments()->length());
11944   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11945   HValue* value = Pop();
11946   HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
11947   return ast_context()->ReturnInstruction(result, call->id());
11948 }
11949
11950
11951 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
11952   ASSERT_EQ(2, call->arguments()->length());
11953   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11954   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11955   HValue* lo = Pop();
11956   HValue* hi = Pop();
11957   HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
11958   return ast_context()->ReturnInstruction(result, call->id());
11959 }
11960
11961
11962 // Construct a RegExp exec result with two in-object properties.
11963 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
11964   ASSERT_EQ(3, call->arguments()->length());
11965   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11966   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11967   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11968   HValue* input = Pop();
11969   HValue* index = Pop();
11970   HValue* length = Pop();
11971   HValue* result = BuildRegExpConstructResult(length, index, input);
11972   return ast_context()->ReturnValue(result);
11973 }
11974
11975
11976 // Support for fast native caches.
11977 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
11978   return Bailout(kInlinedRuntimeFunctionGetFromCache);
11979 }
11980
11981
11982 // Fast support for number to string.
11983 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
11984   ASSERT_EQ(1, call->arguments()->length());
11985   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11986   HValue* number = Pop();
11987   HValue* result = BuildNumberToString(number, Type::Any(zone()));
11988   return ast_context()->ReturnValue(result);
11989 }
11990
11991
11992 // Fast call for custom callbacks.
11993 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
11994   // 1 ~ The function to call is not itself an argument to the call.
11995   int arg_count = call->arguments()->length() - 1;
11996   ASSERT(arg_count >= 1);  // There's always at least a receiver.
11997
11998   CHECK_ALIVE(VisitExpressions(call->arguments()));
11999   // The function is the last argument
12000   HValue* function = Pop();
12001   // Push the arguments to the stack
12002   PushArgumentsFromEnvironment(arg_count);
12003
12004   IfBuilder if_is_jsfunction(this);
12005   if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
12006
12007   if_is_jsfunction.Then();
12008   {
12009     HInstruction* invoke_result =
12010         Add<HInvokeFunction>(function, arg_count);
12011     if (!ast_context()->IsEffect()) {
12012       Push(invoke_result);
12013     }
12014     Add<HSimulate>(call->id(), FIXED_SIMULATE);
12015   }
12016
12017   if_is_jsfunction.Else();
12018   {
12019     HInstruction* call_result =
12020         Add<HCallFunction>(function, arg_count);
12021     if (!ast_context()->IsEffect()) {
12022       Push(call_result);
12023     }
12024     Add<HSimulate>(call->id(), FIXED_SIMULATE);
12025   }
12026   if_is_jsfunction.End();
12027
12028   if (ast_context()->IsEffect()) {
12029     // EffectContext::ReturnValue ignores the value, so we can just pass
12030     // 'undefined' (as we do not have the call result anymore).
12031     return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12032   } else {
12033     return ast_context()->ReturnValue(Pop());
12034   }
12035 }
12036
12037
12038 // Fast call to math functions.
12039 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
12040   ASSERT_EQ(2, call->arguments()->length());
12041   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12042   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12043   HValue* right = Pop();
12044   HValue* left = Pop();
12045   HInstruction* result = NewUncasted<HPower>(left, right);
12046   return ast_context()->ReturnInstruction(result, call->id());
12047 }
12048
12049
12050 void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) {
12051   ASSERT(call->arguments()->length() == 1);
12052   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12053   HValue* value = Pop();
12054   HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
12055   return ast_context()->ReturnInstruction(result, call->id());
12056 }
12057
12058
12059 void HOptimizedGraphBuilder::GenerateMathSqrtRT(CallRuntime* call) {
12060   ASSERT(call->arguments()->length() == 1);
12061   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12062   HValue* value = Pop();
12063   HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
12064   return ast_context()->ReturnInstruction(result, call->id());
12065 }
12066
12067
12068 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
12069   ASSERT(call->arguments()->length() == 1);
12070   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12071   HValue* value = Pop();
12072   HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
12073   return ast_context()->ReturnInstruction(result, call->id());
12074 }
12075
12076
12077 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
12078   return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
12079 }
12080
12081
12082 // Support for generators.
12083 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
12084   return Bailout(kInlinedRuntimeFunctionGeneratorNext);
12085 }
12086
12087
12088 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
12089   return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
12090 }
12091
12092
12093 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
12094     CallRuntime* call) {
12095   Add<HDebugBreak>();
12096   return ast_context()->ReturnValue(graph()->GetConstant0());
12097 }
12098
12099
12100 void HOptimizedGraphBuilder::GenerateDebugCallbackSupportsStepping(
12101     CallRuntime* call) {
12102   ASSERT(call->arguments()->length() == 1);
12103   // Debugging is not supported in optimized code.
12104   return ast_context()->ReturnValue(graph()->GetConstantFalse());
12105 }
12106
12107
12108 #undef CHECK_BAILOUT
12109 #undef CHECK_ALIVE
12110
12111
12112 HEnvironment::HEnvironment(HEnvironment* outer,
12113                            Scope* scope,
12114                            Handle<JSFunction> closure,
12115                            Zone* zone)
12116     : closure_(closure),
12117       values_(0, zone),
12118       frame_type_(JS_FUNCTION),
12119       parameter_count_(0),
12120       specials_count_(1),
12121       local_count_(0),
12122       outer_(outer),
12123       entry_(NULL),
12124       pop_count_(0),
12125       push_count_(0),
12126       ast_id_(BailoutId::None()),
12127       zone_(zone) {
12128   Scope* declaration_scope = scope->DeclarationScope();
12129   Initialize(declaration_scope->num_parameters() + 1,
12130              declaration_scope->num_stack_slots(), 0);
12131 }
12132
12133
12134 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
12135     : values_(0, zone),
12136       frame_type_(STUB),
12137       parameter_count_(parameter_count),
12138       specials_count_(1),
12139       local_count_(0),
12140       outer_(NULL),
12141       entry_(NULL),
12142       pop_count_(0),
12143       push_count_(0),
12144       ast_id_(BailoutId::None()),
12145       zone_(zone) {
12146   Initialize(parameter_count, 0, 0);
12147 }
12148
12149
12150 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
12151     : values_(0, zone),
12152       frame_type_(JS_FUNCTION),
12153       parameter_count_(0),
12154       specials_count_(0),
12155       local_count_(0),
12156       outer_(NULL),
12157       entry_(NULL),
12158       pop_count_(0),
12159       push_count_(0),
12160       ast_id_(other->ast_id()),
12161       zone_(zone) {
12162   Initialize(other);
12163 }
12164
12165
12166 HEnvironment::HEnvironment(HEnvironment* outer,
12167                            Handle<JSFunction> closure,
12168                            FrameType frame_type,
12169                            int arguments,
12170                            Zone* zone)
12171     : closure_(closure),
12172       values_(arguments, zone),
12173       frame_type_(frame_type),
12174       parameter_count_(arguments),
12175       specials_count_(0),
12176       local_count_(0),
12177       outer_(outer),
12178       entry_(NULL),
12179       pop_count_(0),
12180       push_count_(0),
12181       ast_id_(BailoutId::None()),
12182       zone_(zone) {
12183 }
12184
12185
12186 void HEnvironment::Initialize(int parameter_count,
12187                               int local_count,
12188                               int stack_height) {
12189   parameter_count_ = parameter_count;
12190   local_count_ = local_count;
12191
12192   // Avoid reallocating the temporaries' backing store on the first Push.
12193   int total = parameter_count + specials_count_ + local_count + stack_height;
12194   values_.Initialize(total + 4, zone());
12195   for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
12196 }
12197
12198
12199 void HEnvironment::Initialize(const HEnvironment* other) {
12200   closure_ = other->closure();
12201   values_.AddAll(other->values_, zone());
12202   assigned_variables_.Union(other->assigned_variables_, zone());
12203   frame_type_ = other->frame_type_;
12204   parameter_count_ = other->parameter_count_;
12205   local_count_ = other->local_count_;
12206   if (other->outer_ != NULL) outer_ = other->outer_->Copy();  // Deep copy.
12207   entry_ = other->entry_;
12208   pop_count_ = other->pop_count_;
12209   push_count_ = other->push_count_;
12210   specials_count_ = other->specials_count_;
12211   ast_id_ = other->ast_id_;
12212 }
12213
12214
12215 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
12216   ASSERT(!block->IsLoopHeader());
12217   ASSERT(values_.length() == other->values_.length());
12218
12219   int length = values_.length();
12220   for (int i = 0; i < length; ++i) {
12221     HValue* value = values_[i];
12222     if (value != NULL && value->IsPhi() && value->block() == block) {
12223       // There is already a phi for the i'th value.
12224       HPhi* phi = HPhi::cast(value);
12225       // Assert index is correct and that we haven't missed an incoming edge.
12226       ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
12227       ASSERT(phi->OperandCount() == block->predecessors()->length());
12228       phi->AddInput(other->values_[i]);
12229     } else if (values_[i] != other->values_[i]) {
12230       // There is a fresh value on the incoming edge, a phi is needed.
12231       ASSERT(values_[i] != NULL && other->values_[i] != NULL);
12232       HPhi* phi = block->AddNewPhi(i);
12233       HValue* old_value = values_[i];
12234       for (int j = 0; j < block->predecessors()->length(); j++) {
12235         phi->AddInput(old_value);
12236       }
12237       phi->AddInput(other->values_[i]);
12238       this->values_[i] = phi;
12239     }
12240   }
12241 }
12242
12243
12244 void HEnvironment::Bind(int index, HValue* value) {
12245   ASSERT(value != NULL);
12246   assigned_variables_.Add(index, zone());
12247   values_[index] = value;
12248 }
12249
12250
12251 bool HEnvironment::HasExpressionAt(int index) const {
12252   return index >= parameter_count_ + specials_count_ + local_count_;
12253 }
12254
12255
12256 bool HEnvironment::ExpressionStackIsEmpty() const {
12257   ASSERT(length() >= first_expression_index());
12258   return length() == first_expression_index();
12259 }
12260
12261
12262 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
12263   int count = index_from_top + 1;
12264   int index = values_.length() - count;
12265   ASSERT(HasExpressionAt(index));
12266   // The push count must include at least the element in question or else
12267   // the new value will not be included in this environment's history.
12268   if (push_count_ < count) {
12269     // This is the same effect as popping then re-pushing 'count' elements.
12270     pop_count_ += (count - push_count_);
12271     push_count_ = count;
12272   }
12273   values_[index] = value;
12274 }
12275
12276
12277 void HEnvironment::Drop(int count) {
12278   for (int i = 0; i < count; ++i) {
12279     Pop();
12280   }
12281 }
12282
12283
12284 HEnvironment* HEnvironment::Copy() const {
12285   return new(zone()) HEnvironment(this, zone());
12286 }
12287
12288
12289 HEnvironment* HEnvironment::CopyWithoutHistory() const {
12290   HEnvironment* result = Copy();
12291   result->ClearHistory();
12292   return result;
12293 }
12294
12295
12296 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
12297   HEnvironment* new_env = Copy();
12298   for (int i = 0; i < values_.length(); ++i) {
12299     HPhi* phi = loop_header->AddNewPhi(i);
12300     phi->AddInput(values_[i]);
12301     new_env->values_[i] = phi;
12302   }
12303   new_env->ClearHistory();
12304   return new_env;
12305 }
12306
12307
12308 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
12309                                                   Handle<JSFunction> target,
12310                                                   FrameType frame_type,
12311                                                   int arguments) const {
12312   HEnvironment* new_env =
12313       new(zone()) HEnvironment(outer, target, frame_type,
12314                                arguments + 1, zone());
12315   for (int i = 0; i <= arguments; ++i) {  // Include receiver.
12316     new_env->Push(ExpressionStackAt(arguments - i));
12317   }
12318   new_env->ClearHistory();
12319   return new_env;
12320 }
12321
12322
12323 HEnvironment* HEnvironment::CopyForInlining(
12324     Handle<JSFunction> target,
12325     int arguments,
12326     FunctionLiteral* function,
12327     HConstant* undefined,
12328     InliningKind inlining_kind) const {
12329   ASSERT(frame_type() == JS_FUNCTION);
12330
12331   // Outer environment is a copy of this one without the arguments.
12332   int arity = function->scope()->num_parameters();
12333
12334   HEnvironment* outer = Copy();
12335   outer->Drop(arguments + 1);  // Including receiver.
12336   outer->ClearHistory();
12337
12338   if (inlining_kind == CONSTRUCT_CALL_RETURN) {
12339     // Create artificial constructor stub environment.  The receiver should
12340     // actually be the constructor function, but we pass the newly allocated
12341     // object instead, DoComputeConstructStubFrame() relies on that.
12342     outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
12343   } else if (inlining_kind == GETTER_CALL_RETURN) {
12344     // We need an additional StackFrame::INTERNAL frame for restoring the
12345     // correct context.
12346     outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
12347   } else if (inlining_kind == SETTER_CALL_RETURN) {
12348     // We need an additional StackFrame::INTERNAL frame for temporarily saving
12349     // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
12350     outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
12351   }
12352
12353   if (arity != arguments) {
12354     // Create artificial arguments adaptation environment.
12355     outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
12356   }
12357
12358   HEnvironment* inner =
12359       new(zone()) HEnvironment(outer, function->scope(), target, zone());
12360   // Get the argument values from the original environment.
12361   for (int i = 0; i <= arity; ++i) {  // Include receiver.
12362     HValue* push = (i <= arguments) ?
12363         ExpressionStackAt(arguments - i) : undefined;
12364     inner->SetValueAt(i, push);
12365   }
12366   inner->SetValueAt(arity + 1, context());
12367   for (int i = arity + 2; i < inner->length(); ++i) {
12368     inner->SetValueAt(i, undefined);
12369   }
12370
12371   inner->set_ast_id(BailoutId::FunctionEntry());
12372   return inner;
12373 }
12374
12375
12376 void HEnvironment::PrintTo(StringStream* stream) {
12377   for (int i = 0; i < length(); i++) {
12378     if (i == 0) stream->Add("parameters\n");
12379     if (i == parameter_count()) stream->Add("specials\n");
12380     if (i == parameter_count() + specials_count()) stream->Add("locals\n");
12381     if (i == parameter_count() + specials_count() + local_count()) {
12382       stream->Add("expressions\n");
12383     }
12384     HValue* val = values_.at(i);
12385     stream->Add("%d: ", i);
12386     if (val != NULL) {
12387       val->PrintNameTo(stream);
12388     } else {
12389       stream->Add("NULL");
12390     }
12391     stream->Add("\n");
12392   }
12393   PrintF("\n");
12394 }
12395
12396
12397 void HEnvironment::PrintToStd() {
12398   HeapStringAllocator string_allocator;
12399   StringStream trace(&string_allocator);
12400   PrintTo(&trace);
12401   PrintF("%s", trace.ToCString().get());
12402 }
12403
12404
12405 void HTracer::TraceCompilation(CompilationInfo* info) {
12406   Tag tag(this, "compilation");
12407   if (info->IsOptimizing()) {
12408     Handle<String> name = info->function()->debug_name();
12409     PrintStringProperty("name", name->ToCString().get());
12410     PrintIndent();
12411     trace_.Add("method \"%s:%d\"\n",
12412                name->ToCString().get(),
12413                info->optimization_id());
12414   } else {
12415     CodeStub::Major major_key = info->code_stub()->MajorKey();
12416     PrintStringProperty("name", CodeStub::MajorName(major_key, false));
12417     PrintStringProperty("method", "stub");
12418   }
12419   PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
12420 }
12421
12422
12423 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
12424   ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
12425   AllowHandleDereference allow_deref;
12426   AllowDeferredHandleDereference allow_deferred_deref;
12427   Trace(name, chunk->graph(), chunk);
12428 }
12429
12430
12431 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
12432   ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
12433   AllowHandleDereference allow_deref;
12434   AllowDeferredHandleDereference allow_deferred_deref;
12435   Trace(name, graph, NULL);
12436 }
12437
12438
12439 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
12440   Tag tag(this, "cfg");
12441   PrintStringProperty("name", name);
12442   const ZoneList<HBasicBlock*>* blocks = graph->blocks();
12443   for (int i = 0; i < blocks->length(); i++) {
12444     HBasicBlock* current = blocks->at(i);
12445     Tag block_tag(this, "block");
12446     PrintBlockProperty("name", current->block_id());
12447     PrintIntProperty("from_bci", -1);
12448     PrintIntProperty("to_bci", -1);
12449
12450     if (!current->predecessors()->is_empty()) {
12451       PrintIndent();
12452       trace_.Add("predecessors");
12453       for (int j = 0; j < current->predecessors()->length(); ++j) {
12454         trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
12455       }
12456       trace_.Add("\n");
12457     } else {
12458       PrintEmptyProperty("predecessors");
12459     }
12460
12461     if (current->end()->SuccessorCount() == 0) {
12462       PrintEmptyProperty("successors");
12463     } else  {
12464       PrintIndent();
12465       trace_.Add("successors");
12466       for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
12467         trace_.Add(" \"B%d\"", it.Current()->block_id());
12468       }
12469       trace_.Add("\n");
12470     }
12471
12472     PrintEmptyProperty("xhandlers");
12473
12474     {
12475       PrintIndent();
12476       trace_.Add("flags");
12477       if (current->IsLoopSuccessorDominator()) {
12478         trace_.Add(" \"dom-loop-succ\"");
12479       }
12480       if (current->IsUnreachable()) {
12481         trace_.Add(" \"dead\"");
12482       }
12483       if (current->is_osr_entry()) {
12484         trace_.Add(" \"osr\"");
12485       }
12486       trace_.Add("\n");
12487     }
12488
12489     if (current->dominator() != NULL) {
12490       PrintBlockProperty("dominator", current->dominator()->block_id());
12491     }
12492
12493     PrintIntProperty("loop_depth", current->LoopNestingDepth());
12494
12495     if (chunk != NULL) {
12496       int first_index = current->first_instruction_index();
12497       int last_index = current->last_instruction_index();
12498       PrintIntProperty(
12499           "first_lir_id",
12500           LifetimePosition::FromInstructionIndex(first_index).Value());
12501       PrintIntProperty(
12502           "last_lir_id",
12503           LifetimePosition::FromInstructionIndex(last_index).Value());
12504     }
12505
12506     {
12507       Tag states_tag(this, "states");
12508       Tag locals_tag(this, "locals");
12509       int total = current->phis()->length();
12510       PrintIntProperty("size", current->phis()->length());
12511       PrintStringProperty("method", "None");
12512       for (int j = 0; j < total; ++j) {
12513         HPhi* phi = current->phis()->at(j);
12514         PrintIndent();
12515         trace_.Add("%d ", phi->merged_index());
12516         phi->PrintNameTo(&trace_);
12517         trace_.Add(" ");
12518         phi->PrintTo(&trace_);
12519         trace_.Add("\n");
12520       }
12521     }
12522
12523     {
12524       Tag HIR_tag(this, "HIR");
12525       for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
12526         HInstruction* instruction = it.Current();
12527         int uses = instruction->UseCount();
12528         PrintIndent();
12529         trace_.Add("0 %d ", uses);
12530         instruction->PrintNameTo(&trace_);
12531         trace_.Add(" ");
12532         instruction->PrintTo(&trace_);
12533         if (FLAG_hydrogen_track_positions &&
12534             instruction->has_position() &&
12535             instruction->position().raw() != 0) {
12536           const HSourcePosition pos = instruction->position();
12537           trace_.Add(" pos:");
12538           if (pos.inlining_id() != 0) {
12539             trace_.Add("%d_", pos.inlining_id());
12540           }
12541           trace_.Add("%d", pos.position());
12542         }
12543         trace_.Add(" <|@\n");
12544       }
12545     }
12546
12547
12548     if (chunk != NULL) {
12549       Tag LIR_tag(this, "LIR");
12550       int first_index = current->first_instruction_index();
12551       int last_index = current->last_instruction_index();
12552       if (first_index != -1 && last_index != -1) {
12553         const ZoneList<LInstruction*>* instructions = chunk->instructions();
12554         for (int i = first_index; i <= last_index; ++i) {
12555           LInstruction* linstr = instructions->at(i);
12556           if (linstr != NULL) {
12557             PrintIndent();
12558             trace_.Add("%d ",
12559                        LifetimePosition::FromInstructionIndex(i).Value());
12560             linstr->PrintTo(&trace_);
12561             trace_.Add(" [hir:");
12562             linstr->hydrogen_value()->PrintNameTo(&trace_);
12563             trace_.Add("]");
12564             trace_.Add(" <|@\n");
12565           }
12566         }
12567       }
12568     }
12569   }
12570 }
12571
12572
12573 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
12574   Tag tag(this, "intervals");
12575   PrintStringProperty("name", name);
12576
12577   const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
12578   for (int i = 0; i < fixed_d->length(); ++i) {
12579     TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
12580   }
12581
12582   const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
12583   for (int i = 0; i < fixed->length(); ++i) {
12584     TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
12585   }
12586
12587   const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
12588   for (int i = 0; i < live_ranges->length(); ++i) {
12589     TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
12590   }
12591 }
12592
12593
12594 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
12595                              Zone* zone) {
12596   if (range != NULL && !range->IsEmpty()) {
12597     PrintIndent();
12598     trace_.Add("%d %s", range->id(), type);
12599     if (range->HasRegisterAssigned()) {
12600       LOperand* op = range->CreateAssignedOperand(zone);
12601       int assigned_reg = op->index();
12602       if (op->IsDoubleRegister()) {
12603         trace_.Add(" \"%s\"",
12604                    DoubleRegister::AllocationIndexToString(assigned_reg));
12605       } else if (op->IsFloat32x4Register()) {
12606         trace_.Add(" \"%s\"",
12607                    SIMD128Register::AllocationIndexToString(assigned_reg));
12608       } else if (op->IsFloat64x2Register()) {
12609         trace_.Add(" \"%s\"",
12610                    SIMD128Register::AllocationIndexToString(assigned_reg));
12611       } else if (op->IsInt32x4Register()) {
12612         trace_.Add(" \"%s\"",
12613                    SIMD128Register::AllocationIndexToString(assigned_reg));
12614       } else {
12615         ASSERT(op->IsRegister());
12616         trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
12617       }
12618     } else if (range->IsSpilled()) {
12619       LOperand* op = range->TopLevel()->GetSpillOperand();
12620       if (op->IsDoubleStackSlot()) {
12621         trace_.Add(" \"double_stack:%d\"", op->index());
12622       } else if (op->IsFloat32x4StackSlot()) {
12623         trace_.Add(" \"float32x4_stack:%d\"", op->index());
12624       } else if (op->IsFloat64x2StackSlot()) {
12625         trace_.Add(" \"float64x2_stack:%d\"", op->index());
12626       } else if (op->IsInt32x4StackSlot()) {
12627         trace_.Add(" \"int32x4_stack:%d\"", op->index());
12628       } else {
12629         ASSERT(op->IsStackSlot());
12630         trace_.Add(" \"stack:%d\"", op->index());
12631       }
12632     }
12633     int parent_index = -1;
12634     if (range->IsChild()) {
12635       parent_index = range->parent()->id();
12636     } else {
12637       parent_index = range->id();
12638     }
12639     LOperand* op = range->FirstHint();
12640     int hint_index = -1;
12641     if (op != NULL && op->IsUnallocated()) {
12642       hint_index = LUnallocated::cast(op)->virtual_register();
12643     }
12644     trace_.Add(" %d %d", parent_index, hint_index);
12645     UseInterval* cur_interval = range->first_interval();
12646     while (cur_interval != NULL && range->Covers(cur_interval->start())) {
12647       trace_.Add(" [%d, %d[",
12648                  cur_interval->start().Value(),
12649                  cur_interval->end().Value());
12650       cur_interval = cur_interval->next();
12651     }
12652
12653     UsePosition* current_pos = range->first_pos();
12654     while (current_pos != NULL) {
12655       if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
12656         trace_.Add(" %d M", current_pos->pos().Value());
12657       }
12658       current_pos = current_pos->next();
12659     }
12660
12661     trace_.Add(" \"\"\n");
12662   }
12663 }
12664
12665
12666 void HTracer::FlushToFile() {
12667   AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
12668               false);
12669   trace_.Reset();
12670 }
12671
12672
12673 void HStatistics::Initialize(CompilationInfo* info) {
12674   if (info->shared_info().is_null()) return;
12675   source_size_ += info->shared_info()->SourceSize();
12676 }
12677
12678
12679 void HStatistics::Print() {
12680   PrintF("Timing results:\n");
12681   TimeDelta sum;
12682   for (int i = 0; i < times_.length(); ++i) {
12683     sum += times_[i];
12684   }
12685
12686   for (int i = 0; i < names_.length(); ++i) {
12687     PrintF("%32s", names_[i]);
12688     double ms = times_[i].InMillisecondsF();
12689     double percent = times_[i].PercentOf(sum);
12690     PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12691
12692     unsigned size = sizes_[i];
12693     double size_percent = static_cast<double>(size) * 100 / total_size_;
12694     PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
12695   }
12696
12697   PrintF("----------------------------------------"
12698          "---------------------------------------\n");
12699   TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12700   PrintF("%32s %8.3f ms / %4.1f %% \n",
12701          "Create graph",
12702          create_graph_.InMillisecondsF(),
12703          create_graph_.PercentOf(total));
12704   PrintF("%32s %8.3f ms / %4.1f %% \n",
12705          "Optimize graph",
12706          optimize_graph_.InMillisecondsF(),
12707          optimize_graph_.PercentOf(total));
12708   PrintF("%32s %8.3f ms / %4.1f %% \n",
12709          "Generate and install code",
12710          generate_code_.InMillisecondsF(),
12711          generate_code_.PercentOf(total));
12712   PrintF("----------------------------------------"
12713          "---------------------------------------\n");
12714   PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n",
12715          "Total",
12716          total.InMillisecondsF(),
12717          total.TimesOf(full_code_gen_));
12718
12719   double source_size_in_kb = static_cast<double>(source_size_) / 1024;
12720   double normalized_time =  source_size_in_kb > 0
12721       ? total.InMillisecondsF() / source_size_in_kb
12722       : 0;
12723   double normalized_size_in_kb = source_size_in_kb > 0
12724       ? total_size_ / 1024 / source_size_in_kb
12725       : 0;
12726   PrintF("%32s %8.3f ms           %7.3f kB allocated\n",
12727          "Average per kB source",
12728          normalized_time, normalized_size_in_kb);
12729 }
12730
12731
12732 void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) {
12733   total_size_ += size;
12734   for (int i = 0; i < names_.length(); ++i) {
12735     if (strcmp(names_[i], name) == 0) {
12736       times_[i] += time;
12737       sizes_[i] += size;
12738       return;
12739     }
12740   }
12741   names_.Add(name);
12742   times_.Add(time);
12743   sizes_.Add(size);
12744 }
12745
12746
12747 HPhase::~HPhase() {
12748   if (ShouldProduceTraceOutput()) {
12749     isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
12750   }
12751
12752 #ifdef DEBUG
12753   graph_->Verify(false);  // No full verify.
12754 #endif
12755 }
12756
12757 } }  // namespace v8::internal