Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / hydrogen.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "hydrogen.h"
6
7 #include <algorithm>
8
9 #include "v8.h"
10 #include "allocation-site-scopes.h"
11 #include "codegen.h"
12 #include "full-codegen.h"
13 #include "hashmap.h"
14 #include "hydrogen-bce.h"
15 #include "hydrogen-bch.h"
16 #include "hydrogen-canonicalize.h"
17 #include "hydrogen-check-elimination.h"
18 #include "hydrogen-dce.h"
19 #include "hydrogen-dehoist.h"
20 #include "hydrogen-environment-liveness.h"
21 #include "hydrogen-escape-analysis.h"
22 #include "hydrogen-infer-representation.h"
23 #include "hydrogen-infer-types.h"
24 #include "hydrogen-load-elimination.h"
25 #include "hydrogen-gvn.h"
26 #include "hydrogen-mark-deoptimize.h"
27 #include "hydrogen-mark-unreachable.h"
28 #include "hydrogen-osr.h"
29 #include "hydrogen-range-analysis.h"
30 #include "hydrogen-redundant-phi.h"
31 #include "hydrogen-removable-simulates.h"
32 #include "hydrogen-representation-changes.h"
33 #include "hydrogen-sce.h"
34 #include "hydrogen-store-elimination.h"
35 #include "hydrogen-uint32-analysis.h"
36 #include "lithium-allocator.h"
37 #include "parser.h"
38 #include "runtime.h"
39 #include "scopeinfo.h"
40 #include "scopes.h"
41 #include "stub-cache.h"
42 #include "typing.h"
43
44 #if V8_TARGET_ARCH_IA32
45 #include "ia32/lithium-codegen-ia32.h"
46 #elif V8_TARGET_ARCH_X64
47 #include "x64/lithium-codegen-x64.h"
48 #elif V8_TARGET_ARCH_ARM64
49 #include "arm64/lithium-codegen-arm64.h"
50 #elif V8_TARGET_ARCH_ARM
51 #include "arm/lithium-codegen-arm.h"
52 #elif V8_TARGET_ARCH_MIPS
53 #include "mips/lithium-codegen-mips.h"
54 #else
55 #error Unsupported target architecture.
56 #endif
57
58 namespace v8 {
59 namespace internal {
60
61 HBasicBlock::HBasicBlock(HGraph* graph)
62     : block_id_(graph->GetNextBlockID()),
63       graph_(graph),
64       phis_(4, graph->zone()),
65       first_(NULL),
66       last_(NULL),
67       end_(NULL),
68       loop_information_(NULL),
69       predecessors_(2, graph->zone()),
70       dominator_(NULL),
71       dominated_blocks_(4, graph->zone()),
72       last_environment_(NULL),
73       argument_count_(-1),
74       first_instruction_index_(-1),
75       last_instruction_index_(-1),
76       deleted_phis_(4, graph->zone()),
77       parent_loop_header_(NULL),
78       inlined_entry_block_(NULL),
79       is_inline_return_target_(false),
80       is_reachable_(true),
81       dominates_loop_successors_(false),
82       is_osr_entry_(false) { }
83
84
85 Isolate* HBasicBlock::isolate() const {
86   return graph_->isolate();
87 }
88
89
90 void HBasicBlock::MarkUnreachable() {
91   is_reachable_ = false;
92 }
93
94
95 void HBasicBlock::AttachLoopInformation() {
96   ASSERT(!IsLoopHeader());
97   loop_information_ = new(zone()) HLoopInformation(this, zone());
98 }
99
100
101 void HBasicBlock::DetachLoopInformation() {
102   ASSERT(IsLoopHeader());
103   loop_information_ = NULL;
104 }
105
106
107 void HBasicBlock::AddPhi(HPhi* phi) {
108   ASSERT(!IsStartBlock());
109   phis_.Add(phi, zone());
110   phi->SetBlock(this);
111 }
112
113
114 void HBasicBlock::RemovePhi(HPhi* phi) {
115   ASSERT(phi->block() == this);
116   ASSERT(phis_.Contains(phi));
117   phi->Kill();
118   phis_.RemoveElement(phi);
119   phi->SetBlock(NULL);
120 }
121
122
123 void HBasicBlock::AddInstruction(HInstruction* instr,
124                                  HSourcePosition position) {
125   ASSERT(!IsStartBlock() || !IsFinished());
126   ASSERT(!instr->IsLinked());
127   ASSERT(!IsFinished());
128
129   if (!position.IsUnknown()) {
130     instr->set_position(position);
131   }
132   if (first_ == NULL) {
133     ASSERT(last_environment() != NULL);
134     ASSERT(!last_environment()->ast_id().IsNone());
135     HBlockEntry* entry = new(zone()) HBlockEntry();
136     entry->InitializeAsFirst(this);
137     if (!position.IsUnknown()) {
138       entry->set_position(position);
139     } else {
140       ASSERT(!FLAG_hydrogen_track_positions ||
141              !graph()->info()->IsOptimizing());
142     }
143     first_ = last_ = entry;
144   }
145   instr->InsertAfter(last_);
146 }
147
148
149 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
150   if (graph()->IsInsideNoSideEffectsScope()) {
151     merged_index = HPhi::kInvalidMergedIndex;
152   }
153   HPhi* phi = new(zone()) HPhi(merged_index, zone());
154   AddPhi(phi);
155   return phi;
156 }
157
158
159 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
160                                        RemovableSimulate removable) {
161   ASSERT(HasEnvironment());
162   HEnvironment* environment = last_environment();
163   ASSERT(ast_id.IsNone() ||
164          ast_id == BailoutId::StubEntry() ||
165          environment->closure()->shared()->VerifyBailoutId(ast_id));
166
167   int push_count = environment->push_count();
168   int pop_count = environment->pop_count();
169
170   HSimulate* instr =
171       new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
172 #ifdef DEBUG
173   instr->set_closure(environment->closure());
174 #endif
175   // Order of pushed values: newest (top of stack) first. This allows
176   // HSimulate::MergeWith() to easily append additional pushed values
177   // that are older (from further down the stack).
178   for (int i = 0; i < push_count; ++i) {
179     instr->AddPushedValue(environment->ExpressionStackAt(i));
180   }
181   for (GrowableBitVector::Iterator it(environment->assigned_variables(),
182                                       zone());
183        !it.Done();
184        it.Advance()) {
185     int index = it.Current();
186     instr->AddAssignedValue(index, environment->Lookup(index));
187   }
188   environment->ClearHistory();
189   return instr;
190 }
191
192
193 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
194   ASSERT(!IsFinished());
195   AddInstruction(end, position);
196   end_ = end;
197   for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
198     it.Current()->RegisterPredecessor(this);
199   }
200 }
201
202
203 void HBasicBlock::Goto(HBasicBlock* block,
204                        HSourcePosition position,
205                        FunctionState* state,
206                        bool add_simulate) {
207   bool drop_extra = state != NULL &&
208       state->inlining_kind() == NORMAL_RETURN;
209
210   if (block->IsInlineReturnTarget()) {
211     HEnvironment* env = last_environment();
212     int argument_count = env->arguments_environment()->parameter_count();
213     AddInstruction(new(zone())
214                    HLeaveInlined(state->entry(), argument_count),
215                    position);
216     UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
217   }
218
219   if (add_simulate) AddNewSimulate(BailoutId::None(), position);
220   HGoto* instr = new(zone()) HGoto(block);
221   Finish(instr, position);
222 }
223
224
225 void HBasicBlock::AddLeaveInlined(HValue* return_value,
226                                   FunctionState* state,
227                                   HSourcePosition position) {
228   HBasicBlock* target = state->function_return();
229   bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
230
231   ASSERT(target->IsInlineReturnTarget());
232   ASSERT(return_value != NULL);
233   HEnvironment* env = last_environment();
234   int argument_count = env->arguments_environment()->parameter_count();
235   AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
236                  position);
237   UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
238   last_environment()->Push(return_value);
239   AddNewSimulate(BailoutId::None(), position);
240   HGoto* instr = new(zone()) HGoto(target);
241   Finish(instr, position);
242 }
243
244
245 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
246   ASSERT(!HasEnvironment());
247   ASSERT(first() == NULL);
248   UpdateEnvironment(env);
249 }
250
251
252 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
253   last_environment_ = env;
254   graph()->update_maximum_environment_size(env->first_expression_index());
255 }
256
257
258 void HBasicBlock::SetJoinId(BailoutId ast_id) {
259   int length = predecessors_.length();
260   ASSERT(length > 0);
261   for (int i = 0; i < length; i++) {
262     HBasicBlock* predecessor = predecessors_[i];
263     ASSERT(predecessor->end()->IsGoto());
264     HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
265     ASSERT(i != 0 ||
266            (predecessor->last_environment()->closure().is_null() ||
267             predecessor->last_environment()->closure()->shared()
268               ->VerifyBailoutId(ast_id)));
269     simulate->set_ast_id(ast_id);
270     predecessor->last_environment()->set_ast_id(ast_id);
271   }
272 }
273
274
275 bool HBasicBlock::Dominates(HBasicBlock* other) const {
276   HBasicBlock* current = other->dominator();
277   while (current != NULL) {
278     if (current == this) return true;
279     current = current->dominator();
280   }
281   return false;
282 }
283
284
285 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
286   if (this == other) return true;
287   return Dominates(other);
288 }
289
290
291 int HBasicBlock::LoopNestingDepth() const {
292   const HBasicBlock* current = this;
293   int result  = (current->IsLoopHeader()) ? 1 : 0;
294   while (current->parent_loop_header() != NULL) {
295     current = current->parent_loop_header();
296     result++;
297   }
298   return result;
299 }
300
301
302 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
303   ASSERT(IsLoopHeader());
304
305   SetJoinId(stmt->EntryId());
306   if (predecessors()->length() == 1) {
307     // This is a degenerated loop.
308     DetachLoopInformation();
309     return;
310   }
311
312   // Only the first entry into the loop is from outside the loop. All other
313   // entries must be back edges.
314   for (int i = 1; i < predecessors()->length(); ++i) {
315     loop_information()->RegisterBackEdge(predecessors()->at(i));
316   }
317 }
318
319
320 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
321   ASSERT(IsFinished());
322   HBasicBlock* succ_block = end()->SuccessorAt(succ);
323
324   ASSERT(succ_block->predecessors()->length() == 1);
325   succ_block->MarkUnreachable();
326 }
327
328
329 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
330   if (HasPredecessor()) {
331     // Only loop header blocks can have a predecessor added after
332     // instructions have been added to the block (they have phis for all
333     // values in the environment, these phis may be eliminated later).
334     ASSERT(IsLoopHeader() || first_ == NULL);
335     HEnvironment* incoming_env = pred->last_environment();
336     if (IsLoopHeader()) {
337       ASSERT(phis()->length() == incoming_env->length());
338       for (int i = 0; i < phis_.length(); ++i) {
339         phis_[i]->AddInput(incoming_env->values()->at(i));
340       }
341     } else {
342       last_environment()->AddIncomingEdge(this, pred->last_environment());
343     }
344   } else if (!HasEnvironment() && !IsFinished()) {
345     ASSERT(!IsLoopHeader());
346     SetInitialEnvironment(pred->last_environment()->Copy());
347   }
348
349   predecessors_.Add(pred, zone());
350 }
351
352
353 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
354   ASSERT(!dominated_blocks_.Contains(block));
355   // Keep the list of dominated blocks sorted such that if there is two
356   // succeeding block in this list, the predecessor is before the successor.
357   int index = 0;
358   while (index < dominated_blocks_.length() &&
359          dominated_blocks_[index]->block_id() < block->block_id()) {
360     ++index;
361   }
362   dominated_blocks_.InsertAt(index, block, zone());
363 }
364
365
366 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
367   if (dominator_ == NULL) {
368     dominator_ = other;
369     other->AddDominatedBlock(this);
370   } else if (other->dominator() != NULL) {
371     HBasicBlock* first = dominator_;
372     HBasicBlock* second = other;
373
374     while (first != second) {
375       if (first->block_id() > second->block_id()) {
376         first = first->dominator();
377       } else {
378         second = second->dominator();
379       }
380       ASSERT(first != NULL && second != NULL);
381     }
382
383     if (dominator_ != first) {
384       ASSERT(dominator_->dominated_blocks_.Contains(this));
385       dominator_->dominated_blocks_.RemoveElement(this);
386       dominator_ = first;
387       first->AddDominatedBlock(this);
388     }
389   }
390 }
391
392
393 void HBasicBlock::AssignLoopSuccessorDominators() {
394   // Mark blocks that dominate all subsequent reachable blocks inside their
395   // loop. Exploit the fact that blocks are sorted in reverse post order. When
396   // the loop is visited in increasing block id order, if the number of
397   // non-loop-exiting successor edges at the dominator_candidate block doesn't
398   // exceed the number of previously encountered predecessor edges, there is no
399   // path from the loop header to any block with higher id that doesn't go
400   // through the dominator_candidate block. In this case, the
401   // dominator_candidate block is guaranteed to dominate all blocks reachable
402   // from it with higher ids.
403   HBasicBlock* last = loop_information()->GetLastBackEdge();
404   int outstanding_successors = 1;  // one edge from the pre-header
405   // Header always dominates everything.
406   MarkAsLoopSuccessorDominator();
407   for (int j = block_id(); j <= last->block_id(); ++j) {
408     HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
409     for (HPredecessorIterator it(dominator_candidate); !it.Done();
410          it.Advance()) {
411       HBasicBlock* predecessor = it.Current();
412       // Don't count back edges.
413       if (predecessor->block_id() < dominator_candidate->block_id()) {
414         outstanding_successors--;
415       }
416     }
417
418     // If more successors than predecessors have been seen in the loop up to
419     // now, it's not possible to guarantee that the current block dominates
420     // all of the blocks with higher IDs. In this case, assume conservatively
421     // that those paths through loop that don't go through the current block
422     // contain all of the loop's dependencies. Also be careful to record
423     // dominator information about the current loop that's being processed,
424     // and not nested loops, which will be processed when
425     // AssignLoopSuccessorDominators gets called on their header.
426     ASSERT(outstanding_successors >= 0);
427     HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
428     if (outstanding_successors == 0 &&
429         (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
430       dominator_candidate->MarkAsLoopSuccessorDominator();
431     }
432     HControlInstruction* end = dominator_candidate->end();
433     for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
434       HBasicBlock* successor = it.Current();
435       // Only count successors that remain inside the loop and don't loop back
436       // to a loop header.
437       if (successor->block_id() > dominator_candidate->block_id() &&
438           successor->block_id() <= last->block_id()) {
439         // Backwards edges must land on loop headers.
440         ASSERT(successor->block_id() > dominator_candidate->block_id() ||
441                successor->IsLoopHeader());
442         outstanding_successors++;
443       }
444     }
445   }
446 }
447
448
449 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
450   for (int i = 0; i < predecessors_.length(); ++i) {
451     if (predecessors_[i] == predecessor) return i;
452   }
453   UNREACHABLE();
454   return -1;
455 }
456
457
458 #ifdef DEBUG
459 void HBasicBlock::Verify() {
460   // Check that every block is finished.
461   ASSERT(IsFinished());
462   ASSERT(block_id() >= 0);
463
464   // Check that the incoming edges are in edge split form.
465   if (predecessors_.length() > 1) {
466     for (int i = 0; i < predecessors_.length(); ++i) {
467       ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
468     }
469   }
470 }
471 #endif
472
473
474 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
475   this->back_edges_.Add(block, block->zone());
476   AddBlock(block);
477 }
478
479
480 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
481   int max_id = -1;
482   HBasicBlock* result = NULL;
483   for (int i = 0; i < back_edges_.length(); ++i) {
484     HBasicBlock* cur = back_edges_[i];
485     if (cur->block_id() > max_id) {
486       max_id = cur->block_id();
487       result = cur;
488     }
489   }
490   return result;
491 }
492
493
494 void HLoopInformation::AddBlock(HBasicBlock* block) {
495   if (block == loop_header()) return;
496   if (block->parent_loop_header() == loop_header()) return;
497   if (block->parent_loop_header() != NULL) {
498     AddBlock(block->parent_loop_header());
499   } else {
500     block->set_parent_loop_header(loop_header());
501     blocks_.Add(block, block->zone());
502     for (int i = 0; i < block->predecessors()->length(); ++i) {
503       AddBlock(block->predecessors()->at(i));
504     }
505   }
506 }
507
508
509 #ifdef DEBUG
510
511 // Checks reachability of the blocks in this graph and stores a bit in
512 // the BitVector "reachable()" for every block that can be reached
513 // from the start block of the graph. If "dont_visit" is non-null, the given
514 // block is treated as if it would not be part of the graph. "visited_count()"
515 // returns the number of reachable blocks.
516 class ReachabilityAnalyzer BASE_EMBEDDED {
517  public:
518   ReachabilityAnalyzer(HBasicBlock* entry_block,
519                        int block_count,
520                        HBasicBlock* dont_visit)
521       : visited_count_(0),
522         stack_(16, entry_block->zone()),
523         reachable_(block_count, entry_block->zone()),
524         dont_visit_(dont_visit) {
525     PushBlock(entry_block);
526     Analyze();
527   }
528
529   int visited_count() const { return visited_count_; }
530   const BitVector* reachable() const { return &reachable_; }
531
532  private:
533   void PushBlock(HBasicBlock* block) {
534     if (block != NULL && block != dont_visit_ &&
535         !reachable_.Contains(block->block_id())) {
536       reachable_.Add(block->block_id());
537       stack_.Add(block, block->zone());
538       visited_count_++;
539     }
540   }
541
542   void Analyze() {
543     while (!stack_.is_empty()) {
544       HControlInstruction* end = stack_.RemoveLast()->end();
545       for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
546         PushBlock(it.Current());
547       }
548     }
549   }
550
551   int visited_count_;
552   ZoneList<HBasicBlock*> stack_;
553   BitVector reachable_;
554   HBasicBlock* dont_visit_;
555 };
556
557
558 void HGraph::Verify(bool do_full_verify) const {
559   Heap::RelocationLock relocation_lock(isolate()->heap());
560   AllowHandleDereference allow_deref;
561   AllowDeferredHandleDereference allow_deferred_deref;
562   for (int i = 0; i < blocks_.length(); i++) {
563     HBasicBlock* block = blocks_.at(i);
564
565     block->Verify();
566
567     // Check that every block contains at least one node and that only the last
568     // node is a control instruction.
569     HInstruction* current = block->first();
570     ASSERT(current != NULL && current->IsBlockEntry());
571     while (current != NULL) {
572       ASSERT((current->next() == NULL) == current->IsControlInstruction());
573       ASSERT(current->block() == block);
574       current->Verify();
575       current = current->next();
576     }
577
578     // Check that successors are correctly set.
579     HBasicBlock* first = block->end()->FirstSuccessor();
580     HBasicBlock* second = block->end()->SecondSuccessor();
581     ASSERT(second == NULL || first != NULL);
582
583     // Check that the predecessor array is correct.
584     if (first != NULL) {
585       ASSERT(first->predecessors()->Contains(block));
586       if (second != NULL) {
587         ASSERT(second->predecessors()->Contains(block));
588       }
589     }
590
591     // Check that phis have correct arguments.
592     for (int j = 0; j < block->phis()->length(); j++) {
593       HPhi* phi = block->phis()->at(j);
594       phi->Verify();
595     }
596
597     // Check that all join blocks have predecessors that end with an
598     // unconditional goto and agree on their environment node id.
599     if (block->predecessors()->length() >= 2) {
600       BailoutId id =
601           block->predecessors()->first()->last_environment()->ast_id();
602       for (int k = 0; k < block->predecessors()->length(); k++) {
603         HBasicBlock* predecessor = block->predecessors()->at(k);
604         ASSERT(predecessor->end()->IsGoto() ||
605                predecessor->end()->IsDeoptimize());
606         ASSERT(predecessor->last_environment()->ast_id() == id);
607       }
608     }
609   }
610
611   // Check special property of first block to have no predecessors.
612   ASSERT(blocks_.at(0)->predecessors()->is_empty());
613
614   if (do_full_verify) {
615     // Check that the graph is fully connected.
616     ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
617     ASSERT(analyzer.visited_count() == blocks_.length());
618
619     // Check that entry block dominator is NULL.
620     ASSERT(entry_block_->dominator() == NULL);
621
622     // Check dominators.
623     for (int i = 0; i < blocks_.length(); ++i) {
624       HBasicBlock* block = blocks_.at(i);
625       if (block->dominator() == NULL) {
626         // Only start block may have no dominator assigned to.
627         ASSERT(i == 0);
628       } else {
629         // Assert that block is unreachable if dominator must not be visited.
630         ReachabilityAnalyzer dominator_analyzer(entry_block_,
631                                                 blocks_.length(),
632                                                 block->dominator());
633         ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
634       }
635     }
636   }
637 }
638
639 #endif
640
641
642 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
643                                int32_t value) {
644   if (!pointer->is_set()) {
645     // Can't pass GetInvalidContext() to HConstant::New, because that will
646     // recursively call GetConstant
647     HConstant* constant = HConstant::New(zone(), NULL, value);
648     constant->InsertAfter(entry_block()->first());
649     pointer->set(constant);
650     return constant;
651   }
652   return ReinsertConstantIfNecessary(pointer->get());
653 }
654
655
656 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
657   if (!constant->IsLinked()) {
658     // The constant was removed from the graph. Reinsert.
659     constant->ClearFlag(HValue::kIsDead);
660     constant->InsertAfter(entry_block()->first());
661   }
662   return constant;
663 }
664
665
666 HConstant* HGraph::GetConstant0() {
667   return GetConstant(&constant_0_, 0);
668 }
669
670
671 HConstant* HGraph::GetConstant1() {
672   return GetConstant(&constant_1_, 1);
673 }
674
675
676 HConstant* HGraph::GetConstantMinus1() {
677   return GetConstant(&constant_minus1_, -1);
678 }
679
680
681 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value)                  \
682 HConstant* HGraph::GetConstant##Name() {                                       \
683   if (!constant_##name##_.is_set()) {                                          \
684     HConstant* constant = new(zone()) HConstant(                               \
685         Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
686         Representation::Tagged(),                                              \
687         htype,                                                                 \
688         true,                                                                  \
689         boolean_value,                                                         \
690         false,                                                                 \
691         ODDBALL_TYPE);                                                         \
692     constant->InsertAfter(entry_block()->first());                             \
693     constant_##name##_.set(constant);                                          \
694   }                                                                            \
695   return ReinsertConstantIfNecessary(constant_##name##_.get());                \
696 }
697
698
699 DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false)
700 DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
701 DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
702 DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
703 DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
704
705
706 #undef DEFINE_GET_CONSTANT
707
708 #define DEFINE_IS_CONSTANT(Name, name)                                         \
709 bool HGraph::IsConstant##Name(HConstant* constant) {                           \
710   return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
711 }
712 DEFINE_IS_CONSTANT(Undefined, undefined)
713 DEFINE_IS_CONSTANT(0, 0)
714 DEFINE_IS_CONSTANT(1, 1)
715 DEFINE_IS_CONSTANT(Minus1, minus1)
716 DEFINE_IS_CONSTANT(True, true)
717 DEFINE_IS_CONSTANT(False, false)
718 DEFINE_IS_CONSTANT(Hole, the_hole)
719 DEFINE_IS_CONSTANT(Null, null)
720
721 #undef DEFINE_IS_CONSTANT
722
723
724 HConstant* HGraph::GetInvalidContext() {
725   return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
726 }
727
728
729 bool HGraph::IsStandardConstant(HConstant* constant) {
730   if (IsConstantUndefined(constant)) return true;
731   if (IsConstant0(constant)) return true;
732   if (IsConstant1(constant)) return true;
733   if (IsConstantMinus1(constant)) return true;
734   if (IsConstantTrue(constant)) return true;
735   if (IsConstantFalse(constant)) return true;
736   if (IsConstantHole(constant)) return true;
737   if (IsConstantNull(constant)) return true;
738   return false;
739 }
740
741
742 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
743     : builder_(builder),
744       finished_(false),
745       did_then_(false),
746       did_else_(false),
747       did_else_if_(false),
748       did_and_(false),
749       did_or_(false),
750       captured_(false),
751       needs_compare_(true),
752       pending_merge_block_(false),
753       split_edge_merge_block_(NULL),
754       merge_at_join_blocks_(NULL),
755       normal_merge_at_join_block_count_(0),
756       deopt_merge_at_join_block_count_(0) {
757   HEnvironment* env = builder->environment();
758   first_true_block_ = builder->CreateBasicBlock(env->Copy());
759   first_false_block_ = builder->CreateBasicBlock(env->Copy());
760 }
761
762
763 HGraphBuilder::IfBuilder::IfBuilder(
764     HGraphBuilder* builder,
765     HIfContinuation* continuation)
766     : builder_(builder),
767       finished_(false),
768       did_then_(false),
769       did_else_(false),
770       did_else_if_(false),
771       did_and_(false),
772       did_or_(false),
773       captured_(false),
774       needs_compare_(false),
775       pending_merge_block_(false),
776       first_true_block_(NULL),
777       first_false_block_(NULL),
778       split_edge_merge_block_(NULL),
779       merge_at_join_blocks_(NULL),
780       normal_merge_at_join_block_count_(0),
781       deopt_merge_at_join_block_count_(0) {
782   continuation->Continue(&first_true_block_,
783                          &first_false_block_);
784 }
785
786
787 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
788     HControlInstruction* compare) {
789   ASSERT(did_then_ == did_else_);
790   if (did_else_) {
791     // Handle if-then-elseif
792     did_else_if_ = true;
793     did_else_ = false;
794     did_then_ = false;
795     did_and_ = false;
796     did_or_ = false;
797     pending_merge_block_ = false;
798     split_edge_merge_block_ = NULL;
799     HEnvironment* env = builder_->environment();
800     first_true_block_ = builder_->CreateBasicBlock(env->Copy());
801     first_false_block_ = builder_->CreateBasicBlock(env->Copy());
802   }
803   if (split_edge_merge_block_ != NULL) {
804     HEnvironment* env = first_false_block_->last_environment();
805     HBasicBlock* split_edge =
806         builder_->CreateBasicBlock(env->Copy());
807     if (did_or_) {
808       compare->SetSuccessorAt(0, split_edge);
809       compare->SetSuccessorAt(1, first_false_block_);
810     } else {
811       compare->SetSuccessorAt(0, first_true_block_);
812       compare->SetSuccessorAt(1, split_edge);
813     }
814     builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
815   } else {
816     compare->SetSuccessorAt(0, first_true_block_);
817     compare->SetSuccessorAt(1, first_false_block_);
818   }
819   builder_->FinishCurrentBlock(compare);
820   needs_compare_ = false;
821   return compare;
822 }
823
824
825 void HGraphBuilder::IfBuilder::Or() {
826   ASSERT(!needs_compare_);
827   ASSERT(!did_and_);
828   did_or_ = true;
829   HEnvironment* env = first_false_block_->last_environment();
830   if (split_edge_merge_block_ == NULL) {
831     split_edge_merge_block_ =
832         builder_->CreateBasicBlock(env->Copy());
833     builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
834     first_true_block_ = split_edge_merge_block_;
835   }
836   builder_->set_current_block(first_false_block_);
837   first_false_block_ = builder_->CreateBasicBlock(env->Copy());
838 }
839
840
841 void HGraphBuilder::IfBuilder::And() {
842   ASSERT(!needs_compare_);
843   ASSERT(!did_or_);
844   did_and_ = true;
845   HEnvironment* env = first_false_block_->last_environment();
846   if (split_edge_merge_block_ == NULL) {
847     split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
848     builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
849     first_false_block_ = split_edge_merge_block_;
850   }
851   builder_->set_current_block(first_true_block_);
852   first_true_block_ = builder_->CreateBasicBlock(env->Copy());
853 }
854
855
856 void HGraphBuilder::IfBuilder::CaptureContinuation(
857     HIfContinuation* continuation) {
858   ASSERT(!did_else_if_);
859   ASSERT(!finished_);
860   ASSERT(!captured_);
861
862   HBasicBlock* true_block = NULL;
863   HBasicBlock* false_block = NULL;
864   Finish(&true_block, &false_block);
865   ASSERT(true_block != NULL);
866   ASSERT(false_block != NULL);
867   continuation->Capture(true_block, false_block);
868   captured_ = true;
869   builder_->set_current_block(NULL);
870   End();
871 }
872
873
874 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
875   ASSERT(!did_else_if_);
876   ASSERT(!finished_);
877   ASSERT(!captured_);
878   HBasicBlock* true_block = NULL;
879   HBasicBlock* false_block = NULL;
880   Finish(&true_block, &false_block);
881   merge_at_join_blocks_ = NULL;
882   if (true_block != NULL && !true_block->IsFinished()) {
883     ASSERT(continuation->IsTrueReachable());
884     builder_->GotoNoSimulate(true_block, continuation->true_branch());
885   }
886   if (false_block != NULL && !false_block->IsFinished()) {
887     ASSERT(continuation->IsFalseReachable());
888     builder_->GotoNoSimulate(false_block, continuation->false_branch());
889   }
890   captured_ = true;
891   End();
892 }
893
894
895 void HGraphBuilder::IfBuilder::Then() {
896   ASSERT(!captured_);
897   ASSERT(!finished_);
898   did_then_ = true;
899   if (needs_compare_) {
900     // Handle if's without any expressions, they jump directly to the "else"
901     // branch. However, we must pretend that the "then" branch is reachable,
902     // so that the graph builder visits it and sees any live range extending
903     // constructs within it.
904     HConstant* constant_false = builder_->graph()->GetConstantFalse();
905     ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
906     boolean_type.Add(ToBooleanStub::BOOLEAN);
907     HBranch* branch = builder()->New<HBranch>(
908         constant_false, boolean_type, first_true_block_, first_false_block_);
909     builder_->FinishCurrentBlock(branch);
910   }
911   builder_->set_current_block(first_true_block_);
912   pending_merge_block_ = true;
913 }
914
915
916 void HGraphBuilder::IfBuilder::Else() {
917   ASSERT(did_then_);
918   ASSERT(!captured_);
919   ASSERT(!finished_);
920   AddMergeAtJoinBlock(false);
921   builder_->set_current_block(first_false_block_);
922   pending_merge_block_ = true;
923   did_else_ = true;
924 }
925
926
927 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
928   ASSERT(did_then_);
929   builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
930   AddMergeAtJoinBlock(true);
931 }
932
933
934 void HGraphBuilder::IfBuilder::Return(HValue* value) {
935   HValue* parameter_count = builder_->graph()->GetConstantMinus1();
936   builder_->FinishExitCurrentBlock(
937       builder_->New<HReturn>(value, parameter_count));
938   AddMergeAtJoinBlock(false);
939 }
940
941
942 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
943   if (!pending_merge_block_) return;
944   HBasicBlock* block = builder_->current_block();
945   ASSERT(block == NULL || !block->IsFinished());
946   MergeAtJoinBlock* record =
947       new(builder_->zone()) MergeAtJoinBlock(block, deopt,
948                                              merge_at_join_blocks_);
949   merge_at_join_blocks_ = record;
950   if (block != NULL) {
951     ASSERT(block->end() == NULL);
952     if (deopt) {
953       normal_merge_at_join_block_count_++;
954     } else {
955       deopt_merge_at_join_block_count_++;
956     }
957   }
958   builder_->set_current_block(NULL);
959   pending_merge_block_ = false;
960 }
961
962
963 void HGraphBuilder::IfBuilder::Finish() {
964   ASSERT(!finished_);
965   if (!did_then_) {
966     Then();
967   }
968   AddMergeAtJoinBlock(false);
969   if (!did_else_) {
970     Else();
971     AddMergeAtJoinBlock(false);
972   }
973   finished_ = true;
974 }
975
976
977 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
978                                       HBasicBlock** else_continuation) {
979   Finish();
980
981   MergeAtJoinBlock* else_record = merge_at_join_blocks_;
982   if (else_continuation != NULL) {
983     *else_continuation = else_record->block_;
984   }
985   MergeAtJoinBlock* then_record = else_record->next_;
986   if (then_continuation != NULL) {
987     *then_continuation = then_record->block_;
988   }
989   ASSERT(then_record->next_ == NULL);
990 }
991
992
993 void HGraphBuilder::IfBuilder::End() {
994   if (captured_) return;
995   Finish();
996
997   int total_merged_blocks = normal_merge_at_join_block_count_ +
998     deopt_merge_at_join_block_count_;
999   ASSERT(total_merged_blocks >= 1);
1000   HBasicBlock* merge_block = total_merged_blocks == 1
1001       ? NULL : builder_->graph()->CreateBasicBlock();
1002
1003   // Merge non-deopt blocks first to ensure environment has right size for
1004   // padding.
1005   MergeAtJoinBlock* current = merge_at_join_blocks_;
1006   while (current != NULL) {
1007     if (!current->deopt_ && current->block_ != NULL) {
1008       // If there is only one block that makes it through to the end of the
1009       // if, then just set it as the current block and continue rather then
1010       // creating an unnecessary merge block.
1011       if (total_merged_blocks == 1) {
1012         builder_->set_current_block(current->block_);
1013         return;
1014       }
1015       builder_->GotoNoSimulate(current->block_, merge_block);
1016     }
1017     current = current->next_;
1018   }
1019
1020   // Merge deopt blocks, padding when necessary.
1021   current = merge_at_join_blocks_;
1022   while (current != NULL) {
1023     if (current->deopt_ && current->block_ != NULL) {
1024       current->block_->FinishExit(
1025           HAbnormalExit::New(builder_->zone(), NULL),
1026           HSourcePosition::Unknown());
1027     }
1028     current = current->next_;
1029   }
1030   builder_->set_current_block(merge_block);
1031 }
1032
1033
1034 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1035                                         HValue* context,
1036                                         LoopBuilder::Direction direction)
1037     : builder_(builder),
1038       context_(context),
1039       direction_(direction),
1040       finished_(false) {
1041   header_block_ = builder->CreateLoopHeaderBlock();
1042   body_block_ = NULL;
1043   exit_block_ = NULL;
1044   exit_trampoline_block_ = NULL;
1045   increment_amount_ = builder_->graph()->GetConstant1();
1046 }
1047
1048
1049 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1050                                         HValue* context,
1051                                         LoopBuilder::Direction direction,
1052                                         HValue* increment_amount)
1053     : builder_(builder),
1054       context_(context),
1055       direction_(direction),
1056       finished_(false) {
1057   header_block_ = builder->CreateLoopHeaderBlock();
1058   body_block_ = NULL;
1059   exit_block_ = NULL;
1060   exit_trampoline_block_ = NULL;
1061   increment_amount_ = increment_amount;
1062 }
1063
1064
1065 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1066     HValue* initial,
1067     HValue* terminating,
1068     Token::Value token) {
1069   HEnvironment* env = builder_->environment();
1070   phi_ = header_block_->AddNewPhi(env->values()->length());
1071   phi_->AddInput(initial);
1072   env->Push(initial);
1073   builder_->GotoNoSimulate(header_block_);
1074
1075   HEnvironment* body_env = env->Copy();
1076   HEnvironment* exit_env = env->Copy();
1077   // Remove the phi from the expression stack
1078   body_env->Pop();
1079   exit_env->Pop();
1080   body_block_ = builder_->CreateBasicBlock(body_env);
1081   exit_block_ = builder_->CreateBasicBlock(exit_env);
1082
1083   builder_->set_current_block(header_block_);
1084   env->Pop();
1085   builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1086           phi_, terminating, token, body_block_, exit_block_));
1087
1088   builder_->set_current_block(body_block_);
1089   if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1090     HValue* one = builder_->graph()->GetConstant1();
1091     if (direction_ == kPreIncrement) {
1092       increment_ = HAdd::New(zone(), context_, phi_, one);
1093     } else {
1094       increment_ = HSub::New(zone(), context_, phi_, one);
1095     }
1096     increment_->ClearFlag(HValue::kCanOverflow);
1097     builder_->AddInstruction(increment_);
1098     return increment_;
1099   } else {
1100     return phi_;
1101   }
1102 }
1103
1104
1105 void HGraphBuilder::LoopBuilder::Break() {
1106   if (exit_trampoline_block_ == NULL) {
1107     // Its the first time we saw a break.
1108     HEnvironment* env = exit_block_->last_environment()->Copy();
1109     exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1110     builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1111   }
1112
1113   builder_->GotoNoSimulate(exit_trampoline_block_);
1114   builder_->set_current_block(NULL);
1115 }
1116
1117
1118 void HGraphBuilder::LoopBuilder::EndBody() {
1119   ASSERT(!finished_);
1120
1121   if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1122     if (direction_ == kPostIncrement) {
1123       increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1124     } else {
1125       increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1126     }
1127     increment_->ClearFlag(HValue::kCanOverflow);
1128     builder_->AddInstruction(increment_);
1129   }
1130
1131   // Push the new increment value on the expression stack to merge into the phi.
1132   builder_->environment()->Push(increment_);
1133   HBasicBlock* last_block = builder_->current_block();
1134   builder_->GotoNoSimulate(last_block, header_block_);
1135   header_block_->loop_information()->RegisterBackEdge(last_block);
1136
1137   if (exit_trampoline_block_ != NULL) {
1138     builder_->set_current_block(exit_trampoline_block_);
1139   } else {
1140     builder_->set_current_block(exit_block_);
1141   }
1142   finished_ = true;
1143 }
1144
1145
1146 HGraph* HGraphBuilder::CreateGraph() {
1147   graph_ = new(zone()) HGraph(info_);
1148   if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1149   CompilationPhase phase("H_Block building", info_);
1150   set_current_block(graph()->entry_block());
1151   if (!BuildGraph()) return NULL;
1152   graph()->FinalizeUniqueness();
1153   return graph_;
1154 }
1155
1156
1157 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1158   ASSERT(current_block() != NULL);
1159   ASSERT(!FLAG_hydrogen_track_positions ||
1160          !position_.IsUnknown() ||
1161          !info_->IsOptimizing());
1162   current_block()->AddInstruction(instr, source_position());
1163   if (graph()->IsInsideNoSideEffectsScope()) {
1164     instr->SetFlag(HValue::kHasNoObservableSideEffects);
1165   }
1166   return instr;
1167 }
1168
1169
1170 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1171   ASSERT(!FLAG_hydrogen_track_positions ||
1172          !info_->IsOptimizing() ||
1173          !position_.IsUnknown());
1174   current_block()->Finish(last, source_position());
1175   if (last->IsReturn() || last->IsAbnormalExit()) {
1176     set_current_block(NULL);
1177   }
1178 }
1179
1180
1181 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1182   ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1183          !position_.IsUnknown());
1184   current_block()->FinishExit(instruction, source_position());
1185   if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1186     set_current_block(NULL);
1187   }
1188 }
1189
1190
1191 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1192   if (FLAG_native_code_counters && counter->Enabled()) {
1193     HValue* reference = Add<HConstant>(ExternalReference(counter));
1194     HValue* old_value = Add<HLoadNamedField>(
1195         reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1196     HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1197     new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
1198     Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1199                           new_value, STORE_TO_INITIALIZED_ENTRY);
1200   }
1201 }
1202
1203
1204 void HGraphBuilder::AddSimulate(BailoutId id,
1205                                 RemovableSimulate removable) {
1206   ASSERT(current_block() != NULL);
1207   ASSERT(!graph()->IsInsideNoSideEffectsScope());
1208   current_block()->AddNewSimulate(id, source_position(), removable);
1209 }
1210
1211
1212 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1213   HBasicBlock* b = graph()->CreateBasicBlock();
1214   b->SetInitialEnvironment(env);
1215   return b;
1216 }
1217
1218
1219 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1220   HBasicBlock* header = graph()->CreateBasicBlock();
1221   HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1222   header->SetInitialEnvironment(entry_env);
1223   header->AttachLoopInformation();
1224   return header;
1225 }
1226
1227
1228 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1229   if (obj->type().IsHeapObject()) return obj;
1230   return Add<HCheckHeapObject>(obj);
1231 }
1232
1233
1234 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1235   Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1236   FinishExitCurrentBlock(New<HAbnormalExit>());
1237 }
1238
1239
1240 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1241   if (!string->type().IsString()) {
1242     ASSERT(!string->IsConstant() ||
1243            !HConstant::cast(string)->HasStringValue());
1244     BuildCheckHeapObject(string);
1245     return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1246   }
1247   return string;
1248 }
1249
1250
1251 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1252   if (object->type().IsJSObject()) return object;
1253   if (function->IsConstant() &&
1254       HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1255     Handle<JSFunction> f = Handle<JSFunction>::cast(
1256         HConstant::cast(function)->handle(isolate()));
1257     SharedFunctionInfo* shared = f->shared();
1258     if (shared->strict_mode() == STRICT || shared->native()) return object;
1259   }
1260   return Add<HWrapReceiver>(object, function);
1261 }
1262
1263
1264 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1265     HValue* object,
1266     HValue* elements,
1267     ElementsKind kind,
1268     HValue* length,
1269     HValue* key,
1270     bool is_js_array,
1271     PropertyAccessType access_type) {
1272   IfBuilder length_checker(this);
1273
1274   Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1275   length_checker.If<HCompareNumericAndBranch>(key, length, token);
1276
1277   length_checker.Then();
1278
1279   HValue* current_capacity = AddLoadFixedArrayLength(elements);
1280
1281   IfBuilder capacity_checker(this);
1282
1283   capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1284                                                 Token::GTE);
1285   capacity_checker.Then();
1286
1287   HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1288   HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1289
1290   Add<HBoundsCheck>(key, max_capacity);
1291
1292   HValue* new_capacity = BuildNewElementsCapacity(key);
1293   HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1294                                                    kind, kind, length,
1295                                                    new_capacity);
1296
1297   environment()->Push(new_elements);
1298   capacity_checker.Else();
1299
1300   environment()->Push(elements);
1301   capacity_checker.End();
1302
1303   if (is_js_array) {
1304     HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1305     new_length->ClearFlag(HValue::kCanOverflow);
1306
1307     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1308                           new_length);
1309   }
1310
1311   if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1312     HValue* checked_elements = environment()->Top();
1313
1314     // Write zero to ensure that the new element is initialized with some smi.
1315     Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1316   }
1317
1318   length_checker.Else();
1319   Add<HBoundsCheck>(key, length);
1320
1321   environment()->Push(elements);
1322   length_checker.End();
1323
1324   return environment()->Pop();
1325 }
1326
1327
1328 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1329                                                 HValue* elements,
1330                                                 ElementsKind kind,
1331                                                 HValue* length) {
1332   Factory* factory = isolate()->factory();
1333
1334   IfBuilder cow_checker(this);
1335
1336   cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1337   cow_checker.Then();
1338
1339   HValue* capacity = AddLoadFixedArrayLength(elements);
1340
1341   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1342                                                    kind, length, capacity);
1343
1344   environment()->Push(new_elements);
1345
1346   cow_checker.Else();
1347
1348   environment()->Push(elements);
1349
1350   cow_checker.End();
1351
1352   return environment()->Pop();
1353 }
1354
1355
1356 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1357                                                 HValue* map,
1358                                                 ElementsKind from_kind,
1359                                                 ElementsKind to_kind,
1360                                                 bool is_jsarray) {
1361   ASSERT(!IsFastHoleyElementsKind(from_kind) ||
1362          IsFastHoleyElementsKind(to_kind));
1363
1364   if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1365     Add<HTrapAllocationMemento>(object);
1366   }
1367
1368   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1369     HInstruction* elements = AddLoadElements(object);
1370
1371     HInstruction* empty_fixed_array = Add<HConstant>(
1372         isolate()->factory()->empty_fixed_array());
1373
1374     IfBuilder if_builder(this);
1375
1376     if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1377
1378     if_builder.Then();
1379
1380     HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1381
1382     HInstruction* array_length = is_jsarray
1383         ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1384                                HObjectAccess::ForArrayLength(from_kind))
1385         : elements_length;
1386
1387     BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1388                               array_length, elements_length);
1389
1390     if_builder.End();
1391   }
1392
1393   Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1394 }
1395
1396
1397 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1398     HValue* elements,
1399     HValue* key,
1400     HValue* hash,
1401     HValue* mask,
1402     int current_probe) {
1403   if (current_probe == kNumberDictionaryProbes) {
1404     return NULL;
1405   }
1406
1407   int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1408   HValue* raw_index = (current_probe == 0)
1409       ? hash
1410       : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1411   raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1412   int32_t entry_size = SeededNumberDictionary::kEntrySize;
1413   raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1414   raw_index->ClearFlag(HValue::kCanOverflow);
1415
1416   int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1417   HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1418   key_index->ClearFlag(HValue::kCanOverflow);
1419
1420   HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1421                                           static_cast<HValue*>(NULL),
1422                                           FAST_ELEMENTS);
1423
1424   IfBuilder key_compare(this);
1425   key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1426   key_compare.Then();
1427   {
1428     // Key at the current probe doesn't match, try at the next probe.
1429     HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1430         elements, key, hash, mask, current_probe + 1);
1431     if (result == NULL) {
1432       key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
1433       result = graph()->GetConstantUndefined();
1434     } else {
1435       Push(result);
1436     }
1437   }
1438   key_compare.Else();
1439   {
1440     // Key at current probe matches. Details must be zero, otherwise the
1441     // dictionary element requires special handling.
1442     HValue* details_index = AddUncasted<HAdd>(
1443         raw_index, Add<HConstant>(base_offset + 2));
1444     details_index->ClearFlag(HValue::kCanOverflow);
1445
1446     HValue* details = Add<HLoadKeyed>(elements, details_index,
1447                                       static_cast<HValue*>(NULL),
1448                                       FAST_ELEMENTS);
1449     IfBuilder details_compare(this);
1450     details_compare.If<HCompareNumericAndBranch>(details,
1451                                                  graph()->GetConstant0(),
1452                                                  Token::NE);
1453     details_compare.ThenDeopt("keyed load dictionary element not fast case");
1454
1455     details_compare.Else();
1456     {
1457       // Key matches and details are zero --> fast case. Load and return the
1458       // value.
1459       HValue* result_index = AddUncasted<HAdd>(
1460           raw_index, Add<HConstant>(base_offset + 1));
1461       result_index->ClearFlag(HValue::kCanOverflow);
1462
1463       Push(Add<HLoadKeyed>(elements, result_index,
1464                            static_cast<HValue*>(NULL),
1465                            FAST_ELEMENTS));
1466     }
1467     details_compare.End();
1468   }
1469   key_compare.End();
1470
1471   return Pop();
1472 }
1473
1474
1475 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1476   int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1477   HValue* seed = Add<HConstant>(seed_value);
1478   HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1479
1480   // hash = ~hash + (hash << 15);
1481   HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1482   HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1483                                            graph()->GetConstantMinus1());
1484   hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1485
1486   // hash = hash ^ (hash >> 12);
1487   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1488   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1489
1490   // hash = hash + (hash << 2);
1491   shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1492   hash = AddUncasted<HAdd>(hash, shifted_hash);
1493
1494   // hash = hash ^ (hash >> 4);
1495   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1496   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1497
1498   // hash = hash * 2057;
1499   hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1500   hash->ClearFlag(HValue::kCanOverflow);
1501
1502   // hash = hash ^ (hash >> 16);
1503   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1504   return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1505 }
1506
1507
1508 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1509                                                            HValue* key) {
1510   HValue* elements = AddLoadElements(receiver);
1511
1512   HValue* hash = BuildElementIndexHash(key);
1513
1514   HValue* capacity = Add<HLoadKeyed>(
1515       elements,
1516       Add<HConstant>(NameDictionary::kCapacityIndex),
1517       static_cast<HValue*>(NULL),
1518       FAST_ELEMENTS);
1519
1520   HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1521   mask->ChangeRepresentation(Representation::Integer32());
1522   mask->ClearFlag(HValue::kCanOverflow);
1523
1524   return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1525                                                    hash, mask, 0);
1526 }
1527
1528
1529 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1530                                                   HValue* index,
1531                                                   HValue* input) {
1532   NoObservableSideEffectsScope scope(this);
1533
1534   // Compute the size of the RegExpResult followed by FixedArray with length.
1535   HValue* size = length;
1536   size = AddUncasted<HShl>(size, Add<HConstant>(kPointerSizeLog2));
1537   size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>(
1538               JSRegExpResult::kSize + FixedArray::kHeaderSize)));
1539
1540   // Make sure size does not exceeds max regular heap object size.
1541   Add<HBoundsCheck>(size, Add<HConstant>(Page::kMaxRegularHeapObjectSize));
1542
1543   // Allocate the JSRegExpResult and the FixedArray in one step.
1544   HValue* result = Add<HAllocate>(
1545       size, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
1546
1547   // Determine the elements FixedArray.
1548   HValue* elements = Add<HInnerAllocatedObject>(
1549       result, Add<HConstant>(JSRegExpResult::kSize));
1550
1551   // Initialize the JSRegExpResult header.
1552   HValue* global_object = Add<HLoadNamedField>(
1553       context(), static_cast<HValue*>(NULL),
1554       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1555   HValue* native_context = Add<HLoadNamedField>(
1556       global_object, static_cast<HValue*>(NULL),
1557       HObjectAccess::ForGlobalObjectNativeContext());
1558   AddStoreMapNoWriteBarrier(result, Add<HLoadNamedField>(
1559           native_context, static_cast<HValue*>(NULL),
1560           HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1561   Add<HStoreNamedField>(
1562       result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1563       Add<HConstant>(isolate()->factory()->empty_fixed_array()));
1564   Add<HStoreNamedField>(
1565       result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1566       elements);
1567   Add<HStoreNamedField>(
1568       result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1569
1570   // Initialize the additional fields.
1571   Add<HStoreNamedField>(
1572       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1573       index);
1574   Add<HStoreNamedField>(
1575       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1576       input);
1577
1578   // Initialize the elements header.
1579   AddStoreMapConstantNoWriteBarrier(elements,
1580                                     isolate()->factory()->fixed_array_map());
1581   Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), length);
1582
1583   // Initialize the elements contents with undefined.
1584   LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1585   index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1586   {
1587     Add<HStoreKeyed>(elements, index, graph()->GetConstantUndefined(),
1588                      FAST_ELEMENTS);
1589   }
1590   loop.EndBody();
1591
1592   return result;
1593 }
1594
1595
1596 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1597   NoObservableSideEffectsScope scope(this);
1598
1599   // Convert constant numbers at compile time.
1600   if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1601     Handle<Object> number = HConstant::cast(object)->handle(isolate());
1602     Handle<String> result = isolate()->factory()->NumberToString(number);
1603     return Add<HConstant>(result);
1604   }
1605
1606   // Create a joinable continuation.
1607   HIfContinuation found(graph()->CreateBasicBlock(),
1608                         graph()->CreateBasicBlock());
1609
1610   // Load the number string cache.
1611   HValue* number_string_cache =
1612       Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1613
1614   // Make the hash mask from the length of the number string cache. It
1615   // contains two elements (number and string) for each cache entry.
1616   HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1617   mask->set_type(HType::Smi());
1618   mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1619   mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1620
1621   // Check whether object is a smi.
1622   IfBuilder if_objectissmi(this);
1623   if_objectissmi.If<HIsSmiAndBranch>(object);
1624   if_objectissmi.Then();
1625   {
1626     // Compute hash for smi similar to smi_get_hash().
1627     HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1628
1629     // Load the key.
1630     HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1631     HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1632                                   static_cast<HValue*>(NULL),
1633                                   FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1634
1635     // Check if object == key.
1636     IfBuilder if_objectiskey(this);
1637     if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1638     if_objectiskey.Then();
1639     {
1640       // Make the key_index available.
1641       Push(key_index);
1642     }
1643     if_objectiskey.JoinContinuation(&found);
1644   }
1645   if_objectissmi.Else();
1646   {
1647     if (type->Is(Type::SignedSmall())) {
1648       if_objectissmi.Deopt("Expected smi");
1649     } else {
1650       // Check if the object is a heap number.
1651       IfBuilder if_objectisnumber(this);
1652       HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1653           object, isolate()->factory()->heap_number_map());
1654       if_objectisnumber.Then();
1655       {
1656         // Compute hash for heap number similar to double_get_hash().
1657         HValue* low = Add<HLoadNamedField>(
1658             object, objectisnumber,
1659             HObjectAccess::ForHeapNumberValueLowestBits());
1660         HValue* high = Add<HLoadNamedField>(
1661             object, objectisnumber,
1662             HObjectAccess::ForHeapNumberValueHighestBits());
1663         HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1664         hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1665
1666         // Load the key.
1667         HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1668         HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1669                                       static_cast<HValue*>(NULL),
1670                                       FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1671
1672         // Check if key is a heap number (the number string cache contains only
1673         // SMIs and heap number, so it is sufficient to do a SMI check here).
1674         IfBuilder if_keyisnotsmi(this);
1675         HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1676         if_keyisnotsmi.Then();
1677         {
1678           // Check if values of key and object match.
1679           IfBuilder if_keyeqobject(this);
1680           if_keyeqobject.If<HCompareNumericAndBranch>(
1681               Add<HLoadNamedField>(key, keyisnotsmi,
1682                                    HObjectAccess::ForHeapNumberValue()),
1683               Add<HLoadNamedField>(object, objectisnumber,
1684                                    HObjectAccess::ForHeapNumberValue()),
1685               Token::EQ);
1686           if_keyeqobject.Then();
1687           {
1688             // Make the key_index available.
1689             Push(key_index);
1690           }
1691           if_keyeqobject.JoinContinuation(&found);
1692         }
1693         if_keyisnotsmi.JoinContinuation(&found);
1694       }
1695       if_objectisnumber.Else();
1696       {
1697         if (type->Is(Type::Number())) {
1698           if_objectisnumber.Deopt("Expected heap number");
1699         }
1700       }
1701       if_objectisnumber.JoinContinuation(&found);
1702     }
1703   }
1704   if_objectissmi.JoinContinuation(&found);
1705
1706   // Check for cache hit.
1707   IfBuilder if_found(this, &found);
1708   if_found.Then();
1709   {
1710     // Count number to string operation in native code.
1711     AddIncrementCounter(isolate()->counters()->number_to_string_native());
1712
1713     // Load the value in case of cache hit.
1714     HValue* key_index = Pop();
1715     HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1716     Push(Add<HLoadKeyed>(number_string_cache, value_index,
1717                          static_cast<HValue*>(NULL),
1718                          FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1719   }
1720   if_found.Else();
1721   {
1722     // Cache miss, fallback to runtime.
1723     Add<HPushArgument>(object);
1724     Push(Add<HCallRuntime>(
1725             isolate()->factory()->empty_string(),
1726             Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache),
1727             1));
1728   }
1729   if_found.End();
1730
1731   return Pop();
1732 }
1733
1734
1735 HAllocate* HGraphBuilder::BuildAllocate(
1736     HValue* object_size,
1737     HType type,
1738     InstanceType instance_type,
1739     HAllocationMode allocation_mode) {
1740   // Compute the effective allocation size.
1741   HValue* size = object_size;
1742   if (allocation_mode.CreateAllocationMementos()) {
1743     size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
1744     size->ClearFlag(HValue::kCanOverflow);
1745   }
1746
1747   // Perform the actual allocation.
1748   HAllocate* object = Add<HAllocate>(
1749       size, type, allocation_mode.GetPretenureMode(),
1750       instance_type, allocation_mode.feedback_site());
1751
1752   // Setup the allocation memento.
1753   if (allocation_mode.CreateAllocationMementos()) {
1754     BuildCreateAllocationMemento(
1755         object, object_size, allocation_mode.current_site());
1756   }
1757
1758   return object;
1759 }
1760
1761
1762 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
1763                                              HValue* right_length) {
1764   // Compute the combined string length and check against max string length.
1765   HValue* length = AddUncasted<HAdd>(left_length, right_length);
1766   // Check that length <= kMaxLength <=> length < MaxLength + 1.
1767   HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
1768   Add<HBoundsCheck>(length, max_length);
1769   return length;
1770 }
1771
1772
1773 HValue* HGraphBuilder::BuildCreateConsString(
1774     HValue* length,
1775     HValue* left,
1776     HValue* right,
1777     HAllocationMode allocation_mode) {
1778   // Determine the string instance types.
1779   HInstruction* left_instance_type = AddLoadStringInstanceType(left);
1780   HInstruction* right_instance_type = AddLoadStringInstanceType(right);
1781
1782   // Allocate the cons string object. HAllocate does not care whether we
1783   // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
1784   // CONS_STRING_TYPE here. Below we decide whether the cons string is
1785   // one-byte or two-byte and set the appropriate map.
1786   ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
1787                                             CONS_ASCII_STRING_TYPE));
1788   HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
1789                                     HType::String(), CONS_STRING_TYPE,
1790                                     allocation_mode);
1791
1792   // Compute intersection and difference of instance types.
1793   HValue* anded_instance_types = AddUncasted<HBitwise>(
1794       Token::BIT_AND, left_instance_type, right_instance_type);
1795   HValue* xored_instance_types = AddUncasted<HBitwise>(
1796       Token::BIT_XOR, left_instance_type, right_instance_type);
1797
1798   // We create a one-byte cons string if
1799   // 1. both strings are one-byte, or
1800   // 2. at least one of the strings is two-byte, but happens to contain only
1801   //    one-byte characters.
1802   // To do this, we check
1803   // 1. if both strings are one-byte, or if the one-byte data hint is set in
1804   //    both strings, or
1805   // 2. if one of the strings has the one-byte data hint set and the other
1806   //    string is one-byte.
1807   IfBuilder if_onebyte(this);
1808   STATIC_ASSERT(kOneByteStringTag != 0);
1809   STATIC_ASSERT(kOneByteDataHintMask != 0);
1810   if_onebyte.If<HCompareNumericAndBranch>(
1811       AddUncasted<HBitwise>(
1812           Token::BIT_AND, anded_instance_types,
1813           Add<HConstant>(static_cast<int32_t>(
1814                   kStringEncodingMask | kOneByteDataHintMask))),
1815       graph()->GetConstant0(), Token::NE);
1816   if_onebyte.Or();
1817   STATIC_ASSERT(kOneByteStringTag != 0 &&
1818                 kOneByteDataHintTag != 0 &&
1819                 kOneByteDataHintTag != kOneByteStringTag);
1820   if_onebyte.If<HCompareNumericAndBranch>(
1821       AddUncasted<HBitwise>(
1822           Token::BIT_AND, xored_instance_types,
1823           Add<HConstant>(static_cast<int32_t>(
1824                   kOneByteStringTag | kOneByteDataHintTag))),
1825       Add<HConstant>(static_cast<int32_t>(
1826               kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
1827   if_onebyte.Then();
1828   {
1829     // We can safely skip the write barrier for storing the map here.
1830     Handle<Map> map = isolate()->factory()->cons_ascii_string_map();
1831     AddStoreMapConstantNoWriteBarrier(result, map);
1832   }
1833   if_onebyte.Else();
1834   {
1835     // We can safely skip the write barrier for storing the map here.
1836     Handle<Map> map = isolate()->factory()->cons_string_map();
1837     AddStoreMapConstantNoWriteBarrier(result, map);
1838   }
1839   if_onebyte.End();
1840
1841   // Initialize the cons string fields.
1842   Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
1843                         Add<HConstant>(String::kEmptyHashField));
1844   Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
1845   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
1846   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
1847
1848   // Count the native string addition.
1849   AddIncrementCounter(isolate()->counters()->string_add_native());
1850
1851   return result;
1852 }
1853
1854
1855 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
1856                                             HValue* src_offset,
1857                                             String::Encoding src_encoding,
1858                                             HValue* dst,
1859                                             HValue* dst_offset,
1860                                             String::Encoding dst_encoding,
1861                                             HValue* length) {
1862   ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
1863          src_encoding == String::ONE_BYTE_ENCODING);
1864   LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1865   HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1866   {
1867     HValue* src_index = AddUncasted<HAdd>(src_offset, index);
1868     HValue* value =
1869         AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
1870     HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
1871     Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
1872   }
1873   loop.EndBody();
1874 }
1875
1876
1877 HValue* HGraphBuilder::BuildObjectSizeAlignment(
1878     HValue* unaligned_size, int header_size) {
1879   ASSERT((header_size & kObjectAlignmentMask) == 0);
1880   HValue* size = AddUncasted<HAdd>(
1881       unaligned_size, Add<HConstant>(static_cast<int32_t>(
1882           header_size + kObjectAlignmentMask)));
1883   size->ClearFlag(HValue::kCanOverflow);
1884   return AddUncasted<HBitwise>(
1885       Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
1886           ~kObjectAlignmentMask)));
1887 }
1888
1889
1890 HValue* HGraphBuilder::BuildUncheckedStringAdd(
1891     HValue* left,
1892     HValue* right,
1893     HAllocationMode allocation_mode) {
1894   // Determine the string lengths.
1895   HValue* left_length = AddLoadStringLength(left);
1896   HValue* right_length = AddLoadStringLength(right);
1897
1898   // Compute the combined string length.
1899   HValue* length = BuildAddStringLengths(left_length, right_length);
1900
1901   // Do some manual constant folding here.
1902   if (left_length->IsConstant()) {
1903     HConstant* c_left_length = HConstant::cast(left_length);
1904     ASSERT_NE(0, c_left_length->Integer32Value());
1905     if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1906       // The right string contains at least one character.
1907       return BuildCreateConsString(length, left, right, allocation_mode);
1908     }
1909   } else if (right_length->IsConstant()) {
1910     HConstant* c_right_length = HConstant::cast(right_length);
1911     ASSERT_NE(0, c_right_length->Integer32Value());
1912     if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1913       // The left string contains at least one character.
1914       return BuildCreateConsString(length, left, right, allocation_mode);
1915     }
1916   }
1917
1918   // Check if we should create a cons string.
1919   IfBuilder if_createcons(this);
1920   if_createcons.If<HCompareNumericAndBranch>(
1921       length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
1922   if_createcons.Then();
1923   {
1924     // Create a cons string.
1925     Push(BuildCreateConsString(length, left, right, allocation_mode));
1926   }
1927   if_createcons.Else();
1928   {
1929     // Determine the string instance types.
1930     HValue* left_instance_type = AddLoadStringInstanceType(left);
1931     HValue* right_instance_type = AddLoadStringInstanceType(right);
1932
1933     // Compute union and difference of instance types.
1934     HValue* ored_instance_types = AddUncasted<HBitwise>(
1935         Token::BIT_OR, left_instance_type, right_instance_type);
1936     HValue* xored_instance_types = AddUncasted<HBitwise>(
1937         Token::BIT_XOR, left_instance_type, right_instance_type);
1938
1939     // Check if both strings have the same encoding and both are
1940     // sequential.
1941     IfBuilder if_sameencodingandsequential(this);
1942     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1943         AddUncasted<HBitwise>(
1944             Token::BIT_AND, xored_instance_types,
1945             Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1946         graph()->GetConstant0(), Token::EQ);
1947     if_sameencodingandsequential.And();
1948     STATIC_ASSERT(kSeqStringTag == 0);
1949     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1950         AddUncasted<HBitwise>(
1951             Token::BIT_AND, ored_instance_types,
1952             Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
1953         graph()->GetConstant0(), Token::EQ);
1954     if_sameencodingandsequential.Then();
1955     {
1956       HConstant* string_map =
1957           Add<HConstant>(isolate()->factory()->string_map());
1958       HConstant* ascii_string_map =
1959           Add<HConstant>(isolate()->factory()->ascii_string_map());
1960
1961       // Determine map and size depending on whether result is one-byte string.
1962       IfBuilder if_onebyte(this);
1963       STATIC_ASSERT(kOneByteStringTag != 0);
1964       if_onebyte.If<HCompareNumericAndBranch>(
1965           AddUncasted<HBitwise>(
1966               Token::BIT_AND, ored_instance_types,
1967               Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1968           graph()->GetConstant0(), Token::NE);
1969       if_onebyte.Then();
1970       {
1971         // Allocate sequential one-byte string object.
1972         Push(length);
1973         Push(ascii_string_map);
1974       }
1975       if_onebyte.Else();
1976       {
1977         // Allocate sequential two-byte string object.
1978         HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
1979         size->ClearFlag(HValue::kCanOverflow);
1980         size->SetFlag(HValue::kUint32);
1981         Push(size);
1982         Push(string_map);
1983       }
1984       if_onebyte.End();
1985       HValue* map = Pop();
1986
1987       // Calculate the number of bytes needed for the characters in the
1988       // string while observing object alignment.
1989       STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
1990       HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
1991
1992       // Allocate the string object. HAllocate does not care whether we pass
1993       // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here.
1994       HAllocate* result = BuildAllocate(
1995           size, HType::String(), STRING_TYPE, allocation_mode);
1996
1997       // We can safely skip the write barrier for storing map here.
1998       AddStoreMapNoWriteBarrier(result, map);
1999
2000       // Initialize the string fields.
2001       Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2002                             Add<HConstant>(String::kEmptyHashField));
2003       Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2004
2005       // Copy characters to the result string.
2006       IfBuilder if_twobyte(this);
2007       if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2008       if_twobyte.Then();
2009       {
2010         // Copy characters from the left string.
2011         BuildCopySeqStringChars(
2012             left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2013             result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2014             left_length);
2015
2016         // Copy characters from the right string.
2017         BuildCopySeqStringChars(
2018             right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2019             result, left_length, String::TWO_BYTE_ENCODING,
2020             right_length);
2021       }
2022       if_twobyte.Else();
2023       {
2024         // Copy characters from the left string.
2025         BuildCopySeqStringChars(
2026             left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2027             result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2028             left_length);
2029
2030         // Copy characters from the right string.
2031         BuildCopySeqStringChars(
2032             right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2033             result, left_length, String::ONE_BYTE_ENCODING,
2034             right_length);
2035       }
2036       if_twobyte.End();
2037
2038       // Count the native string addition.
2039       AddIncrementCounter(isolate()->counters()->string_add_native());
2040
2041       // Return the sequential string.
2042       Push(result);
2043     }
2044     if_sameencodingandsequential.Else();
2045     {
2046       // Fallback to the runtime to add the two strings.
2047       Add<HPushArgument>(left);
2048       Add<HPushArgument>(right);
2049       Push(Add<HCallRuntime>(
2050             isolate()->factory()->empty_string(),
2051             Runtime::FunctionForId(Runtime::kHiddenStringAdd),
2052             2));
2053     }
2054     if_sameencodingandsequential.End();
2055   }
2056   if_createcons.End();
2057
2058   return Pop();
2059 }
2060
2061
2062 HValue* HGraphBuilder::BuildStringAdd(
2063     HValue* left,
2064     HValue* right,
2065     HAllocationMode allocation_mode) {
2066   NoObservableSideEffectsScope no_effects(this);
2067
2068   // Determine string lengths.
2069   HValue* left_length = AddLoadStringLength(left);
2070   HValue* right_length = AddLoadStringLength(right);
2071
2072   // Check if left string is empty.
2073   IfBuilder if_leftempty(this);
2074   if_leftempty.If<HCompareNumericAndBranch>(
2075       left_length, graph()->GetConstant0(), Token::EQ);
2076   if_leftempty.Then();
2077   {
2078     // Count the native string addition.
2079     AddIncrementCounter(isolate()->counters()->string_add_native());
2080
2081     // Just return the right string.
2082     Push(right);
2083   }
2084   if_leftempty.Else();
2085   {
2086     // Check if right string is empty.
2087     IfBuilder if_rightempty(this);
2088     if_rightempty.If<HCompareNumericAndBranch>(
2089         right_length, graph()->GetConstant0(), Token::EQ);
2090     if_rightempty.Then();
2091     {
2092       // Count the native string addition.
2093       AddIncrementCounter(isolate()->counters()->string_add_native());
2094
2095       // Just return the left string.
2096       Push(left);
2097     }
2098     if_rightempty.Else();
2099     {
2100       // Add the two non-empty strings.
2101       Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2102     }
2103     if_rightempty.End();
2104   }
2105   if_leftempty.End();
2106
2107   return Pop();
2108 }
2109
2110
2111 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2112     HValue* checked_object,
2113     HValue* key,
2114     HValue* val,
2115     bool is_js_array,
2116     ElementsKind elements_kind,
2117     PropertyAccessType access_type,
2118     LoadKeyedHoleMode load_mode,
2119     KeyedAccessStoreMode store_mode) {
2120   ASSERT((!IsExternalArrayElementsKind(elements_kind) &&
2121               !IsFixedTypedArrayElementsKind(elements_kind)) ||
2122          !is_js_array);
2123   // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2124   // on a HElementsTransition instruction. The flag can also be removed if the
2125   // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2126   // ElementsKind transitions. Finally, the dependency can be removed for stores
2127   // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2128   // generated store code.
2129   if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2130       (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2131     checked_object->ClearDependsOnFlag(kElementsKind);
2132   }
2133
2134   bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2135   bool fast_elements = IsFastObjectElementsKind(elements_kind);
2136   HValue* elements = AddLoadElements(checked_object);
2137   if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2138       store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2139     HCheckMaps* check_cow_map = Add<HCheckMaps>(
2140         elements, isolate()->factory()->fixed_array_map());
2141     check_cow_map->ClearDependsOnFlag(kElementsKind);
2142   }
2143   HInstruction* length = NULL;
2144   if (is_js_array) {
2145     length = Add<HLoadNamedField>(
2146         checked_object, static_cast<HValue*>(NULL),
2147         HObjectAccess::ForArrayLength(elements_kind));
2148   } else {
2149     length = AddLoadFixedArrayLength(elements);
2150   }
2151   length->set_type(HType::Smi());
2152   HValue* checked_key = NULL;
2153   if (IsExternalArrayElementsKind(elements_kind) ||
2154       IsFixedTypedArrayElementsKind(elements_kind)) {
2155     HValue* backing_store;
2156     if (IsExternalArrayElementsKind(elements_kind)) {
2157       backing_store = Add<HLoadNamedField>(
2158           elements, static_cast<HValue*>(NULL),
2159           HObjectAccess::ForExternalArrayExternalPointer());
2160     } else {
2161       backing_store = elements;
2162     }
2163     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2164       NoObservableSideEffectsScope no_effects(this);
2165       IfBuilder length_checker(this);
2166       length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2167       length_checker.Then();
2168       IfBuilder negative_checker(this);
2169       HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2170           key, graph()->GetConstant0(), Token::GTE);
2171       negative_checker.Then();
2172       HInstruction* result = AddElementAccess(
2173           backing_store, key, val, bounds_check, elements_kind, access_type);
2174       negative_checker.ElseDeopt("Negative key encountered");
2175       negative_checker.End();
2176       length_checker.End();
2177       return result;
2178     } else {
2179       ASSERT(store_mode == STANDARD_STORE);
2180       checked_key = Add<HBoundsCheck>(key, length);
2181       return AddElementAccess(
2182           backing_store, checked_key, val,
2183           checked_object, elements_kind, access_type);
2184     }
2185   }
2186   ASSERT(fast_smi_only_elements ||
2187          fast_elements ||
2188          IsFastDoubleElementsKind(elements_kind));
2189
2190   // In case val is stored into a fast smi array, assure that the value is a smi
2191   // before manipulating the backing store. Otherwise the actual store may
2192   // deopt, leaving the backing store in an invalid state.
2193   if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2194       !val->type().IsSmi()) {
2195     val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2196   }
2197
2198   if (IsGrowStoreMode(store_mode)) {
2199     NoObservableSideEffectsScope no_effects(this);
2200     elements = BuildCheckForCapacityGrow(checked_object, elements,
2201                                          elements_kind, length, key,
2202                                          is_js_array, access_type);
2203     checked_key = key;
2204   } else {
2205     checked_key = Add<HBoundsCheck>(key, length);
2206
2207     if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2208       if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2209         NoObservableSideEffectsScope no_effects(this);
2210         elements = BuildCopyElementsOnWrite(checked_object, elements,
2211                                             elements_kind, length);
2212       } else {
2213         HCheckMaps* check_cow_map = Add<HCheckMaps>(
2214             elements, isolate()->factory()->fixed_array_map());
2215         check_cow_map->ClearDependsOnFlag(kElementsKind);
2216       }
2217     }
2218   }
2219   return AddElementAccess(elements, checked_key, val, checked_object,
2220                           elements_kind, access_type, load_mode);
2221 }
2222
2223
2224
2225 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2226     JSArrayBuilder* array_builder,
2227     HValue* length_argument) {
2228   if (length_argument->IsConstant() &&
2229       HConstant::cast(length_argument)->HasSmiValue()) {
2230     int array_length = HConstant::cast(length_argument)->Integer32Value();
2231     HValue* new_object = array_length == 0
2232         ? array_builder->AllocateEmptyArray()
2233         : array_builder->AllocateArray(length_argument, length_argument);
2234     return new_object;
2235   }
2236
2237   HValue* constant_zero = graph()->GetConstant0();
2238   HConstant* max_alloc_length =
2239       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2240   HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2241                                                    max_alloc_length);
2242   IfBuilder if_builder(this);
2243   if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2244                                           Token::EQ);
2245   if_builder.Then();
2246   const int initial_capacity = JSArray::kPreallocatedArrayElements;
2247   HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2248   Push(initial_capacity_node);  // capacity
2249   Push(constant_zero);          // length
2250   if_builder.Else();
2251   if (!(top_info()->IsStub()) &&
2252       IsFastPackedElementsKind(array_builder->kind())) {
2253     // We'll come back later with better (holey) feedback.
2254     if_builder.Deopt("Holey array despite packed elements_kind feedback");
2255   } else {
2256     Push(checked_length);         // capacity
2257     Push(checked_length);         // length
2258   }
2259   if_builder.End();
2260
2261   // Figure out total size
2262   HValue* length = Pop();
2263   HValue* capacity = Pop();
2264   return array_builder->AllocateArray(capacity, length);
2265 }
2266
2267 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2268                                              HValue* capacity) {
2269   int elements_size;
2270   InstanceType instance_type;
2271
2272   if (IsFastDoubleElementsKind(kind)) {
2273     elements_size = kDoubleSize;
2274     instance_type = FIXED_DOUBLE_ARRAY_TYPE;
2275   } else {
2276     elements_size = kPointerSize;
2277     instance_type = FIXED_ARRAY_TYPE;
2278   }
2279
2280   HConstant* elements_size_value = Add<HConstant>(elements_size);
2281   HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
2282   mul->ClearFlag(HValue::kCanOverflow);
2283
2284   HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2285   HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2286   total_size->ClearFlag(HValue::kCanOverflow);
2287
2288   PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
2289       isolate()->heap()->GetPretenureMode() : NOT_TENURED;
2290
2291   return Add<HAllocate>(total_size, HType::Tagged(), pretenure_flag,
2292       instance_type);
2293 }
2294
2295
2296 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2297                                                   ElementsKind kind,
2298                                                   HValue* capacity) {
2299   Factory* factory = isolate()->factory();
2300   Handle<Map> map = IsFastDoubleElementsKind(kind)
2301       ? factory->fixed_double_array_map()
2302       : factory->fixed_array_map();
2303
2304   AddStoreMapConstant(elements, map);
2305   Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2306                         capacity);
2307 }
2308
2309
2310 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2311     ElementsKind kind,
2312     HValue* capacity) {
2313   // The HForceRepresentation is to prevent possible deopt on int-smi
2314   // conversion after allocation but before the new object fields are set.
2315   capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2316   HValue* new_elements = BuildAllocateElements(kind, capacity);
2317   BuildInitializeElementsHeader(new_elements, kind, capacity);
2318   return new_elements;
2319 }
2320
2321
2322 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
2323     HValue* array_map,
2324     AllocationSiteMode mode,
2325     ElementsKind elements_kind,
2326     HValue* allocation_site_payload,
2327     HValue* length_field) {
2328
2329   Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2330
2331   HConstant* empty_fixed_array =
2332     Add<HConstant>(isolate()->factory()->empty_fixed_array());
2333
2334   HObjectAccess access = HObjectAccess::ForPropertiesPointer();
2335   Add<HStoreNamedField>(array, access, empty_fixed_array);
2336   Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
2337                         length_field);
2338
2339   if (mode == TRACK_ALLOCATION_SITE) {
2340     BuildCreateAllocationMemento(
2341         array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2342   }
2343
2344   int elements_location = JSArray::kSize;
2345   if (mode == TRACK_ALLOCATION_SITE) {
2346     elements_location += AllocationMemento::kSize;
2347   }
2348
2349   HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>(
2350       array, Add<HConstant>(elements_location));
2351   Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
2352   return elements;
2353 }
2354
2355
2356 HInstruction* HGraphBuilder::AddElementAccess(
2357     HValue* elements,
2358     HValue* checked_key,
2359     HValue* val,
2360     HValue* dependency,
2361     ElementsKind elements_kind,
2362     PropertyAccessType access_type,
2363     LoadKeyedHoleMode load_mode) {
2364   if (access_type == STORE) {
2365     ASSERT(val != NULL);
2366     if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2367         elements_kind == UINT8_CLAMPED_ELEMENTS) {
2368       val = Add<HClampToUint8>(val);
2369     }
2370     return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2371                             elements_kind == FAST_SMI_ELEMENTS
2372                               ? STORE_TO_INITIALIZED_ENTRY
2373                               : INITIALIZING_STORE);
2374   }
2375
2376   ASSERT(access_type == LOAD);
2377   ASSERT(val == NULL);
2378   HLoadKeyed* load = Add<HLoadKeyed>(
2379       elements, checked_key, dependency, elements_kind, load_mode);
2380   if (FLAG_opt_safe_uint32_operations &&
2381       (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2382        elements_kind == UINT32_ELEMENTS)) {
2383     graph()->RecordUint32Instruction(load);
2384   }
2385   return load;
2386 }
2387
2388
2389 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
2390   return Add<HLoadNamedField>(
2391       object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
2392 }
2393
2394
2395 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
2396   return Add<HLoadNamedField>(
2397       object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
2398 }
2399
2400
2401 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2402   HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2403                                                 graph_->GetConstant1());
2404
2405   HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2406   new_capacity->ClearFlag(HValue::kCanOverflow);
2407
2408   HValue* min_growth = Add<HConstant>(16);
2409
2410   new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2411   new_capacity->ClearFlag(HValue::kCanOverflow);
2412
2413   return new_capacity;
2414 }
2415
2416
2417 void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
2418   int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
2419                                                     : kPointerSize;
2420   int max_size = Page::kMaxRegularHeapObjectSize / element_size;
2421   max_size -= JSArray::kSize / element_size;
2422   HConstant* max_size_constant = Add<HConstant>(max_size);
2423   Add<HBoundsCheck>(length, max_size_constant);
2424 }
2425
2426
2427 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2428                                                  HValue* elements,
2429                                                  ElementsKind kind,
2430                                                  ElementsKind new_kind,
2431                                                  HValue* length,
2432                                                  HValue* new_capacity) {
2433   BuildNewSpaceArrayCheck(new_capacity, new_kind);
2434
2435   HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2436       new_kind, new_capacity);
2437
2438   BuildCopyElements(elements, kind,
2439                     new_elements, new_kind,
2440                     length, new_capacity);
2441
2442   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2443                         new_elements);
2444
2445   return new_elements;
2446 }
2447
2448
2449 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2450                                               ElementsKind elements_kind,
2451                                               HValue* from,
2452                                               HValue* to) {
2453   // Fast elements kinds need to be initialized in case statements below cause
2454   // a garbage collection.
2455   Factory* factory = isolate()->factory();
2456
2457   double nan_double = FixedDoubleArray::hole_nan_as_double();
2458   HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2459       ? Add<HConstant>(factory->the_hole_value())
2460       : Add<HConstant>(nan_double);
2461
2462   // Special loop unfolding case
2463   static const int kLoopUnfoldLimit = 8;
2464   STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
2465   int initial_capacity = -1;
2466   if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2467     int constant_from = from->GetInteger32Constant();
2468     int constant_to = to->GetInteger32Constant();
2469
2470     if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
2471       initial_capacity = constant_to;
2472     }
2473   }
2474
2475   // Since we're about to store a hole value, the store instruction below must
2476   // assume an elements kind that supports heap object values.
2477   if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2478     elements_kind = FAST_HOLEY_ELEMENTS;
2479   }
2480
2481   if (initial_capacity >= 0) {
2482     for (int i = 0; i < initial_capacity; i++) {
2483       HInstruction* key = Add<HConstant>(i);
2484       Add<HStoreKeyed>(elements, key, hole, elements_kind);
2485     }
2486   } else {
2487     LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2488
2489     HValue* key = builder.BeginBody(from, to, Token::LT);
2490
2491     Add<HStoreKeyed>(elements, key, hole, elements_kind);
2492
2493     builder.EndBody();
2494   }
2495 }
2496
2497
2498 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2499                                       ElementsKind from_elements_kind,
2500                                       HValue* to_elements,
2501                                       ElementsKind to_elements_kind,
2502                                       HValue* length,
2503                                       HValue* capacity) {
2504   bool pre_fill_with_holes =
2505       IsFastDoubleElementsKind(from_elements_kind) &&
2506       IsFastObjectElementsKind(to_elements_kind);
2507
2508   if (pre_fill_with_holes) {
2509     // If the copy might trigger a GC, make sure that the FixedArray is
2510     // pre-initialized with holes to make sure that it's always in a consistent
2511     // state.
2512     BuildFillElementsWithHole(to_elements, to_elements_kind,
2513                               graph()->GetConstant0(), capacity);
2514   }
2515
2516   LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2517
2518   HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
2519
2520   HValue* element = Add<HLoadKeyed>(from_elements, key,
2521                                     static_cast<HValue*>(NULL),
2522                                     from_elements_kind,
2523                                     ALLOW_RETURN_HOLE);
2524
2525   ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2526                        IsFastSmiElementsKind(to_elements_kind))
2527       ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2528
2529   if (IsHoleyElementsKind(from_elements_kind) &&
2530       from_elements_kind != to_elements_kind) {
2531     IfBuilder if_hole(this);
2532     if_hole.If<HCompareHoleAndBranch>(element);
2533     if_hole.Then();
2534     HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2535         ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2536         : graph()->GetConstantHole();
2537     Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2538     if_hole.Else();
2539     HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2540     store->SetFlag(HValue::kAllowUndefinedAsNaN);
2541     if_hole.End();
2542   } else {
2543     HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2544     store->SetFlag(HValue::kAllowUndefinedAsNaN);
2545   }
2546
2547   builder.EndBody();
2548
2549   if (!pre_fill_with_holes && length != capacity) {
2550     // Fill unused capacity with the hole.
2551     BuildFillElementsWithHole(to_elements, to_elements_kind,
2552                               key, capacity);
2553   }
2554 }
2555
2556
2557 HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
2558                                               HValue* allocation_site,
2559                                               AllocationSiteMode mode,
2560                                               ElementsKind kind,
2561                                               int length) {
2562   NoObservableSideEffectsScope no_effects(this);
2563
2564   // All sizes here are multiples of kPointerSize.
2565   int size = JSArray::kSize;
2566   if (mode == TRACK_ALLOCATION_SITE) {
2567     size += AllocationMemento::kSize;
2568   }
2569
2570   HValue* size_in_bytes = Add<HConstant>(size);
2571   HInstruction* object = Add<HAllocate>(size_in_bytes,
2572                                         HType::JSObject(),
2573                                         NOT_TENURED,
2574                                         JS_OBJECT_TYPE);
2575
2576   // Copy the JS array part.
2577   for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
2578     if ((i != JSArray::kElementsOffset) || (length == 0)) {
2579       HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
2580       Add<HStoreNamedField>(
2581           object, access, Add<HLoadNamedField>(
2582               boilerplate, static_cast<HValue*>(NULL), access));
2583     }
2584   }
2585
2586   // Create an allocation site info if requested.
2587   if (mode == TRACK_ALLOCATION_SITE) {
2588     BuildCreateAllocationMemento(
2589         object, Add<HConstant>(JSArray::kSize), allocation_site);
2590   }
2591
2592   if (length > 0) {
2593     // We have to initialize the elements pointer if allocation folding is
2594     // turned off.
2595     if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
2596       HConstant* empty_fixed_array = Add<HConstant>(
2597           isolate()->factory()->empty_fixed_array());
2598       Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2599           empty_fixed_array, INITIALIZING_STORE);
2600     }
2601
2602     HValue* boilerplate_elements = AddLoadElements(boilerplate);
2603     HValue* object_elements;
2604     if (IsFastDoubleElementsKind(kind)) {
2605       HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
2606       object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2607           NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
2608     } else {
2609       HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
2610       object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2611           NOT_TENURED, FIXED_ARRAY_TYPE);
2612     }
2613     Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2614                           object_elements);
2615
2616     // Copy the elements array header.
2617     for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
2618       HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2619       Add<HStoreNamedField>(
2620           object_elements, access, Add<HLoadNamedField>(
2621               boilerplate_elements, static_cast<HValue*>(NULL), access));
2622     }
2623
2624     // Copy the elements array contents.
2625     // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
2626     // copying loops with constant length up to a given boundary and use this
2627     // helper here instead.
2628     for (int i = 0; i < length; i++) {
2629       HValue* key_constant = Add<HConstant>(i);
2630       HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
2631                                             static_cast<HValue*>(NULL), kind);
2632       Add<HStoreKeyed>(object_elements, key_constant, value, kind);
2633     }
2634   }
2635
2636   return object;
2637 }
2638
2639
2640 void HGraphBuilder::BuildCompareNil(
2641     HValue* value,
2642     Type* type,
2643     HIfContinuation* continuation) {
2644   IfBuilder if_nil(this);
2645   bool some_case_handled = false;
2646   bool some_case_missing = false;
2647
2648   if (type->Maybe(Type::Null())) {
2649     if (some_case_handled) if_nil.Or();
2650     if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
2651     some_case_handled = true;
2652   } else {
2653     some_case_missing = true;
2654   }
2655
2656   if (type->Maybe(Type::Undefined())) {
2657     if (some_case_handled) if_nil.Or();
2658     if_nil.If<HCompareObjectEqAndBranch>(value,
2659                                          graph()->GetConstantUndefined());
2660     some_case_handled = true;
2661   } else {
2662     some_case_missing = true;
2663   }
2664
2665   if (type->Maybe(Type::Undetectable())) {
2666     if (some_case_handled) if_nil.Or();
2667     if_nil.If<HIsUndetectableAndBranch>(value);
2668     some_case_handled = true;
2669   } else {
2670     some_case_missing = true;
2671   }
2672
2673   if (some_case_missing) {
2674     if_nil.Then();
2675     if_nil.Else();
2676     if (type->NumClasses() == 1) {
2677       BuildCheckHeapObject(value);
2678       // For ICs, the map checked below is a sentinel map that gets replaced by
2679       // the monomorphic map when the code is used as a template to generate a
2680       // new IC. For optimized functions, there is no sentinel map, the map
2681       // emitted below is the actual monomorphic map.
2682       Add<HCheckMaps>(value, type->Classes().Current());
2683     } else {
2684       if_nil.Deopt("Too many undetectable types");
2685     }
2686   }
2687
2688   if_nil.CaptureContinuation(continuation);
2689 }
2690
2691
2692 void HGraphBuilder::BuildCreateAllocationMemento(
2693     HValue* previous_object,
2694     HValue* previous_object_size,
2695     HValue* allocation_site) {
2696   ASSERT(allocation_site != NULL);
2697   HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2698       previous_object, previous_object_size);
2699   AddStoreMapConstant(
2700       allocation_memento, isolate()->factory()->allocation_memento_map());
2701   Add<HStoreNamedField>(
2702       allocation_memento,
2703       HObjectAccess::ForAllocationMementoSite(),
2704       allocation_site);
2705   if (FLAG_allocation_site_pretenuring) {
2706     HValue* memento_create_count = Add<HLoadNamedField>(
2707         allocation_site, static_cast<HValue*>(NULL),
2708         HObjectAccess::ForAllocationSiteOffset(
2709             AllocationSite::kPretenureCreateCountOffset));
2710     memento_create_count = AddUncasted<HAdd>(
2711         memento_create_count, graph()->GetConstant1());
2712     // This smi value is reset to zero after every gc, overflow isn't a problem
2713     // since the counter is bounded by the new space size.
2714     memento_create_count->ClearFlag(HValue::kCanOverflow);
2715     HStoreNamedField* store = Add<HStoreNamedField>(
2716         allocation_site, HObjectAccess::ForAllocationSiteOffset(
2717             AllocationSite::kPretenureCreateCountOffset), memento_create_count);
2718     // No write barrier needed to store a smi.
2719     store->SkipWriteBarrier();
2720   }
2721 }
2722
2723
2724 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
2725   // Get the global context, then the native context
2726   HInstruction* context =
2727       Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
2728                            HObjectAccess::ForFunctionContextPointer());
2729   HInstruction* global_object = Add<HLoadNamedField>(
2730       context, static_cast<HValue*>(NULL),
2731       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2732   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2733       GlobalObject::kNativeContextOffset);
2734   return Add<HLoadNamedField>(
2735       global_object, static_cast<HValue*>(NULL), access);
2736 }
2737
2738
2739 HInstruction* HGraphBuilder::BuildGetNativeContext() {
2740   // Get the global context, then the native context
2741   HValue* global_object = Add<HLoadNamedField>(
2742       context(), static_cast<HValue*>(NULL),
2743       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2744   return Add<HLoadNamedField>(
2745       global_object, static_cast<HValue*>(NULL),
2746       HObjectAccess::ForObservableJSObjectOffset(
2747           GlobalObject::kNativeContextOffset));
2748 }
2749
2750
2751 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
2752   HInstruction* native_context = BuildGetNativeContext();
2753   HInstruction* index =
2754       Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
2755   return Add<HLoadKeyed>(
2756       native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2757 }
2758
2759
2760 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2761     ElementsKind kind,
2762     HValue* allocation_site_payload,
2763     HValue* constructor_function,
2764     AllocationSiteOverrideMode override_mode) :
2765         builder_(builder),
2766         kind_(kind),
2767         allocation_site_payload_(allocation_site_payload),
2768         constructor_function_(constructor_function) {
2769   ASSERT(!allocation_site_payload->IsConstant() ||
2770          HConstant::cast(allocation_site_payload)->handle(
2771              builder_->isolate())->IsAllocationSite());
2772   mode_ = override_mode == DISABLE_ALLOCATION_SITES
2773       ? DONT_TRACK_ALLOCATION_SITE
2774       : AllocationSite::GetMode(kind);
2775 }
2776
2777
2778 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2779                                               ElementsKind kind,
2780                                               HValue* constructor_function) :
2781     builder_(builder),
2782     kind_(kind),
2783     mode_(DONT_TRACK_ALLOCATION_SITE),
2784     allocation_site_payload_(NULL),
2785     constructor_function_(constructor_function) {
2786 }
2787
2788
2789 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
2790   if (!builder()->top_info()->IsStub()) {
2791     // A constant map is fine.
2792     Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
2793                     builder()->isolate());
2794     return builder()->Add<HConstant>(map);
2795   }
2796
2797   if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
2798     // No need for a context lookup if the kind_ matches the initial
2799     // map, because we can just load the map in that case.
2800     HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2801     return builder()->Add<HLoadNamedField>(
2802         constructor_function_, static_cast<HValue*>(NULL), access);
2803   }
2804
2805   // TODO(mvstanton): we should always have a constructor function if we
2806   // are creating a stub.
2807   HInstruction* native_context = constructor_function_ != NULL
2808       ? builder()->BuildGetNativeContext(constructor_function_)
2809       : builder()->BuildGetNativeContext();
2810
2811   HInstruction* index = builder()->Add<HConstant>(
2812       static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
2813
2814   HInstruction* map_array = builder()->Add<HLoadKeyed>(
2815       native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2816
2817   HInstruction* kind_index = builder()->Add<HConstant>(kind_);
2818
2819   return builder()->Add<HLoadKeyed>(
2820       map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2821 }
2822
2823
2824 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
2825   // Find the map near the constructor function
2826   HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2827   return builder()->Add<HLoadNamedField>(
2828       constructor_function_, static_cast<HValue*>(NULL), access);
2829 }
2830
2831
2832 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
2833     HValue* length_node) {
2834   ASSERT(length_node != NULL);
2835
2836   int base_size = JSArray::kSize;
2837   if (mode_ == TRACK_ALLOCATION_SITE) {
2838     base_size += AllocationMemento::kSize;
2839   }
2840
2841   STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2842   base_size += FixedArray::kHeaderSize;
2843
2844   HInstruction* elements_size_value =
2845       builder()->Add<HConstant>(elements_size());
2846   HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
2847                                     length_node, elements_size_value);
2848   builder()->AddInstruction(mul);
2849   HInstruction* base = builder()->Add<HConstant>(base_size);
2850   HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
2851                                        base, mul);
2852   total_size->ClearFlag(HValue::kCanOverflow);
2853   builder()->AddInstruction(total_size);
2854   return total_size;
2855 }
2856
2857
2858 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
2859   int base_size = JSArray::kSize;
2860   if (mode_ == TRACK_ALLOCATION_SITE) {
2861     base_size += AllocationMemento::kSize;
2862   }
2863
2864   base_size += IsFastDoubleElementsKind(kind_)
2865       ? FixedDoubleArray::SizeFor(initial_capacity())
2866       : FixedArray::SizeFor(initial_capacity());
2867
2868   return builder()->Add<HConstant>(base_size);
2869 }
2870
2871
2872 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
2873   HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
2874   HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
2875   return AllocateArray(size_in_bytes,
2876                        capacity,
2877                        builder()->graph()->GetConstant0());
2878 }
2879
2880
2881 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
2882                                                      HValue* length_field,
2883                                                      FillMode fill_mode) {
2884   HValue* size_in_bytes = EstablishAllocationSize(capacity);
2885   return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
2886 }
2887
2888
2889 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
2890                                                      HValue* capacity,
2891                                                      HValue* length_field,
2892                                                      FillMode fill_mode) {
2893   // These HForceRepresentations are because we store these as fields in the
2894   // objects we construct, and an int32-to-smi HChange could deopt. Accept
2895   // the deopt possibility now, before allocation occurs.
2896   capacity =
2897       builder()->AddUncasted<HForceRepresentation>(capacity,
2898                                                    Representation::Smi());
2899   length_field =
2900       builder()->AddUncasted<HForceRepresentation>(length_field,
2901                                                    Representation::Smi());
2902   // Allocate (dealing with failure appropriately)
2903   HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes,
2904       HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
2905
2906   // Folded array allocation should be aligned if it has fast double elements.
2907   if (IsFastDoubleElementsKind(kind_)) {
2908      new_object->MakeDoubleAligned();
2909   }
2910
2911   // Fill in the fields: map, properties, length
2912   HValue* map;
2913   if (allocation_site_payload_ == NULL) {
2914     map = EmitInternalMapCode();
2915   } else {
2916     map = EmitMapCode();
2917   }
2918   elements_location_ = builder()->BuildJSArrayHeader(new_object,
2919                                                      map,
2920                                                      mode_,
2921                                                      kind_,
2922                                                      allocation_site_payload_,
2923                                                      length_field);
2924
2925   // Initialize the elements
2926   builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
2927
2928   if (fill_mode == FILL_WITH_HOLE) {
2929     builder()->BuildFillElementsWithHole(elements_location_, kind_,
2930                                          graph()->GetConstant0(), capacity);
2931   }
2932
2933   return new_object;
2934 }
2935
2936
2937 HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
2938                                                      Handle<Map> map) {
2939   return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
2940                                Add<HConstant>(map));
2941 }
2942
2943
2944 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
2945   HValue* global_object = Add<HLoadNamedField>(
2946       context(), static_cast<HValue*>(NULL),
2947       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2948   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2949       GlobalObject::kBuiltinsOffset);
2950   HValue* builtins = Add<HLoadNamedField>(
2951       global_object, static_cast<HValue*>(NULL), access);
2952   HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
2953           JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
2954   return Add<HLoadNamedField>(
2955       builtins, static_cast<HValue*>(NULL), function_access);
2956 }
2957
2958
2959 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
2960     : HGraphBuilder(info),
2961       function_state_(NULL),
2962       initial_function_state_(this, info, NORMAL_RETURN, 0),
2963       ast_context_(NULL),
2964       break_scope_(NULL),
2965       inlined_count_(0),
2966       globals_(10, info->zone()),
2967       inline_bailout_(false),
2968       osr_(new(info->zone()) HOsrBuilder(this)) {
2969   // This is not initialized in the initializer list because the
2970   // constructor for the initial state relies on function_state_ == NULL
2971   // to know it's the initial state.
2972   function_state_= &initial_function_state_;
2973   InitializeAstVisitor(info->zone());
2974   if (FLAG_hydrogen_track_positions) {
2975     SetSourcePosition(info->shared_info()->start_position());
2976   }
2977 }
2978
2979
2980 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
2981                                                 HBasicBlock* second,
2982                                                 BailoutId join_id) {
2983   if (first == NULL) {
2984     return second;
2985   } else if (second == NULL) {
2986     return first;
2987   } else {
2988     HBasicBlock* join_block = graph()->CreateBasicBlock();
2989     Goto(first, join_block);
2990     Goto(second, join_block);
2991     join_block->SetJoinId(join_id);
2992     return join_block;
2993   }
2994 }
2995
2996
2997 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
2998                                                   HBasicBlock* exit_block,
2999                                                   HBasicBlock* continue_block) {
3000   if (continue_block != NULL) {
3001     if (exit_block != NULL) Goto(exit_block, continue_block);
3002     continue_block->SetJoinId(statement->ContinueId());
3003     return continue_block;
3004   }
3005   return exit_block;
3006 }
3007
3008
3009 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3010                                                 HBasicBlock* loop_entry,
3011                                                 HBasicBlock* body_exit,
3012                                                 HBasicBlock* loop_successor,
3013                                                 HBasicBlock* break_block) {
3014   if (body_exit != NULL) Goto(body_exit, loop_entry);
3015   loop_entry->PostProcessLoopHeader(statement);
3016   if (break_block != NULL) {
3017     if (loop_successor != NULL) Goto(loop_successor, break_block);
3018     break_block->SetJoinId(statement->ExitId());
3019     return break_block;
3020   }
3021   return loop_successor;
3022 }
3023
3024
3025 // Build a new loop header block and set it as the current block.
3026 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3027   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3028   Goto(loop_entry);
3029   set_current_block(loop_entry);
3030   return loop_entry;
3031 }
3032
3033
3034 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3035     IterationStatement* statement) {
3036   HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3037       ? osr()->BuildOsrLoopEntry(statement)
3038       : BuildLoopEntry();
3039   return loop_entry;
3040 }
3041
3042
3043 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3044                              HSourcePosition position) {
3045   Finish(instruction, position);
3046   ClearEnvironment();
3047 }
3048
3049
3050 HGraph::HGraph(CompilationInfo* info)
3051     : isolate_(info->isolate()),
3052       next_block_id_(0),
3053       entry_block_(NULL),
3054       blocks_(8, info->zone()),
3055       values_(16, info->zone()),
3056       phi_list_(NULL),
3057       uint32_instructions_(NULL),
3058       osr_(NULL),
3059       info_(info),
3060       zone_(info->zone()),
3061       is_recursive_(false),
3062       use_optimistic_licm_(false),
3063       depends_on_empty_array_proto_elements_(false),
3064       type_change_checksum_(0),
3065       maximum_environment_size_(0),
3066       no_side_effects_scope_count_(0),
3067       disallow_adding_new_values_(false),
3068       next_inline_id_(0),
3069       inlined_functions_(5, info->zone()) {
3070   if (info->IsStub()) {
3071     HydrogenCodeStub* stub = info->code_stub();
3072     CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor();
3073     start_environment_ =
3074         new(zone_) HEnvironment(zone_, descriptor->environment_length());
3075   } else {
3076     TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3077     start_environment_ =
3078         new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3079   }
3080   start_environment_->set_ast_id(BailoutId::FunctionEntry());
3081   entry_block_ = CreateBasicBlock();
3082   entry_block_->SetInitialEnvironment(start_environment_);
3083 }
3084
3085
3086 HBasicBlock* HGraph::CreateBasicBlock() {
3087   HBasicBlock* result = new(zone()) HBasicBlock(this);
3088   blocks_.Add(result, zone());
3089   return result;
3090 }
3091
3092
3093 void HGraph::FinalizeUniqueness() {
3094   DisallowHeapAllocation no_gc;
3095   ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3096   for (int i = 0; i < blocks()->length(); ++i) {
3097     for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3098       it.Current()->FinalizeUniqueness();
3099     }
3100   }
3101 }
3102
3103
3104 int HGraph::TraceInlinedFunction(
3105     Handle<SharedFunctionInfo> shared,
3106     HSourcePosition position) {
3107   if (!FLAG_hydrogen_track_positions) {
3108     return 0;
3109   }
3110
3111   int id = 0;
3112   for (; id < inlined_functions_.length(); id++) {
3113     if (inlined_functions_[id].shared().is_identical_to(shared)) {
3114       break;
3115     }
3116   }
3117
3118   if (id == inlined_functions_.length()) {
3119     inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3120
3121     if (!shared->script()->IsUndefined()) {
3122       Handle<Script> script(Script::cast(shared->script()));
3123       if (!script->source()->IsUndefined()) {
3124         CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3125         PrintF(tracing_scope.file(),
3126                "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n",
3127                shared->DebugName()->ToCString().get(),
3128                info()->optimization_id(),
3129                id);
3130
3131         {
3132           ConsStringIteratorOp op;
3133           StringCharacterStream stream(String::cast(script->source()),
3134                                        &op,
3135                                        shared->start_position());
3136           // fun->end_position() points to the last character in the stream. We
3137           // need to compensate by adding one to calculate the length.
3138           int source_len =
3139               shared->end_position() - shared->start_position() + 1;
3140           for (int i = 0; i < source_len; i++) {
3141             if (stream.HasMore()) {
3142               PrintF(tracing_scope.file(), "%c", stream.GetNext());
3143             }
3144           }
3145         }
3146
3147         PrintF(tracing_scope.file(), "\n--- END ---\n");
3148       }
3149     }
3150   }
3151
3152   int inline_id = next_inline_id_++;
3153
3154   if (inline_id != 0) {
3155     CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3156     PrintF(tracing_scope.file(), "INLINE (%s) id{%d,%d} AS %d AT ",
3157            shared->DebugName()->ToCString().get(),
3158            info()->optimization_id(),
3159            id,
3160            inline_id);
3161     position.PrintTo(tracing_scope.file());
3162     PrintF(tracing_scope.file(), "\n");
3163   }
3164
3165   return inline_id;
3166 }
3167
3168
3169 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3170   if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3171     return pos.raw();
3172   }
3173
3174   return inlined_functions_[pos.inlining_id()].start_position() +
3175       pos.position();
3176 }
3177
3178
3179 // Block ordering was implemented with two mutually recursive methods,
3180 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3181 // The recursion could lead to stack overflow so the algorithm has been
3182 // implemented iteratively.
3183 // At a high level the algorithm looks like this:
3184 //
3185 // Postorder(block, loop_header) : {
3186 //   if (block has already been visited or is of another loop) return;
3187 //   mark block as visited;
3188 //   if (block is a loop header) {
3189 //     VisitLoopMembers(block, loop_header);
3190 //     VisitSuccessorsOfLoopHeader(block);
3191 //   } else {
3192 //     VisitSuccessors(block)
3193 //   }
3194 //   put block in result list;
3195 // }
3196 //
3197 // VisitLoopMembers(block, outer_loop_header) {
3198 //   foreach (block b in block loop members) {
3199 //     VisitSuccessorsOfLoopMember(b, outer_loop_header);
3200 //     if (b is loop header) VisitLoopMembers(b);
3201 //   }
3202 // }
3203 //
3204 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3205 //   foreach (block b in block successors) Postorder(b, outer_loop_header)
3206 // }
3207 //
3208 // VisitSuccessorsOfLoopHeader(block) {
3209 //   foreach (block b in block successors) Postorder(b, block)
3210 // }
3211 //
3212 // VisitSuccessors(block, loop_header) {
3213 //   foreach (block b in block successors) Postorder(b, loop_header)
3214 // }
3215 //
3216 // The ordering is started calling Postorder(entry, NULL).
3217 //
3218 // Each instance of PostorderProcessor represents the "stack frame" of the
3219 // recursion, and particularly keeps the state of the loop (iteration) of the
3220 // "Visit..." function it represents.
3221 // To recycle memory we keep all the frames in a double linked list but
3222 // this means that we cannot use constructors to initialize the frames.
3223 //
3224 class PostorderProcessor : public ZoneObject {
3225  public:
3226   // Back link (towards the stack bottom).
3227   PostorderProcessor* parent() {return father_; }
3228   // Forward link (towards the stack top).
3229   PostorderProcessor* child() {return child_; }
3230   HBasicBlock* block() { return block_; }
3231   HLoopInformation* loop() { return loop_; }
3232   HBasicBlock* loop_header() { return loop_header_; }
3233
3234   static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3235                                                   HBasicBlock* block,
3236                                                   BitVector* visited) {
3237     PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3238     return result->SetupSuccessors(zone, block, NULL, visited);
3239   }
3240
3241   PostorderProcessor* PerformStep(Zone* zone,
3242                                   BitVector* visited,
3243                                   ZoneList<HBasicBlock*>* order) {
3244     PostorderProcessor* next =
3245         PerformNonBacktrackingStep(zone, visited, order);
3246     if (next != NULL) {
3247       return next;
3248     } else {
3249       return Backtrack(zone, visited, order);
3250     }
3251   }
3252
3253  private:
3254   explicit PostorderProcessor(PostorderProcessor* father)
3255       : father_(father), child_(NULL), successor_iterator(NULL) { }
3256
3257   // Each enum value states the cycle whose state is kept by this instance.
3258   enum LoopKind {
3259     NONE,
3260     SUCCESSORS,
3261     SUCCESSORS_OF_LOOP_HEADER,
3262     LOOP_MEMBERS,
3263     SUCCESSORS_OF_LOOP_MEMBER
3264   };
3265
3266   // Each "Setup..." method is like a constructor for a cycle state.
3267   PostorderProcessor* SetupSuccessors(Zone* zone,
3268                                       HBasicBlock* block,
3269                                       HBasicBlock* loop_header,
3270                                       BitVector* visited) {
3271     if (block == NULL || visited->Contains(block->block_id()) ||
3272         block->parent_loop_header() != loop_header) {
3273       kind_ = NONE;
3274       block_ = NULL;
3275       loop_ = NULL;
3276       loop_header_ = NULL;
3277       return this;
3278     } else {
3279       block_ = block;
3280       loop_ = NULL;
3281       visited->Add(block->block_id());
3282
3283       if (block->IsLoopHeader()) {
3284         kind_ = SUCCESSORS_OF_LOOP_HEADER;
3285         loop_header_ = block;
3286         InitializeSuccessors();
3287         PostorderProcessor* result = Push(zone);
3288         return result->SetupLoopMembers(zone, block, block->loop_information(),
3289                                         loop_header);
3290       } else {
3291         ASSERT(block->IsFinished());
3292         kind_ = SUCCESSORS;
3293         loop_header_ = loop_header;
3294         InitializeSuccessors();
3295         return this;
3296       }
3297     }
3298   }
3299
3300   PostorderProcessor* SetupLoopMembers(Zone* zone,
3301                                        HBasicBlock* block,
3302                                        HLoopInformation* loop,
3303                                        HBasicBlock* loop_header) {
3304     kind_ = LOOP_MEMBERS;
3305     block_ = block;
3306     loop_ = loop;
3307     loop_header_ = loop_header;
3308     InitializeLoopMembers();
3309     return this;
3310   }
3311
3312   PostorderProcessor* SetupSuccessorsOfLoopMember(
3313       HBasicBlock* block,
3314       HLoopInformation* loop,
3315       HBasicBlock* loop_header) {
3316     kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3317     block_ = block;
3318     loop_ = loop;
3319     loop_header_ = loop_header;
3320     InitializeSuccessors();
3321     return this;
3322   }
3323
3324   // This method "allocates" a new stack frame.
3325   PostorderProcessor* Push(Zone* zone) {
3326     if (child_ == NULL) {
3327       child_ = new(zone) PostorderProcessor(this);
3328     }
3329     return child_;
3330   }
3331
3332   void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3333     ASSERT(block_->end()->FirstSuccessor() == NULL ||
3334            order->Contains(block_->end()->FirstSuccessor()) ||
3335            block_->end()->FirstSuccessor()->IsLoopHeader());
3336     ASSERT(block_->end()->SecondSuccessor() == NULL ||
3337            order->Contains(block_->end()->SecondSuccessor()) ||
3338            block_->end()->SecondSuccessor()->IsLoopHeader());
3339     order->Add(block_, zone);
3340   }
3341
3342   // This method is the basic block to walk up the stack.
3343   PostorderProcessor* Pop(Zone* zone,
3344                           BitVector* visited,
3345                           ZoneList<HBasicBlock*>* order) {
3346     switch (kind_) {
3347       case SUCCESSORS:
3348       case SUCCESSORS_OF_LOOP_HEADER:
3349         ClosePostorder(order, zone);
3350         return father_;
3351       case LOOP_MEMBERS:
3352         return father_;
3353       case SUCCESSORS_OF_LOOP_MEMBER:
3354         if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3355           // In this case we need to perform a LOOP_MEMBERS cycle so we
3356           // initialize it and return this instead of father.
3357           return SetupLoopMembers(zone, block(),
3358                                   block()->loop_information(), loop_header_);
3359         } else {
3360           return father_;
3361         }
3362       case NONE:
3363         return father_;
3364     }
3365     UNREACHABLE();
3366     return NULL;
3367   }
3368
3369   // Walks up the stack.
3370   PostorderProcessor* Backtrack(Zone* zone,
3371                                 BitVector* visited,
3372                                 ZoneList<HBasicBlock*>* order) {
3373     PostorderProcessor* parent = Pop(zone, visited, order);
3374     while (parent != NULL) {
3375       PostorderProcessor* next =
3376           parent->PerformNonBacktrackingStep(zone, visited, order);
3377       if (next != NULL) {
3378         return next;
3379       } else {
3380         parent = parent->Pop(zone, visited, order);
3381       }
3382     }
3383     return NULL;
3384   }
3385
3386   PostorderProcessor* PerformNonBacktrackingStep(
3387       Zone* zone,
3388       BitVector* visited,
3389       ZoneList<HBasicBlock*>* order) {
3390     HBasicBlock* next_block;
3391     switch (kind_) {
3392       case SUCCESSORS:
3393         next_block = AdvanceSuccessors();
3394         if (next_block != NULL) {
3395           PostorderProcessor* result = Push(zone);
3396           return result->SetupSuccessors(zone, next_block,
3397                                          loop_header_, visited);
3398         }
3399         break;
3400       case SUCCESSORS_OF_LOOP_HEADER:
3401         next_block = AdvanceSuccessors();
3402         if (next_block != NULL) {
3403           PostorderProcessor* result = Push(zone);
3404           return result->SetupSuccessors(zone, next_block,
3405                                          block(), visited);
3406         }
3407         break;
3408       case LOOP_MEMBERS:
3409         next_block = AdvanceLoopMembers();
3410         if (next_block != NULL) {
3411           PostorderProcessor* result = Push(zone);
3412           return result->SetupSuccessorsOfLoopMember(next_block,
3413                                                      loop_, loop_header_);
3414         }
3415         break;
3416       case SUCCESSORS_OF_LOOP_MEMBER:
3417         next_block = AdvanceSuccessors();
3418         if (next_block != NULL) {
3419           PostorderProcessor* result = Push(zone);
3420           return result->SetupSuccessors(zone, next_block,
3421                                          loop_header_, visited);
3422         }
3423         break;
3424       case NONE:
3425         return NULL;
3426     }
3427     return NULL;
3428   }
3429
3430   // The following two methods implement a "foreach b in successors" cycle.
3431   void InitializeSuccessors() {
3432     loop_index = 0;
3433     loop_length = 0;
3434     successor_iterator = HSuccessorIterator(block_->end());
3435   }
3436
3437   HBasicBlock* AdvanceSuccessors() {
3438     if (!successor_iterator.Done()) {
3439       HBasicBlock* result = successor_iterator.Current();
3440       successor_iterator.Advance();
3441       return result;
3442     }
3443     return NULL;
3444   }
3445
3446   // The following two methods implement a "foreach b in loop members" cycle.
3447   void InitializeLoopMembers() {
3448     loop_index = 0;
3449     loop_length = loop_->blocks()->length();
3450   }
3451
3452   HBasicBlock* AdvanceLoopMembers() {
3453     if (loop_index < loop_length) {
3454       HBasicBlock* result = loop_->blocks()->at(loop_index);
3455       loop_index++;
3456       return result;
3457     } else {
3458       return NULL;
3459     }
3460   }
3461
3462   LoopKind kind_;
3463   PostorderProcessor* father_;
3464   PostorderProcessor* child_;
3465   HLoopInformation* loop_;
3466   HBasicBlock* block_;
3467   HBasicBlock* loop_header_;
3468   int loop_index;
3469   int loop_length;
3470   HSuccessorIterator successor_iterator;
3471 };
3472
3473
3474 void HGraph::OrderBlocks() {
3475   CompilationPhase phase("H_Block ordering", info());
3476   BitVector visited(blocks_.length(), zone());
3477
3478   ZoneList<HBasicBlock*> reverse_result(8, zone());
3479   HBasicBlock* start = blocks_[0];
3480   PostorderProcessor* postorder =
3481       PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
3482   while (postorder != NULL) {
3483     postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
3484   }
3485   blocks_.Rewind(0);
3486   int index = 0;
3487   for (int i = reverse_result.length() - 1; i >= 0; --i) {
3488     HBasicBlock* b = reverse_result[i];
3489     blocks_.Add(b, zone());
3490     b->set_block_id(index++);
3491   }
3492 }
3493
3494
3495 void HGraph::AssignDominators() {
3496   HPhase phase("H_Assign dominators", this);
3497   for (int i = 0; i < blocks_.length(); ++i) {
3498     HBasicBlock* block = blocks_[i];
3499     if (block->IsLoopHeader()) {
3500       // Only the first predecessor of a loop header is from outside the loop.
3501       // All others are back edges, and thus cannot dominate the loop header.
3502       block->AssignCommonDominator(block->predecessors()->first());
3503       block->AssignLoopSuccessorDominators();
3504     } else {
3505       for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3506         blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3507       }
3508     }
3509   }
3510 }
3511
3512
3513 bool HGraph::CheckArgumentsPhiUses() {
3514   int block_count = blocks_.length();
3515   for (int i = 0; i < block_count; ++i) {
3516     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3517       HPhi* phi = blocks_[i]->phis()->at(j);
3518       // We don't support phi uses of arguments for now.
3519       if (phi->CheckFlag(HValue::kIsArguments)) return false;
3520     }
3521   }
3522   return true;
3523 }
3524
3525
3526 bool HGraph::CheckConstPhiUses() {
3527   int block_count = blocks_.length();
3528   for (int i = 0; i < block_count; ++i) {
3529     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3530       HPhi* phi = blocks_[i]->phis()->at(j);
3531       // Check for the hole value (from an uninitialized const).
3532       for (int k = 0; k < phi->OperandCount(); k++) {
3533         if (phi->OperandAt(k) == GetConstantHole()) return false;
3534       }
3535     }
3536   }
3537   return true;
3538 }
3539
3540
3541 void HGraph::CollectPhis() {
3542   int block_count = blocks_.length();
3543   phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3544   for (int i = 0; i < block_count; ++i) {
3545     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3546       HPhi* phi = blocks_[i]->phis()->at(j);
3547       phi_list_->Add(phi, zone());
3548     }
3549   }
3550 }
3551
3552
3553 // Implementation of utility class to encapsulate the translation state for
3554 // a (possibly inlined) function.
3555 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3556                              CompilationInfo* info,
3557                              InliningKind inlining_kind,
3558                              int inlining_id)
3559     : owner_(owner),
3560       compilation_info_(info),
3561       call_context_(NULL),
3562       inlining_kind_(inlining_kind),
3563       function_return_(NULL),
3564       test_context_(NULL),
3565       entry_(NULL),
3566       arguments_object_(NULL),
3567       arguments_elements_(NULL),
3568       inlining_id_(inlining_id),
3569       outer_source_position_(HSourcePosition::Unknown()),
3570       outer_(owner->function_state()) {
3571   if (outer_ != NULL) {
3572     // State for an inline function.
3573     if (owner->ast_context()->IsTest()) {
3574       HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3575       HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3576       if_true->MarkAsInlineReturnTarget(owner->current_block());
3577       if_false->MarkAsInlineReturnTarget(owner->current_block());
3578       TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3579       Expression* cond = outer_test_context->condition();
3580       // The AstContext constructor pushed on the context stack.  This newed
3581       // instance is the reason that AstContext can't be BASE_EMBEDDED.
3582       test_context_ = new TestContext(owner, cond, if_true, if_false);
3583     } else {
3584       function_return_ = owner->graph()->CreateBasicBlock();
3585       function_return()->MarkAsInlineReturnTarget(owner->current_block());
3586     }
3587     // Set this after possibly allocating a new TestContext above.
3588     call_context_ = owner->ast_context();
3589   }
3590
3591   // Push on the state stack.
3592   owner->set_function_state(this);
3593
3594   if (FLAG_hydrogen_track_positions) {
3595     outer_source_position_ = owner->source_position();
3596     owner->EnterInlinedSource(
3597       info->shared_info()->start_position(),
3598       inlining_id);
3599     owner->SetSourcePosition(info->shared_info()->start_position());
3600   }
3601 }
3602
3603
3604 FunctionState::~FunctionState() {
3605   delete test_context_;
3606   owner_->set_function_state(outer_);
3607
3608   if (FLAG_hydrogen_track_positions) {
3609     owner_->set_source_position(outer_source_position_);
3610     owner_->EnterInlinedSource(
3611       outer_->compilation_info()->shared_info()->start_position(),
3612       outer_->inlining_id());
3613   }
3614 }
3615
3616
3617 // Implementation of utility classes to represent an expression's context in
3618 // the AST.
3619 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3620     : owner_(owner),
3621       kind_(kind),
3622       outer_(owner->ast_context()),
3623       for_typeof_(false) {
3624   owner->set_ast_context(this);  // Push.
3625 #ifdef DEBUG
3626   ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
3627   original_length_ = owner->environment()->length();
3628 #endif
3629 }
3630
3631
3632 AstContext::~AstContext() {
3633   owner_->set_ast_context(outer_);  // Pop.
3634 }
3635
3636
3637 EffectContext::~EffectContext() {
3638   ASSERT(owner()->HasStackOverflow() ||
3639          owner()->current_block() == NULL ||
3640          (owner()->environment()->length() == original_length_ &&
3641           owner()->environment()->frame_type() == JS_FUNCTION));
3642 }
3643
3644
3645 ValueContext::~ValueContext() {
3646   ASSERT(owner()->HasStackOverflow() ||
3647          owner()->current_block() == NULL ||
3648          (owner()->environment()->length() == original_length_ + 1 &&
3649           owner()->environment()->frame_type() == JS_FUNCTION));
3650 }
3651
3652
3653 void EffectContext::ReturnValue(HValue* value) {
3654   // The value is simply ignored.
3655 }
3656
3657
3658 void ValueContext::ReturnValue(HValue* value) {
3659   // The value is tracked in the bailout environment, and communicated
3660   // through the environment as the result of the expression.
3661   if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3662     owner()->Bailout(kBadValueContextForArgumentsValue);
3663   }
3664   owner()->Push(value);
3665 }
3666
3667
3668 void TestContext::ReturnValue(HValue* value) {
3669   BuildBranch(value);
3670 }
3671
3672
3673 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3674   ASSERT(!instr->IsControlInstruction());
3675   owner()->AddInstruction(instr);
3676   if (instr->HasObservableSideEffects()) {
3677     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3678   }
3679 }
3680
3681
3682 void EffectContext::ReturnControl(HControlInstruction* instr,
3683                                   BailoutId ast_id) {
3684   ASSERT(!instr->HasObservableSideEffects());
3685   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3686   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3687   instr->SetSuccessorAt(0, empty_true);
3688   instr->SetSuccessorAt(1, empty_false);
3689   owner()->FinishCurrentBlock(instr);
3690   HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3691   owner()->set_current_block(join);
3692 }
3693
3694
3695 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3696                                        BailoutId ast_id) {
3697   HBasicBlock* true_branch = NULL;
3698   HBasicBlock* false_branch = NULL;
3699   continuation->Continue(&true_branch, &false_branch);
3700   if (!continuation->IsTrueReachable()) {
3701     owner()->set_current_block(false_branch);
3702   } else if (!continuation->IsFalseReachable()) {
3703     owner()->set_current_block(true_branch);
3704   } else {
3705     HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3706     owner()->set_current_block(join);
3707   }
3708 }
3709
3710
3711 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3712   ASSERT(!instr->IsControlInstruction());
3713   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3714     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3715   }
3716   owner()->AddInstruction(instr);
3717   owner()->Push(instr);
3718   if (instr->HasObservableSideEffects()) {
3719     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3720   }
3721 }
3722
3723
3724 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3725   ASSERT(!instr->HasObservableSideEffects());
3726   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3727     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3728   }
3729   HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3730   HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3731   instr->SetSuccessorAt(0, materialize_true);
3732   instr->SetSuccessorAt(1, materialize_false);
3733   owner()->FinishCurrentBlock(instr);
3734   owner()->set_current_block(materialize_true);
3735   owner()->Push(owner()->graph()->GetConstantTrue());
3736   owner()->set_current_block(materialize_false);
3737   owner()->Push(owner()->graph()->GetConstantFalse());
3738   HBasicBlock* join =
3739     owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3740   owner()->set_current_block(join);
3741 }
3742
3743
3744 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3745                                       BailoutId ast_id) {
3746   HBasicBlock* materialize_true = NULL;
3747   HBasicBlock* materialize_false = NULL;
3748   continuation->Continue(&materialize_true, &materialize_false);
3749   if (continuation->IsTrueReachable()) {
3750     owner()->set_current_block(materialize_true);
3751     owner()->Push(owner()->graph()->GetConstantTrue());
3752     owner()->set_current_block(materialize_true);
3753   }
3754   if (continuation->IsFalseReachable()) {
3755     owner()->set_current_block(materialize_false);
3756     owner()->Push(owner()->graph()->GetConstantFalse());
3757     owner()->set_current_block(materialize_false);
3758   }
3759   if (continuation->TrueAndFalseReachable()) {
3760     HBasicBlock* join =
3761         owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3762     owner()->set_current_block(join);
3763   }
3764 }
3765
3766
3767 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3768   ASSERT(!instr->IsControlInstruction());
3769   HOptimizedGraphBuilder* builder = owner();
3770   builder->AddInstruction(instr);
3771   // We expect a simulate after every expression with side effects, though
3772   // this one isn't actually needed (and wouldn't work if it were targeted).
3773   if (instr->HasObservableSideEffects()) {
3774     builder->Push(instr);
3775     builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3776     builder->Pop();
3777   }
3778   BuildBranch(instr);
3779 }
3780
3781
3782 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3783   ASSERT(!instr->HasObservableSideEffects());
3784   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3785   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3786   instr->SetSuccessorAt(0, empty_true);
3787   instr->SetSuccessorAt(1, empty_false);
3788   owner()->FinishCurrentBlock(instr);
3789   owner()->Goto(empty_true, if_true(), owner()->function_state());
3790   owner()->Goto(empty_false, if_false(), owner()->function_state());
3791   owner()->set_current_block(NULL);
3792 }
3793
3794
3795 void TestContext::ReturnContinuation(HIfContinuation* continuation,
3796                                      BailoutId ast_id) {
3797   HBasicBlock* true_branch = NULL;
3798   HBasicBlock* false_branch = NULL;
3799   continuation->Continue(&true_branch, &false_branch);
3800   if (continuation->IsTrueReachable()) {
3801     owner()->Goto(true_branch, if_true(), owner()->function_state());
3802   }
3803   if (continuation->IsFalseReachable()) {
3804     owner()->Goto(false_branch, if_false(), owner()->function_state());
3805   }
3806   owner()->set_current_block(NULL);
3807 }
3808
3809
3810 void TestContext::BuildBranch(HValue* value) {
3811   // We expect the graph to be in edge-split form: there is no edge that
3812   // connects a branch node to a join node.  We conservatively ensure that
3813   // property by always adding an empty block on the outgoing edges of this
3814   // branch.
3815   HOptimizedGraphBuilder* builder = owner();
3816   if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3817     builder->Bailout(kArgumentsObjectValueInATestContext);
3818   }
3819   ToBooleanStub::Types expected(condition()->to_boolean_types());
3820   ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3821 }
3822
3823
3824 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3825 #define CHECK_BAILOUT(call)                     \
3826   do {                                          \
3827     call;                                       \
3828     if (HasStackOverflow()) return;             \
3829   } while (false)
3830
3831
3832 #define CHECK_ALIVE(call)                                       \
3833   do {                                                          \
3834     call;                                                       \
3835     if (HasStackOverflow() || current_block() == NULL) return;  \
3836   } while (false)
3837
3838
3839 #define CHECK_ALIVE_OR_RETURN(call, value)                            \
3840   do {                                                                \
3841     call;                                                             \
3842     if (HasStackOverflow() || current_block() == NULL) return value;  \
3843   } while (false)
3844
3845
3846 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
3847   current_info()->set_bailout_reason(reason);
3848   SetStackOverflow();
3849 }
3850
3851
3852 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
3853   EffectContext for_effect(this);
3854   Visit(expr);
3855 }
3856
3857
3858 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
3859                                            ArgumentsAllowedFlag flag) {
3860   ValueContext for_value(this, flag);
3861   Visit(expr);
3862 }
3863
3864
3865 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
3866   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3867   for_value.set_for_typeof(true);
3868   Visit(expr);
3869 }
3870
3871
3872 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
3873                                              HBasicBlock* true_block,
3874                                              HBasicBlock* false_block) {
3875   TestContext for_test(this, expr, true_block, false_block);
3876   Visit(expr);
3877 }
3878
3879
3880 void HOptimizedGraphBuilder::VisitExpressions(
3881     ZoneList<Expression*>* exprs) {
3882   for (int i = 0; i < exprs->length(); ++i) {
3883     CHECK_ALIVE(VisitForValue(exprs->at(i)));
3884   }
3885 }
3886
3887
3888 bool HOptimizedGraphBuilder::BuildGraph() {
3889   if (current_info()->function()->is_generator()) {
3890     Bailout(kFunctionIsAGenerator);
3891     return false;
3892   }
3893   Scope* scope = current_info()->scope();
3894   if (scope->HasIllegalRedeclaration()) {
3895     Bailout(kFunctionWithIllegalRedeclaration);
3896     return false;
3897   }
3898   if (scope->calls_eval()) {
3899     Bailout(kFunctionCallsEval);
3900     return false;
3901   }
3902   SetUpScope(scope);
3903
3904   // Add an edge to the body entry.  This is warty: the graph's start
3905   // environment will be used by the Lithium translation as the initial
3906   // environment on graph entry, but it has now been mutated by the
3907   // Hydrogen translation of the instructions in the start block.  This
3908   // environment uses values which have not been defined yet.  These
3909   // Hydrogen instructions will then be replayed by the Lithium
3910   // translation, so they cannot have an environment effect.  The edge to
3911   // the body's entry block (along with some special logic for the start
3912   // block in HInstruction::InsertAfter) seals the start block from
3913   // getting unwanted instructions inserted.
3914   //
3915   // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
3916   // Make the Hydrogen instructions in the initial block into Hydrogen
3917   // values (but not instructions), present in the initial environment and
3918   // not replayed by the Lithium translation.
3919   HEnvironment* initial_env = environment()->CopyWithoutHistory();
3920   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3921   Goto(body_entry);
3922   body_entry->SetJoinId(BailoutId::FunctionEntry());
3923   set_current_block(body_entry);
3924
3925   // Handle implicit declaration of the function name in named function
3926   // expressions before other declarations.
3927   if (scope->is_function_scope() && scope->function() != NULL) {
3928     VisitVariableDeclaration(scope->function());
3929   }
3930   VisitDeclarations(scope->declarations());
3931   Add<HSimulate>(BailoutId::Declarations());
3932
3933   Add<HStackCheck>(HStackCheck::kFunctionEntry);
3934
3935   VisitStatements(current_info()->function()->body());
3936   if (HasStackOverflow()) return false;
3937
3938   if (current_block() != NULL) {
3939     Add<HReturn>(graph()->GetConstantUndefined());
3940     set_current_block(NULL);
3941   }
3942
3943   // If the checksum of the number of type info changes is the same as the
3944   // last time this function was compiled, then this recompile is likely not
3945   // due to missing/inadequate type feedback, but rather too aggressive
3946   // optimization. Disable optimistic LICM in that case.
3947   Handle<Code> unoptimized_code(current_info()->shared_info()->code());
3948   ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3949   Handle<TypeFeedbackInfo> type_info(
3950       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
3951   int checksum = type_info->own_type_change_checksum();
3952   int composite_checksum = graph()->update_type_change_checksum(checksum);
3953   graph()->set_use_optimistic_licm(
3954       !type_info->matches_inlined_type_change_checksum(composite_checksum));
3955   type_info->set_inlined_type_change_checksum(composite_checksum);
3956
3957   // Perform any necessary OSR-specific cleanups or changes to the graph.
3958   osr()->FinishGraph();
3959
3960   return true;
3961 }
3962
3963
3964 bool HGraph::Optimize(BailoutReason* bailout_reason) {
3965   OrderBlocks();
3966   AssignDominators();
3967
3968   // We need to create a HConstant "zero" now so that GVN will fold every
3969   // zero-valued constant in the graph together.
3970   // The constant is needed to make idef-based bounds check work: the pass
3971   // evaluates relations with "zero" and that zero cannot be created after GVN.
3972   GetConstant0();
3973
3974 #ifdef DEBUG
3975   // Do a full verify after building the graph and computing dominators.
3976   Verify(true);
3977 #endif
3978
3979   if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
3980     Run<HEnvironmentLivenessAnalysisPhase>();
3981   }
3982
3983   if (!CheckConstPhiUses()) {
3984     *bailout_reason = kUnsupportedPhiUseOfConstVariable;
3985     return false;
3986   }
3987   Run<HRedundantPhiEliminationPhase>();
3988   if (!CheckArgumentsPhiUses()) {
3989     *bailout_reason = kUnsupportedPhiUseOfArguments;
3990     return false;
3991   }
3992
3993   // Find and mark unreachable code to simplify optimizations, especially gvn,
3994   // where unreachable code could unnecessarily defeat LICM.
3995   Run<HMarkUnreachableBlocksPhase>();
3996
3997   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
3998   if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
3999
4000   if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4001
4002   CollectPhis();
4003
4004   if (has_osr()) osr()->FinishOsrValues();
4005
4006   Run<HInferRepresentationPhase>();
4007
4008   // Remove HSimulate instructions that have turned out not to be needed
4009   // after all by folding them into the following HSimulate.
4010   // This must happen after inferring representations.
4011   Run<HMergeRemovableSimulatesPhase>();
4012
4013   Run<HMarkDeoptimizeOnUndefinedPhase>();
4014   Run<HRepresentationChangesPhase>();
4015
4016   Run<HInferTypesPhase>();
4017
4018   // Must be performed before canonicalization to ensure that Canonicalize
4019   // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4020   // zero.
4021   if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4022
4023   if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4024
4025   if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4026
4027   if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4028
4029   if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4030
4031   Run<HRangeAnalysisPhase>();
4032
4033   Run<HComputeChangeUndefinedToNaN>();
4034
4035   // Eliminate redundant stack checks on backwards branches.
4036   Run<HStackCheckEliminationPhase>();
4037
4038   if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4039   if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4040   if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4041   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4042
4043   RestoreActualValues();
4044
4045   // Find unreachable code a second time, GVN and other optimizations may have
4046   // made blocks unreachable that were previously reachable.
4047   Run<HMarkUnreachableBlocksPhase>();
4048
4049   return true;
4050 }
4051
4052
4053 void HGraph::RestoreActualValues() {
4054   HPhase phase("H_Restore actual values", this);
4055
4056   for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4057     HBasicBlock* block = blocks()->at(block_index);
4058
4059 #ifdef DEBUG
4060     for (int i = 0; i < block->phis()->length(); i++) {
4061       HPhi* phi = block->phis()->at(i);
4062       ASSERT(phi->ActualValue() == phi);
4063     }
4064 #endif
4065
4066     for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4067       HInstruction* instruction = it.Current();
4068       if (instruction->ActualValue() == instruction) continue;
4069       if (instruction->CheckFlag(HValue::kIsDead)) {
4070         // The instruction was marked as deleted but left in the graph
4071         // as a control flow dependency point for subsequent
4072         // instructions.
4073         instruction->DeleteAndReplaceWith(instruction->ActualValue());
4074       } else {
4075         ASSERT(instruction->IsInformativeDefinition());
4076         if (instruction->IsPurelyInformativeDefinition()) {
4077           instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4078         } else {
4079           instruction->ReplaceAllUsesWith(instruction->ActualValue());
4080         }
4081       }
4082     }
4083   }
4084 }
4085
4086
4087 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4088   ZoneList<HValue*> arguments(count, zone());
4089   for (int i = 0; i < count; ++i) {
4090     arguments.Add(Pop(), zone());
4091   }
4092
4093   while (!arguments.is_empty()) {
4094     Add<HPushArgument>(arguments.RemoveLast());
4095   }
4096 }
4097
4098
4099 template <class Instruction>
4100 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4101   PushArgumentsFromEnvironment(call->argument_count());
4102   return call;
4103 }
4104
4105
4106 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4107   // First special is HContext.
4108   HInstruction* context = Add<HContext>();
4109   environment()->BindContext(context);
4110
4111   // Create an arguments object containing the initial parameters.  Set the
4112   // initial values of parameters including "this" having parameter index 0.
4113   ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4114   HArgumentsObject* arguments_object =
4115       New<HArgumentsObject>(environment()->parameter_count());
4116   for (int i = 0; i < environment()->parameter_count(); ++i) {
4117     HInstruction* parameter = Add<HParameter>(i);
4118     arguments_object->AddArgument(parameter, zone());
4119     environment()->Bind(i, parameter);
4120   }
4121   AddInstruction(arguments_object);
4122   graph()->SetArgumentsObject(arguments_object);
4123
4124   HConstant* undefined_constant = graph()->GetConstantUndefined();
4125   // Initialize specials and locals to undefined.
4126   for (int i = environment()->parameter_count() + 1;
4127        i < environment()->length();
4128        ++i) {
4129     environment()->Bind(i, undefined_constant);
4130   }
4131
4132   // Handle the arguments and arguments shadow variables specially (they do
4133   // not have declarations).
4134   if (scope->arguments() != NULL) {
4135     if (!scope->arguments()->IsStackAllocated()) {
4136       return Bailout(kContextAllocatedArguments);
4137     }
4138
4139     environment()->Bind(scope->arguments(),
4140                         graph()->GetArgumentsObject());
4141   }
4142 }
4143
4144
4145 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4146   for (int i = 0; i < statements->length(); i++) {
4147     Statement* stmt = statements->at(i);
4148     CHECK_ALIVE(Visit(stmt));
4149     if (stmt->IsJump()) break;
4150   }
4151 }
4152
4153
4154 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4155   ASSERT(!HasStackOverflow());
4156   ASSERT(current_block() != NULL);
4157   ASSERT(current_block()->HasPredecessor());
4158   if (stmt->scope() != NULL) {
4159     return Bailout(kScopedBlock);
4160   }
4161   BreakAndContinueInfo break_info(stmt);
4162   { BreakAndContinueScope push(&break_info, this);
4163     CHECK_BAILOUT(VisitStatements(stmt->statements()));
4164   }
4165   HBasicBlock* break_block = break_info.break_block();
4166   if (break_block != NULL) {
4167     if (current_block() != NULL) Goto(break_block);
4168     break_block->SetJoinId(stmt->ExitId());
4169     set_current_block(break_block);
4170   }
4171 }
4172
4173
4174 void HOptimizedGraphBuilder::VisitExpressionStatement(
4175     ExpressionStatement* stmt) {
4176   ASSERT(!HasStackOverflow());
4177   ASSERT(current_block() != NULL);
4178   ASSERT(current_block()->HasPredecessor());
4179   VisitForEffect(stmt->expression());
4180 }
4181
4182
4183 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4184   ASSERT(!HasStackOverflow());
4185   ASSERT(current_block() != NULL);
4186   ASSERT(current_block()->HasPredecessor());
4187 }
4188
4189
4190 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4191   ASSERT(!HasStackOverflow());
4192   ASSERT(current_block() != NULL);
4193   ASSERT(current_block()->HasPredecessor());
4194   if (stmt->condition()->ToBooleanIsTrue()) {
4195     Add<HSimulate>(stmt->ThenId());
4196     Visit(stmt->then_statement());
4197   } else if (stmt->condition()->ToBooleanIsFalse()) {
4198     Add<HSimulate>(stmt->ElseId());
4199     Visit(stmt->else_statement());
4200   } else {
4201     HBasicBlock* cond_true = graph()->CreateBasicBlock();
4202     HBasicBlock* cond_false = graph()->CreateBasicBlock();
4203     CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4204
4205     if (cond_true->HasPredecessor()) {
4206       cond_true->SetJoinId(stmt->ThenId());
4207       set_current_block(cond_true);
4208       CHECK_BAILOUT(Visit(stmt->then_statement()));
4209       cond_true = current_block();
4210     } else {
4211       cond_true = NULL;
4212     }
4213
4214     if (cond_false->HasPredecessor()) {
4215       cond_false->SetJoinId(stmt->ElseId());
4216       set_current_block(cond_false);
4217       CHECK_BAILOUT(Visit(stmt->else_statement()));
4218       cond_false = current_block();
4219     } else {
4220       cond_false = NULL;
4221     }
4222
4223     HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4224     set_current_block(join);
4225   }
4226 }
4227
4228
4229 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4230     BreakableStatement* stmt,
4231     BreakType type,
4232     int* drop_extra) {
4233   *drop_extra = 0;
4234   BreakAndContinueScope* current = this;
4235   while (current != NULL && current->info()->target() != stmt) {
4236     *drop_extra += current->info()->drop_extra();
4237     current = current->next();
4238   }
4239   ASSERT(current != NULL);  // Always found (unless stack is malformed).
4240
4241   if (type == BREAK) {
4242     *drop_extra += current->info()->drop_extra();
4243   }
4244
4245   HBasicBlock* block = NULL;
4246   switch (type) {
4247     case BREAK:
4248       block = current->info()->break_block();
4249       if (block == NULL) {
4250         block = current->owner()->graph()->CreateBasicBlock();
4251         current->info()->set_break_block(block);
4252       }
4253       break;
4254
4255     case CONTINUE:
4256       block = current->info()->continue_block();
4257       if (block == NULL) {
4258         block = current->owner()->graph()->CreateBasicBlock();
4259         current->info()->set_continue_block(block);
4260       }
4261       break;
4262   }
4263
4264   return block;
4265 }
4266
4267
4268 void HOptimizedGraphBuilder::VisitContinueStatement(
4269     ContinueStatement* stmt) {
4270   ASSERT(!HasStackOverflow());
4271   ASSERT(current_block() != NULL);
4272   ASSERT(current_block()->HasPredecessor());
4273   int drop_extra = 0;
4274   HBasicBlock* continue_block = break_scope()->Get(
4275       stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra);
4276   Drop(drop_extra);
4277   Goto(continue_block);
4278   set_current_block(NULL);
4279 }
4280
4281
4282 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4283   ASSERT(!HasStackOverflow());
4284   ASSERT(current_block() != NULL);
4285   ASSERT(current_block()->HasPredecessor());
4286   int drop_extra = 0;
4287   HBasicBlock* break_block = break_scope()->Get(
4288       stmt->target(), BreakAndContinueScope::BREAK, &drop_extra);
4289   Drop(drop_extra);
4290   Goto(break_block);
4291   set_current_block(NULL);
4292 }
4293
4294
4295 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4296   ASSERT(!HasStackOverflow());
4297   ASSERT(current_block() != NULL);
4298   ASSERT(current_block()->HasPredecessor());
4299   FunctionState* state = function_state();
4300   AstContext* context = call_context();
4301   if (context == NULL) {
4302     // Not an inlined return, so an actual one.
4303     CHECK_ALIVE(VisitForValue(stmt->expression()));
4304     HValue* result = environment()->Pop();
4305     Add<HReturn>(result);
4306   } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4307     // Return from an inlined construct call. In a test context the return value
4308     // will always evaluate to true, in a value context the return value needs
4309     // to be a JSObject.
4310     if (context->IsTest()) {
4311       TestContext* test = TestContext::cast(context);
4312       CHECK_ALIVE(VisitForEffect(stmt->expression()));
4313       Goto(test->if_true(), state);
4314     } else if (context->IsEffect()) {
4315       CHECK_ALIVE(VisitForEffect(stmt->expression()));
4316       Goto(function_return(), state);
4317     } else {
4318       ASSERT(context->IsValue());
4319       CHECK_ALIVE(VisitForValue(stmt->expression()));
4320       HValue* return_value = Pop();
4321       HValue* receiver = environment()->arguments_environment()->Lookup(0);
4322       HHasInstanceTypeAndBranch* typecheck =
4323           New<HHasInstanceTypeAndBranch>(return_value,
4324                                          FIRST_SPEC_OBJECT_TYPE,
4325                                          LAST_SPEC_OBJECT_TYPE);
4326       HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4327       HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4328       typecheck->SetSuccessorAt(0, if_spec_object);
4329       typecheck->SetSuccessorAt(1, not_spec_object);
4330       FinishCurrentBlock(typecheck);
4331       AddLeaveInlined(if_spec_object, return_value, state);
4332       AddLeaveInlined(not_spec_object, receiver, state);
4333     }
4334   } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4335     // Return from an inlined setter call. The returned value is never used, the
4336     // value of an assignment is always the value of the RHS of the assignment.
4337     CHECK_ALIVE(VisitForEffect(stmt->expression()));
4338     if (context->IsTest()) {
4339       HValue* rhs = environment()->arguments_environment()->Lookup(1);
4340       context->ReturnValue(rhs);
4341     } else if (context->IsEffect()) {
4342       Goto(function_return(), state);
4343     } else {
4344       ASSERT(context->IsValue());
4345       HValue* rhs = environment()->arguments_environment()->Lookup(1);
4346       AddLeaveInlined(rhs, state);
4347     }
4348   } else {
4349     // Return from a normal inlined function. Visit the subexpression in the
4350     // expression context of the call.
4351     if (context->IsTest()) {
4352       TestContext* test = TestContext::cast(context);
4353       VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4354     } else if (context->IsEffect()) {
4355       // Visit in value context and ignore the result. This is needed to keep
4356       // environment in sync with full-codegen since some visitors (e.g.
4357       // VisitCountOperation) use the operand stack differently depending on
4358       // context.
4359       CHECK_ALIVE(VisitForValue(stmt->expression()));
4360       Pop();
4361       Goto(function_return(), state);
4362     } else {
4363       ASSERT(context->IsValue());
4364       CHECK_ALIVE(VisitForValue(stmt->expression()));
4365       AddLeaveInlined(Pop(), state);
4366     }
4367   }
4368   set_current_block(NULL);
4369 }
4370
4371
4372 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4373   ASSERT(!HasStackOverflow());
4374   ASSERT(current_block() != NULL);
4375   ASSERT(current_block()->HasPredecessor());
4376   return Bailout(kWithStatement);
4377 }
4378
4379
4380 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4381   ASSERT(!HasStackOverflow());
4382   ASSERT(current_block() != NULL);
4383   ASSERT(current_block()->HasPredecessor());
4384
4385   // We only optimize switch statements with a bounded number of clauses.
4386   const int kCaseClauseLimit = 128;
4387   ZoneList<CaseClause*>* clauses = stmt->cases();
4388   int clause_count = clauses->length();
4389   ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4390   if (clause_count > kCaseClauseLimit) {
4391     return Bailout(kSwitchStatementTooManyClauses);
4392   }
4393
4394   CHECK_ALIVE(VisitForValue(stmt->tag()));
4395   Add<HSimulate>(stmt->EntryId());
4396   HValue* tag_value = Top();
4397   Type* tag_type = stmt->tag()->bounds().lower;
4398
4399   // 1. Build all the tests, with dangling true branches
4400   BailoutId default_id = BailoutId::None();
4401   for (int i = 0; i < clause_count; ++i) {
4402     CaseClause* clause = clauses->at(i);
4403     if (clause->is_default()) {
4404       body_blocks.Add(NULL, zone());
4405       if (default_id.IsNone()) default_id = clause->EntryId();
4406       continue;
4407     }
4408
4409     // Generate a compare and branch.
4410     CHECK_ALIVE(VisitForValue(clause->label()));
4411     HValue* label_value = Pop();
4412
4413     Type* label_type = clause->label()->bounds().lower;
4414     Type* combined_type = clause->compare_type();
4415     HControlInstruction* compare = BuildCompareInstruction(
4416         Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4417         combined_type,
4418         ScriptPositionToSourcePosition(stmt->tag()->position()),
4419         ScriptPositionToSourcePosition(clause->label()->position()),
4420         PUSH_BEFORE_SIMULATE, clause->id());
4421
4422     HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4423     HBasicBlock* body_block = graph()->CreateBasicBlock();
4424     body_blocks.Add(body_block, zone());
4425     compare->SetSuccessorAt(0, body_block);
4426     compare->SetSuccessorAt(1, next_test_block);
4427     FinishCurrentBlock(compare);
4428
4429     set_current_block(body_block);
4430     Drop(1);  // tag_value
4431
4432     set_current_block(next_test_block);
4433   }
4434
4435   // Save the current block to use for the default or to join with the
4436   // exit.
4437   HBasicBlock* last_block = current_block();
4438   Drop(1);  // tag_value
4439
4440   // 2. Loop over the clauses and the linked list of tests in lockstep,
4441   // translating the clause bodies.
4442   HBasicBlock* fall_through_block = NULL;
4443
4444   BreakAndContinueInfo break_info(stmt);
4445   { BreakAndContinueScope push(&break_info, this);
4446     for (int i = 0; i < clause_count; ++i) {
4447       CaseClause* clause = clauses->at(i);
4448
4449       // Identify the block where normal (non-fall-through) control flow
4450       // goes to.
4451       HBasicBlock* normal_block = NULL;
4452       if (clause->is_default()) {
4453         if (last_block == NULL) continue;
4454         normal_block = last_block;
4455         last_block = NULL;  // Cleared to indicate we've handled it.
4456       } else {
4457         normal_block = body_blocks[i];
4458       }
4459
4460       if (fall_through_block == NULL) {
4461         set_current_block(normal_block);
4462       } else {
4463         HBasicBlock* join = CreateJoin(fall_through_block,
4464                                        normal_block,
4465                                        clause->EntryId());
4466         set_current_block(join);
4467       }
4468
4469       CHECK_BAILOUT(VisitStatements(clause->statements()));
4470       fall_through_block = current_block();
4471     }
4472   }
4473
4474   // Create an up-to-3-way join.  Use the break block if it exists since
4475   // it's already a join block.
4476   HBasicBlock* break_block = break_info.break_block();
4477   if (break_block == NULL) {
4478     set_current_block(CreateJoin(fall_through_block,
4479                                  last_block,
4480                                  stmt->ExitId()));
4481   } else {
4482     if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4483     if (last_block != NULL) Goto(last_block, break_block);
4484     break_block->SetJoinId(stmt->ExitId());
4485     set_current_block(break_block);
4486   }
4487 }
4488
4489
4490 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4491                                            HBasicBlock* loop_entry,
4492                                            BreakAndContinueInfo* break_info) {
4493   BreakAndContinueScope push(break_info, this);
4494   Add<HSimulate>(stmt->StackCheckId());
4495   HStackCheck* stack_check =
4496       HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4497   ASSERT(loop_entry->IsLoopHeader());
4498   loop_entry->loop_information()->set_stack_check(stack_check);
4499   CHECK_BAILOUT(Visit(stmt->body()));
4500 }
4501
4502
4503 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4504   ASSERT(!HasStackOverflow());
4505   ASSERT(current_block() != NULL);
4506   ASSERT(current_block()->HasPredecessor());
4507   ASSERT(current_block() != NULL);
4508   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4509
4510   BreakAndContinueInfo break_info(stmt);
4511   CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4512   HBasicBlock* body_exit =
4513       JoinContinue(stmt, current_block(), break_info.continue_block());
4514   HBasicBlock* loop_successor = NULL;
4515   if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4516     set_current_block(body_exit);
4517     loop_successor = graph()->CreateBasicBlock();
4518     if (stmt->cond()->ToBooleanIsFalse()) {
4519       Goto(loop_successor);
4520       body_exit = NULL;
4521     } else {
4522       // The block for a true condition, the actual predecessor block of the
4523       // back edge.
4524       body_exit = graph()->CreateBasicBlock();
4525       CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4526     }
4527     if (body_exit != NULL && body_exit->HasPredecessor()) {
4528       body_exit->SetJoinId(stmt->BackEdgeId());
4529     } else {
4530       body_exit = NULL;
4531     }
4532     if (loop_successor->HasPredecessor()) {
4533       loop_successor->SetJoinId(stmt->ExitId());
4534     } else {
4535       loop_successor = NULL;
4536     }
4537   }
4538   HBasicBlock* loop_exit = CreateLoop(stmt,
4539                                       loop_entry,
4540                                       body_exit,
4541                                       loop_successor,
4542                                       break_info.break_block());
4543   set_current_block(loop_exit);
4544 }
4545
4546
4547 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4548   ASSERT(!HasStackOverflow());
4549   ASSERT(current_block() != NULL);
4550   ASSERT(current_block()->HasPredecessor());
4551   ASSERT(current_block() != NULL);
4552   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4553
4554   // If the condition is constant true, do not generate a branch.
4555   HBasicBlock* loop_successor = NULL;
4556   if (!stmt->cond()->ToBooleanIsTrue()) {
4557     HBasicBlock* body_entry = graph()->CreateBasicBlock();
4558     loop_successor = graph()->CreateBasicBlock();
4559     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4560     if (body_entry->HasPredecessor()) {
4561       body_entry->SetJoinId(stmt->BodyId());
4562       set_current_block(body_entry);
4563     }
4564     if (loop_successor->HasPredecessor()) {
4565       loop_successor->SetJoinId(stmt->ExitId());
4566     } else {
4567       loop_successor = NULL;
4568     }
4569   }
4570
4571   BreakAndContinueInfo break_info(stmt);
4572   if (current_block() != NULL) {
4573     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4574   }
4575   HBasicBlock* body_exit =
4576       JoinContinue(stmt, current_block(), break_info.continue_block());
4577   HBasicBlock* loop_exit = CreateLoop(stmt,
4578                                       loop_entry,
4579                                       body_exit,
4580                                       loop_successor,
4581                                       break_info.break_block());
4582   set_current_block(loop_exit);
4583 }
4584
4585
4586 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4587   ASSERT(!HasStackOverflow());
4588   ASSERT(current_block() != NULL);
4589   ASSERT(current_block()->HasPredecessor());
4590   if (stmt->init() != NULL) {
4591     CHECK_ALIVE(Visit(stmt->init()));
4592   }
4593   ASSERT(current_block() != NULL);
4594   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4595
4596   HBasicBlock* loop_successor = NULL;
4597   if (stmt->cond() != NULL) {
4598     HBasicBlock* body_entry = graph()->CreateBasicBlock();
4599     loop_successor = graph()->CreateBasicBlock();
4600     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4601     if (body_entry->HasPredecessor()) {
4602       body_entry->SetJoinId(stmt->BodyId());
4603       set_current_block(body_entry);
4604     }
4605     if (loop_successor->HasPredecessor()) {
4606       loop_successor->SetJoinId(stmt->ExitId());
4607     } else {
4608       loop_successor = NULL;
4609     }
4610   }
4611
4612   BreakAndContinueInfo break_info(stmt);
4613   if (current_block() != NULL) {
4614     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4615   }
4616   HBasicBlock* body_exit =
4617       JoinContinue(stmt, current_block(), break_info.continue_block());
4618
4619   if (stmt->next() != NULL && body_exit != NULL) {
4620     set_current_block(body_exit);
4621     CHECK_BAILOUT(Visit(stmt->next()));
4622     body_exit = current_block();
4623   }
4624
4625   HBasicBlock* loop_exit = CreateLoop(stmt,
4626                                       loop_entry,
4627                                       body_exit,
4628                                       loop_successor,
4629                                       break_info.break_block());
4630   set_current_block(loop_exit);
4631 }
4632
4633
4634 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4635   ASSERT(!HasStackOverflow());
4636   ASSERT(current_block() != NULL);
4637   ASSERT(current_block()->HasPredecessor());
4638
4639   if (!FLAG_optimize_for_in) {
4640     return Bailout(kForInStatementOptimizationIsDisabled);
4641   }
4642
4643   if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
4644     return Bailout(kForInStatementIsNotFastCase);
4645   }
4646
4647   if (!stmt->each()->IsVariableProxy() ||
4648       !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4649     return Bailout(kForInStatementWithNonLocalEachVariable);
4650   }
4651
4652   Variable* each_var = stmt->each()->AsVariableProxy()->var();
4653
4654   CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4655   HValue* enumerable = Top();  // Leave enumerable at the top.
4656
4657   HInstruction* map = Add<HForInPrepareMap>(enumerable);
4658   Add<HSimulate>(stmt->PrepareId());
4659
4660   HInstruction* array = Add<HForInCacheArray>(
4661       enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4662
4663   HInstruction* enum_length = Add<HMapEnumLength>(map);
4664
4665   HInstruction* start_index = Add<HConstant>(0);
4666
4667   Push(map);
4668   Push(array);
4669   Push(enum_length);
4670   Push(start_index);
4671
4672   HInstruction* index_cache = Add<HForInCacheArray>(
4673       enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
4674   HForInCacheArray::cast(array)->set_index_cache(
4675       HForInCacheArray::cast(index_cache));
4676
4677   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4678
4679   HValue* index = environment()->ExpressionStackAt(0);
4680   HValue* limit = environment()->ExpressionStackAt(1);
4681
4682   // Check that we still have more keys.
4683   HCompareNumericAndBranch* compare_index =
4684       New<HCompareNumericAndBranch>(index, limit, Token::LT);
4685   compare_index->set_observed_input_representation(
4686       Representation::Smi(), Representation::Smi());
4687
4688   HBasicBlock* loop_body = graph()->CreateBasicBlock();
4689   HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4690
4691   compare_index->SetSuccessorAt(0, loop_body);
4692   compare_index->SetSuccessorAt(1, loop_successor);
4693   FinishCurrentBlock(compare_index);
4694
4695   set_current_block(loop_successor);
4696   Drop(5);
4697
4698   set_current_block(loop_body);
4699
4700   HValue* key = Add<HLoadKeyed>(
4701       environment()->ExpressionStackAt(2),  // Enum cache.
4702       environment()->ExpressionStackAt(0),  // Iteration index.
4703       environment()->ExpressionStackAt(0),
4704       FAST_ELEMENTS);
4705
4706   // Check if the expected map still matches that of the enumerable.
4707   // If not just deoptimize.
4708   Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
4709                       environment()->ExpressionStackAt(3));
4710
4711   Bind(each_var, key);
4712
4713   BreakAndContinueInfo break_info(stmt, 5);
4714   CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4715
4716   HBasicBlock* body_exit =
4717       JoinContinue(stmt, current_block(), break_info.continue_block());
4718
4719   if (body_exit != NULL) {
4720     set_current_block(body_exit);
4721
4722     HValue* current_index = Pop();
4723     Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
4724     body_exit = current_block();
4725   }
4726
4727   HBasicBlock* loop_exit = CreateLoop(stmt,
4728                                       loop_entry,
4729                                       body_exit,
4730                                       loop_successor,
4731                                       break_info.break_block());
4732
4733   set_current_block(loop_exit);
4734 }
4735
4736
4737 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
4738   ASSERT(!HasStackOverflow());
4739   ASSERT(current_block() != NULL);
4740   ASSERT(current_block()->HasPredecessor());
4741   return Bailout(kForOfStatement);
4742 }
4743
4744
4745 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4746   ASSERT(!HasStackOverflow());
4747   ASSERT(current_block() != NULL);
4748   ASSERT(current_block()->HasPredecessor());
4749   return Bailout(kTryCatchStatement);
4750 }
4751
4752
4753 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
4754     TryFinallyStatement* stmt) {
4755   ASSERT(!HasStackOverflow());
4756   ASSERT(current_block() != NULL);
4757   ASSERT(current_block()->HasPredecessor());
4758   return Bailout(kTryFinallyStatement);
4759 }
4760
4761
4762 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4763   ASSERT(!HasStackOverflow());
4764   ASSERT(current_block() != NULL);
4765   ASSERT(current_block()->HasPredecessor());
4766   return Bailout(kDebuggerStatement);
4767 }
4768
4769
4770 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
4771   UNREACHABLE();
4772 }
4773
4774
4775 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4776   ASSERT(!HasStackOverflow());
4777   ASSERT(current_block() != NULL);
4778   ASSERT(current_block()->HasPredecessor());
4779   Handle<SharedFunctionInfo> shared_info = expr->shared_info();
4780   if (shared_info.is_null()) {
4781     shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script());
4782   }
4783   // We also have a stack overflow if the recursive compilation did.
4784   if (HasStackOverflow()) return;
4785   HFunctionLiteral* instr =
4786       New<HFunctionLiteral>(shared_info, expr->pretenure());
4787   return ast_context()->ReturnInstruction(instr, expr->id());
4788 }
4789
4790
4791 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
4792     NativeFunctionLiteral* expr) {
4793   ASSERT(!HasStackOverflow());
4794   ASSERT(current_block() != NULL);
4795   ASSERT(current_block()->HasPredecessor());
4796   return Bailout(kNativeFunctionLiteral);
4797 }
4798
4799
4800 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
4801   ASSERT(!HasStackOverflow());
4802   ASSERT(current_block() != NULL);
4803   ASSERT(current_block()->HasPredecessor());
4804   HBasicBlock* cond_true = graph()->CreateBasicBlock();
4805   HBasicBlock* cond_false = graph()->CreateBasicBlock();
4806   CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4807
4808   // Visit the true and false subexpressions in the same AST context as the
4809   // whole expression.
4810   if (cond_true->HasPredecessor()) {
4811     cond_true->SetJoinId(expr->ThenId());
4812     set_current_block(cond_true);
4813     CHECK_BAILOUT(Visit(expr->then_expression()));
4814     cond_true = current_block();
4815   } else {
4816     cond_true = NULL;
4817   }
4818
4819   if (cond_false->HasPredecessor()) {
4820     cond_false->SetJoinId(expr->ElseId());
4821     set_current_block(cond_false);
4822     CHECK_BAILOUT(Visit(expr->else_expression()));
4823     cond_false = current_block();
4824   } else {
4825     cond_false = NULL;
4826   }
4827
4828   if (!ast_context()->IsTest()) {
4829     HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4830     set_current_block(join);
4831     if (join != NULL && !ast_context()->IsEffect()) {
4832       return ast_context()->ReturnValue(Pop());
4833     }
4834   }
4835 }
4836
4837
4838 HOptimizedGraphBuilder::GlobalPropertyAccess
4839     HOptimizedGraphBuilder::LookupGlobalProperty(
4840         Variable* var, LookupResult* lookup, PropertyAccessType access_type) {
4841   if (var->is_this() || !current_info()->has_global_object()) {
4842     return kUseGeneric;
4843   }
4844   Handle<GlobalObject> global(current_info()->global_object());
4845   global->Lookup(var->name(), lookup);
4846   if (!lookup->IsNormal() ||
4847       (access_type == STORE && lookup->IsReadOnly()) ||
4848       lookup->holder() != *global) {
4849     return kUseGeneric;
4850   }
4851
4852   return kUseCell;
4853 }
4854
4855
4856 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
4857   ASSERT(var->IsContextSlot());
4858   HValue* context = environment()->context();
4859   int length = current_info()->scope()->ContextChainLength(var->scope());
4860   while (length-- > 0) {
4861     context = Add<HLoadNamedField>(
4862         context, static_cast<HValue*>(NULL),
4863         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4864   }
4865   return context;
4866 }
4867
4868
4869 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4870   if (expr->is_this()) {
4871     current_info()->set_this_has_uses(true);
4872   }
4873
4874   ASSERT(!HasStackOverflow());
4875   ASSERT(current_block() != NULL);
4876   ASSERT(current_block()->HasPredecessor());
4877   Variable* variable = expr->var();
4878   switch (variable->location()) {
4879     case Variable::UNALLOCATED: {
4880       if (IsLexicalVariableMode(variable->mode())) {
4881         // TODO(rossberg): should this be an ASSERT?
4882         return Bailout(kReferenceToGlobalLexicalVariable);
4883       }
4884       // Handle known global constants like 'undefined' specially to avoid a
4885       // load from a global cell for them.
4886       Handle<Object> constant_value =
4887           isolate()->factory()->GlobalConstantFor(variable->name());
4888       if (!constant_value.is_null()) {
4889         HConstant* instr = New<HConstant>(constant_value);
4890         return ast_context()->ReturnInstruction(instr, expr->id());
4891       }
4892
4893       LookupResult lookup(isolate());
4894       GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, LOAD);
4895
4896       if (type == kUseCell &&
4897           current_info()->global_object()->IsAccessCheckNeeded()) {
4898         type = kUseGeneric;
4899       }
4900
4901       if (type == kUseCell) {
4902         Handle<GlobalObject> global(current_info()->global_object());
4903         Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
4904         if (cell->type()->IsConstant()) {
4905           PropertyCell::AddDependentCompilationInfo(cell, top_info());
4906           Handle<Object> constant_object = cell->type()->AsConstant()->Value();
4907           if (constant_object->IsConsString()) {
4908             constant_object =
4909                 String::Flatten(Handle<String>::cast(constant_object));
4910           }
4911           HConstant* constant = New<HConstant>(constant_object);
4912           return ast_context()->ReturnInstruction(constant, expr->id());
4913         } else {
4914           HLoadGlobalCell* instr =
4915               New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
4916           return ast_context()->ReturnInstruction(instr, expr->id());
4917         }
4918       } else {
4919         HValue* global_object = Add<HLoadNamedField>(
4920             context(), static_cast<HValue*>(NULL),
4921             HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
4922         HLoadGlobalGeneric* instr =
4923             New<HLoadGlobalGeneric>(global_object,
4924                                     variable->name(),
4925                                     ast_context()->is_for_typeof());
4926         return ast_context()->ReturnInstruction(instr, expr->id());
4927       }
4928     }
4929
4930     case Variable::PARAMETER:
4931     case Variable::LOCAL: {
4932       HValue* value = LookupAndMakeLive(variable);
4933       if (value == graph()->GetConstantHole()) {
4934         ASSERT(IsDeclaredVariableMode(variable->mode()) &&
4935                variable->mode() != VAR);
4936         return Bailout(kReferenceToUninitializedVariable);
4937       }
4938       return ast_context()->ReturnValue(value);
4939     }
4940
4941     case Variable::CONTEXT: {
4942       HValue* context = BuildContextChainWalk(variable);
4943       HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4944       return ast_context()->ReturnInstruction(instr, expr->id());
4945     }
4946
4947     case Variable::LOOKUP:
4948       return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
4949   }
4950 }
4951
4952
4953 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
4954   ASSERT(!HasStackOverflow());
4955   ASSERT(current_block() != NULL);
4956   ASSERT(current_block()->HasPredecessor());
4957   HConstant* instr = New<HConstant>(expr->value());
4958   return ast_context()->ReturnInstruction(instr, expr->id());
4959 }
4960
4961
4962 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4963   ASSERT(!HasStackOverflow());
4964   ASSERT(current_block() != NULL);
4965   ASSERT(current_block()->HasPredecessor());
4966   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4967   Handle<FixedArray> literals(closure->literals());
4968   HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
4969                                               expr->pattern(),
4970                                               expr->flags(),
4971                                               expr->literal_index());
4972   return ast_context()->ReturnInstruction(instr, expr->id());
4973 }
4974
4975
4976 static bool CanInlinePropertyAccess(Type* type) {
4977   if (type->Is(Type::NumberOrString())) return true;
4978   if (!type->IsClass()) return false;
4979   Handle<Map> map = type->AsClass()->Map();
4980   return map->IsJSObjectMap() &&
4981       !map->is_dictionary_map() &&
4982       !map->has_named_interceptor();
4983 }
4984
4985
4986 // Determines whether the given array or object literal boilerplate satisfies
4987 // all limits to be considered for fast deep-copying and computes the total
4988 // size of all objects that are part of the graph.
4989 static bool IsFastLiteral(Handle<JSObject> boilerplate,
4990                           int max_depth,
4991                           int* max_properties) {
4992   if (boilerplate->map()->is_deprecated() &&
4993       !JSObject::TryMigrateInstance(boilerplate)) {
4994     return false;
4995   }
4996
4997   ASSERT(max_depth >= 0 && *max_properties >= 0);
4998   if (max_depth == 0) return false;
4999
5000   Isolate* isolate = boilerplate->GetIsolate();
5001   Handle<FixedArrayBase> elements(boilerplate->elements());
5002   if (elements->length() > 0 &&
5003       elements->map() != isolate->heap()->fixed_cow_array_map()) {
5004     if (boilerplate->HasFastObjectElements()) {
5005       Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5006       int length = elements->length();
5007       for (int i = 0; i < length; i++) {
5008         if ((*max_properties)-- == 0) return false;
5009         Handle<Object> value(fast_elements->get(i), isolate);
5010         if (value->IsJSObject()) {
5011           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5012           if (!IsFastLiteral(value_object,
5013                              max_depth - 1,
5014                              max_properties)) {
5015             return false;
5016           }
5017         }
5018       }
5019     } else if (!boilerplate->HasFastDoubleElements()) {
5020       return false;
5021     }
5022   }
5023
5024   Handle<FixedArray> properties(boilerplate->properties());
5025   if (properties->length() > 0) {
5026     return false;
5027   } else {
5028     Handle<DescriptorArray> descriptors(
5029         boilerplate->map()->instance_descriptors());
5030     int limit = boilerplate->map()->NumberOfOwnDescriptors();
5031     for (int i = 0; i < limit; i++) {
5032       PropertyDetails details = descriptors->GetDetails(i);
5033       if (details.type() != FIELD) continue;
5034       int index = descriptors->GetFieldIndex(i);
5035       if ((*max_properties)-- == 0) return false;
5036       Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5037       if (value->IsJSObject()) {
5038         Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5039         if (!IsFastLiteral(value_object,
5040                            max_depth - 1,
5041                            max_properties)) {
5042           return false;
5043         }
5044       }
5045     }
5046   }
5047   return true;
5048 }
5049
5050
5051 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5052   ASSERT(!HasStackOverflow());
5053   ASSERT(current_block() != NULL);
5054   ASSERT(current_block()->HasPredecessor());
5055   expr->BuildConstantProperties(isolate());
5056   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5057   HInstruction* literal;
5058
5059   // Check whether to use fast or slow deep-copying for boilerplate.
5060   int max_properties = kMaxFastLiteralProperties;
5061   Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5062                                isolate());
5063   Handle<AllocationSite> site;
5064   Handle<JSObject> boilerplate;
5065   if (!literals_cell->IsUndefined()) {
5066     // Retrieve the boilerplate
5067     site = Handle<AllocationSite>::cast(literals_cell);
5068     boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5069                                    isolate());
5070   }
5071
5072   if (!boilerplate.is_null() &&
5073       IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5074     AllocationSiteUsageContext usage_context(isolate(), site, false);
5075     usage_context.EnterNewScope();
5076     literal = BuildFastLiteral(boilerplate, &usage_context);
5077     usage_context.ExitScope(site, boilerplate);
5078   } else {
5079     NoObservableSideEffectsScope no_effects(this);
5080     Handle<FixedArray> closure_literals(closure->literals(), isolate());
5081     Handle<FixedArray> constant_properties = expr->constant_properties();
5082     int literal_index = expr->literal_index();
5083     int flags = expr->fast_elements()
5084         ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5085     flags |= expr->has_function()
5086         ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5087
5088     Add<HPushArgument>(Add<HConstant>(closure_literals));
5089     Add<HPushArgument>(Add<HConstant>(literal_index));
5090     Add<HPushArgument>(Add<HConstant>(constant_properties));
5091     Add<HPushArgument>(Add<HConstant>(flags));
5092
5093     // TODO(mvstanton): Add a flag to turn off creation of any
5094     // AllocationMementos for this call: we are in crankshaft and should have
5095     // learned enough about transition behavior to stop emitting mementos.
5096     Runtime::FunctionId function_id = Runtime::kHiddenCreateObjectLiteral;
5097     literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5098                                 Runtime::FunctionForId(function_id),
5099                                 4);
5100   }
5101
5102   // The object is expected in the bailout environment during computation
5103   // of the property values and is the value of the entire expression.
5104   Push(literal);
5105
5106   expr->CalculateEmitStore(zone());
5107
5108   for (int i = 0; i < expr->properties()->length(); i++) {
5109     ObjectLiteral::Property* property = expr->properties()->at(i);
5110     if (property->IsCompileTimeValue()) continue;
5111
5112     Literal* key = property->key();
5113     Expression* value = property->value();
5114
5115     switch (property->kind()) {
5116       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5117         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
5118         // Fall through.
5119       case ObjectLiteral::Property::COMPUTED:
5120         if (key->value()->IsInternalizedString()) {
5121           if (property->emit_store()) {
5122             CHECK_ALIVE(VisitForValue(value));
5123             HValue* value = Pop();
5124             Handle<Map> map = property->GetReceiverType();
5125             Handle<String> name = property->key()->AsPropertyName();
5126             HInstruction* store;
5127             if (map.is_null()) {
5128               // If we don't know the monomorphic type, do a generic store.
5129               CHECK_ALIVE(store = BuildNamedGeneric(
5130                   STORE, literal, name, value));
5131             } else {
5132               PropertyAccessInfo info(this, STORE, ToType(map), name);
5133               if (info.CanAccessMonomorphic()) {
5134                 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5135                 ASSERT(!info.lookup()->IsPropertyCallbacks());
5136                 store = BuildMonomorphicAccess(
5137                     &info, literal, checked_literal, value,
5138                     BailoutId::None(), BailoutId::None());
5139               } else {
5140                 CHECK_ALIVE(store = BuildNamedGeneric(
5141                     STORE, literal, name, value));
5142               }
5143             }
5144             AddInstruction(store);
5145             if (store->HasObservableSideEffects()) {
5146               Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5147             }
5148           } else {
5149             CHECK_ALIVE(VisitForEffect(value));
5150           }
5151           break;
5152         }
5153         // Fall through.
5154       case ObjectLiteral::Property::PROTOTYPE:
5155       case ObjectLiteral::Property::SETTER:
5156       case ObjectLiteral::Property::GETTER:
5157         return Bailout(kObjectLiteralWithComplexProperty);
5158       default: UNREACHABLE();
5159     }
5160   }
5161
5162   if (expr->has_function()) {
5163     // Return the result of the transformation to fast properties
5164     // instead of the original since this operation changes the map
5165     // of the object. This makes sure that the original object won't
5166     // be used by other optimized code before it is transformed
5167     // (e.g. because of code motion).
5168     HToFastProperties* result = Add<HToFastProperties>(Pop());
5169     return ast_context()->ReturnValue(result);
5170   } else {
5171     return ast_context()->ReturnValue(Pop());
5172   }
5173 }
5174
5175
5176 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5177   ASSERT(!HasStackOverflow());
5178   ASSERT(current_block() != NULL);
5179   ASSERT(current_block()->HasPredecessor());
5180   expr->BuildConstantElements(isolate());
5181   ZoneList<Expression*>* subexprs = expr->values();
5182   int length = subexprs->length();
5183   HInstruction* literal;
5184
5185   Handle<AllocationSite> site;
5186   Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5187   bool uninitialized = false;
5188   Handle<Object> literals_cell(literals->get(expr->literal_index()),
5189                                isolate());
5190   Handle<JSObject> boilerplate_object;
5191   if (literals_cell->IsUndefined()) {
5192     uninitialized = true;
5193     Handle<Object> raw_boilerplate;
5194     ASSIGN_RETURN_ON_EXCEPTION_VALUE(
5195         isolate(), raw_boilerplate,
5196         Runtime::CreateArrayLiteralBoilerplate(
5197             isolate(), literals, expr->constant_elements()),
5198         Bailout(kArrayBoilerplateCreationFailed));
5199
5200     boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5201     AllocationSiteCreationContext creation_context(isolate());
5202     site = creation_context.EnterNewScope();
5203     if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5204       return Bailout(kArrayBoilerplateCreationFailed);
5205     }
5206     creation_context.ExitScope(site, boilerplate_object);
5207     literals->set(expr->literal_index(), *site);
5208
5209     if (boilerplate_object->elements()->map() ==
5210         isolate()->heap()->fixed_cow_array_map()) {
5211       isolate()->counters()->cow_arrays_created_runtime()->Increment();
5212     }
5213   } else {
5214     ASSERT(literals_cell->IsAllocationSite());
5215     site = Handle<AllocationSite>::cast(literals_cell);
5216     boilerplate_object = Handle<JSObject>(
5217         JSObject::cast(site->transition_info()), isolate());
5218   }
5219
5220   ASSERT(!boilerplate_object.is_null());
5221   ASSERT(site->SitePointsToLiteral());
5222
5223   ElementsKind boilerplate_elements_kind =
5224       boilerplate_object->GetElementsKind();
5225
5226   // Check whether to use fast or slow deep-copying for boilerplate.
5227   int max_properties = kMaxFastLiteralProperties;
5228   if (IsFastLiteral(boilerplate_object,
5229                     kMaxFastLiteralDepth,
5230                     &max_properties)) {
5231     AllocationSiteUsageContext usage_context(isolate(), site, false);
5232     usage_context.EnterNewScope();
5233     literal = BuildFastLiteral(boilerplate_object, &usage_context);
5234     usage_context.ExitScope(site, boilerplate_object);
5235   } else {
5236     NoObservableSideEffectsScope no_effects(this);
5237     // Boilerplate already exists and constant elements are never accessed,
5238     // pass an empty fixed array to the runtime function instead.
5239     Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5240     int literal_index = expr->literal_index();
5241     int flags = expr->depth() == 1
5242         ? ArrayLiteral::kShallowElements
5243         : ArrayLiteral::kNoFlags;
5244     flags |= ArrayLiteral::kDisableMementos;
5245
5246     Add<HPushArgument>(Add<HConstant>(literals));
5247     Add<HPushArgument>(Add<HConstant>(literal_index));
5248     Add<HPushArgument>(Add<HConstant>(constants));
5249     Add<HPushArgument>(Add<HConstant>(flags));
5250
5251     // TODO(mvstanton): Consider a flag to turn off creation of any
5252     // AllocationMementos for this call: we are in crankshaft and should have
5253     // learned enough about transition behavior to stop emitting mementos.
5254     Runtime::FunctionId function_id = Runtime::kHiddenCreateArrayLiteral;
5255     literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5256                                 Runtime::FunctionForId(function_id),
5257                                 4);
5258
5259     // De-opt if elements kind changed from boilerplate_elements_kind.
5260     Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5261     literal = Add<HCheckMaps>(literal, map);
5262   }
5263
5264   // The array is expected in the bailout environment during computation
5265   // of the property values and is the value of the entire expression.
5266   Push(literal);
5267   // The literal index is on the stack, too.
5268   Push(Add<HConstant>(expr->literal_index()));
5269
5270   HInstruction* elements = NULL;
5271
5272   for (int i = 0; i < length; i++) {
5273     Expression* subexpr = subexprs->at(i);
5274     // If the subexpression is a literal or a simple materialized literal it
5275     // is already set in the cloned array.
5276     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5277
5278     CHECK_ALIVE(VisitForValue(subexpr));
5279     HValue* value = Pop();
5280     if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5281
5282     elements = AddLoadElements(literal);
5283
5284     HValue* key = Add<HConstant>(i);
5285
5286     switch (boilerplate_elements_kind) {
5287       case FAST_SMI_ELEMENTS:
5288       case FAST_HOLEY_SMI_ELEMENTS:
5289       case FAST_ELEMENTS:
5290       case FAST_HOLEY_ELEMENTS:
5291       case FAST_DOUBLE_ELEMENTS:
5292       case FAST_HOLEY_DOUBLE_ELEMENTS: {
5293         HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5294                                               boilerplate_elements_kind);
5295         instr->SetUninitialized(uninitialized);
5296         break;
5297       }
5298       default:
5299         UNREACHABLE();
5300         break;
5301     }
5302
5303     Add<HSimulate>(expr->GetIdForElement(i));
5304   }
5305
5306   Drop(1);  // array literal index
5307   return ast_context()->ReturnValue(Pop());
5308 }
5309
5310
5311 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5312                                                 Handle<Map> map) {
5313   BuildCheckHeapObject(object);
5314   return Add<HCheckMaps>(object, map);
5315 }
5316
5317
5318 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5319     PropertyAccessInfo* info,
5320     HValue* checked_object) {
5321   // See if this is a load for an immutable property
5322   if (checked_object->ActualValue()->IsConstant() &&
5323       info->lookup()->IsCacheable() &&
5324       info->lookup()->IsReadOnly() && info->lookup()->IsDontDelete()) {
5325     Handle<Object> object(
5326         HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5327
5328     if (object->IsJSObject()) {
5329       LookupResult lookup(isolate());
5330       Handle<JSObject>::cast(object)->Lookup(info->name(), &lookup);
5331       Handle<Object> value(lookup.GetLazyValue(), isolate());
5332
5333       if (!value->IsTheHole()) {
5334         return New<HConstant>(value);
5335       }
5336     }
5337   }
5338
5339   HObjectAccess access = info->access();
5340   if (access.representation().IsDouble()) {
5341     // Load the heap number.
5342     checked_object = Add<HLoadNamedField>(
5343         checked_object, static_cast<HValue*>(NULL),
5344         access.WithRepresentation(Representation::Tagged()));
5345     checked_object->set_type(HType::HeapNumber());
5346     // Load the double value from it.
5347     access = HObjectAccess::ForHeapNumberValue();
5348   }
5349   return New<HLoadNamedField>(
5350       checked_object, checked_object, access, info->field_maps(), top_info());
5351 }
5352
5353
5354 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5355     PropertyAccessInfo* info,
5356     HValue* checked_object,
5357     HValue* value) {
5358   bool transition_to_field = info->lookup()->IsTransition();
5359   // TODO(verwaest): Move this logic into PropertyAccessInfo.
5360   HObjectAccess field_access = info->access();
5361
5362   HStoreNamedField *instr;
5363   if (field_access.representation().IsDouble()) {
5364     HObjectAccess heap_number_access =
5365         field_access.WithRepresentation(Representation::Tagged());
5366     if (transition_to_field) {
5367       // The store requires a mutable HeapNumber to be allocated.
5368       NoObservableSideEffectsScope no_side_effects(this);
5369       HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5370
5371       PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
5372           isolate()->heap()->GetPretenureMode() : NOT_TENURED;
5373
5374       HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5375           HType::HeapNumber(),
5376           pretenure_flag,
5377           HEAP_NUMBER_TYPE);
5378       AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
5379       Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5380                             value);
5381       instr = New<HStoreNamedField>(checked_object->ActualValue(),
5382                                     heap_number_access,
5383                                     heap_number);
5384     } else {
5385       // Already holds a HeapNumber; load the box and write its value field.
5386       HInstruction* heap_number = Add<HLoadNamedField>(
5387           checked_object, static_cast<HValue*>(NULL), heap_number_access);
5388       heap_number->set_type(HType::HeapNumber());
5389       instr = New<HStoreNamedField>(heap_number,
5390                                     HObjectAccess::ForHeapNumberValue(),
5391                                     value, STORE_TO_INITIALIZED_ENTRY);
5392     }
5393   } else {
5394     if (!info->field_maps()->is_empty()) {
5395       ASSERT(field_access.representation().IsHeapObject());
5396       BuildCheckHeapObject(value);
5397       value = Add<HCheckMaps>(value, info->field_maps());
5398
5399       // TODO(bmeurer): This is a dirty hack to avoid repeating the smi check
5400       // that was already performed by the HCheckHeapObject above in the
5401       // HStoreNamedField below. We should really do this right instead and
5402       // make Crankshaft aware of Representation::HeapObject().
5403       field_access = field_access.WithRepresentation(Representation::Tagged());
5404     }
5405
5406     // This is a normal store.
5407     instr = New<HStoreNamedField>(
5408         checked_object->ActualValue(), field_access, value,
5409         transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5410   }
5411
5412   if (transition_to_field) {
5413     Handle<Map> transition(info->transition());
5414     ASSERT(!transition->is_deprecated());
5415     instr->SetTransition(Add<HConstant>(transition));
5416   }
5417   return instr;
5418 }
5419
5420
5421 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5422     PropertyAccessInfo* info) {
5423   if (!CanInlinePropertyAccess(type_)) return false;
5424
5425   // Currently only handle Type::Number as a polymorphic case.
5426   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5427   // instruction.
5428   if (type_->Is(Type::Number())) return false;
5429
5430   // Values are only compatible for monomorphic load if they all behave the same
5431   // regarding value wrappers.
5432   if (type_->Is(Type::NumberOrString())) {
5433     if (!info->type_->Is(Type::NumberOrString())) return false;
5434   } else {
5435     if (info->type_->Is(Type::NumberOrString())) return false;
5436   }
5437
5438   if (!LookupDescriptor()) return false;
5439
5440   if (!lookup_.IsFound()) {
5441     return (!info->lookup_.IsFound() || info->has_holder()) &&
5442         map()->prototype() == info->map()->prototype();
5443   }
5444
5445   // Mismatch if the other access info found the property in the prototype
5446   // chain.
5447   if (info->has_holder()) return false;
5448
5449   if (lookup_.IsPropertyCallbacks()) {
5450     return accessor_.is_identical_to(info->accessor_) &&
5451         api_holder_.is_identical_to(info->api_holder_);
5452   }
5453
5454   if (lookup_.IsConstant()) {
5455     return constant_.is_identical_to(info->constant_);
5456   }
5457
5458   ASSERT(lookup_.IsField());
5459   if (!info->lookup_.IsField()) return false;
5460
5461   Representation r = access_.representation();
5462   if (IsLoad()) {
5463     if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5464   } else {
5465     if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5466   }
5467   if (info->access_.offset() != access_.offset()) return false;
5468   if (info->access_.IsInobject() != access_.IsInobject()) return false;
5469   if (IsLoad()) {
5470     if (field_maps_.is_empty()) {
5471       info->field_maps_.Clear();
5472     } else if (!info->field_maps_.is_empty()) {
5473       for (int i = 0; i < field_maps_.length(); ++i) {
5474         info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5475       }
5476       info->field_maps_.Sort();
5477     }
5478   } else {
5479     // We can only merge stores that agree on their field maps. The comparison
5480     // below is safe, since we keep the field maps sorted.
5481     if (field_maps_.length() != info->field_maps_.length()) return false;
5482     for (int i = 0; i < field_maps_.length(); ++i) {
5483       if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5484         return false;
5485       }
5486     }
5487   }
5488   info->GeneralizeRepresentation(r);
5489   return true;
5490 }
5491
5492
5493 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5494   if (!type_->IsClass()) return true;
5495   map()->LookupDescriptor(NULL, *name_, &lookup_);
5496   return LoadResult(map());
5497 }
5498
5499
5500 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5501   if (!IsLoad() && lookup_.IsProperty() &&
5502       (lookup_.IsReadOnly() || !lookup_.IsCacheable())) {
5503     return false;
5504   }
5505
5506   if (lookup_.IsField()) {
5507     // Construct the object field access.
5508     access_ = HObjectAccess::ForField(map, &lookup_, name_);
5509
5510     // Load field map for heap objects.
5511     LoadFieldMaps(map);
5512   } else if (lookup_.IsPropertyCallbacks()) {
5513     Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5514     if (!callback->IsAccessorPair()) return false;
5515     Object* raw_accessor = IsLoad()
5516         ? Handle<AccessorPair>::cast(callback)->getter()
5517         : Handle<AccessorPair>::cast(callback)->setter();
5518     if (!raw_accessor->IsJSFunction()) return false;
5519     Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
5520     if (accessor->shared()->IsApiFunction()) {
5521       CallOptimization call_optimization(accessor);
5522       if (call_optimization.is_simple_api_call()) {
5523         CallOptimization::HolderLookup holder_lookup;
5524         Handle<Map> receiver_map = this->map();
5525         api_holder_ = call_optimization.LookupHolderOfExpectedType(
5526             receiver_map, &holder_lookup);
5527       }
5528     }
5529     accessor_ = accessor;
5530   } else if (lookup_.IsConstant()) {
5531     constant_ = handle(lookup_.GetConstantFromMap(*map), isolate());
5532   }
5533
5534   return true;
5535 }
5536
5537
5538 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
5539     Handle<Map> map) {
5540   // Clear any previously collected field maps.
5541   field_maps_.Clear();
5542
5543   // Figure out the field type from the accessor map.
5544   Handle<HeapType> field_type(lookup_.GetFieldTypeFromMap(*map), isolate());
5545
5546   // Collect the (stable) maps from the field type.
5547   int num_field_maps = field_type->NumClasses();
5548   if (num_field_maps == 0) return;
5549   ASSERT(access_.representation().IsHeapObject());
5550   field_maps_.Reserve(num_field_maps, zone());
5551   HeapType::Iterator<Map> it = field_type->Classes();
5552   while (!it.Done()) {
5553     Handle<Map> field_map = it.Current();
5554     if (!field_map->is_stable()) {
5555       field_maps_.Clear();
5556       return;
5557     }
5558     field_maps_.Add(field_map, zone());
5559     it.Advance();
5560   }
5561   field_maps_.Sort();
5562   ASSERT_EQ(num_field_maps, field_maps_.length());
5563
5564   // Add dependency on the map that introduced the field.
5565   Map::AddDependentCompilationInfo(
5566       handle(lookup_.GetFieldOwnerFromMap(*map), isolate()),
5567       DependentCode::kFieldTypeGroup, top_info());
5568 }
5569
5570
5571 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5572   Handle<Map> map = this->map();
5573
5574   while (map->prototype()->IsJSObject()) {
5575     holder_ = handle(JSObject::cast(map->prototype()));
5576     if (holder_->map()->is_deprecated()) {
5577       JSObject::TryMigrateInstance(holder_);
5578     }
5579     map = Handle<Map>(holder_->map());
5580     if (!CanInlinePropertyAccess(ToType(map))) {
5581       lookup_.NotFound();
5582       return false;
5583     }
5584     map->LookupDescriptor(*holder_, *name_, &lookup_);
5585     if (lookup_.IsFound()) return LoadResult(map);
5586   }
5587   lookup_.NotFound();
5588   return true;
5589 }
5590
5591
5592 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
5593   if (IsSIMD128PropertyCallback() &&
5594       CpuFeatures::SupportsSIMD128InCrankshaft()) {
5595     return true;
5596   }
5597   if (!CanInlinePropertyAccess(type_)) return false;
5598   if (IsJSObjectFieldAccessor()) return IsLoad();
5599   if (!LookupDescriptor()) return false;
5600   if (lookup_.IsFound()) {
5601     if (IsLoad()) return true;
5602     return !lookup_.IsReadOnly() && lookup_.IsCacheable();
5603   }
5604   if (!LookupInPrototypes()) return false;
5605   if (IsLoad()) return true;
5606
5607   if (lookup_.IsPropertyCallbacks()) return true;
5608   Handle<Map> map = this->map();
5609   map->LookupTransition(NULL, *name_, &lookup_);
5610   if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
5611     // Construct the object field access.
5612     access_ = HObjectAccess::ForField(map, &lookup_, name_);
5613
5614     // Load field map for heap objects.
5615     LoadFieldMaps(transition());
5616     return true;
5617   }
5618   return false;
5619 }
5620
5621
5622 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
5623     SmallMapList* types) {
5624   ASSERT(type_->Is(ToType(types->first())));
5625   if (!CanAccessMonomorphic()) return false;
5626   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5627   if (types->length() > kMaxLoadPolymorphism) return false;
5628
5629   if (IsSIMD128PropertyCallback() &&
5630       CpuFeatures::SupportsSIMD128InCrankshaft()) {
5631     for (int i = 1; i < types->length(); ++i) {
5632       if (types->at(i)->instance_type() == types->first()->instance_type()) {
5633         return false;
5634       }
5635     }
5636     return true;
5637   }
5638
5639   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
5640   if (GetJSObjectFieldAccess(&access)) {
5641     for (int i = 1; i < types->length(); ++i) {
5642       PropertyAccessInfo test_info(
5643           builder_, access_type_, ToType(types->at(i)), name_);
5644       HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
5645       if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
5646       if (!access.Equals(test_access)) return false;
5647     }
5648     return true;
5649   }
5650
5651   // Currently only handle Type::Number as a polymorphic case.
5652   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5653   // instruction.
5654   if (type_->Is(Type::Number())) return false;
5655
5656   // Multiple maps cannot transition to the same target map.
5657   ASSERT(!IsLoad() || !lookup_.IsTransition());
5658   if (lookup_.IsTransition() && types->length() > 1) return false;
5659
5660   for (int i = 1; i < types->length(); ++i) {
5661     PropertyAccessInfo test_info(
5662         builder_, access_type_, ToType(types->at(i)), name_);
5663     if (!test_info.IsCompatible(this)) return false;
5664   }
5665
5666   return true;
5667 }
5668
5669
5670 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
5671   return type->Is(Type::NumberOrString()) &&
5672       target->shared()->strict_mode() == SLOPPY &&
5673       !target->shared()->native();
5674 }
5675
5676
5677 static bool IsSIMDProperty(Handle<String> name, uint8_t* mask) {
5678   SmartArrayPointer<char> cstring = name->ToCString();
5679   int i = 0;
5680   while (i <= 3) {
5681     int shift = 0;
5682     switch (cstring[i]) {
5683       case 'W':
5684         shift++;
5685       case 'Z':
5686         shift++;
5687       case 'Y':
5688         shift++;
5689       case 'X':
5690         break;
5691       default:
5692         return false;
5693     }
5694     *mask |= (shift << 2*i);
5695     i++;
5696   }
5697
5698   return true;
5699 }
5700
5701
5702 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
5703     PropertyAccessInfo* info,
5704     HValue* object,
5705     HValue* checked_object,
5706     HValue* value,
5707     BailoutId ast_id,
5708     BailoutId return_id,
5709     bool can_inline_accessor) {
5710
5711   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
5712   if (info->GetJSObjectFieldAccess(&access)) {
5713     ASSERT(info->IsLoad());
5714     return New<HLoadNamedField>(object, checked_object, access);
5715   }
5716
5717   HValue* checked_holder = checked_object;
5718   if (info->has_holder()) {
5719     Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
5720     checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
5721   }
5722
5723   if (!info->lookup()->IsFound()) {
5724     ASSERT(info->IsLoad());
5725     return graph()->GetConstantUndefined();
5726   }
5727
5728   if (info->lookup()->IsField()) {
5729     if (info->IsLoad()) {
5730       if (info->map()->constructor()->IsJSFunction()) {
5731         JSFunction* constructor = JSFunction::cast(info->map()->constructor());
5732         String* class_name =
5733           String::cast(constructor->shared()->instance_class_name());
5734         uint8_t mask = 0;
5735         if (class_name->Equals(isolate()->heap()->simd()) &&
5736             IsSIMDProperty(info->name(), &mask) &&
5737             CpuFeatures::SupportsSIMD128InCrankshaft()) {
5738           return New<HConstant>(mask);
5739         }
5740       }
5741       return BuildLoadNamedField(info, checked_holder);
5742     } else {
5743       return BuildStoreNamedField(info, checked_object, value);
5744     }
5745   }
5746
5747   if (info->lookup()->IsTransition()) {
5748     ASSERT(!info->IsLoad());
5749     return BuildStoreNamedField(info, checked_object, value);
5750   }
5751
5752   if (info->lookup()->IsPropertyCallbacks()) {
5753     Push(checked_object);
5754     int argument_count = 1;
5755     if (!info->IsLoad()) {
5756       argument_count = 2;
5757       Push(value);
5758     }
5759
5760     if (NeedsWrappingFor(info->type(), info->accessor())) {
5761       HValue* function = Add<HConstant>(info->accessor());
5762       PushArgumentsFromEnvironment(argument_count);
5763       return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
5764     } else if (FLAG_inline_accessors && can_inline_accessor) {
5765       bool success = info->IsLoad()
5766           ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
5767           : TryInlineSetter(
5768               info->accessor(), info->map(), ast_id, return_id, value);
5769       if (success || HasStackOverflow()) return NULL;
5770     }
5771
5772     PushArgumentsFromEnvironment(argument_count);
5773     return BuildCallConstantFunction(info->accessor(), argument_count);
5774   }
5775
5776   ASSERT(info->lookup()->IsConstant());
5777   if (info->IsLoad()) {
5778     return New<HConstant>(info->constant());
5779   } else {
5780     return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
5781   }
5782 }
5783
5784
5785 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
5786     PropertyAccessType access_type,
5787     BailoutId ast_id,
5788     BailoutId return_id,
5789     HValue* object,
5790     HValue* value,
5791     SmallMapList* types,
5792     Handle<String> name) {
5793   // Something did not match; must use a polymorphic load.
5794   int count = 0;
5795   HBasicBlock* join = NULL;
5796   HBasicBlock* number_block = NULL;
5797   bool handled_string = false;
5798
5799   bool handle_smi = false;
5800   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5801   for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5802     PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5803     if (info.type()->Is(Type::String())) {
5804       if (handled_string) continue;
5805       handled_string = true;
5806     }
5807     if (info.CanAccessMonomorphic()) {
5808       count++;
5809       if (info.type()->Is(Type::Number())) {
5810         handle_smi = true;
5811         break;
5812       }
5813     }
5814   }
5815
5816   count = 0;
5817   HControlInstruction* smi_check = NULL;
5818   handled_string = false;
5819
5820   for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5821     PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5822     if (info.type()->Is(Type::String())) {
5823       if (handled_string) continue;
5824       handled_string = true;
5825     }
5826     if (!info.CanAccessMonomorphic()) continue;
5827
5828     if (count == 0) {
5829       join = graph()->CreateBasicBlock();
5830       if (handle_smi) {
5831         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
5832         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
5833         number_block = graph()->CreateBasicBlock();
5834         smi_check = New<HIsSmiAndBranch>(
5835             object, empty_smi_block, not_smi_block);
5836         FinishCurrentBlock(smi_check);
5837         GotoNoSimulate(empty_smi_block, number_block);
5838         set_current_block(not_smi_block);
5839       } else {
5840         BuildCheckHeapObject(object);
5841       }
5842     }
5843     ++count;
5844     HBasicBlock* if_true = graph()->CreateBasicBlock();
5845     HBasicBlock* if_false = graph()->CreateBasicBlock();
5846     HUnaryControlInstruction* compare;
5847
5848     HValue* dependency;
5849     if (info.type()->Is(Type::Number())) {
5850       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
5851       compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
5852       dependency = smi_check;
5853     } else if (info.type()->Is(Type::String())) {
5854       compare = New<HIsStringAndBranch>(object, if_true, if_false);
5855       dependency = compare;
5856     } else {
5857       compare = New<HCompareMap>(object, info.map(), if_true, if_false);
5858       dependency = compare;
5859     }
5860     FinishCurrentBlock(compare);
5861
5862     if (info.type()->Is(Type::Number())) {
5863       GotoNoSimulate(if_true, number_block);
5864       if_true = number_block;
5865     }
5866
5867     set_current_block(if_true);
5868
5869     HInstruction* access = BuildMonomorphicAccess(
5870         &info, object, dependency, value, ast_id,
5871         return_id, FLAG_polymorphic_inlining);
5872
5873     HValue* result = NULL;
5874     switch (access_type) {
5875       case LOAD:
5876         result = access;
5877         break;
5878       case STORE:
5879         result = value;
5880         break;
5881     }
5882
5883     if (access == NULL) {
5884       if (HasStackOverflow()) return;
5885     } else {
5886       if (!access->IsLinked()) AddInstruction(access);
5887       if (!ast_context()->IsEffect()) Push(result);
5888     }
5889
5890     if (current_block() != NULL) Goto(join);
5891     set_current_block(if_false);
5892   }
5893
5894   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
5895   // know about and do not want to handle ones we've never seen.  Otherwise
5896   // use a generic IC.
5897   if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5898     FinishExitWithHardDeoptimization("Uknown map in polymorphic access");
5899   } else {
5900     HInstruction* instr = BuildNamedGeneric(access_type, object, name, value);
5901     AddInstruction(instr);
5902     if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
5903
5904     if (join != NULL) {
5905       Goto(join);
5906     } else {
5907       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5908       if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5909       return;
5910     }
5911   }
5912
5913   ASSERT(join != NULL);
5914   if (join->HasPredecessor()) {
5915     join->SetJoinId(ast_id);
5916     set_current_block(join);
5917     if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5918   } else {
5919     set_current_block(NULL);
5920   }
5921 }
5922
5923
5924 static bool ComputeReceiverTypes(Expression* expr,
5925                                  HValue* receiver,
5926                                  SmallMapList** t,
5927                                  Zone* zone) {
5928   SmallMapList* types = expr->GetReceiverTypes();
5929   *t = types;
5930   bool monomorphic = expr->IsMonomorphic();
5931   if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
5932     Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
5933     types->FilterForPossibleTransitions(root_map);
5934     monomorphic = types->length() == 1;
5935   }
5936   return monomorphic && CanInlinePropertyAccess(
5937       IC::MapToType<Type>(types->first(), zone));
5938 }
5939
5940
5941 static bool AreStringTypes(SmallMapList* types) {
5942   for (int i = 0; i < types->length(); i++) {
5943     if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
5944   }
5945   return true;
5946 }
5947
5948
5949 static bool AreInt32x4Types(SmallMapList* types) {
5950   if (types == NULL || types->length() == 0) return false;
5951   for (int i = 0; i < types->length(); i++) {
5952     if (types->at(i)->instance_type() != INT32x4_TYPE) return false;
5953   }
5954   return true;
5955 }
5956
5957
5958 static bool AreFloat32x4Types(SmallMapList* types) {
5959   if (types == NULL || types->length() == 0) return false;
5960   for (int i = 0; i < types->length(); i++) {
5961     if (types->at(i)->instance_type() != FLOAT32x4_TYPE) return false;
5962   }
5963   return true;
5964 }
5965
5966
5967 static bool AreFloat64x2Types(SmallMapList* types) {
5968   if (types == NULL || types->length() == 0) return false;
5969   for (int i = 0; i < types->length(); i++) {
5970     if (types->at(i)->instance_type() != FLOAT64x2_TYPE) return false;
5971   }
5972   return true;
5973 }
5974
5975
5976 static BuiltinFunctionId NameToId(Isolate* isolate, Handle<String> name,
5977                                   InstanceType type) {
5978   BuiltinFunctionId id;
5979   if (name->Equals(isolate->heap()->signMask())) {
5980     if (type == FLOAT32x4_TYPE) {
5981       id = kFloat32x4GetSignMask;
5982     } else if (type == FLOAT64x2_TYPE) {
5983       id = kFloat64x2GetSignMask;
5984     } else {
5985       ASSERT(type == INT32x4_TYPE);
5986       id = kInt32x4GetSignMask;
5987     }
5988   } else if (name->Equals(isolate->heap()->x())) {
5989     if (type == FLOAT32x4_TYPE) {
5990       id = kFloat32x4GetX;
5991     } else if (type == FLOAT64x2_TYPE) {
5992       id = kFloat64x2GetX;
5993     } else {
5994       ASSERT(type == INT32x4_TYPE);
5995       id = kInt32x4GetX;
5996     }
5997   } else if (name->Equals(isolate->heap()->y())) {
5998     if (type == FLOAT32x4_TYPE) {
5999       id = kFloat32x4GetY;
6000     } else if (type == FLOAT64x2_TYPE) {
6001       id = kFloat64x2GetY;
6002     } else {
6003       ASSERT(type == INT32x4_TYPE);
6004       id = kInt32x4GetY;
6005     }
6006   } else if (name->Equals(isolate->heap()->z())) {
6007     id = type == FLOAT32x4_TYPE ? kFloat32x4GetZ : kInt32x4GetZ;
6008   } else if (name->Equals(isolate->heap()->w())) {
6009     id = type == FLOAT32x4_TYPE ? kFloat32x4GetW : kInt32x4GetW;
6010   } else if (name->Equals(isolate->heap()->flagX())) {
6011     ASSERT(type == INT32x4_TYPE);
6012     id = kInt32x4GetFlagX;
6013   } else if (name->Equals(isolate->heap()->flagY())) {
6014     ASSERT(type == INT32x4_TYPE);
6015     id = kInt32x4GetFlagY;
6016   } else if (name->Equals(isolate->heap()->flagZ())) {
6017     ASSERT(type == INT32x4_TYPE);
6018     id = kInt32x4GetFlagZ;
6019   } else if (name->Equals(isolate->heap()->flagW())) {
6020     ASSERT(type == INT32x4_TYPE);
6021     id = kInt32x4GetFlagW;
6022   } else {
6023     UNREACHABLE();
6024     id = kSIMD128Unreachable;
6025   }
6026
6027   return id;
6028 }
6029
6030
6031 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
6032                                         Property* prop,
6033                                         BailoutId ast_id,
6034                                         BailoutId return_id,
6035                                         bool is_uninitialized) {
6036   if (!prop->key()->IsPropertyName()) {
6037     // Keyed store.
6038     HValue* value = environment()->ExpressionStackAt(0);
6039     HValue* key = environment()->ExpressionStackAt(1);
6040     HValue* object = environment()->ExpressionStackAt(2);
6041     bool has_side_effects = false;
6042     HandleKeyedElementAccess(object, key, value, expr,
6043                              STORE, &has_side_effects);
6044     Drop(3);
6045     Push(value);
6046     Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
6047     return ast_context()->ReturnValue(Pop());
6048   }
6049
6050   // Named store.
6051   HValue* value = Pop();
6052   HValue* object = Pop();
6053
6054   Literal* key = prop->key()->AsLiteral();
6055   Handle<String> name = Handle<String>::cast(key->value());
6056   ASSERT(!name.is_null());
6057
6058   HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
6059                                          object, name, value, is_uninitialized);
6060   if (instr == NULL) return;
6061
6062   if (!ast_context()->IsEffect()) Push(value);
6063   AddInstruction(instr);
6064   if (instr->HasObservableSideEffects()) {
6065     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6066   }
6067   if (!ast_context()->IsEffect()) Drop(1);
6068   return ast_context()->ReturnValue(value);
6069 }
6070
6071
6072 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6073   Property* prop = expr->target()->AsProperty();
6074   ASSERT(prop != NULL);
6075   CHECK_ALIVE(VisitForValue(prop->obj()));
6076   if (!prop->key()->IsPropertyName()) {
6077     CHECK_ALIVE(VisitForValue(prop->key()));
6078   }
6079   CHECK_ALIVE(VisitForValue(expr->value()));
6080   BuildStore(expr, prop, expr->id(),
6081              expr->AssignmentId(), expr->IsUninitialized());
6082 }
6083
6084
6085 // Because not every expression has a position and there is not common
6086 // superclass of Assignment and CountOperation, we cannot just pass the
6087 // owning expression instead of position and ast_id separately.
6088 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6089     Variable* var,
6090     HValue* value,
6091     BailoutId ast_id) {
6092   LookupResult lookup(isolate());
6093   GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, STORE);
6094   if (type == kUseCell) {
6095     Handle<GlobalObject> global(current_info()->global_object());
6096     Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
6097     if (cell->type()->IsConstant()) {
6098       Handle<Object> constant = cell->type()->AsConstant()->Value();
6099       if (value->IsConstant()) {
6100         HConstant* c_value = HConstant::cast(value);
6101         if (!constant.is_identical_to(c_value->handle(isolate()))) {
6102           Add<HDeoptimize>("Constant global variable assignment",
6103                            Deoptimizer::EAGER);
6104         }
6105       } else {
6106         HValue* c_constant = Add<HConstant>(constant);
6107         IfBuilder builder(this);
6108         if (constant->IsNumber()) {
6109           builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6110         } else {
6111           builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6112         }
6113         builder.Then();
6114         builder.Else();
6115         Add<HDeoptimize>("Constant global variable assignment",
6116                          Deoptimizer::EAGER);
6117         builder.End();
6118       }
6119     }
6120     HInstruction* instr =
6121         Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
6122     if (instr->HasObservableSideEffects()) {
6123       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6124     }
6125   } else {
6126     HValue* global_object = Add<HLoadNamedField>(
6127         context(), static_cast<HValue*>(NULL),
6128         HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
6129     HStoreNamedGeneric* instr =
6130         Add<HStoreNamedGeneric>(global_object, var->name(),
6131                                  value, function_strict_mode());
6132     USE(instr);
6133     ASSERT(instr->HasObservableSideEffects());
6134     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6135   }
6136 }
6137
6138
6139 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6140   Expression* target = expr->target();
6141   VariableProxy* proxy = target->AsVariableProxy();
6142   Property* prop = target->AsProperty();
6143   ASSERT(proxy == NULL || prop == NULL);
6144
6145   // We have a second position recorded in the FullCodeGenerator to have
6146   // type feedback for the binary operation.
6147   BinaryOperation* operation = expr->binary_operation();
6148
6149   if (proxy != NULL) {
6150     Variable* var = proxy->var();
6151     if (var->mode() == LET)  {
6152       return Bailout(kUnsupportedLetCompoundAssignment);
6153     }
6154
6155     CHECK_ALIVE(VisitForValue(operation));
6156
6157     switch (var->location()) {
6158       case Variable::UNALLOCATED:
6159         HandleGlobalVariableAssignment(var,
6160                                        Top(),
6161                                        expr->AssignmentId());
6162         break;
6163
6164       case Variable::PARAMETER:
6165       case Variable::LOCAL:
6166         if (var->mode() == CONST_LEGACY)  {
6167           return Bailout(kUnsupportedConstCompoundAssignment);
6168         }
6169         BindIfLive(var, Top());
6170         break;
6171
6172       case Variable::CONTEXT: {
6173         // Bail out if we try to mutate a parameter value in a function
6174         // using the arguments object.  We do not (yet) correctly handle the
6175         // arguments property of the function.
6176         if (current_info()->scope()->arguments() != NULL) {
6177           // Parameters will be allocated to context slots.  We have no
6178           // direct way to detect that the variable is a parameter so we do
6179           // a linear search of the parameter variables.
6180           int count = current_info()->scope()->num_parameters();
6181           for (int i = 0; i < count; ++i) {
6182             if (var == current_info()->scope()->parameter(i)) {
6183               Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6184             }
6185           }
6186         }
6187
6188         HStoreContextSlot::Mode mode;
6189
6190         switch (var->mode()) {
6191           case LET:
6192             mode = HStoreContextSlot::kCheckDeoptimize;
6193             break;
6194           case CONST:
6195             // This case is checked statically so no need to
6196             // perform checks here
6197             UNREACHABLE();
6198           case CONST_LEGACY:
6199             return ast_context()->ReturnValue(Pop());
6200           default:
6201             mode = HStoreContextSlot::kNoCheck;
6202         }
6203
6204         HValue* context = BuildContextChainWalk(var);
6205         HStoreContextSlot* instr = Add<HStoreContextSlot>(
6206             context, var->index(), mode, Top());
6207         if (instr->HasObservableSideEffects()) {
6208           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6209         }
6210         break;
6211       }
6212
6213       case Variable::LOOKUP:
6214         return Bailout(kCompoundAssignmentToLookupSlot);
6215     }
6216     return ast_context()->ReturnValue(Pop());
6217
6218   } else if (prop != NULL) {
6219     CHECK_ALIVE(VisitForValue(prop->obj()));
6220     HValue* object = Top();
6221     HValue* key = NULL;
6222     if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
6223         prop->IsStringAccess()) {
6224       CHECK_ALIVE(VisitForValue(prop->key()));
6225       key = Top();
6226     }
6227
6228     CHECK_ALIVE(PushLoad(prop, object, key));
6229
6230     CHECK_ALIVE(VisitForValue(expr->value()));
6231     HValue* right = Pop();
6232     HValue* left = Pop();
6233
6234     Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6235
6236     BuildStore(expr, prop, expr->id(),
6237                expr->AssignmentId(), expr->IsUninitialized());
6238   } else {
6239     return Bailout(kInvalidLhsInCompoundAssignment);
6240   }
6241 }
6242
6243
6244 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6245   ASSERT(!HasStackOverflow());
6246   ASSERT(current_block() != NULL);
6247   ASSERT(current_block()->HasPredecessor());
6248   VariableProxy* proxy = expr->target()->AsVariableProxy();
6249   Property* prop = expr->target()->AsProperty();
6250   ASSERT(proxy == NULL || prop == NULL);
6251
6252   if (expr->is_compound()) {
6253     HandleCompoundAssignment(expr);
6254     return;
6255   }
6256
6257   if (prop != NULL) {
6258     HandlePropertyAssignment(expr);
6259   } else if (proxy != NULL) {
6260     Variable* var = proxy->var();
6261
6262     if (var->mode() == CONST) {
6263       if (expr->op() != Token::INIT_CONST) {
6264         return Bailout(kNonInitializerAssignmentToConst);
6265       }
6266     } else if (var->mode() == CONST_LEGACY) {
6267       if (expr->op() != Token::INIT_CONST_LEGACY) {
6268         CHECK_ALIVE(VisitForValue(expr->value()));
6269         return ast_context()->ReturnValue(Pop());
6270       }
6271
6272       if (var->IsStackAllocated()) {
6273         // We insert a use of the old value to detect unsupported uses of const
6274         // variables (e.g. initialization inside a loop).
6275         HValue* old_value = environment()->Lookup(var);
6276         Add<HUseConst>(old_value);
6277       }
6278     }
6279
6280     if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6281
6282     // Handle the assignment.
6283     switch (var->location()) {
6284       case Variable::UNALLOCATED:
6285         CHECK_ALIVE(VisitForValue(expr->value()));
6286         HandleGlobalVariableAssignment(var,
6287                                        Top(),
6288                                        expr->AssignmentId());
6289         return ast_context()->ReturnValue(Pop());
6290
6291       case Variable::PARAMETER:
6292       case Variable::LOCAL: {
6293         // Perform an initialization check for let declared variables
6294         // or parameters.
6295         if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6296           HValue* env_value = environment()->Lookup(var);
6297           if (env_value == graph()->GetConstantHole()) {
6298             return Bailout(kAssignmentToLetVariableBeforeInitialization);
6299           }
6300         }
6301         // We do not allow the arguments object to occur in a context where it
6302         // may escape, but assignments to stack-allocated locals are
6303         // permitted.
6304         CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6305         HValue* value = Pop();
6306         BindIfLive(var, value);
6307         return ast_context()->ReturnValue(value);
6308       }
6309
6310       case Variable::CONTEXT: {
6311         // Bail out if we try to mutate a parameter value in a function using
6312         // the arguments object.  We do not (yet) correctly handle the
6313         // arguments property of the function.
6314         if (current_info()->scope()->arguments() != NULL) {
6315           // Parameters will rewrite to context slots.  We have no direct way
6316           // to detect that the variable is a parameter.
6317           int count = current_info()->scope()->num_parameters();
6318           for (int i = 0; i < count; ++i) {
6319             if (var == current_info()->scope()->parameter(i)) {
6320               return Bailout(kAssignmentToParameterInArgumentsObject);
6321             }
6322           }
6323         }
6324
6325         CHECK_ALIVE(VisitForValue(expr->value()));
6326         HStoreContextSlot::Mode mode;
6327         if (expr->op() == Token::ASSIGN) {
6328           switch (var->mode()) {
6329             case LET:
6330               mode = HStoreContextSlot::kCheckDeoptimize;
6331               break;
6332             case CONST:
6333               // This case is checked statically so no need to
6334               // perform checks here
6335               UNREACHABLE();
6336             case CONST_LEGACY:
6337               return ast_context()->ReturnValue(Pop());
6338             default:
6339               mode = HStoreContextSlot::kNoCheck;
6340           }
6341         } else if (expr->op() == Token::INIT_VAR ||
6342                    expr->op() == Token::INIT_LET ||
6343                    expr->op() == Token::INIT_CONST) {
6344           mode = HStoreContextSlot::kNoCheck;
6345         } else {
6346           ASSERT(expr->op() == Token::INIT_CONST_LEGACY);
6347
6348           mode = HStoreContextSlot::kCheckIgnoreAssignment;
6349         }
6350
6351         HValue* context = BuildContextChainWalk(var);
6352         HStoreContextSlot* instr = Add<HStoreContextSlot>(
6353             context, var->index(), mode, Top());
6354         if (instr->HasObservableSideEffects()) {
6355           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6356         }
6357         return ast_context()->ReturnValue(Pop());
6358       }
6359
6360       case Variable::LOOKUP:
6361         return Bailout(kAssignmentToLOOKUPVariable);
6362     }
6363   } else {
6364     return Bailout(kInvalidLeftHandSideInAssignment);
6365   }
6366 }
6367
6368
6369 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6370   // Generators are not optimized, so we should never get here.
6371   UNREACHABLE();
6372 }
6373
6374
6375 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6376   ASSERT(!HasStackOverflow());
6377   ASSERT(current_block() != NULL);
6378   ASSERT(current_block()->HasPredecessor());
6379   // We don't optimize functions with invalid left-hand sides in
6380   // assignments, count operations, or for-in.  Consequently throw can
6381   // currently only occur in an effect context.
6382   ASSERT(ast_context()->IsEffect());
6383   CHECK_ALIVE(VisitForValue(expr->exception()));
6384
6385   HValue* value = environment()->Pop();
6386   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6387   Add<HPushArgument>(value);
6388   Add<HCallRuntime>(isolate()->factory()->empty_string(),
6389                     Runtime::FunctionForId(Runtime::kHiddenThrow), 1);
6390   Add<HSimulate>(expr->id());
6391
6392   // If the throw definitely exits the function, we can finish with a dummy
6393   // control flow at this point.  This is not the case if the throw is inside
6394   // an inlined function which may be replaced.
6395   if (call_context() == NULL) {
6396     FinishExitCurrentBlock(New<HAbnormalExit>());
6397   }
6398 }
6399
6400
6401 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6402   if (string->IsConstant()) {
6403     HConstant* c_string = HConstant::cast(string);
6404     if (c_string->HasStringValue()) {
6405       return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6406     }
6407   }
6408   return Add<HLoadNamedField>(
6409       Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6410                            HObjectAccess::ForMap()),
6411       static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6412 }
6413
6414
6415 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6416   if (string->IsConstant()) {
6417     HConstant* c_string = HConstant::cast(string);
6418     if (c_string->HasStringValue()) {
6419       return Add<HConstant>(c_string->StringValue()->length());
6420     }
6421   }
6422   return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6423                               HObjectAccess::ForStringLength());
6424 }
6425
6426
6427 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6428     PropertyAccessType access_type,
6429     HValue* object,
6430     Handle<String> name,
6431     HValue* value,
6432     bool is_uninitialized) {
6433   if (is_uninitialized) {
6434     Add<HDeoptimize>("Insufficient type feedback for generic named access",
6435                      Deoptimizer::SOFT);
6436   }
6437   if (access_type == LOAD) {
6438     return New<HLoadNamedGeneric>(object, name);
6439   } else {
6440     return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6441   }
6442 }
6443
6444
6445
6446 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6447     PropertyAccessType access_type,
6448     HValue* object,
6449     HValue* key,
6450     HValue* value) {
6451   if (access_type == LOAD) {
6452     return New<HLoadKeyedGeneric>(object, key);
6453   } else {
6454     return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6455   }
6456 }
6457
6458
6459 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6460   // Loads from a "stock" fast holey double arrays can elide the hole check.
6461   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6462   if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6463       isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6464     Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6465     Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6466     BuildCheckPrototypeMaps(prototype, object_prototype);
6467     load_mode = ALLOW_RETURN_HOLE;
6468     graph()->MarkDependsOnEmptyArrayProtoElements();
6469   }
6470
6471   return load_mode;
6472 }
6473
6474
6475 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6476     HValue* object,
6477     HValue* key,
6478     HValue* val,
6479     HValue* dependency,
6480     Handle<Map> map,
6481     PropertyAccessType access_type,
6482     KeyedAccessStoreMode store_mode) {
6483   HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6484   if (dependency) {
6485     checked_object->ClearDependsOnFlag(kElementsKind);
6486   }
6487
6488   if (access_type == STORE && map->prototype()->IsJSObject()) {
6489     // monomorphic stores need a prototype chain check because shape
6490     // changes could allow callbacks on elements in the chain that
6491     // aren't compatible with monomorphic keyed stores.
6492     Handle<JSObject> prototype(JSObject::cast(map->prototype()));
6493     Object* holder = map->prototype();
6494     while (holder->GetPrototype(isolate())->IsJSObject()) {
6495       holder = holder->GetPrototype(isolate());
6496     }
6497     ASSERT(holder->GetPrototype(isolate())->IsNull());
6498
6499     BuildCheckPrototypeMaps(prototype,
6500                             Handle<JSObject>(JSObject::cast(holder)));
6501   }
6502
6503   LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6504   return BuildUncheckedMonomorphicElementAccess(
6505       checked_object, key, val,
6506       map->instance_type() == JS_ARRAY_TYPE,
6507       map->elements_kind(), access_type,
6508       load_mode, store_mode);
6509 }
6510
6511
6512 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6513     HValue* object,
6514     HValue* key,
6515     HValue* val,
6516     SmallMapList* maps) {
6517   // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6518   // double), always use the "worst case" code without a transition.  This is
6519   // much faster than transitioning the elements to the worst case, trading a
6520   // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6521   bool has_double_maps = false;
6522   bool has_smi_or_object_maps = false;
6523   bool has_js_array_access = false;
6524   bool has_non_js_array_access = false;
6525   bool has_seen_holey_elements = false;
6526   Handle<Map> most_general_consolidated_map;
6527   for (int i = 0; i < maps->length(); ++i) {
6528     Handle<Map> map = maps->at(i);
6529     if (!map->IsJSObjectMap()) return NULL;
6530     // Don't allow mixing of JSArrays with JSObjects.
6531     if (map->instance_type() == JS_ARRAY_TYPE) {
6532       if (has_non_js_array_access) return NULL;
6533       has_js_array_access = true;
6534     } else if (has_js_array_access) {
6535       return NULL;
6536     } else {
6537       has_non_js_array_access = true;
6538     }
6539     // Don't allow mixed, incompatible elements kinds.
6540     if (map->has_fast_double_elements()) {
6541       if (has_smi_or_object_maps) return NULL;
6542       has_double_maps = true;
6543     } else if (map->has_fast_smi_or_object_elements()) {
6544       if (has_double_maps) return NULL;
6545       has_smi_or_object_maps = true;
6546     } else {
6547       return NULL;
6548     }
6549     // Remember if we've ever seen holey elements.
6550     if (IsHoleyElementsKind(map->elements_kind())) {
6551       has_seen_holey_elements = true;
6552     }
6553     // Remember the most general elements kind, the code for its load will
6554     // properly handle all of the more specific cases.
6555     if ((i == 0) || IsMoreGeneralElementsKindTransition(
6556             most_general_consolidated_map->elements_kind(),
6557             map->elements_kind())) {
6558       most_general_consolidated_map = map;
6559     }
6560   }
6561   if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6562
6563   HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6564   // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6565   // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6566   ElementsKind consolidated_elements_kind = has_seen_holey_elements
6567       ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6568       : most_general_consolidated_map->elements_kind();
6569   HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6570       checked_object, key, val,
6571       most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6572       consolidated_elements_kind,
6573       LOAD, NEVER_RETURN_HOLE, STANDARD_STORE);
6574   return instr;
6575 }
6576
6577
6578 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6579     HValue* object,
6580     HValue* key,
6581     HValue* val,
6582     SmallMapList* maps,
6583     PropertyAccessType access_type,
6584     KeyedAccessStoreMode store_mode,
6585     bool* has_side_effects) {
6586   *has_side_effects = false;
6587   BuildCheckHeapObject(object);
6588
6589   if (access_type == LOAD) {
6590     HInstruction* consolidated_load =
6591         TryBuildConsolidatedElementLoad(object, key, val, maps);
6592     if (consolidated_load != NULL) {
6593       *has_side_effects |= consolidated_load->HasObservableSideEffects();
6594       return consolidated_load;
6595     }
6596   }
6597
6598   // Elements_kind transition support.
6599   MapHandleList transition_target(maps->length());
6600   // Collect possible transition targets.
6601   MapHandleList possible_transitioned_maps(maps->length());
6602   for (int i = 0; i < maps->length(); ++i) {
6603     Handle<Map> map = maps->at(i);
6604     ElementsKind elements_kind = map->elements_kind();
6605     if (IsFastElementsKind(elements_kind) &&
6606         elements_kind != GetInitialFastElementsKind()) {
6607       possible_transitioned_maps.Add(map);
6608     }
6609     if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
6610       HInstruction* result = BuildKeyedGeneric(access_type, object, key, val);
6611       *has_side_effects = result->HasObservableSideEffects();
6612       return AddInstruction(result);
6613     }
6614   }
6615   // Get transition target for each map (NULL == no transition).
6616   for (int i = 0; i < maps->length(); ++i) {
6617     Handle<Map> map = maps->at(i);
6618     Handle<Map> transitioned_map =
6619         map->FindTransitionedMap(&possible_transitioned_maps);
6620     transition_target.Add(transitioned_map);
6621   }
6622
6623   MapHandleList untransitionable_maps(maps->length());
6624   HTransitionElementsKind* transition = NULL;
6625   for (int i = 0; i < maps->length(); ++i) {
6626     Handle<Map> map = maps->at(i);
6627     ASSERT(map->IsMap());
6628     if (!transition_target.at(i).is_null()) {
6629       ASSERT(Map::IsValidElementsTransition(
6630           map->elements_kind(),
6631           transition_target.at(i)->elements_kind()));
6632       transition = Add<HTransitionElementsKind>(object, map,
6633                                                 transition_target.at(i));
6634     } else {
6635       untransitionable_maps.Add(map);
6636     }
6637   }
6638
6639   // If only one map is left after transitioning, handle this case
6640   // monomorphically.
6641   ASSERT(untransitionable_maps.length() >= 1);
6642   if (untransitionable_maps.length() == 1) {
6643     Handle<Map> untransitionable_map = untransitionable_maps[0];
6644     HInstruction* instr = NULL;
6645     if (untransitionable_map->has_slow_elements_kind() ||
6646         !untransitionable_map->IsJSObjectMap()) {
6647       instr = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6648     } else {
6649       instr = BuildMonomorphicElementAccess(
6650           object, key, val, transition, untransitionable_map, access_type,
6651           store_mode);
6652     }
6653     *has_side_effects |= instr->HasObservableSideEffects();
6654     return access_type == STORE ? NULL : instr;
6655   }
6656
6657   HBasicBlock* join = graph()->CreateBasicBlock();
6658
6659   for (int i = 0; i < untransitionable_maps.length(); ++i) {
6660     Handle<Map> map = untransitionable_maps[i];
6661     if (!map->IsJSObjectMap()) continue;
6662     ElementsKind elements_kind = map->elements_kind();
6663     HBasicBlock* this_map = graph()->CreateBasicBlock();
6664     HBasicBlock* other_map = graph()->CreateBasicBlock();
6665     HCompareMap* mapcompare =
6666         New<HCompareMap>(object, map, this_map, other_map);
6667     FinishCurrentBlock(mapcompare);
6668
6669     set_current_block(this_map);
6670     HInstruction* access = NULL;
6671     if (IsDictionaryElementsKind(elements_kind)) {
6672       access = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6673     } else {
6674       ASSERT(IsFastElementsKind(elements_kind) ||
6675              IsExternalArrayElementsKind(elements_kind) ||
6676              IsFixedTypedArrayElementsKind(elements_kind));
6677       LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6678       // Happily, mapcompare is a checked object.
6679       access = BuildUncheckedMonomorphicElementAccess(
6680           mapcompare, key, val,
6681           map->instance_type() == JS_ARRAY_TYPE,
6682           elements_kind, access_type,
6683           load_mode,
6684           store_mode);
6685     }
6686     *has_side_effects |= access->HasObservableSideEffects();
6687     // The caller will use has_side_effects and add a correct Simulate.
6688     access->SetFlag(HValue::kHasNoObservableSideEffects);
6689     if (access_type == LOAD) {
6690       Push(access);
6691     }
6692     NoObservableSideEffectsScope scope(this);
6693     GotoNoSimulate(join);
6694     set_current_block(other_map);
6695   }
6696
6697   // Ensure that we visited at least one map above that goes to join. This is
6698   // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
6699   // rather than joining the join block. If this becomes an issue, insert a
6700   // generic access in the case length() == 0.
6701   ASSERT(join->predecessors()->length() > 0);
6702   // Deopt if none of the cases matched.
6703   NoObservableSideEffectsScope scope(this);
6704   FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
6705   set_current_block(join);
6706   return access_type == STORE ? NULL : Pop();
6707 }
6708
6709
6710 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
6711     HValue* obj,
6712     HValue* key,
6713     HValue* val,
6714     Expression* expr,
6715     PropertyAccessType access_type,
6716     bool* has_side_effects) {
6717   ASSERT(!expr->IsPropertyName());
6718   HInstruction* instr = NULL;
6719
6720   SmallMapList* types;
6721   bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
6722
6723   bool force_generic = false;
6724   if (access_type == STORE &&
6725       (monomorphic || (types != NULL && !types->is_empty()))) {
6726     // Stores can't be mono/polymorphic if their prototype chain has dictionary
6727     // elements. However a receiver map that has dictionary elements itself
6728     // should be left to normal mono/poly behavior (the other maps may benefit
6729     // from highly optimized stores).
6730     for (int i = 0; i < types->length(); i++) {
6731       Handle<Map> current_map = types->at(i);
6732       if (current_map->DictionaryElementsInPrototypeChainOnly()) {
6733         force_generic = true;
6734         monomorphic = false;
6735         break;
6736       }
6737     }
6738   }
6739
6740   if (monomorphic) {
6741     Handle<Map> map = types->first();
6742     if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
6743       instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6744     } else {
6745       BuildCheckHeapObject(obj);
6746       instr = BuildMonomorphicElementAccess(
6747           obj, key, val, NULL, map, access_type, expr->GetStoreMode());
6748     }
6749   } else if (!force_generic && (types != NULL && !types->is_empty())) {
6750     return HandlePolymorphicElementAccess(
6751         obj, key, val, types, access_type,
6752         expr->GetStoreMode(), has_side_effects);
6753   } else {
6754     if (access_type == STORE) {
6755       if (expr->IsAssignment() &&
6756           expr->AsAssignment()->HasNoTypeInformation()) {
6757         Add<HDeoptimize>("Insufficient type feedback for keyed store",
6758                          Deoptimizer::SOFT);
6759       }
6760     } else {
6761       if (expr->AsProperty()->HasNoTypeInformation()) {
6762         Add<HDeoptimize>("Insufficient type feedback for keyed load",
6763                          Deoptimizer::SOFT);
6764       }
6765     }
6766     instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6767   }
6768   *has_side_effects = instr->HasObservableSideEffects();
6769   return instr;
6770 }
6771
6772
6773 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
6774   // Outermost function already has arguments on the stack.
6775   if (function_state()->outer() == NULL) return;
6776
6777   if (function_state()->arguments_pushed()) return;
6778
6779   // Push arguments when entering inlined function.
6780   HEnterInlined* entry = function_state()->entry();
6781   entry->set_arguments_pushed();
6782
6783   HArgumentsObject* arguments = entry->arguments_object();
6784   const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
6785
6786   HInstruction* insert_after = entry;
6787   for (int i = 0; i < arguments_values->length(); i++) {
6788     HValue* argument = arguments_values->at(i);
6789     HInstruction* push_argument = New<HPushArgument>(argument);
6790     push_argument->InsertAfter(insert_after);
6791     insert_after = push_argument;
6792   }
6793
6794   HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
6795   arguments_elements->ClearFlag(HValue::kUseGVN);
6796   arguments_elements->InsertAfter(insert_after);
6797   function_state()->set_arguments_elements(arguments_elements);
6798 }
6799
6800
6801 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
6802   VariableProxy* proxy = expr->obj()->AsVariableProxy();
6803   if (proxy == NULL) return false;
6804   if (!proxy->var()->IsStackAllocated()) return false;
6805   if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
6806     return false;
6807   }
6808
6809   HInstruction* result = NULL;
6810   if (expr->key()->IsPropertyName()) {
6811     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6812     if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
6813
6814     if (function_state()->outer() == NULL) {
6815       HInstruction* elements = Add<HArgumentsElements>(false);
6816       result = New<HArgumentsLength>(elements);
6817     } else {
6818       // Number of arguments without receiver.
6819       int argument_count = environment()->
6820           arguments_environment()->parameter_count() - 1;
6821       result = New<HConstant>(argument_count);
6822     }
6823   } else {
6824     Push(graph()->GetArgumentsObject());
6825     CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
6826     HValue* key = Pop();
6827     Drop(1);  // Arguments object.
6828     if (function_state()->outer() == NULL) {
6829       HInstruction* elements = Add<HArgumentsElements>(false);
6830       HInstruction* length = Add<HArgumentsLength>(elements);
6831       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6832       result = New<HAccessArgumentsAt>(elements, length, checked_key);
6833     } else {
6834       EnsureArgumentsArePushedForAccess();
6835
6836       // Number of arguments without receiver.
6837       HInstruction* elements = function_state()->arguments_elements();
6838       int argument_count = environment()->
6839           arguments_environment()->parameter_count() - 1;
6840       HInstruction* length = Add<HConstant>(argument_count);
6841       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6842       result = New<HAccessArgumentsAt>(elements, length, checked_key);
6843     }
6844   }
6845   ast_context()->ReturnInstruction(result, expr->id());
6846   return true;
6847 }
6848
6849
6850 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
6851     PropertyAccessType access,
6852     BailoutId ast_id,
6853     BailoutId return_id,
6854     Expression* expr,
6855     HValue* object,
6856     Handle<String> name,
6857     HValue* value,
6858     bool is_uninitialized) {
6859   SmallMapList* types;
6860   ComputeReceiverTypes(expr, object, &types, zone());
6861   ASSERT(types != NULL);
6862
6863   if (types->length() > 0) {
6864     PropertyAccessInfo info(this, access, ToType(types->first()), name);
6865     if (!info.CanAccessAsMonomorphic(types)) {
6866       HandlePolymorphicNamedFieldAccess(
6867           access, ast_id, return_id, object, value, types, name);
6868       return NULL;
6869     }
6870
6871     HValue* checked_object;
6872     // Type::Number() is only supported by polymorphic load/call handling.
6873     ASSERT(!info.type()->Is(Type::Number()));
6874     BuildCheckHeapObject(object);
6875
6876     if (AreStringTypes(types)) {
6877       checked_object =
6878           Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
6879     } else if (AreFloat32x4Types(types) &&
6880                CpuFeatures::SupportsSIMD128InCrankshaft()) {
6881       Handle<JSFunction> function(
6882           isolate()->native_context()->float32x4_function());
6883       HInstruction* constant_function = Add<HConstant>(function);
6884       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6885       HInstruction* map = Add<HLoadNamedField>(
6886           constant_function, static_cast<HValue*>(NULL), map_access);
6887       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6888       HInstruction* prototype = Add<HLoadNamedField>(
6889           map, static_cast<HValue*>(NULL), prototype_access);
6890       Handle<Map> initial_function_prototype_map(
6891           isolate()->native_context()->float32x4_function_prototype_map());
6892       Add<HCheckMaps>(prototype, initial_function_prototype_map);
6893       BuiltinFunctionId id = NameToId(isolate(), name, FLOAT32x4_TYPE);
6894       return NewUncasted<HUnarySIMDOperation>(object, id);
6895     } else if (AreFloat64x2Types(types) &&
6896                CpuFeatures::SupportsSIMD128InCrankshaft()) {
6897       Handle<JSFunction> function(
6898           isolate()->native_context()->float64x2_function());
6899       HInstruction* constant_function = Add<HConstant>(function);
6900       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6901       HInstruction* map = Add<HLoadNamedField>(
6902           constant_function, static_cast<HValue*>(NULL), map_access);
6903       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6904       HInstruction* prototype = Add<HLoadNamedField>(
6905           map, static_cast<HValue*>(NULL), prototype_access);
6906       Handle<Map> initial_function_prototype_map(
6907           isolate()->native_context()->float64x2_function_prototype_map());
6908       Add<HCheckMaps>(prototype, initial_function_prototype_map);
6909       BuiltinFunctionId id = NameToId(isolate(), name, FLOAT64x2_TYPE);
6910       return NewUncasted<HUnarySIMDOperation>(object, id);
6911     } else if (AreInt32x4Types(types) &&
6912                CpuFeatures::SupportsSIMD128InCrankshaft()) {
6913       Handle<JSFunction> function(
6914           isolate()->native_context()->int32x4_function());
6915       HInstruction* constant_function = Add<HConstant>(function);
6916       HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6917       HInstruction* map = Add<HLoadNamedField>(
6918           constant_function, static_cast<HValue*>(NULL), map_access);
6919       HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6920       HInstruction* prototype = Add<HLoadNamedField>(
6921           map, static_cast<HValue*>(NULL), prototype_access);
6922       Handle<Map> initial_function_prototype_map(
6923           isolate()->native_context()->int32x4_function_prototype_map());
6924       Add<HCheckMaps>(prototype, initial_function_prototype_map);
6925       BuiltinFunctionId id = NameToId(isolate(), name, INT32x4_TYPE);
6926       return NewUncasted<HUnarySIMDOperation>(object, id);
6927     } else {
6928       checked_object = Add<HCheckMaps>(object, types);
6929     }
6930     return BuildMonomorphicAccess(
6931         &info, object, checked_object, value, ast_id, return_id);
6932   }
6933
6934   return BuildNamedGeneric(access, object, name, value, is_uninitialized);
6935 }
6936
6937
6938 void HOptimizedGraphBuilder::PushLoad(Property* expr,
6939                                       HValue* object,
6940                                       HValue* key) {
6941   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
6942   Push(object);
6943   if (key != NULL) Push(key);
6944   BuildLoad(expr, expr->LoadId());
6945 }
6946
6947
6948 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
6949                                        BailoutId ast_id) {
6950   HInstruction* instr = NULL;
6951   if (expr->IsStringAccess()) {
6952     HValue* index = Pop();
6953     HValue* string = Pop();
6954     HInstruction* char_code = BuildStringCharCodeAt(string, index);
6955     AddInstruction(char_code);
6956     instr = NewUncasted<HStringCharFromCode>(char_code);
6957
6958   } else if (expr->IsFunctionPrototype()) {
6959     HValue* function = Pop();
6960     BuildCheckHeapObject(function);
6961     instr = New<HLoadFunctionPrototype>(function);
6962
6963   } else if (expr->key()->IsPropertyName()) {
6964     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6965     HValue* object = Pop();
6966
6967     instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
6968                              object, name, NULL, expr->IsUninitialized());
6969     if (instr == NULL) return;
6970     if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
6971
6972   } else {
6973     HValue* key = Pop();
6974     HValue* obj = Pop();
6975
6976     bool has_side_effects = false;
6977     HValue* load = HandleKeyedElementAccess(
6978         obj, key, NULL, expr, LOAD, &has_side_effects);
6979     if (has_side_effects) {
6980       if (ast_context()->IsEffect()) {
6981         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6982       } else {
6983         Push(load);
6984         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6985         Drop(1);
6986       }
6987     }
6988     return ast_context()->ReturnValue(load);
6989   }
6990   return ast_context()->ReturnInstruction(instr, ast_id);
6991 }
6992
6993
6994 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
6995   ASSERT(!HasStackOverflow());
6996   ASSERT(current_block() != NULL);
6997   ASSERT(current_block()->HasPredecessor());
6998
6999   if (TryArgumentsAccess(expr)) return;
7000
7001   CHECK_ALIVE(VisitForValue(expr->obj()));
7002   if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
7003       expr->IsStringAccess()) {
7004     CHECK_ALIVE(VisitForValue(expr->key()));
7005   }
7006
7007   BuildLoad(expr, expr->id());
7008 }
7009
7010
7011 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7012   HCheckMaps* check = Add<HCheckMaps>(
7013       Add<HConstant>(constant), handle(constant->map()));
7014   check->ClearDependsOnFlag(kElementsKind);
7015   return check;
7016 }
7017
7018
7019 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7020                                                      Handle<JSObject> holder) {
7021   while (holder.is_null() || !prototype.is_identical_to(holder)) {
7022     BuildConstantMapCheck(prototype);
7023     Object* next_prototype = prototype->GetPrototype();
7024     if (next_prototype->IsNull()) return NULL;
7025     CHECK(next_prototype->IsJSObject());
7026     prototype = handle(JSObject::cast(next_prototype));
7027   }
7028   return BuildConstantMapCheck(prototype);
7029 }
7030
7031
7032 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7033                                                    Handle<Map> receiver_map) {
7034   if (!holder.is_null()) {
7035     Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7036     BuildCheckPrototypeMaps(prototype, holder);
7037   }
7038 }
7039
7040
7041 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
7042     HValue* fun, int argument_count, bool pass_argument_count) {
7043   return New<HCallJSFunction>(
7044       fun, argument_count, pass_argument_count);
7045 }
7046
7047
7048 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
7049     HValue* fun, HValue* context,
7050     int argument_count, HValue* expected_param_count) {
7051   CallInterfaceDescriptor* descriptor =
7052       isolate()->call_descriptor(Isolate::ArgumentAdaptorCall);
7053
7054   HValue* arity = Add<HConstant>(argument_count - 1);
7055
7056   HValue* op_vals[] = { fun, context, arity, expected_param_count };
7057
7058   Handle<Code> adaptor =
7059       isolate()->builtins()->ArgumentsAdaptorTrampoline();
7060   HConstant* adaptor_value = Add<HConstant>(adaptor);
7061
7062   return New<HCallWithDescriptor>(
7063       adaptor_value, argument_count, descriptor,
7064       Vector<HValue*>(op_vals, descriptor->environment_length()));
7065 }
7066
7067
7068 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
7069     Handle<JSFunction> jsfun, int argument_count) {
7070   HValue* target = Add<HConstant>(jsfun);
7071   // For constant functions, we try to avoid calling the
7072   // argument adaptor and instead call the function directly
7073   int formal_parameter_count = jsfun->shared()->formal_parameter_count();
7074   bool dont_adapt_arguments =
7075       (formal_parameter_count ==
7076        SharedFunctionInfo::kDontAdaptArgumentsSentinel);
7077   int arity = argument_count - 1;
7078   bool can_invoke_directly =
7079       dont_adapt_arguments || formal_parameter_count == arity;
7080   if (can_invoke_directly) {
7081     if (jsfun.is_identical_to(current_info()->closure())) {
7082       graph()->MarkRecursive();
7083     }
7084     return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
7085   } else {
7086     HValue* param_count_value = Add<HConstant>(formal_parameter_count);
7087     HValue* context = Add<HLoadNamedField>(
7088         target, static_cast<HValue*>(NULL),
7089         HObjectAccess::ForFunctionContextPointer());
7090     return NewArgumentAdaptorCall(target, context,
7091         argument_count, param_count_value);
7092   }
7093   UNREACHABLE();
7094   return NULL;
7095 }
7096
7097
7098 class FunctionSorter {
7099  public:
7100   FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7101       : index_(index), ticks_(ticks), size_(size) { }
7102
7103   int index() const { return index_; }
7104   int ticks() const { return ticks_; }
7105   int size() const { return size_; }
7106
7107  private:
7108   int index_;
7109   int ticks_;
7110   int size_;
7111 };
7112
7113
7114 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7115   int diff = lhs.ticks() - rhs.ticks();
7116   if (diff != 0) return diff > 0;
7117   return lhs.size() < rhs.size();
7118 }
7119
7120
7121 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
7122     Call* expr,
7123     HValue* receiver,
7124     SmallMapList* types,
7125     Handle<String> name) {
7126   int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
7127   FunctionSorter order[kMaxCallPolymorphism];
7128
7129   bool handle_smi = false;
7130   bool handled_string = false;
7131   int ordered_functions = 0;
7132
7133   for (int i = 0;
7134        i < types->length() && ordered_functions < kMaxCallPolymorphism;
7135        ++i) {
7136     PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7137     if (info.CanAccessMonomorphic() &&
7138         info.lookup()->IsConstant() &&
7139         info.constant()->IsJSFunction()) {
7140       if (info.type()->Is(Type::String())) {
7141         if (handled_string) continue;
7142         handled_string = true;
7143       }
7144       Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7145       if (info.type()->Is(Type::Number())) {
7146         handle_smi = true;
7147       }
7148       expr->set_target(target);
7149       order[ordered_functions++] = FunctionSorter(
7150           i, target->shared()->profiler_ticks(), InliningAstSize(target));
7151     }
7152   }
7153
7154   std::sort(order, order + ordered_functions);
7155
7156   HBasicBlock* number_block = NULL;
7157   HBasicBlock* join = NULL;
7158   handled_string = false;
7159   int count = 0;
7160
7161   for (int fn = 0; fn < ordered_functions; ++fn) {
7162     int i = order[fn].index();
7163     PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7164     if (info.type()->Is(Type::String())) {
7165       if (handled_string) continue;
7166       handled_string = true;
7167     }
7168     // Reloads the target.
7169     info.CanAccessMonomorphic();
7170     Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7171
7172     expr->set_target(target);
7173     if (count == 0) {
7174       // Only needed once.
7175       join = graph()->CreateBasicBlock();
7176       if (handle_smi) {
7177         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7178         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7179         number_block = graph()->CreateBasicBlock();
7180         FinishCurrentBlock(New<HIsSmiAndBranch>(
7181                 receiver, empty_smi_block, not_smi_block));
7182         GotoNoSimulate(empty_smi_block, number_block);
7183         set_current_block(not_smi_block);
7184       } else {
7185         BuildCheckHeapObject(receiver);
7186       }
7187     }
7188     ++count;
7189     HBasicBlock* if_true = graph()->CreateBasicBlock();
7190     HBasicBlock* if_false = graph()->CreateBasicBlock();
7191     HUnaryControlInstruction* compare;
7192
7193     Handle<Map> map = info.map();
7194     if (info.type()->Is(Type::Number())) {
7195       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7196       compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7197     } else if (info.type()->Is(Type::String())) {
7198       compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7199     } else {
7200       compare = New<HCompareMap>(receiver, map, if_true, if_false);
7201     }
7202     FinishCurrentBlock(compare);
7203
7204     if (info.type()->Is(Type::Number())) {
7205       GotoNoSimulate(if_true, number_block);
7206       if_true = number_block;
7207     }
7208
7209     set_current_block(if_true);
7210
7211     AddCheckPrototypeMaps(info.holder(), map);
7212
7213     HValue* function = Add<HConstant>(expr->target());
7214     environment()->SetExpressionStackAt(0, function);
7215     Push(receiver);
7216     CHECK_ALIVE(VisitExpressions(expr->arguments()));
7217     bool needs_wrapping = NeedsWrappingFor(info.type(), target);
7218     bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7219     if (FLAG_trace_inlining && try_inline) {
7220       Handle<JSFunction> caller = current_info()->closure();
7221       SmartArrayPointer<char> caller_name =
7222           caller->shared()->DebugName()->ToCString();
7223       PrintF("Trying to inline the polymorphic call to %s from %s\n",
7224              name->ToCString().get(),
7225              caller_name.get());
7226     }
7227     if (try_inline && TryInlineCall(expr)) {
7228       // Trying to inline will signal that we should bailout from the
7229       // entire compilation by setting stack overflow on the visitor.
7230       if (HasStackOverflow()) return;
7231     } else {
7232       // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7233       // use the regular CallFunctionStub for method calls to wrap the receiver.
7234       // TODO(verwaest): Support creation of value wrappers directly in
7235       // HWrapReceiver.
7236       HInstruction* call = needs_wrapping
7237           ? NewUncasted<HCallFunction>(
7238               function, argument_count, WRAP_AND_CALL)
7239           : BuildCallConstantFunction(target, argument_count);
7240       PushArgumentsFromEnvironment(argument_count);
7241       AddInstruction(call);
7242       Drop(1);  // Drop the function.
7243       if (!ast_context()->IsEffect()) Push(call);
7244     }
7245
7246     if (current_block() != NULL) Goto(join);
7247     set_current_block(if_false);
7248   }
7249
7250   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
7251   // know about and do not want to handle ones we've never seen.  Otherwise
7252   // use a generic IC.
7253   if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
7254     FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
7255   } else {
7256     Property* prop = expr->expression()->AsProperty();
7257     HInstruction* function = BuildNamedGeneric(
7258         LOAD, receiver, name, NULL, prop->IsUninitialized());
7259     AddInstruction(function);
7260     Push(function);
7261     AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7262
7263     environment()->SetExpressionStackAt(1, function);
7264     environment()->SetExpressionStackAt(0, receiver);
7265     CHECK_ALIVE(VisitExpressions(expr->arguments()));
7266
7267     CallFunctionFlags flags = receiver->type().IsJSObject()
7268         ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
7269     HInstruction* call = New<HCallFunction>(
7270         function, argument_count, flags);
7271
7272     PushArgumentsFromEnvironment(argument_count);
7273
7274     Drop(1);  // Function.
7275
7276     if (join != NULL) {
7277       AddInstruction(call);
7278       if (!ast_context()->IsEffect()) Push(call);
7279       Goto(join);
7280     } else {
7281       return ast_context()->ReturnInstruction(call, expr->id());
7282     }
7283   }
7284
7285   // We assume that control flow is always live after an expression.  So
7286   // even without predecessors to the join block, we set it as the exit
7287   // block and continue by adding instructions there.
7288   ASSERT(join != NULL);
7289   if (join->HasPredecessor()) {
7290     set_current_block(join);
7291     join->SetJoinId(expr->id());
7292     if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7293   } else {
7294     set_current_block(NULL);
7295   }
7296 }
7297
7298
7299 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7300                                          Handle<JSFunction> caller,
7301                                          const char* reason) {
7302   if (FLAG_trace_inlining) {
7303     SmartArrayPointer<char> target_name =
7304         target->shared()->DebugName()->ToCString();
7305     SmartArrayPointer<char> caller_name =
7306         caller->shared()->DebugName()->ToCString();
7307     if (reason == NULL) {
7308       PrintF("Inlined %s called from %s.\n", target_name.get(),
7309              caller_name.get());
7310     } else {
7311       PrintF("Did not inline %s called from %s (%s).\n",
7312              target_name.get(), caller_name.get(), reason);
7313     }
7314   }
7315 }
7316
7317
7318 static const int kNotInlinable = 1000000000;
7319
7320
7321 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7322   if (!FLAG_use_inlining) return kNotInlinable;
7323
7324   // Precondition: call is monomorphic and we have found a target with the
7325   // appropriate arity.
7326   Handle<JSFunction> caller = current_info()->closure();
7327   Handle<SharedFunctionInfo> target_shared(target->shared());
7328
7329   // Always inline builtins marked for inlining.
7330   if (target->IsBuiltin()) {
7331     return target_shared->inline_builtin() ? 0 : kNotInlinable;
7332   }
7333
7334   if (target_shared->IsApiFunction()) {
7335     TraceInline(target, caller, "target is api function");
7336     return kNotInlinable;
7337   }
7338
7339   // Do a quick check on source code length to avoid parsing large
7340   // inlining candidates.
7341   if (target_shared->SourceSize() >
7342       Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7343     TraceInline(target, caller, "target text too big");
7344     return kNotInlinable;
7345   }
7346
7347   // Target must be inlineable.
7348   if (!target_shared->IsInlineable()) {
7349     TraceInline(target, caller, "target not inlineable");
7350     return kNotInlinable;
7351   }
7352   if (target_shared->dont_inline() || target_shared->dont_optimize()) {
7353     TraceInline(target, caller, "target contains unsupported syntax [early]");
7354     return kNotInlinable;
7355   }
7356
7357   int nodes_added = target_shared->ast_node_count();
7358   return nodes_added;
7359 }
7360
7361
7362 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7363                                        int arguments_count,
7364                                        HValue* implicit_return_value,
7365                                        BailoutId ast_id,
7366                                        BailoutId return_id,
7367                                        InliningKind inlining_kind,
7368                                        HSourcePosition position) {
7369   int nodes_added = InliningAstSize(target);
7370   if (nodes_added == kNotInlinable) return false;
7371
7372   Handle<JSFunction> caller = current_info()->closure();
7373
7374   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7375     TraceInline(target, caller, "target AST is too large [early]");
7376     return false;
7377   }
7378
7379   // Don't inline deeper than the maximum number of inlining levels.
7380   HEnvironment* env = environment();
7381   int current_level = 1;
7382   while (env->outer() != NULL) {
7383     if (current_level == FLAG_max_inlining_levels) {
7384       TraceInline(target, caller, "inline depth limit reached");
7385       return false;
7386     }
7387     if (env->outer()->frame_type() == JS_FUNCTION) {
7388       current_level++;
7389     }
7390     env = env->outer();
7391   }
7392
7393   // Don't inline recursive functions.
7394   for (FunctionState* state = function_state();
7395        state != NULL;
7396        state = state->outer()) {
7397     if (*state->compilation_info()->closure() == *target) {
7398       TraceInline(target, caller, "target is recursive");
7399       return false;
7400     }
7401   }
7402
7403   // We don't want to add more than a certain number of nodes from inlining.
7404   if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7405                            kUnlimitedMaxInlinedNodesCumulative)) {
7406     TraceInline(target, caller, "cumulative AST node limit reached");
7407     return false;
7408   }
7409
7410   // Parse and allocate variables.
7411   CompilationInfo target_info(target, zone());
7412   Handle<SharedFunctionInfo> target_shared(target->shared());
7413   if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7414     if (target_info.isolate()->has_pending_exception()) {
7415       // Parse or scope error, never optimize this function.
7416       SetStackOverflow();
7417       target_shared->DisableOptimization(kParseScopeError);
7418     }
7419     TraceInline(target, caller, "parse failure");
7420     return false;
7421   }
7422
7423   if (target_info.scope()->num_heap_slots() > 0) {
7424     TraceInline(target, caller, "target has context-allocated variables");
7425     return false;
7426   }
7427   FunctionLiteral* function = target_info.function();
7428
7429   // The following conditions must be checked again after re-parsing, because
7430   // earlier the information might not have been complete due to lazy parsing.
7431   nodes_added = function->ast_node_count();
7432   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7433     TraceInline(target, caller, "target AST is too large [late]");
7434     return false;
7435   }
7436   AstProperties::Flags* flags(function->flags());
7437   if (flags->Contains(kDontInline) || function->dont_optimize()) {
7438     TraceInline(target, caller, "target contains unsupported syntax [late]");
7439     return false;
7440   }
7441
7442   // If the function uses the arguments object check that inlining of functions
7443   // with arguments object is enabled and the arguments-variable is
7444   // stack allocated.
7445   if (function->scope()->arguments() != NULL) {
7446     if (!FLAG_inline_arguments) {
7447       TraceInline(target, caller, "target uses arguments object");
7448       return false;
7449     }
7450
7451     if (!function->scope()->arguments()->IsStackAllocated()) {
7452       TraceInline(target,
7453                   caller,
7454                   "target uses non-stackallocated arguments object");
7455       return false;
7456     }
7457   }
7458
7459   // All declarations must be inlineable.
7460   ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7461   int decl_count = decls->length();
7462   for (int i = 0; i < decl_count; ++i) {
7463     if (!decls->at(i)->IsInlineable()) {
7464       TraceInline(target, caller, "target has non-trivial declaration");
7465       return false;
7466     }
7467   }
7468
7469   // Generate the deoptimization data for the unoptimized version of
7470   // the target function if we don't already have it.
7471   if (!target_shared->has_deoptimization_support()) {
7472     // Note that we compile here using the same AST that we will use for
7473     // generating the optimized inline code.
7474     target_info.EnableDeoptimizationSupport();
7475     if (!FullCodeGenerator::MakeCode(&target_info)) {
7476       TraceInline(target, caller, "could not generate deoptimization info");
7477       return false;
7478     }
7479     if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) {
7480       // The scope info might not have been set if a lazily compiled
7481       // function is inlined before being called for the first time.
7482       Handle<ScopeInfo> target_scope_info =
7483           ScopeInfo::Create(target_info.scope(), zone());
7484       target_shared->set_scope_info(*target_scope_info);
7485     }
7486     target_shared->EnableDeoptimizationSupport(*target_info.code());
7487     target_shared->set_feedback_vector(*target_info.feedback_vector());
7488     Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7489                                         &target_info,
7490                                         target_shared);
7491   }
7492
7493   // ----------------------------------------------------------------
7494   // After this point, we've made a decision to inline this function (so
7495   // TryInline should always return true).
7496
7497   // Type-check the inlined function.
7498   ASSERT(target_shared->has_deoptimization_support());
7499   AstTyper::Run(&target_info);
7500
7501   int function_id = graph()->TraceInlinedFunction(target_shared, position);
7502
7503   // Save the pending call context. Set up new one for the inlined function.
7504   // The function state is new-allocated because we need to delete it
7505   // in two different places.
7506   FunctionState* target_state = new FunctionState(
7507       this, &target_info, inlining_kind, function_id);
7508
7509   HConstant* undefined = graph()->GetConstantUndefined();
7510
7511   HEnvironment* inner_env =
7512       environment()->CopyForInlining(target,
7513                                      arguments_count,
7514                                      function,
7515                                      undefined,
7516                                      function_state()->inlining_kind());
7517
7518   HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7519   inner_env->BindContext(context);
7520
7521   HArgumentsObject* arguments_object = NULL;
7522
7523   // If the function uses arguments object create and bind one, also copy
7524   // current arguments values to use them for materialization.
7525   if (function->scope()->arguments() != NULL) {
7526     ASSERT(function->scope()->arguments()->IsStackAllocated());
7527     HEnvironment* arguments_env = inner_env->arguments_environment();
7528     int arguments_count = arguments_env->parameter_count();
7529     arguments_object = Add<HArgumentsObject>(arguments_count);
7530     inner_env->Bind(function->scope()->arguments(), arguments_object);
7531     for (int i = 0; i < arguments_count; i++) {
7532       arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7533     }
7534   }
7535
7536   // Capture the state before invoking the inlined function for deopt in the
7537   // inlined function. This simulate has no bailout-id since it's not directly
7538   // reachable for deopt, and is only used to capture the state. If the simulate
7539   // becomes reachable by merging, the ast id of the simulate merged into it is
7540   // adopted.
7541   Add<HSimulate>(BailoutId::None());
7542
7543   current_block()->UpdateEnvironment(inner_env);
7544
7545   HEnterInlined* enter_inlined =
7546       Add<HEnterInlined>(return_id, target, arguments_count, function,
7547                          function_state()->inlining_kind(),
7548                          function->scope()->arguments(),
7549                          arguments_object);
7550   function_state()->set_entry(enter_inlined);
7551
7552   VisitDeclarations(target_info.scope()->declarations());
7553   VisitStatements(function->body());
7554   if (HasStackOverflow()) {
7555     // Bail out if the inline function did, as we cannot residualize a call
7556     // instead.
7557     TraceInline(target, caller, "inline graph construction failed");
7558     target_shared->DisableOptimization(kInliningBailedOut);
7559     inline_bailout_ = true;
7560     delete target_state;
7561     return true;
7562   }
7563
7564   // Update inlined nodes count.
7565   inlined_count_ += nodes_added;
7566
7567   Handle<Code> unoptimized_code(target_shared->code());
7568   ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7569   Handle<TypeFeedbackInfo> type_info(
7570       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7571   graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7572
7573   TraceInline(target, caller, NULL);
7574
7575   if (current_block() != NULL) {
7576     FunctionState* state = function_state();
7577     if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7578       // Falling off the end of an inlined construct call. In a test context the
7579       // return value will always evaluate to true, in a value context the
7580       // return value is the newly allocated receiver.
7581       if (call_context()->IsTest()) {
7582         Goto(inlined_test_context()->if_true(), state);
7583       } else if (call_context()->IsEffect()) {
7584         Goto(function_return(), state);
7585       } else {
7586         ASSERT(call_context()->IsValue());
7587         AddLeaveInlined(implicit_return_value, state);
7588       }
7589     } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7590       // Falling off the end of an inlined setter call. The returned value is
7591       // never used, the value of an assignment is always the value of the RHS
7592       // of the assignment.
7593       if (call_context()->IsTest()) {
7594         inlined_test_context()->ReturnValue(implicit_return_value);
7595       } else if (call_context()->IsEffect()) {
7596         Goto(function_return(), state);
7597       } else {
7598         ASSERT(call_context()->IsValue());
7599         AddLeaveInlined(implicit_return_value, state);
7600       }
7601     } else {
7602       // Falling off the end of a normal inlined function. This basically means
7603       // returning undefined.
7604       if (call_context()->IsTest()) {
7605         Goto(inlined_test_context()->if_false(), state);
7606       } else if (call_context()->IsEffect()) {
7607         Goto(function_return(), state);
7608       } else {
7609         ASSERT(call_context()->IsValue());
7610         AddLeaveInlined(undefined, state);
7611       }
7612     }
7613   }
7614
7615   // Fix up the function exits.
7616   if (inlined_test_context() != NULL) {
7617     HBasicBlock* if_true = inlined_test_context()->if_true();
7618     HBasicBlock* if_false = inlined_test_context()->if_false();
7619
7620     HEnterInlined* entry = function_state()->entry();
7621
7622     // Pop the return test context from the expression context stack.
7623     ASSERT(ast_context() == inlined_test_context());
7624     ClearInlinedTestContext();
7625     delete target_state;
7626
7627     // Forward to the real test context.
7628     if (if_true->HasPredecessor()) {
7629       entry->RegisterReturnTarget(if_true, zone());
7630       if_true->SetJoinId(ast_id);
7631       HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
7632       Goto(if_true, true_target, function_state());
7633     }
7634     if (if_false->HasPredecessor()) {
7635       entry->RegisterReturnTarget(if_false, zone());
7636       if_false->SetJoinId(ast_id);
7637       HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
7638       Goto(if_false, false_target, function_state());
7639     }
7640     set_current_block(NULL);
7641     return true;
7642
7643   } else if (function_return()->HasPredecessor()) {
7644     function_state()->entry()->RegisterReturnTarget(function_return(), zone());
7645     function_return()->SetJoinId(ast_id);
7646     set_current_block(function_return());
7647   } else {
7648     set_current_block(NULL);
7649   }
7650   delete target_state;
7651   return true;
7652 }
7653
7654
7655 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
7656   return TryInline(expr->target(),
7657                    expr->arguments()->length(),
7658                    NULL,
7659                    expr->id(),
7660                    expr->ReturnId(),
7661                    NORMAL_RETURN,
7662                    ScriptPositionToSourcePosition(expr->position()));
7663 }
7664
7665
7666 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
7667                                                 HValue* implicit_return_value) {
7668   return TryInline(expr->target(),
7669                    expr->arguments()->length(),
7670                    implicit_return_value,
7671                    expr->id(),
7672                    expr->ReturnId(),
7673                    CONSTRUCT_CALL_RETURN,
7674                    ScriptPositionToSourcePosition(expr->position()));
7675 }
7676
7677
7678 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7679                                              Handle<Map> receiver_map,
7680                                              BailoutId ast_id,
7681                                              BailoutId return_id) {
7682   if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
7683   return TryInline(getter,
7684                    0,
7685                    NULL,
7686                    ast_id,
7687                    return_id,
7688                    GETTER_CALL_RETURN,
7689                    source_position());
7690 }
7691
7692
7693 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7694                                              Handle<Map> receiver_map,
7695                                              BailoutId id,
7696                                              BailoutId assignment_id,
7697                                              HValue* implicit_return_value) {
7698   if (TryInlineApiSetter(setter, receiver_map, id)) return true;
7699   return TryInline(setter,
7700                    1,
7701                    implicit_return_value,
7702                    id, assignment_id,
7703                    SETTER_CALL_RETURN,
7704                    source_position());
7705 }
7706
7707
7708 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function,
7709                                             Call* expr,
7710                                             int arguments_count) {
7711   return TryInline(function,
7712                    arguments_count,
7713                    NULL,
7714                    expr->id(),
7715                    expr->ReturnId(),
7716                    NORMAL_RETURN,
7717                    ScriptPositionToSourcePosition(expr->position()));
7718 }
7719
7720
7721 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
7722   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7723   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7724   switch (id) {
7725     case kMathExp:
7726       if (!FLAG_fast_math) break;
7727       // Fall through if FLAG_fast_math.
7728     case kMathRound:
7729     case kMathFloor:
7730     case kMathAbs:
7731     case kMathSqrt:
7732     case kMathLog:
7733     case kMathClz32:
7734       if (expr->arguments()->length() == 1) {
7735         HValue* argument = Pop();
7736         Drop(2);  // Receiver and function.
7737         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7738         ast_context()->ReturnInstruction(op, expr->id());
7739         return true;
7740       }
7741       break;
7742     case kMathImul:
7743       if (expr->arguments()->length() == 2) {
7744         HValue* right = Pop();
7745         HValue* left = Pop();
7746         Drop(2);  // Receiver and function.
7747         HInstruction* op = HMul::NewImul(zone(), context(), left, right);
7748         ast_context()->ReturnInstruction(op, expr->id());
7749         return true;
7750       }
7751       break;
7752 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4)                     \
7753     case k##name:
7754 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
7755 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
7756       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7757           expr->arguments()->length() == 0) {
7758         Drop(2);  // Receiver and function.
7759         HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
7760         ast_context()->ReturnInstruction(op, expr->id());
7761         return true;
7762       }
7763       break;
7764 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5)                   \
7765     case k##name:
7766 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
7767 #undef SIMD_UNARY_OPERATION_CASE_ITEM
7768       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7769           expr->arguments()->length() == 1) {
7770         HValue* argument = Pop();
7771         Drop(2);  // Receiver and function.
7772         HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
7773         ast_context()->ReturnInstruction(op, expr->id());
7774         return true;
7775       }
7776       break;
7777 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6)              \
7778     case k##name:
7779 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
7780 #undef SIMD_BINARY_OPERATION_CASE_ITEM
7781       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7782           expr->arguments()->length() == 2) {
7783         HValue* right = Pop();
7784         HValue* left = Pop();
7785         Drop(2);  // Receiver and function.
7786         HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
7787         ast_context()->ReturnInstruction(op, expr->id());
7788         return true;
7789       }
7790       break;
7791 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7)         \
7792     case k##name:
7793 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
7794 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
7795       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7796           expr->arguments()->length() == 3) {
7797         HValue* right = Pop();
7798         HValue* left = Pop();
7799         HValue* value = Pop();
7800         Drop(2);  // Receiver and function.
7801         HInstruction* op =
7802             NewUncasted<HTernarySIMDOperation>(value, left, right, id);
7803         ast_context()->ReturnInstruction(op, expr->id());
7804         return true;
7805       }
7806       break;
7807 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
7808     case k##name:
7809 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
7810 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
7811       if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7812           expr->arguments()->length() == 4) {
7813         HValue* w = Pop();
7814         HValue* z = Pop();
7815         HValue* y = Pop();
7816         HValue* x = Pop();
7817         Drop(2);  // Receiver and function.
7818         HInstruction* op =
7819             NewUncasted<HQuarternarySIMDOperation>(x, y, z, w, id);
7820         ast_context()->ReturnInstruction(op, expr->id());
7821         return true;
7822       }
7823       break;
7824     default:
7825       // Not supported for inlining yet.
7826       break;
7827   }
7828   return false;
7829 }
7830
7831
7832 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
7833     Call* expr,
7834     HValue* receiver,
7835     Handle<Map> receiver_map) {
7836   // Try to inline calls like Math.* as operations in the calling function.
7837   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7838   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7839   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
7840   switch (id) {
7841     case kStringCharCodeAt:
7842     case kStringCharAt:
7843       if (argument_count == 2) {
7844         HValue* index = Pop();
7845         HValue* string = Pop();
7846         Drop(1);  // Function.
7847         HInstruction* char_code =
7848             BuildStringCharCodeAt(string, index);
7849         if (id == kStringCharCodeAt) {
7850           ast_context()->ReturnInstruction(char_code, expr->id());
7851           return true;
7852         }
7853         AddInstruction(char_code);
7854         HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
7855         ast_context()->ReturnInstruction(result, expr->id());
7856         return true;
7857       }
7858       break;
7859     case kStringFromCharCode:
7860       if (argument_count == 2) {
7861         HValue* argument = Pop();
7862         Drop(2);  // Receiver and function.
7863         HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
7864         ast_context()->ReturnInstruction(result, expr->id());
7865         return true;
7866       }
7867       break;
7868     case kMathExp:
7869       if (!FLAG_fast_math) break;
7870       // Fall through if FLAG_fast_math.
7871     case kMathRound:
7872     case kMathFloor:
7873     case kMathAbs:
7874     case kMathSqrt:
7875     case kMathLog:
7876     case kMathClz32:
7877       if (argument_count == 2) {
7878         HValue* argument = Pop();
7879         Drop(2);  // Receiver and function.
7880         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7881         ast_context()->ReturnInstruction(op, expr->id());
7882         return true;
7883       }
7884       break;
7885     case kMathPow:
7886       if (argument_count == 3) {
7887         HValue* right = Pop();
7888         HValue* left = Pop();
7889         Drop(2);  // Receiver and function.
7890         HInstruction* result = NULL;
7891         // Use sqrt() if exponent is 0.5 or -0.5.
7892         if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7893           double exponent = HConstant::cast(right)->DoubleValue();
7894           if (exponent == 0.5) {
7895             result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
7896           } else if (exponent == -0.5) {
7897             HValue* one = graph()->GetConstant1();
7898             HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
7899                 left, kMathPowHalf);
7900             // MathPowHalf doesn't have side effects so there's no need for
7901             // an environment simulation here.
7902             ASSERT(!sqrt->HasObservableSideEffects());
7903             result = NewUncasted<HDiv>(one, sqrt);
7904           } else if (exponent == 2.0) {
7905             result = NewUncasted<HMul>(left, left);
7906           }
7907         }
7908
7909         if (result == NULL) {
7910           result = NewUncasted<HPower>(left, right);
7911         }
7912         ast_context()->ReturnInstruction(result, expr->id());
7913         return true;
7914       }
7915       break;
7916     case kMathMax:
7917     case kMathMin:
7918       if (argument_count == 3) {
7919         HValue* right = Pop();
7920         HValue* left = Pop();
7921         Drop(2);  // Receiver and function.
7922         HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
7923                                                      : HMathMinMax::kMathMax;
7924         HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
7925         ast_context()->ReturnInstruction(result, expr->id());
7926         return true;
7927       }
7928       break;
7929     case kMathImul:
7930       if (argument_count == 3) {
7931         HValue* right = Pop();
7932         HValue* left = Pop();
7933         Drop(2);  // Receiver and function.
7934         HInstruction* result = HMul::NewImul(zone(), context(), left, right);
7935         ast_context()->ReturnInstruction(result, expr->id());
7936         return true;
7937       }
7938       break;
7939     case kArrayPop: {
7940       if (receiver_map.is_null()) return false;
7941       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
7942       ElementsKind elements_kind = receiver_map->elements_kind();
7943       if (!IsFastElementsKind(elements_kind)) return false;
7944       if (receiver_map->is_observed()) return false;
7945       ASSERT(receiver_map->is_extensible());
7946
7947       Drop(expr->arguments()->length());
7948       HValue* result;
7949       HValue* reduced_length;
7950       HValue* receiver = Pop();
7951
7952       HValue* checked_object = AddCheckMap(receiver, receiver_map);
7953       HValue* length = Add<HLoadNamedField>(
7954           checked_object, static_cast<HValue*>(NULL),
7955           HObjectAccess::ForArrayLength(elements_kind));
7956
7957       Drop(1);  // Function.
7958
7959       { NoObservableSideEffectsScope scope(this);
7960         IfBuilder length_checker(this);
7961
7962         HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
7963             length, graph()->GetConstant0(), Token::EQ);
7964         length_checker.Then();
7965
7966         if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
7967
7968         length_checker.Else();
7969         HValue* elements = AddLoadElements(checked_object);
7970         // Ensure that we aren't popping from a copy-on-write array.
7971         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7972           elements = BuildCopyElementsOnWrite(checked_object, elements,
7973                                               elements_kind, length);
7974         }
7975         reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
7976         result = AddElementAccess(elements, reduced_length, NULL,
7977                                   bounds_check, elements_kind, LOAD);
7978         Factory* factory = isolate()->factory();
7979         double nan_double = FixedDoubleArray::hole_nan_as_double();
7980         HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
7981             ? Add<HConstant>(factory->the_hole_value())
7982             : Add<HConstant>(nan_double);
7983         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7984           elements_kind = FAST_HOLEY_ELEMENTS;
7985         }
7986         AddElementAccess(
7987             elements, reduced_length, hole, bounds_check, elements_kind, STORE);
7988         Add<HStoreNamedField>(
7989             checked_object, HObjectAccess::ForArrayLength(elements_kind),
7990             reduced_length, STORE_TO_INITIALIZED_ENTRY);
7991
7992         if (!ast_context()->IsEffect()) Push(result);
7993
7994         length_checker.End();
7995       }
7996       result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
7997       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
7998       if (!ast_context()->IsEffect()) Drop(1);
7999
8000       ast_context()->ReturnValue(result);
8001       return true;
8002     }
8003     case kArrayPush: {
8004       if (receiver_map.is_null()) return false;
8005       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8006       ElementsKind elements_kind = receiver_map->elements_kind();
8007       if (!IsFastElementsKind(elements_kind)) return false;
8008       if (receiver_map->is_observed()) return false;
8009       ASSERT(receiver_map->is_extensible());
8010
8011       // If there may be elements accessors in the prototype chain, the fast
8012       // inlined version can't be used.
8013       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8014       // If there currently can be no elements accessors on the prototype chain,
8015       // it doesn't mean that there won't be any later. Install a full prototype
8016       // chain check to trap element accessors being installed on the prototype
8017       // chain, which would cause elements to go to dictionary mode and result
8018       // in a map change.
8019       Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8020       BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8021
8022       const int argc = expr->arguments()->length();
8023       if (argc != 1) return false;
8024
8025       HValue* value_to_push = Pop();
8026       HValue* array = Pop();
8027       Drop(1);  // Drop function.
8028
8029       HInstruction* new_size = NULL;
8030       HValue* length = NULL;
8031
8032       {
8033         NoObservableSideEffectsScope scope(this);
8034
8035         length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL),
8036           HObjectAccess::ForArrayLength(elements_kind));
8037
8038         new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8039
8040         bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8041         BuildUncheckedMonomorphicElementAccess(array, length,
8042                                                value_to_push, is_array,
8043                                                elements_kind, STORE,
8044                                                NEVER_RETURN_HOLE,
8045                                                STORE_AND_GROW_NO_TRANSITION);
8046
8047         if (!ast_context()->IsEffect()) Push(new_size);
8048         Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8049         if (!ast_context()->IsEffect()) Drop(1);
8050       }
8051
8052       ast_context()->ReturnValue(new_size);
8053       return true;
8054     }
8055 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4)                     \
8056     case k##name:
8057 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
8058 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
8059       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 1) {
8060         Drop(2);  // Receiver and function.
8061         HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
8062         ast_context()->ReturnInstruction(op, expr->id());
8063         return true;
8064       }
8065       break;
8066 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5)                   \
8067     case k##name:
8068 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
8069 #undef SIMD_UNARY_OPERATION_CASE_ITEM
8070       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8071         HValue* argument = Pop();
8072         Drop(2);  // Receiver and function.
8073         HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
8074         ast_context()->ReturnInstruction(op, expr->id());
8075         return true;
8076       }
8077       break;
8078 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6)              \
8079     case k##name:
8080 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
8081 #undef SIMD_BINARY_OPERATION_CASE_ITEM
8082       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8083         HValue* right = Pop();
8084         HValue* left = Pop();
8085         Drop(2);  // Receiver and function.
8086         HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
8087         ast_context()->ReturnInstruction(op, expr->id());
8088         return true;
8089       }
8090       break;
8091 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7)         \
8092     case k##name:
8093 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
8094 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
8095       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 4) {
8096         HValue* right = Pop();
8097         HValue* left = Pop();
8098         HValue* value = Pop();
8099         Drop(2);  // Receiver and function.
8100         HInstruction* op =
8101             NewUncasted<HTernarySIMDOperation>(value, left, right, id);
8102         ast_context()->ReturnInstruction(op, expr->id());
8103         return true;
8104       }
8105       break;
8106 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
8107     case k##name:
8108 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
8109 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
8110       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 5) {
8111         HValue* w = Pop();
8112         HValue* z = Pop();
8113         HValue* y = Pop();
8114         HValue* x = Pop();
8115         Drop(2);  // Receiver and function.
8116         HValue* context = environment()->context();
8117         HInstruction* op =
8118             HQuarternarySIMDOperation::New(zone(), context, x, y, z, w, id);
8119         ast_context()->ReturnInstruction(op, expr->id());
8120         return true;
8121       }
8122       break;
8123     case kFloat32x4ArrayGetAt:
8124     case kFloat64x2ArrayGetAt:
8125     case kInt32x4ArrayGetAt:
8126       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8127         HValue* key = Pop();
8128         HValue* typed32x4_array = Pop();
8129         ASSERT(typed32x4_array == receiver);
8130         Drop(1);  // Drop function.
8131         HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
8132             typed32x4_array, key, NULL,
8133             receiver_map->instance_type() == JS_ARRAY_TYPE,
8134             receiver_map->elements_kind(),
8135             LOAD,  // is_store.
8136             NEVER_RETURN_HOLE,  // load_mode.
8137             STANDARD_STORE);
8138         ast_context()->ReturnValue(instr);
8139         return true;
8140       }
8141       break;
8142     case kFloat32x4ArraySetAt:
8143     case kFloat64x2ArraySetAt:
8144     case kInt32x4ArraySetAt:
8145       if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8146         HValue* value = Pop();
8147         HValue* key = Pop();
8148         HValue* typed32x4_array = Pop();
8149         ASSERT(typed32x4_array == receiver);
8150         Drop(1);  // Drop function.
8151         // TODO(haitao): add STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS.
8152         KeyedAccessStoreMode store_mode = STANDARD_STORE;
8153         BuildUncheckedMonomorphicElementAccess(
8154             typed32x4_array, key, value,
8155             receiver_map->instance_type() == JS_ARRAY_TYPE,
8156             receiver_map->elements_kind(),
8157             STORE,  // is_store.
8158             NEVER_RETURN_HOLE,  // load_mode.
8159             store_mode);
8160         Push(value);
8161         Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8162         ast_context()->ReturnValue(Pop());
8163         return true;
8164       }
8165       break;
8166     default:
8167       // Not yet supported for inlining.
8168       break;
8169   }
8170   return false;
8171 }
8172
8173
8174 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8175                                                       HValue* receiver) {
8176   Handle<JSFunction> function = expr->target();
8177   int argc = expr->arguments()->length();
8178   SmallMapList receiver_maps;
8179   return TryInlineApiCall(function,
8180                           receiver,
8181                           &receiver_maps,
8182                           argc,
8183                           expr->id(),
8184                           kCallApiFunction);
8185 }
8186
8187
8188 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8189     Call* expr,
8190     HValue* receiver,
8191     SmallMapList* receiver_maps) {
8192   Handle<JSFunction> function = expr->target();
8193   int argc = expr->arguments()->length();
8194   return TryInlineApiCall(function,
8195                           receiver,
8196                           receiver_maps,
8197                           argc,
8198                           expr->id(),
8199                           kCallApiMethod);
8200 }
8201
8202
8203 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
8204                                                 Handle<Map> receiver_map,
8205                                                 BailoutId ast_id) {
8206   SmallMapList receiver_maps(1, zone());
8207   receiver_maps.Add(receiver_map, zone());
8208   return TryInlineApiCall(function,
8209                           NULL,  // Receiver is on expression stack.
8210                           &receiver_maps,
8211                           0,
8212                           ast_id,
8213                           kCallApiGetter);
8214 }
8215
8216
8217 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
8218                                                 Handle<Map> receiver_map,
8219                                                 BailoutId ast_id) {
8220   SmallMapList receiver_maps(1, zone());
8221   receiver_maps.Add(receiver_map, zone());
8222   return TryInlineApiCall(function,
8223                           NULL,  // Receiver is on expression stack.
8224                           &receiver_maps,
8225                           1,
8226                           ast_id,
8227                           kCallApiSetter);
8228 }
8229
8230
8231 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
8232                                                HValue* receiver,
8233                                                SmallMapList* receiver_maps,
8234                                                int argc,
8235                                                BailoutId ast_id,
8236                                                ApiCallType call_type) {
8237   CallOptimization optimization(function);
8238   if (!optimization.is_simple_api_call()) return false;
8239   Handle<Map> holder_map;
8240   if (call_type == kCallApiFunction) {
8241     // Cannot embed a direct reference to the global proxy map
8242     // as it maybe dropped on deserialization.
8243     CHECK(!Serializer::enabled(isolate()));
8244     ASSERT_EQ(0, receiver_maps->length());
8245     receiver_maps->Add(handle(
8246         function->context()->global_object()->global_receiver()->map()),
8247         zone());
8248   }
8249   CallOptimization::HolderLookup holder_lookup =
8250       CallOptimization::kHolderNotFound;
8251   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8252       receiver_maps->first(), &holder_lookup);
8253   if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8254
8255   if (FLAG_trace_inlining) {
8256     PrintF("Inlining api function ");
8257     function->ShortPrint();
8258     PrintF("\n");
8259   }
8260
8261   bool drop_extra = false;
8262   bool is_store = false;
8263   switch (call_type) {
8264     case kCallApiFunction:
8265     case kCallApiMethod:
8266       // Need to check that none of the receiver maps could have changed.
8267       Add<HCheckMaps>(receiver, receiver_maps);
8268       // Need to ensure the chain between receiver and api_holder is intact.
8269       if (holder_lookup == CallOptimization::kHolderFound) {
8270         AddCheckPrototypeMaps(api_holder, receiver_maps->first());
8271       } else {
8272         ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
8273       }
8274       // Includes receiver.
8275       PushArgumentsFromEnvironment(argc + 1);
8276       // Drop function after call.
8277       drop_extra = true;
8278       break;
8279     case kCallApiGetter:
8280       // Receiver and prototype chain cannot have changed.
8281       ASSERT_EQ(0, argc);
8282       ASSERT_EQ(NULL, receiver);
8283       // Receiver is on expression stack.
8284       receiver = Pop();
8285       Add<HPushArgument>(receiver);
8286       break;
8287     case kCallApiSetter:
8288       {
8289         is_store = true;
8290         // Receiver and prototype chain cannot have changed.
8291         ASSERT_EQ(1, argc);
8292         ASSERT_EQ(NULL, receiver);
8293         // Receiver and value are on expression stack.
8294         HValue* value = Pop();
8295         receiver = Pop();
8296         Add<HPushArgument>(receiver);
8297         Add<HPushArgument>(value);
8298         break;
8299      }
8300   }
8301
8302   HValue* holder = NULL;
8303   switch (holder_lookup) {
8304     case CallOptimization::kHolderFound:
8305       holder = Add<HConstant>(api_holder);
8306       break;
8307     case CallOptimization::kHolderIsReceiver:
8308       holder = receiver;
8309       break;
8310     case CallOptimization::kHolderNotFound:
8311       UNREACHABLE();
8312       break;
8313   }
8314   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
8315   Handle<Object> call_data_obj(api_call_info->data(), isolate());
8316   bool call_data_is_undefined = call_data_obj->IsUndefined();
8317   HValue* call_data = Add<HConstant>(call_data_obj);
8318   ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
8319   ExternalReference ref = ExternalReference(&fun,
8320                                             ExternalReference::DIRECT_API_CALL,
8321                                             isolate());
8322   HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
8323
8324   HValue* op_vals[] = {
8325     Add<HConstant>(function),
8326     call_data,
8327     holder,
8328     api_function_address,
8329     context()
8330   };
8331
8332   CallInterfaceDescriptor* descriptor =
8333       isolate()->call_descriptor(Isolate::ApiFunctionCall);
8334
8335   CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
8336   Handle<Code> code = stub.GetCode();
8337   HConstant* code_value = Add<HConstant>(code);
8338
8339   ASSERT((sizeof(op_vals) / kPointerSize) ==
8340          descriptor->environment_length());
8341
8342   HInstruction* call = New<HCallWithDescriptor>(
8343       code_value, argc + 1, descriptor,
8344       Vector<HValue*>(op_vals, descriptor->environment_length()));
8345
8346   if (drop_extra) Drop(1);  // Drop function.
8347   ast_context()->ReturnInstruction(call, ast_id);
8348   return true;
8349 }
8350
8351
8352 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
8353   ASSERT(expr->expression()->IsProperty());
8354
8355   if (!expr->IsMonomorphic()) {
8356     return false;
8357   }
8358   Handle<Map> function_map = expr->GetReceiverTypes()->first();
8359   if (function_map->instance_type() != JS_FUNCTION_TYPE ||
8360       !expr->target()->shared()->HasBuiltinFunctionId() ||
8361       expr->target()->shared()->builtin_function_id() != kFunctionApply) {
8362     return false;
8363   }
8364
8365   if (current_info()->scope()->arguments() == NULL) return false;
8366
8367   ZoneList<Expression*>* args = expr->arguments();
8368   if (args->length() != 2) return false;
8369
8370   VariableProxy* arg_two = args->at(1)->AsVariableProxy();
8371   if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
8372   HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
8373   if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
8374
8375   // Found pattern f.apply(receiver, arguments).
8376   CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
8377   HValue* receiver = Pop();  // receiver
8378   HValue* function = Pop();  // f
8379   Drop(1);  // apply
8380
8381   if (function_state()->outer() == NULL) {
8382     HInstruction* elements = Add<HArgumentsElements>(false);
8383     HInstruction* length = Add<HArgumentsLength>(elements);
8384     HValue* wrapped_receiver = BuildWrapReceiver(receiver, function);
8385     HInstruction* result = New<HApplyArguments>(function,
8386                                                 wrapped_receiver,
8387                                                 length,
8388                                                 elements);
8389     ast_context()->ReturnInstruction(result, expr->id());
8390     return true;
8391   } else {
8392     // We are inside inlined function and we know exactly what is inside
8393     // arguments object. But we need to be able to materialize at deopt.
8394     ASSERT_EQ(environment()->arguments_environment()->parameter_count(),
8395               function_state()->entry()->arguments_object()->arguments_count());
8396     HArgumentsObject* args = function_state()->entry()->arguments_object();
8397     const ZoneList<HValue*>* arguments_values = args->arguments_values();
8398     int arguments_count = arguments_values->length();
8399     Push(function);
8400     Push(BuildWrapReceiver(receiver, function));
8401     for (int i = 1; i < arguments_count; i++) {
8402       Push(arguments_values->at(i));
8403     }
8404
8405     Handle<JSFunction> known_function;
8406     if (function->IsConstant() &&
8407         HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8408       known_function = Handle<JSFunction>::cast(
8409           HConstant::cast(function)->handle(isolate()));
8410       int args_count = arguments_count - 1;  // Excluding receiver.
8411       if (TryInlineApply(known_function, expr, args_count)) return true;
8412     }
8413
8414     PushArgumentsFromEnvironment(arguments_count);
8415     HInvokeFunction* call = New<HInvokeFunction>(
8416         function, known_function, arguments_count);
8417     Drop(1);  // Function.
8418     ast_context()->ReturnInstruction(call, expr->id());
8419     return true;
8420   }
8421 }
8422
8423
8424 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
8425                                                     Handle<JSFunction> target) {
8426   SharedFunctionInfo* shared = target->shared();
8427   if (shared->strict_mode() == SLOPPY && !shared->native()) {
8428     // Cannot embed a direct reference to the global proxy
8429     // as is it dropped on deserialization.
8430     CHECK(!Serializer::enabled(isolate()));
8431     Handle<JSObject> global_receiver(
8432         target->context()->global_object()->global_receiver());
8433     return Add<HConstant>(global_receiver);
8434   }
8435   return graph()->GetConstantUndefined();
8436 }
8437
8438
8439 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
8440   ASSERT(!HasStackOverflow());
8441   ASSERT(current_block() != NULL);
8442   ASSERT(current_block()->HasPredecessor());
8443   Expression* callee = expr->expression();
8444   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
8445   HInstruction* call = NULL;
8446
8447   Property* prop = callee->AsProperty();
8448   if (prop != NULL) {
8449     CHECK_ALIVE(VisitForValue(prop->obj()));
8450     HValue* receiver = Top();
8451
8452     SmallMapList* types;
8453     ComputeReceiverTypes(expr, receiver, &types, zone());
8454
8455     if (prop->key()->IsPropertyName() && types->length() > 0) {
8456       Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
8457       PropertyAccessInfo info(this, LOAD, ToType(types->first()), name);
8458       if (!info.CanAccessAsMonomorphic(types)) {
8459         HandlePolymorphicCallNamed(expr, receiver, types, name);
8460         return;
8461       }
8462     }
8463
8464     HValue* key = NULL;
8465     if (!prop->key()->IsPropertyName()) {
8466       CHECK_ALIVE(VisitForValue(prop->key()));
8467       key = Pop();
8468     }
8469
8470     CHECK_ALIVE(PushLoad(prop, receiver, key));
8471     HValue* function = Pop();
8472
8473     if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8474
8475     // Push the function under the receiver.
8476     environment()->SetExpressionStackAt(0, function);
8477
8478     Push(receiver);
8479
8480     if (function->IsConstant() &&
8481         HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8482       Handle<JSFunction> known_function = Handle<JSFunction>::cast(
8483           HConstant::cast(function)->handle(isolate()));
8484       expr->set_target(known_function);
8485
8486       if (TryCallApply(expr)) return;
8487       CHECK_ALIVE(VisitExpressions(expr->arguments()));
8488
8489       Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
8490       if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
8491         if (FLAG_trace_inlining) {
8492           PrintF("Inlining builtin ");
8493           known_function->ShortPrint();
8494           PrintF("\n");
8495         }
8496         return;
8497       }
8498       if (TryInlineApiMethodCall(expr, receiver, types)) return;
8499
8500       // Wrap the receiver if necessary.
8501       if (NeedsWrappingFor(ToType(types->first()), known_function)) {
8502         // Since HWrapReceiver currently cannot actually wrap numbers and
8503         // strings, use the regular CallFunctionStub for method calls to wrap
8504         // the receiver.
8505         // TODO(verwaest): Support creation of value wrappers directly in
8506         // HWrapReceiver.
8507         call = New<HCallFunction>(
8508             function, argument_count, WRAP_AND_CALL);
8509       } else if (TryInlineCall(expr)) {
8510         return;
8511       } else {
8512         call = BuildCallConstantFunction(known_function, argument_count);
8513       }
8514
8515     } else {
8516       CHECK_ALIVE(VisitExpressions(expr->arguments()));
8517       CallFunctionFlags flags = receiver->type().IsJSObject()
8518           ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
8519       call = New<HCallFunction>(function, argument_count, flags);
8520     }
8521     PushArgumentsFromEnvironment(argument_count);
8522
8523   } else {
8524     VariableProxy* proxy = expr->expression()->AsVariableProxy();
8525     if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
8526       return Bailout(kPossibleDirectCallToEval);
8527     }
8528
8529     // The function is on the stack in the unoptimized code during
8530     // evaluation of the arguments.
8531     CHECK_ALIVE(VisitForValue(expr->expression()));
8532     HValue* function = Top();
8533     bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
8534     if (global_call) {
8535       Variable* var = proxy->var();
8536       bool known_global_function = false;
8537       // If there is a global property cell for the name at compile time and
8538       // access check is not enabled we assume that the function will not change
8539       // and generate optimized code for calling the function.
8540       LookupResult lookup(isolate());
8541       GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, LOAD);
8542       if (type == kUseCell &&
8543           !current_info()->global_object()->IsAccessCheckNeeded()) {
8544         Handle<GlobalObject> global(current_info()->global_object());
8545         known_global_function = expr->ComputeGlobalTarget(global, &lookup);
8546       }
8547       if (known_global_function) {
8548         Add<HCheckValue>(function, expr->target());
8549
8550         // Placeholder for the receiver.
8551         Push(graph()->GetConstantUndefined());
8552         CHECK_ALIVE(VisitExpressions(expr->arguments()));
8553
8554         // Patch the global object on the stack by the expected receiver.
8555         HValue* receiver = ImplicitReceiverFor(function, expr->target());
8556         const int receiver_index = argument_count - 1;
8557         environment()->SetExpressionStackAt(receiver_index, receiver);
8558
8559         if (TryInlineBuiltinFunctionCall(expr)) {
8560           if (FLAG_trace_inlining) {
8561             PrintF("Inlining builtin ");
8562             expr->target()->ShortPrint();
8563             PrintF("\n");
8564           }
8565           return;
8566         }
8567         if (TryInlineApiFunctionCall(expr, receiver)) return;
8568         if (TryInlineCall(expr)) return;
8569
8570         PushArgumentsFromEnvironment(argument_count);
8571         call = BuildCallConstantFunction(expr->target(), argument_count);
8572       } else {
8573         Push(graph()->GetConstantUndefined());
8574         CHECK_ALIVE(VisitExpressions(expr->arguments()));
8575         PushArgumentsFromEnvironment(argument_count);
8576         call = New<HCallFunction>(function, argument_count);
8577       }
8578
8579     } else if (expr->IsMonomorphic()) {
8580       Add<HCheckValue>(function, expr->target());
8581
8582       Push(graph()->GetConstantUndefined());
8583       CHECK_ALIVE(VisitExpressions(expr->arguments()));
8584
8585       HValue* receiver = ImplicitReceiverFor(function, expr->target());
8586       const int receiver_index = argument_count - 1;
8587       environment()->SetExpressionStackAt(receiver_index, receiver);
8588
8589       if (TryInlineBuiltinFunctionCall(expr)) {
8590         if (FLAG_trace_inlining) {
8591           PrintF("Inlining builtin ");
8592           expr->target()->ShortPrint();
8593           PrintF("\n");
8594         }
8595         return;
8596       }
8597       if (TryInlineApiFunctionCall(expr, receiver)) return;
8598
8599       if (TryInlineCall(expr)) return;
8600
8601       call = PreProcessCall(New<HInvokeFunction>(
8602           function, expr->target(), argument_count));
8603
8604     } else {
8605       Push(graph()->GetConstantUndefined());
8606       CHECK_ALIVE(VisitExpressions(expr->arguments()));
8607       PushArgumentsFromEnvironment(argument_count);
8608       call = New<HCallFunction>(function, argument_count);
8609     }
8610   }
8611
8612   Drop(1);  // Drop the function.
8613   return ast_context()->ReturnInstruction(call, expr->id());
8614 }
8615
8616
8617 void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
8618   NoObservableSideEffectsScope no_effects(this);
8619
8620   int argument_count = expr->arguments()->length();
8621   // We should at least have the constructor on the expression stack.
8622   HValue* constructor = environment()->ExpressionStackAt(argument_count);
8623
8624   ElementsKind kind = expr->elements_kind();
8625   Handle<AllocationSite> site = expr->allocation_site();
8626   ASSERT(!site.is_null());
8627
8628   // Register on the site for deoptimization if the transition feedback changes.
8629   AllocationSite::AddDependentCompilationInfo(
8630       site, AllocationSite::TRANSITIONS, top_info());
8631   HInstruction* site_instruction = Add<HConstant>(site);
8632
8633   // In the single constant argument case, we may have to adjust elements kind
8634   // to avoid creating a packed non-empty array.
8635   if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
8636     HValue* argument = environment()->Top();
8637     if (argument->IsConstant()) {
8638       HConstant* constant_argument = HConstant::cast(argument);
8639       ASSERT(constant_argument->HasSmiValue());
8640       int constant_array_size = constant_argument->Integer32Value();
8641       if (constant_array_size != 0) {
8642         kind = GetHoleyElementsKind(kind);
8643       }
8644     }
8645   }
8646
8647   // Build the array.
8648   JSArrayBuilder array_builder(this,
8649                                kind,
8650                                site_instruction,
8651                                constructor,
8652                                DISABLE_ALLOCATION_SITES);
8653   HValue* new_object;
8654   if (argument_count == 0) {
8655     new_object = array_builder.AllocateEmptyArray();
8656   } else if (argument_count == 1) {
8657     HValue* argument = environment()->Top();
8658     new_object = BuildAllocateArrayFromLength(&array_builder, argument);
8659   } else {
8660     HValue* length = Add<HConstant>(argument_count);
8661     // Smi arrays need to initialize array elements with the hole because
8662     // bailout could occur if the arguments don't fit in a smi.
8663     //
8664     // TODO(mvstanton): If all the arguments are constants in smi range, then
8665     // we could set fill_with_hole to false and save a few instructions.
8666     JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
8667         ? JSArrayBuilder::FILL_WITH_HOLE
8668         : JSArrayBuilder::DONT_FILL_WITH_HOLE;
8669     new_object = array_builder.AllocateArray(length, length, fill_mode);
8670     HValue* elements = array_builder.GetElementsLocation();
8671     for (int i = 0; i < argument_count; i++) {
8672       HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
8673       HValue* constant_i = Add<HConstant>(i);
8674       Add<HStoreKeyed>(elements, constant_i, value, kind);
8675     }
8676   }
8677
8678   Drop(argument_count + 1);  // drop constructor and args.
8679   ast_context()->ReturnValue(new_object);
8680 }
8681
8682
8683 // Checks whether allocation using the given constructor can be inlined.
8684 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
8685   return constructor->has_initial_map() &&
8686       constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
8687       constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
8688       constructor->initial_map()->InitialPropertiesLength() == 0;
8689 }
8690
8691
8692 bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
8693   Handle<JSFunction> caller = current_info()->closure();
8694   Handle<JSFunction> target(isolate()->native_context()->array_function(),
8695                             isolate());
8696   int argument_count = expr->arguments()->length();
8697   // We should have the function plus array arguments on the environment stack.
8698   ASSERT(environment()->length() >= (argument_count + 1));
8699   Handle<AllocationSite> site = expr->allocation_site();
8700   ASSERT(!site.is_null());
8701
8702   bool inline_ok = false;
8703   if (site->CanInlineCall()) {
8704     // We also want to avoid inlining in certain 1 argument scenarios.
8705     if (argument_count == 1) {
8706       HValue* argument = Top();
8707       if (argument->IsConstant()) {
8708         // Do not inline if the constant length argument is not a smi or
8709         // outside the valid range for a fast array.
8710         HConstant* constant_argument = HConstant::cast(argument);
8711         if (constant_argument->HasSmiValue()) {
8712           int value = constant_argument->Integer32Value();
8713           inline_ok = value >= 0 &&
8714               value < JSObject::kInitialMaxFastElementArray;
8715           if (!inline_ok) {
8716             TraceInline(target, caller,
8717                         "Length outside of valid array range");
8718           }
8719         }
8720       } else {
8721         inline_ok = true;
8722       }
8723     } else {
8724       inline_ok = true;
8725     }
8726   } else {
8727     TraceInline(target, caller, "AllocationSite requested no inlining.");
8728   }
8729
8730   if (inline_ok) {
8731     TraceInline(target, caller, NULL);
8732   }
8733   return inline_ok;
8734 }
8735
8736
8737 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
8738   ASSERT(!HasStackOverflow());
8739   ASSERT(current_block() != NULL);
8740   ASSERT(current_block()->HasPredecessor());
8741   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8742   int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
8743   Factory* factory = isolate()->factory();
8744
8745   // The constructor function is on the stack in the unoptimized code
8746   // during evaluation of the arguments.
8747   CHECK_ALIVE(VisitForValue(expr->expression()));
8748   HValue* function = Top();
8749   CHECK_ALIVE(VisitExpressions(expr->arguments()));
8750
8751   if (FLAG_inline_construct &&
8752       expr->IsMonomorphic() &&
8753       IsAllocationInlineable(expr->target())) {
8754     Handle<JSFunction> constructor = expr->target();
8755     HValue* check = Add<HCheckValue>(function, constructor);
8756
8757     // Force completion of inobject slack tracking before generating
8758     // allocation code to finalize instance size.
8759     if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
8760       constructor->shared()->CompleteInobjectSlackTracking();
8761     }
8762
8763     // Calculate instance size from initial map of constructor.
8764     ASSERT(constructor->has_initial_map());
8765     Handle<Map> initial_map(constructor->initial_map());
8766     int instance_size = initial_map->instance_size();
8767     ASSERT(initial_map->InitialPropertiesLength() == 0);
8768
8769     // Allocate an instance of the implicit receiver object.
8770     HValue* size_in_bytes = Add<HConstant>(instance_size);
8771     HAllocationMode allocation_mode;
8772     if (FLAG_pretenuring_call_new) {
8773       if (FLAG_allocation_site_pretenuring) {
8774         // Try to use pretenuring feedback.
8775         Handle<AllocationSite> allocation_site = expr->allocation_site();
8776         allocation_mode = HAllocationMode(allocation_site);
8777         // Take a dependency on allocation site.
8778         AllocationSite::AddDependentCompilationInfo(allocation_site,
8779                                                     AllocationSite::TENURING,
8780                                                     top_info());
8781       } else {
8782         allocation_mode = HAllocationMode(
8783             isolate()->heap()->GetPretenureMode());
8784       }
8785     }
8786
8787     HAllocate* receiver =
8788         BuildAllocate(size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE,
8789                       allocation_mode);
8790     receiver->set_known_initial_map(initial_map);
8791
8792     // Load the initial map from the constructor.
8793     HValue* constructor_value = Add<HConstant>(constructor);
8794     HValue* initial_map_value =
8795       Add<HLoadNamedField>(constructor_value, static_cast<HValue*>(NULL),
8796                            HObjectAccess::ForMapAndOffset(
8797                                handle(constructor->map()),
8798                                JSFunction::kPrototypeOrInitialMapOffset));
8799
8800     // Initialize map and fields of the newly allocated object.
8801     { NoObservableSideEffectsScope no_effects(this);
8802       ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
8803       Add<HStoreNamedField>(receiver,
8804           HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
8805           initial_map_value);
8806       HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
8807       Add<HStoreNamedField>(receiver,
8808           HObjectAccess::ForMapAndOffset(initial_map,
8809                                          JSObject::kPropertiesOffset),
8810           empty_fixed_array);
8811       Add<HStoreNamedField>(receiver,
8812           HObjectAccess::ForMapAndOffset(initial_map,
8813                                          JSObject::kElementsOffset),
8814           empty_fixed_array);
8815       if (initial_map->inobject_properties() != 0) {
8816         HConstant* undefined = graph()->GetConstantUndefined();
8817         for (int i = 0; i < initial_map->inobject_properties(); i++) {
8818           int property_offset = initial_map->GetInObjectPropertyOffset(i);
8819           Add<HStoreNamedField>(receiver,
8820               HObjectAccess::ForMapAndOffset(initial_map, property_offset),
8821               undefined);
8822         }
8823       }
8824     }
8825
8826     // Replace the constructor function with a newly allocated receiver using
8827     // the index of the receiver from the top of the expression stack.
8828     const int receiver_index = argument_count - 1;
8829     ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
8830     environment()->SetExpressionStackAt(receiver_index, receiver);
8831
8832     if (TryInlineConstruct(expr, receiver)) return;
8833
8834     // TODO(mstarzinger): For now we remove the previous HAllocate and all
8835     // corresponding instructions and instead add HPushArgument for the
8836     // arguments in case inlining failed.  What we actually should do is for
8837     // inlining to try to build a subgraph without mutating the parent graph.
8838     HInstruction* instr = current_block()->last();
8839     while (instr != initial_map_value) {
8840       HInstruction* prev_instr = instr->previous();
8841       instr->DeleteAndReplaceWith(NULL);
8842       instr = prev_instr;
8843     }
8844     initial_map_value->DeleteAndReplaceWith(NULL);
8845     receiver->DeleteAndReplaceWith(NULL);
8846     check->DeleteAndReplaceWith(NULL);
8847     environment()->SetExpressionStackAt(receiver_index, function);
8848     HInstruction* call =
8849       PreProcessCall(New<HCallNew>(function, argument_count));
8850     return ast_context()->ReturnInstruction(call, expr->id());
8851   } else {
8852     // The constructor function is both an operand to the instruction and an
8853     // argument to the construct call.
8854     Handle<JSFunction> array_function(
8855         isolate()->native_context()->array_function(), isolate());
8856     bool use_call_new_array = expr->target().is_identical_to(array_function);
8857     if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
8858       // Verify we are still calling the array function for our native context.
8859       Add<HCheckValue>(function, array_function);
8860       BuildInlinedCallNewArray(expr);
8861       return;
8862     }
8863
8864     HBinaryCall* call;
8865     if (use_call_new_array) {
8866       Add<HCheckValue>(function, array_function);
8867       call = New<HCallNewArray>(function, argument_count,
8868                                 expr->elements_kind());
8869     } else {
8870       call = New<HCallNew>(function, argument_count);
8871     }
8872     PreProcessCall(call);
8873     return ast_context()->ReturnInstruction(call, expr->id());
8874   }
8875 }
8876
8877
8878 // Support for generating inlined runtime functions.
8879
8880 // Lookup table for generators for runtime calls that are generated inline.
8881 // Elements of the table are member pointers to functions of
8882 // HOptimizedGraphBuilder.
8883 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)  \
8884     &HOptimizedGraphBuilder::Generate##Name,
8885
8886 const HOptimizedGraphBuilder::InlineFunctionGenerator
8887     HOptimizedGraphBuilder::kInlineFunctionGenerators[] = {
8888         INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8889         INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8890 };
8891 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
8892
8893
8894 template <class ViewClass>
8895 void HGraphBuilder::BuildArrayBufferViewInitialization(
8896     HValue* obj,
8897     HValue* buffer,
8898     HValue* byte_offset,
8899     HValue* byte_length) {
8900
8901   for (int offset = ViewClass::kSize;
8902        offset < ViewClass::kSizeWithInternalFields;
8903        offset += kPointerSize) {
8904     Add<HStoreNamedField>(obj,
8905         HObjectAccess::ForObservableJSObjectOffset(offset),
8906         graph()->GetConstant0());
8907   }
8908
8909   Add<HStoreNamedField>(
8910       obj,
8911       HObjectAccess::ForJSArrayBufferViewByteOffset(),
8912       byte_offset);
8913   Add<HStoreNamedField>(
8914       obj,
8915       HObjectAccess::ForJSArrayBufferViewByteLength(),
8916       byte_length);
8917
8918   if (buffer != NULL) {
8919     Add<HStoreNamedField>(
8920         obj,
8921         HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
8922     HObjectAccess weak_first_view_access =
8923         HObjectAccess::ForJSArrayBufferWeakFirstView();
8924     Add<HStoreNamedField>(obj,
8925         HObjectAccess::ForJSArrayBufferViewWeakNext(),
8926         Add<HLoadNamedField>(buffer,
8927                              static_cast<HValue*>(NULL),
8928                              weak_first_view_access));
8929     Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
8930   } else {
8931     Add<HStoreNamedField>(
8932         obj,
8933         HObjectAccess::ForJSArrayBufferViewBuffer(),
8934         Add<HConstant>(static_cast<int32_t>(0)));
8935     Add<HStoreNamedField>(obj,
8936         HObjectAccess::ForJSArrayBufferViewWeakNext(),
8937         graph()->GetConstantUndefined());
8938   }
8939 }
8940
8941
8942 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
8943     CallRuntime* expr) {
8944   ZoneList<Expression*>* arguments = expr->arguments();
8945
8946   NoObservableSideEffectsScope scope(this);
8947   ASSERT(arguments->length()== 4);
8948   CHECK_ALIVE(VisitForValue(arguments->at(0)));
8949   HValue* obj = Pop();
8950
8951   CHECK_ALIVE(VisitForValue(arguments->at(1)));
8952   HValue* buffer = Pop();
8953
8954   CHECK_ALIVE(VisitForValue(arguments->at(2)));
8955   HValue* byte_offset = Pop();
8956
8957   CHECK_ALIVE(VisitForValue(arguments->at(3)));
8958   HValue* byte_length = Pop();
8959
8960   BuildArrayBufferViewInitialization<JSDataView>(
8961       obj, buffer, byte_offset, byte_length);
8962 }
8963
8964
8965 static Handle<Map> TypedArrayMap(Isolate* isolate,
8966                                  ExternalArrayType array_type,
8967                                  ElementsKind target_kind) {
8968   Handle<Context> native_context = isolate->native_context();
8969   Handle<JSFunction> fun;
8970   switch (array_type) {
8971 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
8972     case kExternal##Type##Array:                                              \
8973       fun = Handle<JSFunction>(native_context->type##_array_fun());           \
8974       break;
8975
8976     TYPED_ARRAYS(TYPED_ARRAY_CASE)
8977 #undef TYPED_ARRAY_CASE
8978   }
8979   Handle<Map> map(fun->initial_map());
8980   return Map::AsElementsKind(map, target_kind);
8981 }
8982
8983
8984 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
8985     ExternalArrayType array_type,
8986     bool is_zero_byte_offset,
8987     HValue* buffer, HValue* byte_offset, HValue* length) {
8988   Handle<Map> external_array_map(
8989       isolate()->heap()->MapForExternalArrayType(array_type));
8990
8991   // The HForceRepresentation is to prevent possible deopt on int-smi
8992   // conversion after allocation but before the new object fields are set.
8993   length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
8994   HValue* elements =
8995       Add<HAllocate>(
8996           Add<HConstant>(ExternalArray::kAlignedSize),
8997           HType::Tagged(),
8998           NOT_TENURED,
8999           external_array_map->instance_type());
9000
9001   AddStoreMapConstant(elements, external_array_map);
9002   Add<HStoreNamedField>(elements,
9003       HObjectAccess::ForFixedArrayLength(), length);
9004
9005   HValue* backing_store = Add<HLoadNamedField>(
9006       buffer, static_cast<HValue*>(NULL),
9007       HObjectAccess::ForJSArrayBufferBackingStore());
9008
9009   HValue* typed_array_start;
9010   if (is_zero_byte_offset) {
9011     typed_array_start = backing_store;
9012   } else {
9013     HInstruction* external_pointer =
9014         AddUncasted<HAdd>(backing_store, byte_offset);
9015     // Arguments are checked prior to call to TypedArrayInitialize,
9016     // including byte_offset.
9017     external_pointer->ClearFlag(HValue::kCanOverflow);
9018     typed_array_start = external_pointer;
9019   }
9020
9021   Add<HStoreNamedField>(elements,
9022       HObjectAccess::ForExternalArrayExternalPointer(),
9023       typed_array_start);
9024
9025   return elements;
9026 }
9027
9028
9029 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9030     ExternalArrayType array_type, size_t element_size,
9031     ElementsKind fixed_elements_kind,
9032     HValue* byte_length, HValue* length) {
9033   STATIC_ASSERT(
9034       (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
9035   HValue* total_size;
9036
9037   // if fixed array's elements are not aligned to object's alignment,
9038   // we need to align the whole array to object alignment.
9039   if (element_size % kObjectAlignment != 0) {
9040     total_size = BuildObjectSizeAlignment(
9041         byte_length, FixedTypedArrayBase::kHeaderSize);
9042   } else {
9043     total_size = AddUncasted<HAdd>(byte_length,
9044         Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9045     total_size->ClearFlag(HValue::kCanOverflow);
9046   }
9047
9048   // The HForceRepresentation is to prevent possible deopt on int-smi
9049   // conversion after allocation but before the new object fields are set.
9050   length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9051   Handle<Map> fixed_typed_array_map(
9052       isolate()->heap()->MapForFixedTypedArray(array_type));
9053   HValue* elements =
9054       Add<HAllocate>(total_size, HType::Tagged(),
9055           NOT_TENURED,
9056           fixed_typed_array_map->instance_type());
9057   AddStoreMapConstant(elements, fixed_typed_array_map);
9058
9059   Add<HStoreNamedField>(elements,
9060       HObjectAccess::ForFixedArrayLength(),
9061       length);
9062
9063   HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9064   if (IsFixedFloat32x4ElementsKind(fixed_elements_kind)) {
9065     if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9066       filler = AddUncasted<HNullarySIMDOperation>(kFloat32x4Zero);
9067     } else {
9068       HValue* size = Add<HConstant>(Float32x4::kSize);
9069       filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9070           Float32x4::kInstanceType);
9071       AddStoreMapConstant(filler, isolate()->factory()->float32x4_map());
9072       HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9073       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9074       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9075     }
9076   } else if (IsFixedFloat64x2ElementsKind(fixed_elements_kind)) {
9077     if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9078       filler = AddUncasted<HNullarySIMDOperation>(kFloat64x2Zero);
9079     } else {
9080       HValue* size = Add<HConstant>(Float64x2::kSize);
9081       filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9082           Float64x2::kInstanceType);
9083       AddStoreMapConstant(filler, isolate()->factory()->float64x2_map());
9084       HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9085       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9086       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9087     }
9088   } else if (IsFixedInt32x4ElementsKind(fixed_elements_kind)) {
9089     if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9090       filler = AddUncasted<HNullarySIMDOperation>(kInt32x4Zero);
9091     } else {
9092       HValue* size = Add<HConstant>(Int32x4::kSize);
9093       filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9094           Int32x4::kInstanceType);
9095       AddStoreMapConstant(filler, isolate()->factory()->int32x4_map());
9096       HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9097       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9098       Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9099     }
9100   }
9101
9102   {
9103     LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9104
9105     HValue* key = builder.BeginBody(
9106         Add<HConstant>(static_cast<int32_t>(0)),
9107         length, Token::LT);
9108     Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
9109
9110     builder.EndBody();
9111   }
9112   return elements;
9113 }
9114
9115
9116 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
9117     CallRuntime* expr) {
9118   ZoneList<Expression*>* arguments = expr->arguments();
9119
9120   NoObservableSideEffectsScope scope(this);
9121   static const int kObjectArg = 0;
9122   static const int kArrayIdArg = 1;
9123   static const int kBufferArg = 2;
9124   static const int kByteOffsetArg = 3;
9125   static const int kByteLengthArg = 4;
9126   static const int kArgsLength = 5;
9127   ASSERT(arguments->length() == kArgsLength);
9128
9129
9130   CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
9131   HValue* obj = Pop();
9132
9133   ASSERT(arguments->at(kArrayIdArg)->node_type() == AstNode::kLiteral);
9134   Handle<Object> value =
9135       static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
9136   ASSERT(value->IsSmi());
9137   int array_id = Smi::cast(*value)->value();
9138
9139   HValue* buffer;
9140   if (!arguments->at(kBufferArg)->IsNullLiteral()) {
9141     CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
9142     buffer = Pop();
9143   } else {
9144     buffer = NULL;
9145   }
9146
9147   HValue* byte_offset;
9148   bool is_zero_byte_offset;
9149
9150   if (arguments->at(kByteOffsetArg)->node_type() == AstNode::kLiteral
9151       && Smi::FromInt(0) ==
9152       *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
9153     byte_offset = Add<HConstant>(static_cast<int32_t>(0));
9154     is_zero_byte_offset = true;
9155   } else {
9156     CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
9157     byte_offset = Pop();
9158     is_zero_byte_offset = false;
9159     ASSERT(buffer != NULL);
9160   }
9161
9162   CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
9163   HValue* byte_length = Pop();
9164
9165   IfBuilder byte_offset_smi(this);
9166
9167   if (!is_zero_byte_offset) {
9168     byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
9169     byte_offset_smi.Then();
9170   }
9171
9172   ExternalArrayType array_type =
9173       kExternalInt8Array;  // Bogus initialization.
9174   size_t element_size = 1;  // Bogus initialization.
9175   ElementsKind external_elements_kind =  // Bogus initialization.
9176       EXTERNAL_INT8_ELEMENTS;
9177   ElementsKind fixed_elements_kind =  // Bogus initialization.
9178       INT8_ELEMENTS;
9179   Runtime::ArrayIdToTypeAndSize(array_id,
9180       &array_type,
9181       &external_elements_kind,
9182       &fixed_elements_kind,
9183       &element_size);
9184
9185
9186   { //  byte_offset is Smi.
9187     BuildArrayBufferViewInitialization<JSTypedArray>(
9188         obj, buffer, byte_offset, byte_length);
9189
9190
9191     HInstruction* length = AddUncasted<HDiv>(byte_length,
9192         Add<HConstant>(static_cast<int32_t>(element_size)));
9193
9194     Add<HStoreNamedField>(obj,
9195         HObjectAccess::ForJSTypedArrayLength(),
9196         length);
9197
9198     HValue* elements;
9199     if (buffer != NULL) {
9200       elements = BuildAllocateExternalElements(
9201           array_type, is_zero_byte_offset, buffer, byte_offset, length);
9202       Handle<Map> obj_map = TypedArrayMap(
9203           isolate(), array_type, external_elements_kind);
9204       AddStoreMapConstant(obj, obj_map);
9205     } else {
9206       ASSERT(is_zero_byte_offset);
9207       elements = BuildAllocateFixedTypedArray(
9208           array_type, element_size, fixed_elements_kind,
9209           byte_length, length);
9210     }
9211     Add<HStoreNamedField>(
9212         obj, HObjectAccess::ForElementsPointer(), elements);
9213   }
9214
9215   if (!is_zero_byte_offset) {
9216     byte_offset_smi.Else();
9217     { //  byte_offset is not Smi.
9218       Push(obj);
9219       CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
9220       Push(buffer);
9221       Push(byte_offset);
9222       Push(byte_length);
9223       PushArgumentsFromEnvironment(kArgsLength);
9224       Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
9225     }
9226   }
9227   byte_offset_smi.End();
9228 }
9229
9230
9231 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
9232   ASSERT(expr->arguments()->length() == 0);
9233   HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
9234   return ast_context()->ReturnInstruction(max_smi, expr->id());
9235 }
9236
9237
9238 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
9239     CallRuntime* expr) {
9240   ASSERT(expr->arguments()->length() == 0);
9241   HConstant* result = New<HConstant>(static_cast<int32_t>(
9242         FLAG_typed_array_max_size_in_heap));
9243   return ast_context()->ReturnInstruction(result, expr->id());
9244 }
9245
9246
9247 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
9248     CallRuntime* expr) {
9249   ASSERT(expr->arguments()->length() == 1);
9250   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9251   HValue* buffer = Pop();
9252   HInstruction* result = New<HLoadNamedField>(
9253     buffer,
9254     static_cast<HValue*>(NULL),
9255     HObjectAccess::ForJSArrayBufferByteLength());
9256   return ast_context()->ReturnInstruction(result, expr->id());
9257 }
9258
9259
9260 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
9261     CallRuntime* expr) {
9262   ASSERT(expr->arguments()->length() == 1);
9263   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9264   HValue* buffer = Pop();
9265   HInstruction* result = New<HLoadNamedField>(
9266     buffer,
9267     static_cast<HValue*>(NULL),
9268     HObjectAccess::ForJSArrayBufferViewByteLength());
9269   return ast_context()->ReturnInstruction(result, expr->id());
9270 }
9271
9272
9273 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
9274     CallRuntime* expr) {
9275   ASSERT(expr->arguments()->length() == 1);
9276   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9277   HValue* buffer = Pop();
9278   HInstruction* result = New<HLoadNamedField>(
9279     buffer,
9280     static_cast<HValue*>(NULL),
9281     HObjectAccess::ForJSArrayBufferViewByteOffset());
9282   return ast_context()->ReturnInstruction(result, expr->id());
9283 }
9284
9285
9286 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
9287     CallRuntime* expr) {
9288   ASSERT(expr->arguments()->length() == 1);
9289   CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9290   HValue* buffer = Pop();
9291   HInstruction* result = New<HLoadNamedField>(
9292     buffer,
9293     static_cast<HValue*>(NULL),
9294     HObjectAccess::ForJSTypedArrayLength());
9295   return ast_context()->ReturnInstruction(result, expr->id());
9296 }
9297
9298
9299 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
9300   ASSERT(!HasStackOverflow());
9301   ASSERT(current_block() != NULL);
9302   ASSERT(current_block()->HasPredecessor());
9303   if (expr->is_jsruntime()) {
9304     return Bailout(kCallToAJavaScriptRuntimeFunction);
9305   }
9306
9307   const Runtime::Function* function = expr->function();
9308   ASSERT(function != NULL);
9309
9310   if (function->intrinsic_type == Runtime::INLINE ||
9311       function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
9312     ASSERT(expr->name()->length() > 0);
9313     ASSERT(expr->name()->Get(0) == '_');
9314     // Call to an inline function.
9315     int lookup_index = static_cast<int>(function->function_id) -
9316         static_cast<int>(Runtime::kFirstInlineFunction);
9317     ASSERT(lookup_index >= 0);
9318     ASSERT(static_cast<size_t>(lookup_index) <
9319            ARRAY_SIZE(kInlineFunctionGenerators));
9320     InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
9321
9322     // Call the inline code generator using the pointer-to-member.
9323     (this->*generator)(expr);
9324   } else {
9325     ASSERT(function->intrinsic_type == Runtime::RUNTIME);
9326     Handle<String> name = expr->name();
9327     int argument_count = expr->arguments()->length();
9328     CHECK_ALIVE(VisitExpressions(expr->arguments()));
9329     PushArgumentsFromEnvironment(argument_count);
9330     HCallRuntime* call = New<HCallRuntime>(name, function,
9331                                            argument_count);
9332     return ast_context()->ReturnInstruction(call, expr->id());
9333   }
9334 }
9335
9336
9337 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
9338   ASSERT(!HasStackOverflow());
9339   ASSERT(current_block() != NULL);
9340   ASSERT(current_block()->HasPredecessor());
9341   switch (expr->op()) {
9342     case Token::DELETE: return VisitDelete(expr);
9343     case Token::VOID: return VisitVoid(expr);
9344     case Token::TYPEOF: return VisitTypeof(expr);
9345     case Token::NOT: return VisitNot(expr);
9346     default: UNREACHABLE();
9347   }
9348 }
9349
9350
9351 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
9352   Property* prop = expr->expression()->AsProperty();
9353   VariableProxy* proxy = expr->expression()->AsVariableProxy();
9354   if (prop != NULL) {
9355     CHECK_ALIVE(VisitForValue(prop->obj()));
9356     CHECK_ALIVE(VisitForValue(prop->key()));
9357     HValue* key = Pop();
9358     HValue* obj = Pop();
9359     HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
9360     Add<HPushArgument>(obj);
9361     Add<HPushArgument>(key);
9362     Add<HPushArgument>(Add<HConstant>(function_strict_mode()));
9363     // TODO(olivf) InvokeFunction produces a check for the parameter count,
9364     // even though we are certain to pass the correct number of arguments here.
9365     HInstruction* instr = New<HInvokeFunction>(function, 3);
9366     return ast_context()->ReturnInstruction(instr, expr->id());
9367   } else if (proxy != NULL) {
9368     Variable* var = proxy->var();
9369     if (var->IsUnallocated()) {
9370       Bailout(kDeleteWithGlobalVariable);
9371     } else if (var->IsStackAllocated() || var->IsContextSlot()) {
9372       // Result of deleting non-global variables is false.  'this' is not
9373       // really a variable, though we implement it as one.  The
9374       // subexpression does not have side effects.
9375       HValue* value = var->is_this()
9376           ? graph()->GetConstantTrue()
9377           : graph()->GetConstantFalse();
9378       return ast_context()->ReturnValue(value);
9379     } else {
9380       Bailout(kDeleteWithNonGlobalVariable);
9381     }
9382   } else {
9383     // Result of deleting non-property, non-variable reference is true.
9384     // Evaluate the subexpression for side effects.
9385     CHECK_ALIVE(VisitForEffect(expr->expression()));
9386     return ast_context()->ReturnValue(graph()->GetConstantTrue());
9387   }
9388 }
9389
9390
9391 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
9392   CHECK_ALIVE(VisitForEffect(expr->expression()));
9393   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9394 }
9395
9396
9397 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
9398   CHECK_ALIVE(VisitForTypeOf(expr->expression()));
9399   HValue* value = Pop();
9400   HInstruction* instr = New<HTypeof>(value);
9401   return ast_context()->ReturnInstruction(instr, expr->id());
9402 }
9403
9404
9405 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
9406   if (ast_context()->IsTest()) {
9407     TestContext* context = TestContext::cast(ast_context());
9408     VisitForControl(expr->expression(),
9409                     context->if_false(),
9410                     context->if_true());
9411     return;
9412   }
9413
9414   if (ast_context()->IsEffect()) {
9415     VisitForEffect(expr->expression());
9416     return;
9417   }
9418
9419   ASSERT(ast_context()->IsValue());
9420   HBasicBlock* materialize_false = graph()->CreateBasicBlock();
9421   HBasicBlock* materialize_true = graph()->CreateBasicBlock();
9422   CHECK_BAILOUT(VisitForControl(expr->expression(),
9423                                 materialize_false,
9424                                 materialize_true));
9425
9426   if (materialize_false->HasPredecessor()) {
9427     materialize_false->SetJoinId(expr->MaterializeFalseId());
9428     set_current_block(materialize_false);
9429     Push(graph()->GetConstantFalse());
9430   } else {
9431     materialize_false = NULL;
9432   }
9433
9434   if (materialize_true->HasPredecessor()) {
9435     materialize_true->SetJoinId(expr->MaterializeTrueId());
9436     set_current_block(materialize_true);
9437     Push(graph()->GetConstantTrue());
9438   } else {
9439     materialize_true = NULL;
9440   }
9441
9442   HBasicBlock* join =
9443     CreateJoin(materialize_false, materialize_true, expr->id());
9444   set_current_block(join);
9445   if (join != NULL) return ast_context()->ReturnValue(Pop());
9446 }
9447
9448
9449 HInstruction* HOptimizedGraphBuilder::BuildIncrement(
9450     bool returns_original_input,
9451     CountOperation* expr) {
9452   // The input to the count operation is on top of the expression stack.
9453   Representation rep = Representation::FromType(expr->type());
9454   if (rep.IsNone() || rep.IsTagged()) {
9455     rep = Representation::Smi();
9456   }
9457
9458   if (returns_original_input) {
9459     // We need an explicit HValue representing ToNumber(input).  The
9460     // actual HChange instruction we need is (sometimes) added in a later
9461     // phase, so it is not available now to be used as an input to HAdd and
9462     // as the return value.
9463     HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
9464     if (!rep.IsDouble()) {
9465       number_input->SetFlag(HInstruction::kFlexibleRepresentation);
9466       number_input->SetFlag(HInstruction::kCannotBeTagged);
9467     }
9468     Push(number_input);
9469   }
9470
9471   // The addition has no side effects, so we do not need
9472   // to simulate the expression stack after this instruction.
9473   // Any later failures deopt to the load of the input or earlier.
9474   HConstant* delta = (expr->op() == Token::INC)
9475       ? graph()->GetConstant1()
9476       : graph()->GetConstantMinus1();
9477   HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
9478   if (instr->IsAdd()) {
9479     HAdd* add = HAdd::cast(instr);
9480     add->set_observed_input_representation(1, rep);
9481     add->set_observed_input_representation(2, Representation::Smi());
9482   }
9483   instr->SetFlag(HInstruction::kCannotBeTagged);
9484   instr->ClearAllSideEffects();
9485   return instr;
9486 }
9487
9488
9489 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
9490                                                  Property* prop,
9491                                                  BailoutId ast_id,
9492                                                  BailoutId return_id,
9493                                                  HValue* object,
9494                                                  HValue* key,
9495                                                  HValue* value) {
9496   EffectContext for_effect(this);
9497   Push(object);
9498   if (key != NULL) Push(key);
9499   Push(value);
9500   BuildStore(expr, prop, ast_id, return_id);
9501 }
9502
9503
9504 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
9505   ASSERT(!HasStackOverflow());
9506   ASSERT(current_block() != NULL);
9507   ASSERT(current_block()->HasPredecessor());
9508   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9509   Expression* target = expr->expression();
9510   VariableProxy* proxy = target->AsVariableProxy();
9511   Property* prop = target->AsProperty();
9512   if (proxy == NULL && prop == NULL) {
9513     return Bailout(kInvalidLhsInCountOperation);
9514   }
9515
9516   // Match the full code generator stack by simulating an extra stack
9517   // element for postfix operations in a non-effect context.  The return
9518   // value is ToNumber(input).
9519   bool returns_original_input =
9520       expr->is_postfix() && !ast_context()->IsEffect();
9521   HValue* input = NULL;  // ToNumber(original_input).
9522   HValue* after = NULL;  // The result after incrementing or decrementing.
9523
9524   if (proxy != NULL) {
9525     Variable* var = proxy->var();
9526     if (var->mode() == CONST_LEGACY)  {
9527       return Bailout(kUnsupportedCountOperationWithConst);
9528     }
9529     // Argument of the count operation is a variable, not a property.
9530     ASSERT(prop == NULL);
9531     CHECK_ALIVE(VisitForValue(target));
9532
9533     after = BuildIncrement(returns_original_input, expr);
9534     input = returns_original_input ? Top() : Pop();
9535     Push(after);
9536
9537     switch (var->location()) {
9538       case Variable::UNALLOCATED:
9539         HandleGlobalVariableAssignment(var,
9540                                        after,
9541                                        expr->AssignmentId());
9542         break;
9543
9544       case Variable::PARAMETER:
9545       case Variable::LOCAL:
9546         BindIfLive(var, after);
9547         break;
9548
9549       case Variable::CONTEXT: {
9550         // Bail out if we try to mutate a parameter value in a function
9551         // using the arguments object.  We do not (yet) correctly handle the
9552         // arguments property of the function.
9553         if (current_info()->scope()->arguments() != NULL) {
9554           // Parameters will rewrite to context slots.  We have no direct
9555           // way to detect that the variable is a parameter so we use a
9556           // linear search of the parameter list.
9557           int count = current_info()->scope()->num_parameters();
9558           for (int i = 0; i < count; ++i) {
9559             if (var == current_info()->scope()->parameter(i)) {
9560               return Bailout(kAssignmentToParameterInArgumentsObject);
9561             }
9562           }
9563         }
9564
9565         HValue* context = BuildContextChainWalk(var);
9566         HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
9567             ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
9568         HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
9569                                                           mode, after);
9570         if (instr->HasObservableSideEffects()) {
9571           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
9572         }
9573         break;
9574       }
9575
9576       case Variable::LOOKUP:
9577         return Bailout(kLookupVariableInCountOperation);
9578     }
9579
9580     Drop(returns_original_input ? 2 : 1);
9581     return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
9582   }
9583
9584   // Argument of the count operation is a property.
9585   ASSERT(prop != NULL);
9586   if (returns_original_input) Push(graph()->GetConstantUndefined());
9587
9588   CHECK_ALIVE(VisitForValue(prop->obj()));
9589   HValue* object = Top();
9590
9591   HValue* key = NULL;
9592   if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
9593       prop->IsStringAccess()) {
9594     CHECK_ALIVE(VisitForValue(prop->key()));
9595     key = Top();
9596   }
9597
9598   CHECK_ALIVE(PushLoad(prop, object, key));
9599
9600   after = BuildIncrement(returns_original_input, expr);
9601
9602   if (returns_original_input) {
9603     input = Pop();
9604     // Drop object and key to push it again in the effect context below.
9605     Drop(key == NULL ? 1 : 2);
9606     environment()->SetExpressionStackAt(0, input);
9607     CHECK_ALIVE(BuildStoreForEffect(
9608         expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
9609     return ast_context()->ReturnValue(Pop());
9610   }
9611
9612   environment()->SetExpressionStackAt(0, after);
9613   return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
9614 }
9615
9616
9617 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
9618     HValue* string,
9619     HValue* index) {
9620   if (string->IsConstant() && index->IsConstant()) {
9621     HConstant* c_string = HConstant::cast(string);
9622     HConstant* c_index = HConstant::cast(index);
9623     if (c_string->HasStringValue() && c_index->HasNumberValue()) {
9624       int32_t i = c_index->NumberValueAsInteger32();
9625       Handle<String> s = c_string->StringValue();
9626       if (i < 0 || i >= s->length()) {
9627         return New<HConstant>(OS::nan_value());
9628       }
9629       return New<HConstant>(s->Get(i));
9630     }
9631   }
9632   string = BuildCheckString(string);
9633   index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
9634   return New<HStringCharCodeAt>(string, index);
9635 }
9636
9637
9638 // Checks if the given shift amounts have following forms:
9639 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
9640 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
9641                                              HValue* const32_minus_sa) {
9642   if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
9643     const HConstant* c1 = HConstant::cast(sa);
9644     const HConstant* c2 = HConstant::cast(const32_minus_sa);
9645     return c1->HasInteger32Value() && c2->HasInteger32Value() &&
9646         (c1->Integer32Value() + c2->Integer32Value() == 32);
9647   }
9648   if (!const32_minus_sa->IsSub()) return false;
9649   HSub* sub = HSub::cast(const32_minus_sa);
9650   return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
9651 }
9652
9653
9654 // Checks if the left and the right are shift instructions with the oposite
9655 // directions that can be replaced by one rotate right instruction or not.
9656 // Returns the operand and the shift amount for the rotate instruction in the
9657 // former case.
9658 bool HGraphBuilder::MatchRotateRight(HValue* left,
9659                                      HValue* right,
9660                                      HValue** operand,
9661                                      HValue** shift_amount) {
9662   HShl* shl;
9663   HShr* shr;
9664   if (left->IsShl() && right->IsShr()) {
9665     shl = HShl::cast(left);
9666     shr = HShr::cast(right);
9667   } else if (left->IsShr() && right->IsShl()) {
9668     shl = HShl::cast(right);
9669     shr = HShr::cast(left);
9670   } else {
9671     return false;
9672   }
9673   if (shl->left() != shr->left()) return false;
9674
9675   if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
9676       !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
9677     return false;
9678   }
9679   *operand= shr->left();
9680   *shift_amount = shr->right();
9681   return true;
9682 }
9683
9684
9685 bool CanBeZero(HValue* right) {
9686   if (right->IsConstant()) {
9687     HConstant* right_const = HConstant::cast(right);
9688     if (right_const->HasInteger32Value() &&
9689        (right_const->Integer32Value() & 0x1f) != 0) {
9690       return false;
9691     }
9692   }
9693   return true;
9694 }
9695
9696
9697 HValue* HGraphBuilder::EnforceNumberType(HValue* number,
9698                                          Type* expected) {
9699   if (expected->Is(Type::SignedSmall())) {
9700     return AddUncasted<HForceRepresentation>(number, Representation::Smi());
9701   }
9702   if (expected->Is(Type::Signed32())) {
9703     return AddUncasted<HForceRepresentation>(number,
9704                                              Representation::Integer32());
9705   }
9706   return number;
9707 }
9708
9709
9710 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
9711   if (value->IsConstant()) {
9712     HConstant* constant = HConstant::cast(value);
9713     Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
9714     if (number.has_value) {
9715       *expected = Type::Number(zone());
9716       return AddInstruction(number.value);
9717     }
9718   }
9719
9720   // We put temporary values on the stack, which don't correspond to anything
9721   // in baseline code. Since nothing is observable we avoid recording those
9722   // pushes with a NoObservableSideEffectsScope.
9723   NoObservableSideEffectsScope no_effects(this);
9724
9725   Type* expected_type = *expected;
9726
9727   // Separate the number type from the rest.
9728   Type* expected_obj =
9729       Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
9730   Type* expected_number =
9731       Type::Intersect(expected_type, Type::Number(zone()), zone());
9732
9733   // We expect to get a number.
9734   // (We need to check first, since Type::None->Is(Type::Any()) == true.
9735   if (expected_obj->Is(Type::None())) {
9736     ASSERT(!expected_number->Is(Type::None(zone())));
9737     return value;
9738   }
9739
9740   if (expected_obj->Is(Type::Undefined(zone()))) {
9741     // This is already done by HChange.
9742     *expected = Type::Union(expected_number, Type::Float(zone()), zone());
9743     return value;
9744   }
9745
9746   return value;
9747 }
9748
9749
9750 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
9751     BinaryOperation* expr,
9752     HValue* left,
9753     HValue* right,
9754     PushBeforeSimulateBehavior push_sim_result) {
9755   Type* left_type = expr->left()->bounds().lower;
9756   Type* right_type = expr->right()->bounds().lower;
9757   Type* result_type = expr->bounds().lower;
9758   Maybe<int> fixed_right_arg = expr->fixed_right_arg();
9759   Handle<AllocationSite> allocation_site = expr->allocation_site();
9760
9761   PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
9762       isolate()->heap()->GetPretenureMode() : NOT_TENURED;
9763
9764   HAllocationMode allocation_mode =
9765       FLAG_allocation_site_pretenuring
9766       ? (allocation_site.is_null()
9767          ? HAllocationMode(NOT_TENURED)
9768          : HAllocationMode(allocation_site))
9769       : HAllocationMode(pretenure_flag);
9770
9771   HValue* result = HGraphBuilder::BuildBinaryOperation(
9772       expr->op(), left, right, left_type, right_type, result_type,
9773       fixed_right_arg, allocation_mode);
9774   // Add a simulate after instructions with observable side effects, and
9775   // after phis, which are the result of BuildBinaryOperation when we
9776   // inlined some complex subgraph.
9777   if (result->HasObservableSideEffects() || result->IsPhi()) {
9778     if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9779       Push(result);
9780       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9781       Drop(1);
9782     } else {
9783       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9784     }
9785   }
9786   return result;
9787 }
9788
9789
9790 HValue* HGraphBuilder::BuildBinaryOperation(
9791     Token::Value op,
9792     HValue* left,
9793     HValue* right,
9794     Type* left_type,
9795     Type* right_type,
9796     Type* result_type,
9797     Maybe<int> fixed_right_arg,
9798     HAllocationMode allocation_mode) {
9799
9800   Representation left_rep = Representation::FromType(left_type);
9801   Representation right_rep = Representation::FromType(right_type);
9802
9803   bool maybe_string_add = op == Token::ADD &&
9804                           (left_type->Maybe(Type::String()) ||
9805                            right_type->Maybe(Type::String()));
9806
9807   if (left_type->Is(Type::None())) {
9808     Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
9809                      Deoptimizer::SOFT);
9810     // TODO(rossberg): we should be able to get rid of non-continuous
9811     // defaults.
9812     left_type = Type::Any(zone());
9813   } else {
9814     if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
9815     left_rep = Representation::FromType(left_type);
9816   }
9817
9818   if (right_type->Is(Type::None())) {
9819     Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
9820                      Deoptimizer::SOFT);
9821     right_type = Type::Any(zone());
9822   } else {
9823     if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
9824     right_rep = Representation::FromType(right_type);
9825   }
9826
9827   // Special case for string addition here.
9828   if (op == Token::ADD &&
9829       (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
9830     // Validate type feedback for left argument.
9831     if (left_type->Is(Type::String())) {
9832       left = BuildCheckString(left);
9833     }
9834
9835     // Validate type feedback for right argument.
9836     if (right_type->Is(Type::String())) {
9837       right = BuildCheckString(right);
9838     }
9839
9840     // Convert left argument as necessary.
9841     if (left_type->Is(Type::Number())) {
9842       ASSERT(right_type->Is(Type::String()));
9843       left = BuildNumberToString(left, left_type);
9844     } else if (!left_type->Is(Type::String())) {
9845       ASSERT(right_type->Is(Type::String()));
9846       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
9847       Add<HPushArgument>(left);
9848       Add<HPushArgument>(right);
9849       return AddUncasted<HInvokeFunction>(function, 2);
9850     }
9851
9852     // Convert right argument as necessary.
9853     if (right_type->Is(Type::Number())) {
9854       ASSERT(left_type->Is(Type::String()));
9855       right = BuildNumberToString(right, right_type);
9856     } else if (!right_type->Is(Type::String())) {
9857       ASSERT(left_type->Is(Type::String()));
9858       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
9859       Add<HPushArgument>(left);
9860       Add<HPushArgument>(right);
9861       return AddUncasted<HInvokeFunction>(function, 2);
9862     }
9863
9864     // Fast path for empty constant strings.
9865     if (left->IsConstant() &&
9866         HConstant::cast(left)->HasStringValue() &&
9867         HConstant::cast(left)->StringValue()->length() == 0) {
9868       return right;
9869     }
9870     if (right->IsConstant() &&
9871         HConstant::cast(right)->HasStringValue() &&
9872         HConstant::cast(right)->StringValue()->length() == 0) {
9873       return left;
9874     }
9875
9876     // Register the dependent code with the allocation site.
9877     if (!allocation_mode.feedback_site().is_null()) {
9878       ASSERT(!graph()->info()->IsStub());
9879       Handle<AllocationSite> site(allocation_mode.feedback_site());
9880       AllocationSite::AddDependentCompilationInfo(
9881           site, AllocationSite::TENURING, top_info());
9882     }
9883
9884     // Inline the string addition into the stub when creating allocation
9885     // mementos to gather allocation site feedback, or if we can statically
9886     // infer that we're going to create a cons string.
9887     if ((graph()->info()->IsStub() &&
9888          allocation_mode.CreateAllocationMementos()) ||
9889         (left->IsConstant() &&
9890          HConstant::cast(left)->HasStringValue() &&
9891          HConstant::cast(left)->StringValue()->length() + 1 >=
9892            ConsString::kMinLength) ||
9893         (right->IsConstant() &&
9894          HConstant::cast(right)->HasStringValue() &&
9895          HConstant::cast(right)->StringValue()->length() + 1 >=
9896            ConsString::kMinLength)) {
9897       return BuildStringAdd(left, right, allocation_mode);
9898     }
9899
9900     // Fallback to using the string add stub.
9901     return AddUncasted<HStringAdd>(
9902         left, right, allocation_mode.GetPretenureMode(),
9903         STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
9904   }
9905
9906   if (graph()->info()->IsStub()) {
9907     left = EnforceNumberType(left, left_type);
9908     right = EnforceNumberType(right, right_type);
9909   }
9910
9911   Representation result_rep = Representation::FromType(result_type);
9912
9913   bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
9914                           (right_rep.IsTagged() && !right_rep.IsSmi());
9915
9916   HInstruction* instr = NULL;
9917   // Only the stub is allowed to call into the runtime, since otherwise we would
9918   // inline several instructions (including the two pushes) for every tagged
9919   // operation in optimized code, which is more expensive, than a stub call.
9920   if (graph()->info()->IsStub() && is_non_primitive) {
9921     HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
9922     Add<HPushArgument>(left);
9923     Add<HPushArgument>(right);
9924     instr = AddUncasted<HInvokeFunction>(function, 2);
9925   } else {
9926     switch (op) {
9927       case Token::ADD:
9928         instr = AddUncasted<HAdd>(left, right);
9929         break;
9930       case Token::SUB:
9931         instr = AddUncasted<HSub>(left, right);
9932         break;
9933       case Token::MUL:
9934         instr = AddUncasted<HMul>(left, right);
9935         break;
9936       case Token::MOD: {
9937         if (fixed_right_arg.has_value &&
9938             !right->EqualsInteger32Constant(fixed_right_arg.value)) {
9939           HConstant* fixed_right = Add<HConstant>(
9940               static_cast<int>(fixed_right_arg.value));
9941           IfBuilder if_same(this);
9942           if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
9943           if_same.Then();
9944           if_same.ElseDeopt("Unexpected RHS of binary operation");
9945           right = fixed_right;
9946         }
9947         instr = AddUncasted<HMod>(left, right);
9948         break;
9949       }
9950       case Token::DIV:
9951         instr = AddUncasted<HDiv>(left, right);
9952         break;
9953       case Token::BIT_XOR:
9954       case Token::BIT_AND:
9955         instr = AddUncasted<HBitwise>(op, left, right);
9956         break;
9957       case Token::BIT_OR: {
9958         HValue* operand, *shift_amount;
9959         if (left_type->Is(Type::Signed32()) &&
9960             right_type->Is(Type::Signed32()) &&
9961             MatchRotateRight(left, right, &operand, &shift_amount)) {
9962           instr = AddUncasted<HRor>(operand, shift_amount);
9963         } else {
9964           instr = AddUncasted<HBitwise>(op, left, right);
9965         }
9966         break;
9967       }
9968       case Token::SAR:
9969         instr = AddUncasted<HSar>(left, right);
9970         break;
9971       case Token::SHR:
9972         instr = AddUncasted<HShr>(left, right);
9973         if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
9974             CanBeZero(right)) {
9975           graph()->RecordUint32Instruction(instr);
9976         }
9977         break;
9978       case Token::SHL:
9979         instr = AddUncasted<HShl>(left, right);
9980         break;
9981       default:
9982         UNREACHABLE();
9983     }
9984   }
9985
9986   if (instr->IsBinaryOperation()) {
9987     HBinaryOperation* binop = HBinaryOperation::cast(instr);
9988     binop->set_observed_input_representation(1, left_rep);
9989     binop->set_observed_input_representation(2, right_rep);
9990     binop->initialize_output_representation(result_rep);
9991     if (graph()->info()->IsStub()) {
9992       // Stub should not call into stub.
9993       instr->SetFlag(HValue::kCannotBeTagged);
9994       // And should truncate on HForceRepresentation already.
9995       if (left->IsForceRepresentation()) {
9996         left->CopyFlag(HValue::kTruncatingToSmi, instr);
9997         left->CopyFlag(HValue::kTruncatingToInt32, instr);
9998       }
9999       if (right->IsForceRepresentation()) {
10000         right->CopyFlag(HValue::kTruncatingToSmi, instr);
10001         right->CopyFlag(HValue::kTruncatingToInt32, instr);
10002       }
10003     }
10004   }
10005   return instr;
10006 }
10007
10008
10009 // Check for the form (%_ClassOf(foo) === 'BarClass').
10010 static bool IsClassOfTest(CompareOperation* expr) {
10011   if (expr->op() != Token::EQ_STRICT) return false;
10012   CallRuntime* call = expr->left()->AsCallRuntime();
10013   if (call == NULL) return false;
10014   Literal* literal = expr->right()->AsLiteral();
10015   if (literal == NULL) return false;
10016   if (!literal->value()->IsString()) return false;
10017   if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) {
10018     return false;
10019   }
10020   ASSERT(call->arguments()->length() == 1);
10021   return true;
10022 }
10023
10024
10025 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10026   ASSERT(!HasStackOverflow());
10027   ASSERT(current_block() != NULL);
10028   ASSERT(current_block()->HasPredecessor());
10029   switch (expr->op()) {
10030     case Token::COMMA:
10031       return VisitComma(expr);
10032     case Token::OR:
10033     case Token::AND:
10034       return VisitLogicalExpression(expr);
10035     default:
10036       return VisitArithmeticExpression(expr);
10037   }
10038 }
10039
10040
10041 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
10042   CHECK_ALIVE(VisitForEffect(expr->left()));
10043   // Visit the right subexpression in the same AST context as the entire
10044   // expression.
10045   Visit(expr->right());
10046 }
10047
10048
10049 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
10050   bool is_logical_and = expr->op() == Token::AND;
10051   if (ast_context()->IsTest()) {
10052     TestContext* context = TestContext::cast(ast_context());
10053     // Translate left subexpression.
10054     HBasicBlock* eval_right = graph()->CreateBasicBlock();
10055     if (is_logical_and) {
10056       CHECK_BAILOUT(VisitForControl(expr->left(),
10057                                     eval_right,
10058                                     context->if_false()));
10059     } else {
10060       CHECK_BAILOUT(VisitForControl(expr->left(),
10061                                     context->if_true(),
10062                                     eval_right));
10063     }
10064
10065     // Translate right subexpression by visiting it in the same AST
10066     // context as the entire expression.
10067     if (eval_right->HasPredecessor()) {
10068       eval_right->SetJoinId(expr->RightId());
10069       set_current_block(eval_right);
10070       Visit(expr->right());
10071     }
10072
10073   } else if (ast_context()->IsValue()) {
10074     CHECK_ALIVE(VisitForValue(expr->left()));
10075     ASSERT(current_block() != NULL);
10076     HValue* left_value = Top();
10077
10078     // Short-circuit left values that always evaluate to the same boolean value.
10079     if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
10080       // l (evals true)  && r -> r
10081       // l (evals true)  || r -> l
10082       // l (evals false) && r -> l
10083       // l (evals false) || r -> r
10084       if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
10085         Drop(1);
10086         CHECK_ALIVE(VisitForValue(expr->right()));
10087       }
10088       return ast_context()->ReturnValue(Pop());
10089     }
10090
10091     // We need an extra block to maintain edge-split form.
10092     HBasicBlock* empty_block = graph()->CreateBasicBlock();
10093     HBasicBlock* eval_right = graph()->CreateBasicBlock();
10094     ToBooleanStub::Types expected(expr->left()->to_boolean_types());
10095     HBranch* test = is_logical_and
10096         ? New<HBranch>(left_value, expected, eval_right, empty_block)
10097         : New<HBranch>(left_value, expected, empty_block, eval_right);
10098     FinishCurrentBlock(test);
10099
10100     set_current_block(eval_right);
10101     Drop(1);  // Value of the left subexpression.
10102     CHECK_BAILOUT(VisitForValue(expr->right()));
10103
10104     HBasicBlock* join_block =
10105       CreateJoin(empty_block, current_block(), expr->id());
10106     set_current_block(join_block);
10107     return ast_context()->ReturnValue(Pop());
10108
10109   } else {
10110     ASSERT(ast_context()->IsEffect());
10111     // In an effect context, we don't need the value of the left subexpression,
10112     // only its control flow and side effects.  We need an extra block to
10113     // maintain edge-split form.
10114     HBasicBlock* empty_block = graph()->CreateBasicBlock();
10115     HBasicBlock* right_block = graph()->CreateBasicBlock();
10116     if (is_logical_and) {
10117       CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
10118     } else {
10119       CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
10120     }
10121
10122     // TODO(kmillikin): Find a way to fix this.  It's ugly that there are
10123     // actually two empty blocks (one here and one inserted by
10124     // TestContext::BuildBranch, and that they both have an HSimulate though the
10125     // second one is not a merge node, and that we really have no good AST ID to
10126     // put on that first HSimulate.
10127
10128     if (empty_block->HasPredecessor()) {
10129       empty_block->SetJoinId(expr->id());
10130     } else {
10131       empty_block = NULL;
10132     }
10133
10134     if (right_block->HasPredecessor()) {
10135       right_block->SetJoinId(expr->RightId());
10136       set_current_block(right_block);
10137       CHECK_BAILOUT(VisitForEffect(expr->right()));
10138       right_block = current_block();
10139     } else {
10140       right_block = NULL;
10141     }
10142
10143     HBasicBlock* join_block =
10144       CreateJoin(empty_block, right_block, expr->id());
10145     set_current_block(join_block);
10146     // We did not materialize any value in the predecessor environments,
10147     // so there is no need to handle it here.
10148   }
10149 }
10150
10151
10152 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
10153   CHECK_ALIVE(VisitForValue(expr->left()));
10154   CHECK_ALIVE(VisitForValue(expr->right()));
10155   SetSourcePosition(expr->position());
10156   HValue* right = Pop();
10157   HValue* left = Pop();
10158   HValue* result =
10159       BuildBinaryOperation(expr, left, right,
10160           ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10161                                     : PUSH_BEFORE_SIMULATE);
10162   if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
10163     HBinaryOperation::cast(result)->SetOperandPositions(
10164         zone(),
10165         ScriptPositionToSourcePosition(expr->left()->position()),
10166         ScriptPositionToSourcePosition(expr->right()->position()));
10167   }
10168   return ast_context()->ReturnValue(result);
10169 }
10170
10171
10172 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
10173                                                         Expression* sub_expr,
10174                                                         Handle<String> check) {
10175   CHECK_ALIVE(VisitForTypeOf(sub_expr));
10176   SetSourcePosition(expr->position());
10177   HValue* value = Pop();
10178   HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
10179   return ast_context()->ReturnControl(instr, expr->id());
10180 }
10181
10182
10183 static bool IsLiteralCompareBool(Isolate* isolate,
10184                                  HValue* left,
10185                                  Token::Value op,
10186                                  HValue* right) {
10187   return op == Token::EQ_STRICT &&
10188       ((left->IsConstant() &&
10189           HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
10190        (right->IsConstant() &&
10191            HConstant::cast(right)->handle(isolate)->IsBoolean()));
10192 }
10193
10194
10195 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
10196   ASSERT(!HasStackOverflow());
10197   ASSERT(current_block() != NULL);
10198   ASSERT(current_block()->HasPredecessor());
10199
10200   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10201
10202   // Check for a few fast cases. The AST visiting behavior must be in sync
10203   // with the full codegen: We don't push both left and right values onto
10204   // the expression stack when one side is a special-case literal.
10205   Expression* sub_expr = NULL;
10206   Handle<String> check;
10207   if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
10208     return HandleLiteralCompareTypeof(expr, sub_expr, check);
10209   }
10210   if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
10211     return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
10212   }
10213   if (expr->IsLiteralCompareNull(&sub_expr)) {
10214     return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
10215   }
10216
10217   if (IsClassOfTest(expr)) {
10218     CallRuntime* call = expr->left()->AsCallRuntime();
10219     ASSERT(call->arguments()->length() == 1);
10220     CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10221     HValue* value = Pop();
10222     Literal* literal = expr->right()->AsLiteral();
10223     Handle<String> rhs = Handle<String>::cast(literal->value());
10224     HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
10225     return ast_context()->ReturnControl(instr, expr->id());
10226   }
10227
10228   Type* left_type = expr->left()->bounds().lower;
10229   Type* right_type = expr->right()->bounds().lower;
10230   Type* combined_type = expr->combined_type();
10231
10232   CHECK_ALIVE(VisitForValue(expr->left()));
10233   CHECK_ALIVE(VisitForValue(expr->right()));
10234
10235   if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10236
10237   HValue* right = Pop();
10238   HValue* left = Pop();
10239   Token::Value op = expr->op();
10240
10241   if (IsLiteralCompareBool(isolate(), left, op, right)) {
10242     HCompareObjectEqAndBranch* result =
10243         New<HCompareObjectEqAndBranch>(left, right);
10244     return ast_context()->ReturnControl(result, expr->id());
10245   }
10246
10247   if (op == Token::INSTANCEOF) {
10248     // Check to see if the rhs of the instanceof is a global function not
10249     // residing in new space. If it is we assume that the function will stay the
10250     // same.
10251     Handle<JSFunction> target = Handle<JSFunction>::null();
10252     VariableProxy* proxy = expr->right()->AsVariableProxy();
10253     bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
10254     if (global_function &&
10255         current_info()->has_global_object() &&
10256         !current_info()->global_object()->IsAccessCheckNeeded()) {
10257       Handle<String> name = proxy->name();
10258       Handle<GlobalObject> global(current_info()->global_object());
10259       LookupResult lookup(isolate());
10260       global->Lookup(name, &lookup);
10261       if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
10262         Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
10263         // If the function is in new space we assume it's more likely to
10264         // change and thus prefer the general IC code.
10265         if (!isolate()->heap()->InNewSpace(*candidate)) {
10266           target = candidate;
10267         }
10268       }
10269     }
10270
10271     // If the target is not null we have found a known global function that is
10272     // assumed to stay the same for this instanceof.
10273     if (target.is_null()) {
10274       HInstanceOf* result = New<HInstanceOf>(left, right);
10275       return ast_context()->ReturnInstruction(result, expr->id());
10276     } else {
10277       Add<HCheckValue>(right, target);
10278       HInstanceOfKnownGlobal* result =
10279         New<HInstanceOfKnownGlobal>(left, target);
10280       return ast_context()->ReturnInstruction(result, expr->id());
10281     }
10282
10283     // Code below assumes that we don't fall through.
10284     UNREACHABLE();
10285   } else if (op == Token::IN) {
10286     HValue* function = AddLoadJSBuiltin(Builtins::IN);
10287     Add<HPushArgument>(left);
10288     Add<HPushArgument>(right);
10289     // TODO(olivf) InvokeFunction produces a check for the parameter count,
10290     // even though we are certain to pass the correct number of arguments here.
10291     HInstruction* result = New<HInvokeFunction>(function, 2);
10292     return ast_context()->ReturnInstruction(result, expr->id());
10293   }
10294
10295   PushBeforeSimulateBehavior push_behavior =
10296     ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10297                               : PUSH_BEFORE_SIMULATE;
10298   HControlInstruction* compare = BuildCompareInstruction(
10299       op, left, right, left_type, right_type, combined_type,
10300       ScriptPositionToSourcePosition(expr->left()->position()),
10301       ScriptPositionToSourcePosition(expr->right()->position()),
10302       push_behavior, expr->id());
10303   if (compare == NULL) return;  // Bailed out.
10304   return ast_context()->ReturnControl(compare, expr->id());
10305 }
10306
10307
10308 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
10309     Token::Value op,
10310     HValue* left,
10311     HValue* right,
10312     Type* left_type,
10313     Type* right_type,
10314     Type* combined_type,
10315     HSourcePosition left_position,
10316     HSourcePosition right_position,
10317     PushBeforeSimulateBehavior push_sim_result,
10318     BailoutId bailout_id) {
10319   // Cases handled below depend on collected type feedback. They should
10320   // soft deoptimize when there is no type feedback.
10321   if (combined_type->Is(Type::None())) {
10322     Add<HDeoptimize>("Insufficient type feedback for combined type "
10323                      "of binary operation",
10324                      Deoptimizer::SOFT);
10325     combined_type = left_type = right_type = Type::Any(zone());
10326   }
10327
10328   Representation left_rep = Representation::FromType(left_type);
10329   Representation right_rep = Representation::FromType(right_type);
10330   Representation combined_rep = Representation::FromType(combined_type);
10331
10332   if (combined_type->Is(Type::Receiver())) {
10333     if (Token::IsEqualityOp(op)) {
10334       // HCompareObjectEqAndBranch can only deal with object, so
10335       // exclude numbers.
10336       if ((left->IsConstant() &&
10337            HConstant::cast(left)->HasNumberValue()) ||
10338           (right->IsConstant() &&
10339            HConstant::cast(right)->HasNumberValue())) {
10340         Add<HDeoptimize>("Type mismatch between feedback and constant",
10341                          Deoptimizer::SOFT);
10342         // The caller expects a branch instruction, so make it happy.
10343         return New<HBranch>(graph()->GetConstantTrue());
10344       }
10345       // Can we get away with map check and not instance type check?
10346       HValue* operand_to_check =
10347           left->block()->block_id() < right->block()->block_id() ? left : right;
10348       if (combined_type->IsClass()) {
10349         Handle<Map> map = combined_type->AsClass()->Map();
10350         AddCheckMap(operand_to_check, map);
10351         HCompareObjectEqAndBranch* result =
10352             New<HCompareObjectEqAndBranch>(left, right);
10353         if (FLAG_hydrogen_track_positions) {
10354           result->set_operand_position(zone(), 0, left_position);
10355           result->set_operand_position(zone(), 1, right_position);
10356         }
10357         return result;
10358       } else {
10359         BuildCheckHeapObject(operand_to_check);
10360         Add<HCheckInstanceType>(operand_to_check,
10361                                 HCheckInstanceType::IS_SPEC_OBJECT);
10362         HCompareObjectEqAndBranch* result =
10363             New<HCompareObjectEqAndBranch>(left, right);
10364         return result;
10365       }
10366     } else {
10367       Bailout(kUnsupportedNonPrimitiveCompare);
10368       return NULL;
10369     }
10370   } else if (combined_type->Is(Type::InternalizedString()) &&
10371              Token::IsEqualityOp(op)) {
10372     // If we have a constant argument, it should be consistent with the type
10373     // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
10374     if ((left->IsConstant() &&
10375          !HConstant::cast(left)->HasInternalizedStringValue()) ||
10376         (right->IsConstant() &&
10377          !HConstant::cast(right)->HasInternalizedStringValue())) {
10378       Add<HDeoptimize>("Type mismatch between feedback and constant",
10379                        Deoptimizer::SOFT);
10380       // The caller expects a branch instruction, so make it happy.
10381       return New<HBranch>(graph()->GetConstantTrue());
10382     }
10383     BuildCheckHeapObject(left);
10384     Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
10385     BuildCheckHeapObject(right);
10386     Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
10387     HCompareObjectEqAndBranch* result =
10388         New<HCompareObjectEqAndBranch>(left, right);
10389     return result;
10390   } else if (combined_type->Is(Type::String())) {
10391     BuildCheckHeapObject(left);
10392     Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
10393     BuildCheckHeapObject(right);
10394     Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
10395     HStringCompareAndBranch* result =
10396         New<HStringCompareAndBranch>(left, right, op);
10397     return result;
10398   } else {
10399     if (combined_rep.IsTagged() || combined_rep.IsNone()) {
10400       HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
10401       result->set_observed_input_representation(1, left_rep);
10402       result->set_observed_input_representation(2, right_rep);
10403       if (result->HasObservableSideEffects()) {
10404         if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10405           Push(result);
10406           AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10407           Drop(1);
10408         } else {
10409           AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10410         }
10411       }
10412       // TODO(jkummerow): Can we make this more efficient?
10413       HBranch* branch = New<HBranch>(result);
10414       return branch;
10415     } else {
10416       HCompareNumericAndBranch* result =
10417           New<HCompareNumericAndBranch>(left, right, op);
10418       result->set_observed_input_representation(left_rep, right_rep);
10419       if (FLAG_hydrogen_track_positions) {
10420         result->SetOperandPositions(zone(), left_position, right_position);
10421       }
10422       return result;
10423     }
10424   }
10425 }
10426
10427
10428 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
10429                                                      Expression* sub_expr,
10430                                                      NilValue nil) {
10431   ASSERT(!HasStackOverflow());
10432   ASSERT(current_block() != NULL);
10433   ASSERT(current_block()->HasPredecessor());
10434   ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
10435   if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10436   CHECK_ALIVE(VisitForValue(sub_expr));
10437   HValue* value = Pop();
10438   if (expr->op() == Token::EQ_STRICT) {
10439     HConstant* nil_constant = nil == kNullValue
10440         ? graph()->GetConstantNull()
10441         : graph()->GetConstantUndefined();
10442     HCompareObjectEqAndBranch* instr =
10443         New<HCompareObjectEqAndBranch>(value, nil_constant);
10444     return ast_context()->ReturnControl(instr, expr->id());
10445   } else {
10446     ASSERT_EQ(Token::EQ, expr->op());
10447     Type* type = expr->combined_type()->Is(Type::None())
10448         ? Type::Any(zone()) : expr->combined_type();
10449     HIfContinuation continuation;
10450     BuildCompareNil(value, type, &continuation);
10451     return ast_context()->ReturnContinuation(&continuation, expr->id());
10452   }
10453 }
10454
10455
10456 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
10457   // If we share optimized code between different closures, the
10458   // this-function is not a constant, except inside an inlined body.
10459   if (function_state()->outer() != NULL) {
10460       return New<HConstant>(
10461           function_state()->compilation_info()->closure());
10462   } else {
10463       return New<HThisFunction>();
10464   }
10465 }
10466
10467
10468 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
10469     Handle<JSObject> boilerplate_object,
10470     AllocationSiteUsageContext* site_context) {
10471   NoObservableSideEffectsScope no_effects(this);
10472   InstanceType instance_type = boilerplate_object->map()->instance_type();
10473   ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
10474
10475   HType type = instance_type == JS_ARRAY_TYPE
10476       ? HType::JSArray() : HType::JSObject();
10477   HValue* object_size_constant = Add<HConstant>(
10478       boilerplate_object->map()->instance_size());
10479
10480   PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
10481   if (FLAG_allocation_site_pretenuring) {
10482     pretenure_flag = site_context->current()->GetPretenureMode();
10483     Handle<AllocationSite> site(site_context->current());
10484     AllocationSite::AddDependentCompilationInfo(
10485         site, AllocationSite::TENURING, top_info());
10486   }
10487
10488   HInstruction* object = Add<HAllocate>(object_size_constant, type,
10489       pretenure_flag, instance_type, site_context->current());
10490
10491   // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
10492   // elements array may not get folded into the object. Hence, we set the
10493   // elements pointer to empty fixed array and let store elimination remove
10494   // this store in the folding case.
10495   HConstant* empty_fixed_array = Add<HConstant>(
10496       isolate()->factory()->empty_fixed_array());
10497   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
10498       empty_fixed_array, INITIALIZING_STORE);
10499
10500   BuildEmitObjectHeader(boilerplate_object, object);
10501
10502   Handle<FixedArrayBase> elements(boilerplate_object->elements());
10503   int elements_size = (elements->length() > 0 &&
10504       elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
10505           elements->Size() : 0;
10506
10507   if (pretenure_flag == TENURED &&
10508       elements->map() == isolate()->heap()->fixed_cow_array_map() &&
10509       isolate()->heap()->InNewSpace(*elements)) {
10510     // If we would like to pretenure a fixed cow array, we must ensure that the
10511     // array is already in old space, otherwise we'll create too many old-to-
10512     // new-space pointers (overflowing the store buffer).
10513     elements = Handle<FixedArrayBase>(
10514         isolate()->factory()->CopyAndTenureFixedCOWArray(
10515             Handle<FixedArray>::cast(elements)));
10516     boilerplate_object->set_elements(*elements);
10517   }
10518
10519   HInstruction* object_elements = NULL;
10520   if (elements_size > 0) {
10521     HValue* object_elements_size = Add<HConstant>(elements_size);
10522     if (boilerplate_object->HasFastDoubleElements()) {
10523       object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
10524           pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE, site_context->current());
10525     } else {
10526       object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
10527           pretenure_flag, FIXED_ARRAY_TYPE, site_context->current());
10528     }
10529   }
10530   BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
10531
10532   // Copy object elements if non-COW.
10533   if (object_elements != NULL) {
10534     BuildEmitElements(boilerplate_object, elements, object_elements,
10535                       site_context);
10536   }
10537
10538   // Copy in-object properties.
10539   if (boilerplate_object->map()->NumberOfFields() != 0) {
10540     BuildEmitInObjectProperties(boilerplate_object, object, site_context,
10541                                 pretenure_flag);
10542   }
10543   return object;
10544 }
10545
10546
10547 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
10548     Handle<JSObject> boilerplate_object,
10549     HInstruction* object) {
10550   ASSERT(boilerplate_object->properties()->length() == 0);
10551
10552   Handle<Map> boilerplate_object_map(boilerplate_object->map());
10553   AddStoreMapConstant(object, boilerplate_object_map);
10554
10555   Handle<Object> properties_field =
10556       Handle<Object>(boilerplate_object->properties(), isolate());
10557   ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
10558   HInstruction* properties = Add<HConstant>(properties_field);
10559   HObjectAccess access = HObjectAccess::ForPropertiesPointer();
10560   Add<HStoreNamedField>(object, access, properties);
10561
10562   if (boilerplate_object->IsJSArray()) {
10563     Handle<JSArray> boilerplate_array =
10564         Handle<JSArray>::cast(boilerplate_object);
10565     Handle<Object> length_field =
10566         Handle<Object>(boilerplate_array->length(), isolate());
10567     HInstruction* length = Add<HConstant>(length_field);
10568
10569     ASSERT(boilerplate_array->length()->IsSmi());
10570     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
10571         boilerplate_array->GetElementsKind()), length);
10572   }
10573 }
10574
10575
10576 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
10577     Handle<JSObject> boilerplate_object,
10578     HInstruction* object,
10579     HInstruction* object_elements) {
10580   ASSERT(boilerplate_object->properties()->length() == 0);
10581   if (object_elements == NULL) {
10582     Handle<Object> elements_field =
10583         Handle<Object>(boilerplate_object->elements(), isolate());
10584     object_elements = Add<HConstant>(elements_field);
10585   }
10586   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
10587       object_elements);
10588 }
10589
10590
10591 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
10592     Handle<JSObject> boilerplate_object,
10593     HInstruction* object,
10594     AllocationSiteUsageContext* site_context,
10595     PretenureFlag pretenure_flag) {
10596   Handle<Map> boilerplate_map(boilerplate_object->map());
10597   Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
10598   int limit = boilerplate_map->NumberOfOwnDescriptors();
10599
10600   int copied_fields = 0;
10601   for (int i = 0; i < limit; i++) {
10602     PropertyDetails details = descriptors->GetDetails(i);
10603     if (details.type() != FIELD) continue;
10604     copied_fields++;
10605     int index = descriptors->GetFieldIndex(i);
10606     int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
10607     Handle<Name> name(descriptors->GetKey(i));
10608     Handle<Object> value =
10609         Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
10610         isolate());
10611
10612     // The access for the store depends on the type of the boilerplate.
10613     HObjectAccess access = boilerplate_object->IsJSArray() ?
10614         HObjectAccess::ForJSArrayOffset(property_offset) :
10615         HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10616
10617     if (value->IsJSObject()) {
10618       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10619       Handle<AllocationSite> current_site = site_context->EnterNewScope();
10620       HInstruction* result =
10621           BuildFastLiteral(value_object, site_context);
10622       site_context->ExitScope(current_site, value_object);
10623       Add<HStoreNamedField>(object, access, result);
10624     } else {
10625       Representation representation = details.representation();
10626       HInstruction* value_instruction;
10627
10628       if (representation.IsDouble()) {
10629         // Allocate a HeapNumber box and store the value into it.
10630         HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
10631         // This heap number alloc does not have a corresponding
10632         // AllocationSite. That is okay because
10633         // 1) it's a child object of another object with a valid allocation site
10634         // 2) we can just use the mode of the parent object for pretenuring
10635         HInstruction* double_box =
10636             Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
10637                 pretenure_flag, HEAP_NUMBER_TYPE);
10638         AddStoreMapConstant(double_box,
10639             isolate()->factory()->heap_number_map());
10640         Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
10641                               Add<HConstant>(value));
10642         value_instruction = double_box;
10643       } else if (representation.IsSmi()) {
10644         value_instruction = value->IsUninitialized()
10645             ? graph()->GetConstant0()
10646             : Add<HConstant>(value);
10647         // Ensure that value is stored as smi.
10648         access = access.WithRepresentation(representation);
10649       } else {
10650         value_instruction = Add<HConstant>(value);
10651       }
10652
10653       Add<HStoreNamedField>(object, access, value_instruction);
10654     }
10655   }
10656
10657   int inobject_properties = boilerplate_object->map()->inobject_properties();
10658   HInstruction* value_instruction =
10659       Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
10660   for (int i = copied_fields; i < inobject_properties; i++) {
10661     ASSERT(boilerplate_object->IsJSObject());
10662     int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
10663     HObjectAccess access =
10664         HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10665     Add<HStoreNamedField>(object, access, value_instruction);
10666   }
10667 }
10668
10669
10670 void HOptimizedGraphBuilder::BuildEmitElements(
10671     Handle<JSObject> boilerplate_object,
10672     Handle<FixedArrayBase> elements,
10673     HValue* object_elements,
10674     AllocationSiteUsageContext* site_context) {
10675   ElementsKind kind = boilerplate_object->map()->elements_kind();
10676   int elements_length = elements->length();
10677   HValue* object_elements_length = Add<HConstant>(elements_length);
10678   BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
10679
10680   // Copy elements backing store content.
10681   if (elements->IsFixedDoubleArray()) {
10682     BuildEmitFixedDoubleArray(elements, kind, object_elements);
10683   } else if (elements->IsFixedArray()) {
10684     BuildEmitFixedArray(elements, kind, object_elements,
10685                         site_context);
10686   } else {
10687     UNREACHABLE();
10688   }
10689 }
10690
10691
10692 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
10693     Handle<FixedArrayBase> elements,
10694     ElementsKind kind,
10695     HValue* object_elements) {
10696   HInstruction* boilerplate_elements = Add<HConstant>(elements);
10697   int elements_length = elements->length();
10698   for (int i = 0; i < elements_length; i++) {
10699     HValue* key_constant = Add<HConstant>(i);
10700     HInstruction* value_instruction =
10701         Add<HLoadKeyed>(boilerplate_elements, key_constant,
10702                         static_cast<HValue*>(NULL), kind,
10703                         ALLOW_RETURN_HOLE);
10704     HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
10705                                            value_instruction, kind);
10706     store->SetFlag(HValue::kAllowUndefinedAsNaN);
10707   }
10708 }
10709
10710
10711 void HOptimizedGraphBuilder::BuildEmitFixedArray(
10712     Handle<FixedArrayBase> elements,
10713     ElementsKind kind,
10714     HValue* object_elements,
10715     AllocationSiteUsageContext* site_context) {
10716   HInstruction* boilerplate_elements = Add<HConstant>(elements);
10717   int elements_length = elements->length();
10718   Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
10719   for (int i = 0; i < elements_length; i++) {
10720     Handle<Object> value(fast_elements->get(i), isolate());
10721     HValue* key_constant = Add<HConstant>(i);
10722     if (value->IsJSObject()) {
10723       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10724       Handle<AllocationSite> current_site = site_context->EnterNewScope();
10725       HInstruction* result =
10726           BuildFastLiteral(value_object, site_context);
10727       site_context->ExitScope(current_site, value_object);
10728       Add<HStoreKeyed>(object_elements, key_constant, result, kind);
10729     } else {
10730       HInstruction* value_instruction =
10731           Add<HLoadKeyed>(boilerplate_elements, key_constant,
10732                           static_cast<HValue*>(NULL), kind,
10733                           ALLOW_RETURN_HOLE);
10734       Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
10735     }
10736   }
10737 }
10738
10739
10740 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
10741   ASSERT(!HasStackOverflow());
10742   ASSERT(current_block() != NULL);
10743   ASSERT(current_block()->HasPredecessor());
10744   HInstruction* instr = BuildThisFunction();
10745   return ast_context()->ReturnInstruction(instr, expr->id());
10746 }
10747
10748
10749 void HOptimizedGraphBuilder::VisitDeclarations(
10750     ZoneList<Declaration*>* declarations) {
10751   ASSERT(globals_.is_empty());
10752   AstVisitor::VisitDeclarations(declarations);
10753   if (!globals_.is_empty()) {
10754     Handle<FixedArray> array =
10755        isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
10756     for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
10757     int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
10758         DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
10759         DeclareGlobalsStrictMode::encode(current_info()->strict_mode());
10760     Add<HDeclareGlobals>(array, flags);
10761     globals_.Clear();
10762   }
10763 }
10764
10765
10766 void HOptimizedGraphBuilder::VisitVariableDeclaration(
10767     VariableDeclaration* declaration) {
10768   VariableProxy* proxy = declaration->proxy();
10769   VariableMode mode = declaration->mode();
10770   Variable* variable = proxy->var();
10771   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
10772   switch (variable->location()) {
10773     case Variable::UNALLOCATED:
10774       globals_.Add(variable->name(), zone());
10775       globals_.Add(variable->binding_needs_init()
10776                        ? isolate()->factory()->the_hole_value()
10777                        : isolate()->factory()->undefined_value(), zone());
10778       return;
10779     case Variable::PARAMETER:
10780     case Variable::LOCAL:
10781       if (hole_init) {
10782         HValue* value = graph()->GetConstantHole();
10783         environment()->Bind(variable, value);
10784       }
10785       break;
10786     case Variable::CONTEXT:
10787       if (hole_init) {
10788         HValue* value = graph()->GetConstantHole();
10789         HValue* context = environment()->context();
10790         HStoreContextSlot* store = Add<HStoreContextSlot>(
10791             context, variable->index(), HStoreContextSlot::kNoCheck, value);
10792         if (store->HasObservableSideEffects()) {
10793           Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10794         }
10795       }
10796       break;
10797     case Variable::LOOKUP:
10798       return Bailout(kUnsupportedLookupSlotInDeclaration);
10799   }
10800 }
10801
10802
10803 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
10804     FunctionDeclaration* declaration) {
10805   VariableProxy* proxy = declaration->proxy();
10806   Variable* variable = proxy->var();
10807   switch (variable->location()) {
10808     case Variable::UNALLOCATED: {
10809       globals_.Add(variable->name(), zone());
10810       Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
10811           declaration->fun(), current_info()->script());
10812       // Check for stack-overflow exception.
10813       if (function.is_null()) return SetStackOverflow();
10814       globals_.Add(function, zone());
10815       return;
10816     }
10817     case Variable::PARAMETER:
10818     case Variable::LOCAL: {
10819       CHECK_ALIVE(VisitForValue(declaration->fun()));
10820       HValue* value = Pop();
10821       BindIfLive(variable, value);
10822       break;
10823     }
10824     case Variable::CONTEXT: {
10825       CHECK_ALIVE(VisitForValue(declaration->fun()));
10826       HValue* value = Pop();
10827       HValue* context = environment()->context();
10828       HStoreContextSlot* store = Add<HStoreContextSlot>(
10829           context, variable->index(), HStoreContextSlot::kNoCheck, value);
10830       if (store->HasObservableSideEffects()) {
10831         Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10832       }
10833       break;
10834     }
10835     case Variable::LOOKUP:
10836       return Bailout(kUnsupportedLookupSlotInDeclaration);
10837   }
10838 }
10839
10840
10841 void HOptimizedGraphBuilder::VisitModuleDeclaration(
10842     ModuleDeclaration* declaration) {
10843   UNREACHABLE();
10844 }
10845
10846
10847 void HOptimizedGraphBuilder::VisitImportDeclaration(
10848     ImportDeclaration* declaration) {
10849   UNREACHABLE();
10850 }
10851
10852
10853 void HOptimizedGraphBuilder::VisitExportDeclaration(
10854     ExportDeclaration* declaration) {
10855   UNREACHABLE();
10856 }
10857
10858
10859 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
10860   UNREACHABLE();
10861 }
10862
10863
10864 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
10865   UNREACHABLE();
10866 }
10867
10868
10869 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
10870   UNREACHABLE();
10871 }
10872
10873
10874 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
10875   UNREACHABLE();
10876 }
10877
10878
10879 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
10880   UNREACHABLE();
10881 }
10882
10883
10884 // Generators for inline runtime functions.
10885 // Support for types.
10886 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
10887   ASSERT(call->arguments()->length() == 1);
10888   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10889   HValue* value = Pop();
10890   HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
10891   return ast_context()->ReturnControl(result, call->id());
10892 }
10893
10894
10895 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
10896   ASSERT(call->arguments()->length() == 1);
10897   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10898   HValue* value = Pop();
10899   HHasInstanceTypeAndBranch* result =
10900       New<HHasInstanceTypeAndBranch>(value,
10901                                      FIRST_SPEC_OBJECT_TYPE,
10902                                      LAST_SPEC_OBJECT_TYPE);
10903   return ast_context()->ReturnControl(result, call->id());
10904 }
10905
10906
10907 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
10908   ASSERT(call->arguments()->length() == 1);
10909   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10910   HValue* value = Pop();
10911   HHasInstanceTypeAndBranch* result =
10912       New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
10913   return ast_context()->ReturnControl(result, call->id());
10914 }
10915
10916
10917 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
10918   ASSERT(call->arguments()->length() == 1);
10919   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10920   HValue* value = Pop();
10921   HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
10922   return ast_context()->ReturnControl(result, call->id());
10923 }
10924
10925
10926 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
10927   ASSERT(call->arguments()->length() == 1);
10928   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10929   HValue* value = Pop();
10930   HHasCachedArrayIndexAndBranch* result =
10931       New<HHasCachedArrayIndexAndBranch>(value);
10932   return ast_context()->ReturnControl(result, call->id());
10933 }
10934
10935
10936 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
10937   ASSERT(call->arguments()->length() == 1);
10938   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10939   HValue* value = Pop();
10940   HHasInstanceTypeAndBranch* result =
10941       New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
10942   return ast_context()->ReturnControl(result, call->id());
10943 }
10944
10945
10946 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
10947   ASSERT(call->arguments()->length() == 1);
10948   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10949   HValue* value = Pop();
10950   HHasInstanceTypeAndBranch* result =
10951       New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
10952   return ast_context()->ReturnControl(result, call->id());
10953 }
10954
10955
10956 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
10957   ASSERT(call->arguments()->length() == 1);
10958   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10959   HValue* value = Pop();
10960   HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
10961   return ast_context()->ReturnControl(result, call->id());
10962 }
10963
10964
10965 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
10966   return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
10967 }
10968
10969
10970 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
10971   ASSERT(call->arguments()->length() == 1);
10972   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10973   HValue* value = Pop();
10974   HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
10975   return ast_context()->ReturnControl(result, call->id());
10976 }
10977
10978
10979 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
10980     CallRuntime* call) {
10981   return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
10982 }
10983
10984
10985 // Support for construct call checks.
10986 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
10987   ASSERT(call->arguments()->length() == 0);
10988   if (function_state()->outer() != NULL) {
10989     // We are generating graph for inlined function.
10990     HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
10991         ? graph()->GetConstantTrue()
10992         : graph()->GetConstantFalse();
10993     return ast_context()->ReturnValue(value);
10994   } else {
10995     return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
10996                                         call->id());
10997   }
10998 }
10999
11000
11001 // Support for arguments.length and arguments[?].
11002 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
11003   // Our implementation of arguments (based on this stack frame or an
11004   // adapter below it) does not work for inlined functions.  This runtime
11005   // function is blacklisted by AstNode::IsInlineable.
11006   ASSERT(function_state()->outer() == NULL);
11007   ASSERT(call->arguments()->length() == 0);
11008   HInstruction* elements = Add<HArgumentsElements>(false);
11009   HArgumentsLength* result = New<HArgumentsLength>(elements);
11010   return ast_context()->ReturnInstruction(result, call->id());
11011 }
11012
11013
11014 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
11015   // Our implementation of arguments (based on this stack frame or an
11016   // adapter below it) does not work for inlined functions.  This runtime
11017   // function is blacklisted by AstNode::IsInlineable.
11018   ASSERT(function_state()->outer() == NULL);
11019   ASSERT(call->arguments()->length() == 1);
11020   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11021   HValue* index = Pop();
11022   HInstruction* elements = Add<HArgumentsElements>(false);
11023   HInstruction* length = Add<HArgumentsLength>(elements);
11024   HInstruction* checked_index = Add<HBoundsCheck>(index, length);
11025   HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
11026       elements, length, checked_index);
11027   return ast_context()->ReturnInstruction(result, call->id());
11028 }
11029
11030
11031 // Support for accessing the class and value fields of an object.
11032 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
11033   // The special form detected by IsClassOfTest is detected before we get here
11034   // and does not cause a bailout.
11035   return Bailout(kInlinedRuntimeFunctionClassOf);
11036 }
11037
11038
11039 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
11040   ASSERT(call->arguments()->length() == 1);
11041   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11042   HValue* object = Pop();
11043
11044   IfBuilder if_objectisvalue(this);
11045   HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
11046       object, JS_VALUE_TYPE);
11047   if_objectisvalue.Then();
11048   {
11049     // Return the actual value.
11050     Push(Add<HLoadNamedField>(
11051             object, objectisvalue,
11052             HObjectAccess::ForObservableJSObjectOffset(
11053                 JSValue::kValueOffset)));
11054     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11055   }
11056   if_objectisvalue.Else();
11057   {
11058     // If the object is not a value return the object.
11059     Push(object);
11060     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11061   }
11062   if_objectisvalue.End();
11063   return ast_context()->ReturnValue(Pop());
11064 }
11065
11066
11067 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
11068   ASSERT(call->arguments()->length() == 2);
11069   ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
11070   Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
11071   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11072   HValue* date = Pop();
11073   HDateField* result = New<HDateField>(date, index);
11074   return ast_context()->ReturnInstruction(result, call->id());
11075 }
11076
11077
11078 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
11079     CallRuntime* call) {
11080   ASSERT(call->arguments()->length() == 3);
11081   // We need to follow the evaluation order of full codegen.
11082   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11083   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11084   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11085   HValue* string = Pop();
11086   HValue* value = Pop();
11087   HValue* index = Pop();
11088   Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
11089                          index, value);
11090   Add<HSimulate>(call->id(), FIXED_SIMULATE);
11091   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11092 }
11093
11094
11095 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
11096     CallRuntime* call) {
11097   ASSERT(call->arguments()->length() == 3);
11098   // We need to follow the evaluation order of full codegen.
11099   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11100   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11101   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11102   HValue* string = Pop();
11103   HValue* value = Pop();
11104   HValue* index = Pop();
11105   Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
11106                          index, value);
11107   Add<HSimulate>(call->id(), FIXED_SIMULATE);
11108   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11109 }
11110
11111
11112 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
11113   ASSERT(call->arguments()->length() == 2);
11114   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11115   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11116   HValue* value = Pop();
11117   HValue* object = Pop();
11118
11119   // Check if object is a JSValue.
11120   IfBuilder if_objectisvalue(this);
11121   if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
11122   if_objectisvalue.Then();
11123   {
11124     // Create in-object property store to kValueOffset.
11125     Add<HStoreNamedField>(object,
11126         HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
11127         value);
11128     if (!ast_context()->IsEffect()) {
11129       Push(value);
11130     }
11131     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11132   }
11133   if_objectisvalue.Else();
11134   {
11135     // Nothing to do in this case.
11136     if (!ast_context()->IsEffect()) {
11137       Push(value);
11138     }
11139     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11140   }
11141   if_objectisvalue.End();
11142   if (!ast_context()->IsEffect()) {
11143     Drop(1);
11144   }
11145   return ast_context()->ReturnValue(value);
11146 }
11147
11148
11149 // Fast support for charCodeAt(n).
11150 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
11151   ASSERT(call->arguments()->length() == 2);
11152   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11153   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11154   HValue* index = Pop();
11155   HValue* string = Pop();
11156   HInstruction* result = BuildStringCharCodeAt(string, index);
11157   return ast_context()->ReturnInstruction(result, call->id());
11158 }
11159
11160
11161 // Fast support for string.charAt(n) and string[n].
11162 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
11163   ASSERT(call->arguments()->length() == 1);
11164   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11165   HValue* char_code = Pop();
11166   HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11167   return ast_context()->ReturnInstruction(result, call->id());
11168 }
11169
11170
11171 // Fast support for string.charAt(n) and string[n].
11172 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
11173   ASSERT(call->arguments()->length() == 2);
11174   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11175   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11176   HValue* index = Pop();
11177   HValue* string = Pop();
11178   HInstruction* char_code = BuildStringCharCodeAt(string, index);
11179   AddInstruction(char_code);
11180   HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11181   return ast_context()->ReturnInstruction(result, call->id());
11182 }
11183
11184
11185 // Fast support for object equality testing.
11186 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
11187   ASSERT(call->arguments()->length() == 2);
11188   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11189   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11190   HValue* right = Pop();
11191   HValue* left = Pop();
11192   HCompareObjectEqAndBranch* result =
11193       New<HCompareObjectEqAndBranch>(left, right);
11194   return ast_context()->ReturnControl(result, call->id());
11195 }
11196
11197
11198 void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
11199   // %_Log is ignored in optimized code.
11200   return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11201 }
11202
11203
11204 // Fast support for StringAdd.
11205 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
11206   ASSERT_EQ(2, call->arguments()->length());
11207   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11208   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11209   HValue* right = Pop();
11210   HValue* left = Pop();
11211   HInstruction* result = NewUncasted<HStringAdd>(left, right);
11212   return ast_context()->ReturnInstruction(result, call->id());
11213 }
11214
11215
11216 // Fast support for SubString.
11217 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
11218   ASSERT_EQ(3, call->arguments()->length());
11219   CHECK_ALIVE(VisitExpressions(call->arguments()));
11220   PushArgumentsFromEnvironment(call->arguments()->length());
11221   HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
11222   return ast_context()->ReturnInstruction(result, call->id());
11223 }
11224
11225
11226 // Fast support for StringCompare.
11227 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
11228   ASSERT_EQ(2, call->arguments()->length());
11229   CHECK_ALIVE(VisitExpressions(call->arguments()));
11230   PushArgumentsFromEnvironment(call->arguments()->length());
11231   HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
11232   return ast_context()->ReturnInstruction(result, call->id());
11233 }
11234
11235
11236 // Support for direct calls from JavaScript to native RegExp code.
11237 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
11238   ASSERT_EQ(4, call->arguments()->length());
11239   CHECK_ALIVE(VisitExpressions(call->arguments()));
11240   PushArgumentsFromEnvironment(call->arguments()->length());
11241   HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
11242   return ast_context()->ReturnInstruction(result, call->id());
11243 }
11244
11245
11246 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
11247   ASSERT_EQ(1, call->arguments()->length());
11248   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11249   HValue* value = Pop();
11250   HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
11251   return ast_context()->ReturnInstruction(result, call->id());
11252 }
11253
11254
11255 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
11256   ASSERT_EQ(1, call->arguments()->length());
11257   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11258   HValue* value = Pop();
11259   HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
11260   return ast_context()->ReturnInstruction(result, call->id());
11261 }
11262
11263
11264 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
11265   ASSERT_EQ(2, call->arguments()->length());
11266   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11267   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11268   HValue* lo = Pop();
11269   HValue* hi = Pop();
11270   HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
11271   return ast_context()->ReturnInstruction(result, call->id());
11272 }
11273
11274
11275 // Construct a RegExp exec result with two in-object properties.
11276 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
11277   ASSERT_EQ(3, call->arguments()->length());
11278   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11279   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11280   CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11281   HValue* input = Pop();
11282   HValue* index = Pop();
11283   HValue* length = Pop();
11284   HValue* result = BuildRegExpConstructResult(length, index, input);
11285   return ast_context()->ReturnValue(result);
11286 }
11287
11288
11289 // Support for fast native caches.
11290 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
11291   return Bailout(kInlinedRuntimeFunctionGetFromCache);
11292 }
11293
11294
11295 // Fast support for number to string.
11296 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
11297   ASSERT_EQ(1, call->arguments()->length());
11298   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11299   HValue* number = Pop();
11300   HValue* result = BuildNumberToString(number, Type::Any(zone()));
11301   return ast_context()->ReturnValue(result);
11302 }
11303
11304
11305 // Fast call for custom callbacks.
11306 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
11307   // 1 ~ The function to call is not itself an argument to the call.
11308   int arg_count = call->arguments()->length() - 1;
11309   ASSERT(arg_count >= 1);  // There's always at least a receiver.
11310
11311   CHECK_ALIVE(VisitExpressions(call->arguments()));
11312   // The function is the last argument
11313   HValue* function = Pop();
11314   // Push the arguments to the stack
11315   PushArgumentsFromEnvironment(arg_count);
11316
11317   IfBuilder if_is_jsfunction(this);
11318   if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
11319
11320   if_is_jsfunction.Then();
11321   {
11322     HInstruction* invoke_result =
11323         Add<HInvokeFunction>(function, arg_count);
11324     if (!ast_context()->IsEffect()) {
11325       Push(invoke_result);
11326     }
11327     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11328   }
11329
11330   if_is_jsfunction.Else();
11331   {
11332     HInstruction* call_result =
11333         Add<HCallFunction>(function, arg_count);
11334     if (!ast_context()->IsEffect()) {
11335       Push(call_result);
11336     }
11337     Add<HSimulate>(call->id(), FIXED_SIMULATE);
11338   }
11339   if_is_jsfunction.End();
11340
11341   if (ast_context()->IsEffect()) {
11342     // EffectContext::ReturnValue ignores the value, so we can just pass
11343     // 'undefined' (as we do not have the call result anymore).
11344     return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11345   } else {
11346     return ast_context()->ReturnValue(Pop());
11347   }
11348 }
11349
11350
11351 // Fast call to math functions.
11352 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
11353   ASSERT_EQ(2, call->arguments()->length());
11354   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11355   CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11356   HValue* right = Pop();
11357   HValue* left = Pop();
11358   HInstruction* result = NewUncasted<HPower>(left, right);
11359   return ast_context()->ReturnInstruction(result, call->id());
11360 }
11361
11362
11363 void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
11364   ASSERT(call->arguments()->length() == 1);
11365   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11366   HValue* value = Pop();
11367   HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
11368   return ast_context()->ReturnInstruction(result, call->id());
11369 }
11370
11371
11372 void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
11373   ASSERT(call->arguments()->length() == 1);
11374   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11375   HValue* value = Pop();
11376   HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
11377   return ast_context()->ReturnInstruction(result, call->id());
11378 }
11379
11380
11381 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
11382   ASSERT(call->arguments()->length() == 1);
11383   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11384   HValue* value = Pop();
11385   HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
11386   return ast_context()->ReturnInstruction(result, call->id());
11387 }
11388
11389
11390 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
11391   return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
11392 }
11393
11394
11395 // Support for generators.
11396 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
11397   return Bailout(kInlinedRuntimeFunctionGeneratorNext);
11398 }
11399
11400
11401 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
11402   return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
11403 }
11404
11405
11406 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
11407     CallRuntime* call) {
11408   Add<HDebugBreak>();
11409   return ast_context()->ReturnValue(graph()->GetConstant0());
11410 }
11411
11412
11413 void HOptimizedGraphBuilder::GenerateDebugCallbackSupportsStepping(
11414     CallRuntime* call) {
11415   ASSERT(call->arguments()->length() == 1);
11416   // Debugging is not supported in optimized code.
11417   return ast_context()->ReturnValue(graph()->GetConstantFalse());
11418 }
11419
11420
11421 #undef CHECK_BAILOUT
11422 #undef CHECK_ALIVE
11423
11424
11425 HEnvironment::HEnvironment(HEnvironment* outer,
11426                            Scope* scope,
11427                            Handle<JSFunction> closure,
11428                            Zone* zone)
11429     : closure_(closure),
11430       values_(0, zone),
11431       frame_type_(JS_FUNCTION),
11432       parameter_count_(0),
11433       specials_count_(1),
11434       local_count_(0),
11435       outer_(outer),
11436       entry_(NULL),
11437       pop_count_(0),
11438       push_count_(0),
11439       ast_id_(BailoutId::None()),
11440       zone_(zone) {
11441   Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
11442 }
11443
11444
11445 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
11446     : values_(0, zone),
11447       frame_type_(STUB),
11448       parameter_count_(parameter_count),
11449       specials_count_(1),
11450       local_count_(0),
11451       outer_(NULL),
11452       entry_(NULL),
11453       pop_count_(0),
11454       push_count_(0),
11455       ast_id_(BailoutId::None()),
11456       zone_(zone) {
11457   Initialize(parameter_count, 0, 0);
11458 }
11459
11460
11461 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
11462     : values_(0, zone),
11463       frame_type_(JS_FUNCTION),
11464       parameter_count_(0),
11465       specials_count_(0),
11466       local_count_(0),
11467       outer_(NULL),
11468       entry_(NULL),
11469       pop_count_(0),
11470       push_count_(0),
11471       ast_id_(other->ast_id()),
11472       zone_(zone) {
11473   Initialize(other);
11474 }
11475
11476
11477 HEnvironment::HEnvironment(HEnvironment* outer,
11478                            Handle<JSFunction> closure,
11479                            FrameType frame_type,
11480                            int arguments,
11481                            Zone* zone)
11482     : closure_(closure),
11483       values_(arguments, zone),
11484       frame_type_(frame_type),
11485       parameter_count_(arguments),
11486       specials_count_(0),
11487       local_count_(0),
11488       outer_(outer),
11489       entry_(NULL),
11490       pop_count_(0),
11491       push_count_(0),
11492       ast_id_(BailoutId::None()),
11493       zone_(zone) {
11494 }
11495
11496
11497 void HEnvironment::Initialize(int parameter_count,
11498                               int local_count,
11499                               int stack_height) {
11500   parameter_count_ = parameter_count;
11501   local_count_ = local_count;
11502
11503   // Avoid reallocating the temporaries' backing store on the first Push.
11504   int total = parameter_count + specials_count_ + local_count + stack_height;
11505   values_.Initialize(total + 4, zone());
11506   for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
11507 }
11508
11509
11510 void HEnvironment::Initialize(const HEnvironment* other) {
11511   closure_ = other->closure();
11512   values_.AddAll(other->values_, zone());
11513   assigned_variables_.Union(other->assigned_variables_, zone());
11514   frame_type_ = other->frame_type_;
11515   parameter_count_ = other->parameter_count_;
11516   local_count_ = other->local_count_;
11517   if (other->outer_ != NULL) outer_ = other->outer_->Copy();  // Deep copy.
11518   entry_ = other->entry_;
11519   pop_count_ = other->pop_count_;
11520   push_count_ = other->push_count_;
11521   specials_count_ = other->specials_count_;
11522   ast_id_ = other->ast_id_;
11523 }
11524
11525
11526 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
11527   ASSERT(!block->IsLoopHeader());
11528   ASSERT(values_.length() == other->values_.length());
11529
11530   int length = values_.length();
11531   for (int i = 0; i < length; ++i) {
11532     HValue* value = values_[i];
11533     if (value != NULL && value->IsPhi() && value->block() == block) {
11534       // There is already a phi for the i'th value.
11535       HPhi* phi = HPhi::cast(value);
11536       // Assert index is correct and that we haven't missed an incoming edge.
11537       ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
11538       ASSERT(phi->OperandCount() == block->predecessors()->length());
11539       phi->AddInput(other->values_[i]);
11540     } else if (values_[i] != other->values_[i]) {
11541       // There is a fresh value on the incoming edge, a phi is needed.
11542       ASSERT(values_[i] != NULL && other->values_[i] != NULL);
11543       HPhi* phi = block->AddNewPhi(i);
11544       HValue* old_value = values_[i];
11545       for (int j = 0; j < block->predecessors()->length(); j++) {
11546         phi->AddInput(old_value);
11547       }
11548       phi->AddInput(other->values_[i]);
11549       this->values_[i] = phi;
11550     }
11551   }
11552 }
11553
11554
11555 void HEnvironment::Bind(int index, HValue* value) {
11556   ASSERT(value != NULL);
11557   assigned_variables_.Add(index, zone());
11558   values_[index] = value;
11559 }
11560
11561
11562 bool HEnvironment::HasExpressionAt(int index) const {
11563   return index >= parameter_count_ + specials_count_ + local_count_;
11564 }
11565
11566
11567 bool HEnvironment::ExpressionStackIsEmpty() const {
11568   ASSERT(length() >= first_expression_index());
11569   return length() == first_expression_index();
11570 }
11571
11572
11573 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
11574   int count = index_from_top + 1;
11575   int index = values_.length() - count;
11576   ASSERT(HasExpressionAt(index));
11577   // The push count must include at least the element in question or else
11578   // the new value will not be included in this environment's history.
11579   if (push_count_ < count) {
11580     // This is the same effect as popping then re-pushing 'count' elements.
11581     pop_count_ += (count - push_count_);
11582     push_count_ = count;
11583   }
11584   values_[index] = value;
11585 }
11586
11587
11588 void HEnvironment::Drop(int count) {
11589   for (int i = 0; i < count; ++i) {
11590     Pop();
11591   }
11592 }
11593
11594
11595 HEnvironment* HEnvironment::Copy() const {
11596   return new(zone()) HEnvironment(this, zone());
11597 }
11598
11599
11600 HEnvironment* HEnvironment::CopyWithoutHistory() const {
11601   HEnvironment* result = Copy();
11602   result->ClearHistory();
11603   return result;
11604 }
11605
11606
11607 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
11608   HEnvironment* new_env = Copy();
11609   for (int i = 0; i < values_.length(); ++i) {
11610     HPhi* phi = loop_header->AddNewPhi(i);
11611     phi->AddInput(values_[i]);
11612     new_env->values_[i] = phi;
11613   }
11614   new_env->ClearHistory();
11615   return new_env;
11616 }
11617
11618
11619 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
11620                                                   Handle<JSFunction> target,
11621                                                   FrameType frame_type,
11622                                                   int arguments) const {
11623   HEnvironment* new_env =
11624       new(zone()) HEnvironment(outer, target, frame_type,
11625                                arguments + 1, zone());
11626   for (int i = 0; i <= arguments; ++i) {  // Include receiver.
11627     new_env->Push(ExpressionStackAt(arguments - i));
11628   }
11629   new_env->ClearHistory();
11630   return new_env;
11631 }
11632
11633
11634 HEnvironment* HEnvironment::CopyForInlining(
11635     Handle<JSFunction> target,
11636     int arguments,
11637     FunctionLiteral* function,
11638     HConstant* undefined,
11639     InliningKind inlining_kind) const {
11640   ASSERT(frame_type() == JS_FUNCTION);
11641
11642   // Outer environment is a copy of this one without the arguments.
11643   int arity = function->scope()->num_parameters();
11644
11645   HEnvironment* outer = Copy();
11646   outer->Drop(arguments + 1);  // Including receiver.
11647   outer->ClearHistory();
11648
11649   if (inlining_kind == CONSTRUCT_CALL_RETURN) {
11650     // Create artificial constructor stub environment.  The receiver should
11651     // actually be the constructor function, but we pass the newly allocated
11652     // object instead, DoComputeConstructStubFrame() relies on that.
11653     outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
11654   } else if (inlining_kind == GETTER_CALL_RETURN) {
11655     // We need an additional StackFrame::INTERNAL frame for restoring the
11656     // correct context.
11657     outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
11658   } else if (inlining_kind == SETTER_CALL_RETURN) {
11659     // We need an additional StackFrame::INTERNAL frame for temporarily saving
11660     // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
11661     outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
11662   }
11663
11664   if (arity != arguments) {
11665     // Create artificial arguments adaptation environment.
11666     outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
11667   }
11668
11669   HEnvironment* inner =
11670       new(zone()) HEnvironment(outer, function->scope(), target, zone());
11671   // Get the argument values from the original environment.
11672   for (int i = 0; i <= arity; ++i) {  // Include receiver.
11673     HValue* push = (i <= arguments) ?
11674         ExpressionStackAt(arguments - i) : undefined;
11675     inner->SetValueAt(i, push);
11676   }
11677   inner->SetValueAt(arity + 1, context());
11678   for (int i = arity + 2; i < inner->length(); ++i) {
11679     inner->SetValueAt(i, undefined);
11680   }
11681
11682   inner->set_ast_id(BailoutId::FunctionEntry());
11683   return inner;
11684 }
11685
11686
11687 void HEnvironment::PrintTo(StringStream* stream) {
11688   for (int i = 0; i < length(); i++) {
11689     if (i == 0) stream->Add("parameters\n");
11690     if (i == parameter_count()) stream->Add("specials\n");
11691     if (i == parameter_count() + specials_count()) stream->Add("locals\n");
11692     if (i == parameter_count() + specials_count() + local_count()) {
11693       stream->Add("expressions\n");
11694     }
11695     HValue* val = values_.at(i);
11696     stream->Add("%d: ", i);
11697     if (val != NULL) {
11698       val->PrintNameTo(stream);
11699     } else {
11700       stream->Add("NULL");
11701     }
11702     stream->Add("\n");
11703   }
11704   PrintF("\n");
11705 }
11706
11707
11708 void HEnvironment::PrintToStd() {
11709   HeapStringAllocator string_allocator;
11710   StringStream trace(&string_allocator);
11711   PrintTo(&trace);
11712   PrintF("%s", trace.ToCString().get());
11713 }
11714
11715
11716 void HTracer::TraceCompilation(CompilationInfo* info) {
11717   Tag tag(this, "compilation");
11718   if (info->IsOptimizing()) {
11719     Handle<String> name = info->function()->debug_name();
11720     PrintStringProperty("name", name->ToCString().get());
11721     PrintIndent();
11722     trace_.Add("method \"%s:%d\"\n",
11723                name->ToCString().get(),
11724                info->optimization_id());
11725   } else {
11726     CodeStub::Major major_key = info->code_stub()->MajorKey();
11727     PrintStringProperty("name", CodeStub::MajorName(major_key, false));
11728     PrintStringProperty("method", "stub");
11729   }
11730   PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
11731 }
11732
11733
11734 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
11735   ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
11736   AllowHandleDereference allow_deref;
11737   AllowDeferredHandleDereference allow_deferred_deref;
11738   Trace(name, chunk->graph(), chunk);
11739 }
11740
11741
11742 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
11743   ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
11744   AllowHandleDereference allow_deref;
11745   AllowDeferredHandleDereference allow_deferred_deref;
11746   Trace(name, graph, NULL);
11747 }
11748
11749
11750 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
11751   Tag tag(this, "cfg");
11752   PrintStringProperty("name", name);
11753   const ZoneList<HBasicBlock*>* blocks = graph->blocks();
11754   for (int i = 0; i < blocks->length(); i++) {
11755     HBasicBlock* current = blocks->at(i);
11756     Tag block_tag(this, "block");
11757     PrintBlockProperty("name", current->block_id());
11758     PrintIntProperty("from_bci", -1);
11759     PrintIntProperty("to_bci", -1);
11760
11761     if (!current->predecessors()->is_empty()) {
11762       PrintIndent();
11763       trace_.Add("predecessors");
11764       for (int j = 0; j < current->predecessors()->length(); ++j) {
11765         trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
11766       }
11767       trace_.Add("\n");
11768     } else {
11769       PrintEmptyProperty("predecessors");
11770     }
11771
11772     if (current->end()->SuccessorCount() == 0) {
11773       PrintEmptyProperty("successors");
11774     } else  {
11775       PrintIndent();
11776       trace_.Add("successors");
11777       for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
11778         trace_.Add(" \"B%d\"", it.Current()->block_id());
11779       }
11780       trace_.Add("\n");
11781     }
11782
11783     PrintEmptyProperty("xhandlers");
11784
11785     {
11786       PrintIndent();
11787       trace_.Add("flags");
11788       if (current->IsLoopSuccessorDominator()) {
11789         trace_.Add(" \"dom-loop-succ\"");
11790       }
11791       if (current->IsUnreachable()) {
11792         trace_.Add(" \"dead\"");
11793       }
11794       if (current->is_osr_entry()) {
11795         trace_.Add(" \"osr\"");
11796       }
11797       trace_.Add("\n");
11798     }
11799
11800     if (current->dominator() != NULL) {
11801       PrintBlockProperty("dominator", current->dominator()->block_id());
11802     }
11803
11804     PrintIntProperty("loop_depth", current->LoopNestingDepth());
11805
11806     if (chunk != NULL) {
11807       int first_index = current->first_instruction_index();
11808       int last_index = current->last_instruction_index();
11809       PrintIntProperty(
11810           "first_lir_id",
11811           LifetimePosition::FromInstructionIndex(first_index).Value());
11812       PrintIntProperty(
11813           "last_lir_id",
11814           LifetimePosition::FromInstructionIndex(last_index).Value());
11815     }
11816
11817     {
11818       Tag states_tag(this, "states");
11819       Tag locals_tag(this, "locals");
11820       int total = current->phis()->length();
11821       PrintIntProperty("size", current->phis()->length());
11822       PrintStringProperty("method", "None");
11823       for (int j = 0; j < total; ++j) {
11824         HPhi* phi = current->phis()->at(j);
11825         PrintIndent();
11826         trace_.Add("%d ", phi->merged_index());
11827         phi->PrintNameTo(&trace_);
11828         trace_.Add(" ");
11829         phi->PrintTo(&trace_);
11830         trace_.Add("\n");
11831       }
11832     }
11833
11834     {
11835       Tag HIR_tag(this, "HIR");
11836       for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
11837         HInstruction* instruction = it.Current();
11838         int uses = instruction->UseCount();
11839         PrintIndent();
11840         trace_.Add("0 %d ", uses);
11841         instruction->PrintNameTo(&trace_);
11842         trace_.Add(" ");
11843         instruction->PrintTo(&trace_);
11844         if (FLAG_hydrogen_track_positions &&
11845             instruction->has_position() &&
11846             instruction->position().raw() != 0) {
11847           const HSourcePosition pos = instruction->position();
11848           trace_.Add(" pos:");
11849           if (pos.inlining_id() != 0) {
11850             trace_.Add("%d_", pos.inlining_id());
11851           }
11852           trace_.Add("%d", pos.position());
11853         }
11854         trace_.Add(" <|@\n");
11855       }
11856     }
11857
11858
11859     if (chunk != NULL) {
11860       Tag LIR_tag(this, "LIR");
11861       int first_index = current->first_instruction_index();
11862       int last_index = current->last_instruction_index();
11863       if (first_index != -1 && last_index != -1) {
11864         const ZoneList<LInstruction*>* instructions = chunk->instructions();
11865         for (int i = first_index; i <= last_index; ++i) {
11866           LInstruction* linstr = instructions->at(i);
11867           if (linstr != NULL) {
11868             PrintIndent();
11869             trace_.Add("%d ",
11870                        LifetimePosition::FromInstructionIndex(i).Value());
11871             linstr->PrintTo(&trace_);
11872             trace_.Add(" [hir:");
11873             linstr->hydrogen_value()->PrintNameTo(&trace_);
11874             trace_.Add("]");
11875             trace_.Add(" <|@\n");
11876           }
11877         }
11878       }
11879     }
11880   }
11881 }
11882
11883
11884 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
11885   Tag tag(this, "intervals");
11886   PrintStringProperty("name", name);
11887
11888   const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
11889   for (int i = 0; i < fixed_d->length(); ++i) {
11890     TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
11891   }
11892
11893   const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
11894   for (int i = 0; i < fixed->length(); ++i) {
11895     TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
11896   }
11897
11898   const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
11899   for (int i = 0; i < live_ranges->length(); ++i) {
11900     TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
11901   }
11902 }
11903
11904
11905 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
11906                              Zone* zone) {
11907   if (range != NULL && !range->IsEmpty()) {
11908     PrintIndent();
11909     trace_.Add("%d %s", range->id(), type);
11910     if (range->HasRegisterAssigned()) {
11911       LOperand* op = range->CreateAssignedOperand(zone);
11912       int assigned_reg = op->index();
11913       if (op->IsDoubleRegister()) {
11914         trace_.Add(" \"%s\"",
11915                    DoubleRegister::AllocationIndexToString(assigned_reg));
11916       } else if (op->IsFloat32x4Register()) {
11917         trace_.Add(" \"%s\"",
11918                    SIMD128Register::AllocationIndexToString(assigned_reg));
11919       } else if (op->IsFloat64x2Register()) {
11920         trace_.Add(" \"%s\"",
11921                    SIMD128Register::AllocationIndexToString(assigned_reg));
11922       } else if (op->IsInt32x4Register()) {
11923         trace_.Add(" \"%s\"",
11924                    SIMD128Register::AllocationIndexToString(assigned_reg));
11925       } else {
11926         ASSERT(op->IsRegister());
11927         trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
11928       }
11929     } else if (range->IsSpilled()) {
11930       LOperand* op = range->TopLevel()->GetSpillOperand();
11931       if (op->IsDoubleStackSlot()) {
11932         trace_.Add(" \"double_stack:%d\"", op->index());
11933       } else if (op->IsFloat32x4StackSlot()) {
11934         trace_.Add(" \"float32x4_stack:%d\"", op->index());
11935       } else if (op->IsFloat64x2StackSlot()) {
11936         trace_.Add(" \"float64x2_stack:%d\"", op->index());
11937       } else if (op->IsInt32x4StackSlot()) {
11938         trace_.Add(" \"int32x4_stack:%d\"", op->index());
11939       } else {
11940         ASSERT(op->IsStackSlot());
11941         trace_.Add(" \"stack:%d\"", op->index());
11942       }
11943     }
11944     int parent_index = -1;
11945     if (range->IsChild()) {
11946       parent_index = range->parent()->id();
11947     } else {
11948       parent_index = range->id();
11949     }
11950     LOperand* op = range->FirstHint();
11951     int hint_index = -1;
11952     if (op != NULL && op->IsUnallocated()) {
11953       hint_index = LUnallocated::cast(op)->virtual_register();
11954     }
11955     trace_.Add(" %d %d", parent_index, hint_index);
11956     UseInterval* cur_interval = range->first_interval();
11957     while (cur_interval != NULL && range->Covers(cur_interval->start())) {
11958       trace_.Add(" [%d, %d[",
11959                  cur_interval->start().Value(),
11960                  cur_interval->end().Value());
11961       cur_interval = cur_interval->next();
11962     }
11963
11964     UsePosition* current_pos = range->first_pos();
11965     while (current_pos != NULL) {
11966       if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
11967         trace_.Add(" %d M", current_pos->pos().Value());
11968       }
11969       current_pos = current_pos->next();
11970     }
11971
11972     trace_.Add(" \"\"\n");
11973   }
11974 }
11975
11976
11977 void HTracer::FlushToFile() {
11978   AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
11979               false);
11980   trace_.Reset();
11981 }
11982
11983
11984 void HStatistics::Initialize(CompilationInfo* info) {
11985   if (info->shared_info().is_null()) return;
11986   source_size_ += info->shared_info()->SourceSize();
11987 }
11988
11989
11990 void HStatistics::Print() {
11991   PrintF("Timing results:\n");
11992   TimeDelta sum;
11993   for (int i = 0; i < times_.length(); ++i) {
11994     sum += times_[i];
11995   }
11996
11997   for (int i = 0; i < names_.length(); ++i) {
11998     PrintF("%32s", names_[i]);
11999     double ms = times_[i].InMillisecondsF();
12000     double percent = times_[i].PercentOf(sum);
12001     PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12002
12003     unsigned size = sizes_[i];
12004     double size_percent = static_cast<double>(size) * 100 / total_size_;
12005     PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
12006   }
12007
12008   PrintF("----------------------------------------"
12009          "---------------------------------------\n");
12010   TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12011   PrintF("%32s %8.3f ms / %4.1f %% \n",
12012          "Create graph",
12013          create_graph_.InMillisecondsF(),
12014          create_graph_.PercentOf(total));
12015   PrintF("%32s %8.3f ms / %4.1f %% \n",
12016          "Optimize graph",
12017          optimize_graph_.InMillisecondsF(),
12018          optimize_graph_.PercentOf(total));
12019   PrintF("%32s %8.3f ms / %4.1f %% \n",
12020          "Generate and install code",
12021          generate_code_.InMillisecondsF(),
12022          generate_code_.PercentOf(total));
12023   PrintF("----------------------------------------"
12024          "---------------------------------------\n");
12025   PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n",
12026          "Total",
12027          total.InMillisecondsF(),
12028          total.TimesOf(full_code_gen_));
12029
12030   double source_size_in_kb = static_cast<double>(source_size_) / 1024;
12031   double normalized_time =  source_size_in_kb > 0
12032       ? total.InMillisecondsF() / source_size_in_kb
12033       : 0;
12034   double normalized_size_in_kb = source_size_in_kb > 0
12035       ? total_size_ / 1024 / source_size_in_kb
12036       : 0;
12037   PrintF("%32s %8.3f ms           %7.3f kB allocated\n",
12038          "Average per kB source",
12039          normalized_time, normalized_size_in_kb);
12040 }
12041
12042
12043 void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) {
12044   total_size_ += size;
12045   for (int i = 0; i < names_.length(); ++i) {
12046     if (strcmp(names_[i], name) == 0) {
12047       times_[i] += time;
12048       sizes_[i] += size;
12049       return;
12050     }
12051   }
12052   names_.Add(name);
12053   times_.Add(time);
12054   sizes_.Add(size);
12055 }
12056
12057
12058 HPhase::~HPhase() {
12059   if (ShouldProduceTraceOutput()) {
12060     isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
12061   }
12062
12063 #ifdef DEBUG
12064   graph_->Verify(false);  // No full verify.
12065 #endif
12066 }
12067
12068 } }  // namespace v8::internal