1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "allocation-site-scopes.h"
12 #include "full-codegen.h"
14 #include "hydrogen-bce.h"
15 #include "hydrogen-bch.h"
16 #include "hydrogen-canonicalize.h"
17 #include "hydrogen-check-elimination.h"
18 #include "hydrogen-dce.h"
19 #include "hydrogen-dehoist.h"
20 #include "hydrogen-environment-liveness.h"
21 #include "hydrogen-escape-analysis.h"
22 #include "hydrogen-infer-representation.h"
23 #include "hydrogen-infer-types.h"
24 #include "hydrogen-load-elimination.h"
25 #include "hydrogen-gvn.h"
26 #include "hydrogen-mark-deoptimize.h"
27 #include "hydrogen-mark-unreachable.h"
28 #include "hydrogen-osr.h"
29 #include "hydrogen-range-analysis.h"
30 #include "hydrogen-redundant-phi.h"
31 #include "hydrogen-removable-simulates.h"
32 #include "hydrogen-representation-changes.h"
33 #include "hydrogen-sce.h"
34 #include "hydrogen-store-elimination.h"
35 #include "hydrogen-uint32-analysis.h"
36 #include "lithium-allocator.h"
39 #include "scopeinfo.h"
41 #include "stub-cache.h"
44 #if V8_TARGET_ARCH_IA32
45 #include "ia32/lithium-codegen-ia32.h"
46 #elif V8_TARGET_ARCH_X64
47 #include "x64/lithium-codegen-x64.h"
48 #elif V8_TARGET_ARCH_ARM64
49 #include "arm64/lithium-codegen-arm64.h"
50 #elif V8_TARGET_ARCH_ARM
51 #include "arm/lithium-codegen-arm.h"
52 #elif V8_TARGET_ARCH_MIPS
53 #include "mips/lithium-codegen-mips.h"
55 #error Unsupported target architecture.
61 HBasicBlock::HBasicBlock(HGraph* graph)
62 : block_id_(graph->GetNextBlockID()),
64 phis_(4, graph->zone()),
68 loop_information_(NULL),
69 predecessors_(2, graph->zone()),
71 dominated_blocks_(4, graph->zone()),
72 last_environment_(NULL),
74 first_instruction_index_(-1),
75 last_instruction_index_(-1),
76 deleted_phis_(4, graph->zone()),
77 parent_loop_header_(NULL),
78 inlined_entry_block_(NULL),
79 is_inline_return_target_(false),
81 dominates_loop_successors_(false),
82 is_osr_entry_(false) { }
85 Isolate* HBasicBlock::isolate() const {
86 return graph_->isolate();
90 void HBasicBlock::MarkUnreachable() {
91 is_reachable_ = false;
95 void HBasicBlock::AttachLoopInformation() {
96 ASSERT(!IsLoopHeader());
97 loop_information_ = new(zone()) HLoopInformation(this, zone());
101 void HBasicBlock::DetachLoopInformation() {
102 ASSERT(IsLoopHeader());
103 loop_information_ = NULL;
107 void HBasicBlock::AddPhi(HPhi* phi) {
108 ASSERT(!IsStartBlock());
109 phis_.Add(phi, zone());
114 void HBasicBlock::RemovePhi(HPhi* phi) {
115 ASSERT(phi->block() == this);
116 ASSERT(phis_.Contains(phi));
118 phis_.RemoveElement(phi);
123 void HBasicBlock::AddInstruction(HInstruction* instr,
124 HSourcePosition position) {
125 ASSERT(!IsStartBlock() || !IsFinished());
126 ASSERT(!instr->IsLinked());
127 ASSERT(!IsFinished());
129 if (!position.IsUnknown()) {
130 instr->set_position(position);
132 if (first_ == NULL) {
133 ASSERT(last_environment() != NULL);
134 ASSERT(!last_environment()->ast_id().IsNone());
135 HBlockEntry* entry = new(zone()) HBlockEntry();
136 entry->InitializeAsFirst(this);
137 if (!position.IsUnknown()) {
138 entry->set_position(position);
140 ASSERT(!FLAG_hydrogen_track_positions ||
141 !graph()->info()->IsOptimizing());
143 first_ = last_ = entry;
145 instr->InsertAfter(last_);
149 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
150 if (graph()->IsInsideNoSideEffectsScope()) {
151 merged_index = HPhi::kInvalidMergedIndex;
153 HPhi* phi = new(zone()) HPhi(merged_index, zone());
159 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
160 RemovableSimulate removable) {
161 ASSERT(HasEnvironment());
162 HEnvironment* environment = last_environment();
163 ASSERT(ast_id.IsNone() ||
164 ast_id == BailoutId::StubEntry() ||
165 environment->closure()->shared()->VerifyBailoutId(ast_id));
167 int push_count = environment->push_count();
168 int pop_count = environment->pop_count();
171 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
173 instr->set_closure(environment->closure());
175 // Order of pushed values: newest (top of stack) first. This allows
176 // HSimulate::MergeWith() to easily append additional pushed values
177 // that are older (from further down the stack).
178 for (int i = 0; i < push_count; ++i) {
179 instr->AddPushedValue(environment->ExpressionStackAt(i));
181 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
185 int index = it.Current();
186 instr->AddAssignedValue(index, environment->Lookup(index));
188 environment->ClearHistory();
193 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
194 ASSERT(!IsFinished());
195 AddInstruction(end, position);
197 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
198 it.Current()->RegisterPredecessor(this);
203 void HBasicBlock::Goto(HBasicBlock* block,
204 HSourcePosition position,
205 FunctionState* state,
207 bool drop_extra = state != NULL &&
208 state->inlining_kind() == NORMAL_RETURN;
210 if (block->IsInlineReturnTarget()) {
211 HEnvironment* env = last_environment();
212 int argument_count = env->arguments_environment()->parameter_count();
213 AddInstruction(new(zone())
214 HLeaveInlined(state->entry(), argument_count),
216 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
219 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
220 HGoto* instr = new(zone()) HGoto(block);
221 Finish(instr, position);
225 void HBasicBlock::AddLeaveInlined(HValue* return_value,
226 FunctionState* state,
227 HSourcePosition position) {
228 HBasicBlock* target = state->function_return();
229 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
231 ASSERT(target->IsInlineReturnTarget());
232 ASSERT(return_value != NULL);
233 HEnvironment* env = last_environment();
234 int argument_count = env->arguments_environment()->parameter_count();
235 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
237 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
238 last_environment()->Push(return_value);
239 AddNewSimulate(BailoutId::None(), position);
240 HGoto* instr = new(zone()) HGoto(target);
241 Finish(instr, position);
245 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
246 ASSERT(!HasEnvironment());
247 ASSERT(first() == NULL);
248 UpdateEnvironment(env);
252 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
253 last_environment_ = env;
254 graph()->update_maximum_environment_size(env->first_expression_index());
258 void HBasicBlock::SetJoinId(BailoutId ast_id) {
259 int length = predecessors_.length();
261 for (int i = 0; i < length; i++) {
262 HBasicBlock* predecessor = predecessors_[i];
263 ASSERT(predecessor->end()->IsGoto());
264 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
266 (predecessor->last_environment()->closure().is_null() ||
267 predecessor->last_environment()->closure()->shared()
268 ->VerifyBailoutId(ast_id)));
269 simulate->set_ast_id(ast_id);
270 predecessor->last_environment()->set_ast_id(ast_id);
275 bool HBasicBlock::Dominates(HBasicBlock* other) const {
276 HBasicBlock* current = other->dominator();
277 while (current != NULL) {
278 if (current == this) return true;
279 current = current->dominator();
285 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
286 if (this == other) return true;
287 return Dominates(other);
291 int HBasicBlock::LoopNestingDepth() const {
292 const HBasicBlock* current = this;
293 int result = (current->IsLoopHeader()) ? 1 : 0;
294 while (current->parent_loop_header() != NULL) {
295 current = current->parent_loop_header();
302 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
303 ASSERT(IsLoopHeader());
305 SetJoinId(stmt->EntryId());
306 if (predecessors()->length() == 1) {
307 // This is a degenerated loop.
308 DetachLoopInformation();
312 // Only the first entry into the loop is from outside the loop. All other
313 // entries must be back edges.
314 for (int i = 1; i < predecessors()->length(); ++i) {
315 loop_information()->RegisterBackEdge(predecessors()->at(i));
320 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
321 ASSERT(IsFinished());
322 HBasicBlock* succ_block = end()->SuccessorAt(succ);
324 ASSERT(succ_block->predecessors()->length() == 1);
325 succ_block->MarkUnreachable();
329 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
330 if (HasPredecessor()) {
331 // Only loop header blocks can have a predecessor added after
332 // instructions have been added to the block (they have phis for all
333 // values in the environment, these phis may be eliminated later).
334 ASSERT(IsLoopHeader() || first_ == NULL);
335 HEnvironment* incoming_env = pred->last_environment();
336 if (IsLoopHeader()) {
337 ASSERT(phis()->length() == incoming_env->length());
338 for (int i = 0; i < phis_.length(); ++i) {
339 phis_[i]->AddInput(incoming_env->values()->at(i));
342 last_environment()->AddIncomingEdge(this, pred->last_environment());
344 } else if (!HasEnvironment() && !IsFinished()) {
345 ASSERT(!IsLoopHeader());
346 SetInitialEnvironment(pred->last_environment()->Copy());
349 predecessors_.Add(pred, zone());
353 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
354 ASSERT(!dominated_blocks_.Contains(block));
355 // Keep the list of dominated blocks sorted such that if there is two
356 // succeeding block in this list, the predecessor is before the successor.
358 while (index < dominated_blocks_.length() &&
359 dominated_blocks_[index]->block_id() < block->block_id()) {
362 dominated_blocks_.InsertAt(index, block, zone());
366 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
367 if (dominator_ == NULL) {
369 other->AddDominatedBlock(this);
370 } else if (other->dominator() != NULL) {
371 HBasicBlock* first = dominator_;
372 HBasicBlock* second = other;
374 while (first != second) {
375 if (first->block_id() > second->block_id()) {
376 first = first->dominator();
378 second = second->dominator();
380 ASSERT(first != NULL && second != NULL);
383 if (dominator_ != first) {
384 ASSERT(dominator_->dominated_blocks_.Contains(this));
385 dominator_->dominated_blocks_.RemoveElement(this);
387 first->AddDominatedBlock(this);
393 void HBasicBlock::AssignLoopSuccessorDominators() {
394 // Mark blocks that dominate all subsequent reachable blocks inside their
395 // loop. Exploit the fact that blocks are sorted in reverse post order. When
396 // the loop is visited in increasing block id order, if the number of
397 // non-loop-exiting successor edges at the dominator_candidate block doesn't
398 // exceed the number of previously encountered predecessor edges, there is no
399 // path from the loop header to any block with higher id that doesn't go
400 // through the dominator_candidate block. In this case, the
401 // dominator_candidate block is guaranteed to dominate all blocks reachable
402 // from it with higher ids.
403 HBasicBlock* last = loop_information()->GetLastBackEdge();
404 int outstanding_successors = 1; // one edge from the pre-header
405 // Header always dominates everything.
406 MarkAsLoopSuccessorDominator();
407 for (int j = block_id(); j <= last->block_id(); ++j) {
408 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
409 for (HPredecessorIterator it(dominator_candidate); !it.Done();
411 HBasicBlock* predecessor = it.Current();
412 // Don't count back edges.
413 if (predecessor->block_id() < dominator_candidate->block_id()) {
414 outstanding_successors--;
418 // If more successors than predecessors have been seen in the loop up to
419 // now, it's not possible to guarantee that the current block dominates
420 // all of the blocks with higher IDs. In this case, assume conservatively
421 // that those paths through loop that don't go through the current block
422 // contain all of the loop's dependencies. Also be careful to record
423 // dominator information about the current loop that's being processed,
424 // and not nested loops, which will be processed when
425 // AssignLoopSuccessorDominators gets called on their header.
426 ASSERT(outstanding_successors >= 0);
427 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
428 if (outstanding_successors == 0 &&
429 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
430 dominator_candidate->MarkAsLoopSuccessorDominator();
432 HControlInstruction* end = dominator_candidate->end();
433 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
434 HBasicBlock* successor = it.Current();
435 // Only count successors that remain inside the loop and don't loop back
437 if (successor->block_id() > dominator_candidate->block_id() &&
438 successor->block_id() <= last->block_id()) {
439 // Backwards edges must land on loop headers.
440 ASSERT(successor->block_id() > dominator_candidate->block_id() ||
441 successor->IsLoopHeader());
442 outstanding_successors++;
449 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
450 for (int i = 0; i < predecessors_.length(); ++i) {
451 if (predecessors_[i] == predecessor) return i;
459 void HBasicBlock::Verify() {
460 // Check that every block is finished.
461 ASSERT(IsFinished());
462 ASSERT(block_id() >= 0);
464 // Check that the incoming edges are in edge split form.
465 if (predecessors_.length() > 1) {
466 for (int i = 0; i < predecessors_.length(); ++i) {
467 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
474 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
475 this->back_edges_.Add(block, block->zone());
480 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
482 HBasicBlock* result = NULL;
483 for (int i = 0; i < back_edges_.length(); ++i) {
484 HBasicBlock* cur = back_edges_[i];
485 if (cur->block_id() > max_id) {
486 max_id = cur->block_id();
494 void HLoopInformation::AddBlock(HBasicBlock* block) {
495 if (block == loop_header()) return;
496 if (block->parent_loop_header() == loop_header()) return;
497 if (block->parent_loop_header() != NULL) {
498 AddBlock(block->parent_loop_header());
500 block->set_parent_loop_header(loop_header());
501 blocks_.Add(block, block->zone());
502 for (int i = 0; i < block->predecessors()->length(); ++i) {
503 AddBlock(block->predecessors()->at(i));
511 // Checks reachability of the blocks in this graph and stores a bit in
512 // the BitVector "reachable()" for every block that can be reached
513 // from the start block of the graph. If "dont_visit" is non-null, the given
514 // block is treated as if it would not be part of the graph. "visited_count()"
515 // returns the number of reachable blocks.
516 class ReachabilityAnalyzer BASE_EMBEDDED {
518 ReachabilityAnalyzer(HBasicBlock* entry_block,
520 HBasicBlock* dont_visit)
522 stack_(16, entry_block->zone()),
523 reachable_(block_count, entry_block->zone()),
524 dont_visit_(dont_visit) {
525 PushBlock(entry_block);
529 int visited_count() const { return visited_count_; }
530 const BitVector* reachable() const { return &reachable_; }
533 void PushBlock(HBasicBlock* block) {
534 if (block != NULL && block != dont_visit_ &&
535 !reachable_.Contains(block->block_id())) {
536 reachable_.Add(block->block_id());
537 stack_.Add(block, block->zone());
543 while (!stack_.is_empty()) {
544 HControlInstruction* end = stack_.RemoveLast()->end();
545 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
546 PushBlock(it.Current());
552 ZoneList<HBasicBlock*> stack_;
553 BitVector reachable_;
554 HBasicBlock* dont_visit_;
558 void HGraph::Verify(bool do_full_verify) const {
559 Heap::RelocationLock relocation_lock(isolate()->heap());
560 AllowHandleDereference allow_deref;
561 AllowDeferredHandleDereference allow_deferred_deref;
562 for (int i = 0; i < blocks_.length(); i++) {
563 HBasicBlock* block = blocks_.at(i);
567 // Check that every block contains at least one node and that only the last
568 // node is a control instruction.
569 HInstruction* current = block->first();
570 ASSERT(current != NULL && current->IsBlockEntry());
571 while (current != NULL) {
572 ASSERT((current->next() == NULL) == current->IsControlInstruction());
573 ASSERT(current->block() == block);
575 current = current->next();
578 // Check that successors are correctly set.
579 HBasicBlock* first = block->end()->FirstSuccessor();
580 HBasicBlock* second = block->end()->SecondSuccessor();
581 ASSERT(second == NULL || first != NULL);
583 // Check that the predecessor array is correct.
585 ASSERT(first->predecessors()->Contains(block));
586 if (second != NULL) {
587 ASSERT(second->predecessors()->Contains(block));
591 // Check that phis have correct arguments.
592 for (int j = 0; j < block->phis()->length(); j++) {
593 HPhi* phi = block->phis()->at(j);
597 // Check that all join blocks have predecessors that end with an
598 // unconditional goto and agree on their environment node id.
599 if (block->predecessors()->length() >= 2) {
601 block->predecessors()->first()->last_environment()->ast_id();
602 for (int k = 0; k < block->predecessors()->length(); k++) {
603 HBasicBlock* predecessor = block->predecessors()->at(k);
604 ASSERT(predecessor->end()->IsGoto() ||
605 predecessor->end()->IsDeoptimize());
606 ASSERT(predecessor->last_environment()->ast_id() == id);
611 // Check special property of first block to have no predecessors.
612 ASSERT(blocks_.at(0)->predecessors()->is_empty());
614 if (do_full_verify) {
615 // Check that the graph is fully connected.
616 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
617 ASSERT(analyzer.visited_count() == blocks_.length());
619 // Check that entry block dominator is NULL.
620 ASSERT(entry_block_->dominator() == NULL);
623 for (int i = 0; i < blocks_.length(); ++i) {
624 HBasicBlock* block = blocks_.at(i);
625 if (block->dominator() == NULL) {
626 // Only start block may have no dominator assigned to.
629 // Assert that block is unreachable if dominator must not be visited.
630 ReachabilityAnalyzer dominator_analyzer(entry_block_,
633 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
642 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
644 if (!pointer->is_set()) {
645 // Can't pass GetInvalidContext() to HConstant::New, because that will
646 // recursively call GetConstant
647 HConstant* constant = HConstant::New(zone(), NULL, value);
648 constant->InsertAfter(entry_block()->first());
649 pointer->set(constant);
652 return ReinsertConstantIfNecessary(pointer->get());
656 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
657 if (!constant->IsLinked()) {
658 // The constant was removed from the graph. Reinsert.
659 constant->ClearFlag(HValue::kIsDead);
660 constant->InsertAfter(entry_block()->first());
666 HConstant* HGraph::GetConstant0() {
667 return GetConstant(&constant_0_, 0);
671 HConstant* HGraph::GetConstant1() {
672 return GetConstant(&constant_1_, 1);
676 HConstant* HGraph::GetConstantMinus1() {
677 return GetConstant(&constant_minus1_, -1);
681 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \
682 HConstant* HGraph::GetConstant##Name() { \
683 if (!constant_##name##_.is_set()) { \
684 HConstant* constant = new(zone()) HConstant( \
685 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
686 Representation::Tagged(), \
692 constant->InsertAfter(entry_block()->first()); \
693 constant_##name##_.set(constant); \
695 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
699 DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false)
700 DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
701 DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
702 DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
703 DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
706 #undef DEFINE_GET_CONSTANT
708 #define DEFINE_IS_CONSTANT(Name, name) \
709 bool HGraph::IsConstant##Name(HConstant* constant) { \
710 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
712 DEFINE_IS_CONSTANT(Undefined, undefined)
713 DEFINE_IS_CONSTANT(0, 0)
714 DEFINE_IS_CONSTANT(1, 1)
715 DEFINE_IS_CONSTANT(Minus1, minus1)
716 DEFINE_IS_CONSTANT(True, true)
717 DEFINE_IS_CONSTANT(False, false)
718 DEFINE_IS_CONSTANT(Hole, the_hole)
719 DEFINE_IS_CONSTANT(Null, null)
721 #undef DEFINE_IS_CONSTANT
724 HConstant* HGraph::GetInvalidContext() {
725 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
729 bool HGraph::IsStandardConstant(HConstant* constant) {
730 if (IsConstantUndefined(constant)) return true;
731 if (IsConstant0(constant)) return true;
732 if (IsConstant1(constant)) return true;
733 if (IsConstantMinus1(constant)) return true;
734 if (IsConstantTrue(constant)) return true;
735 if (IsConstantFalse(constant)) return true;
736 if (IsConstantHole(constant)) return true;
737 if (IsConstantNull(constant)) return true;
742 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
751 needs_compare_(true),
752 pending_merge_block_(false),
753 split_edge_merge_block_(NULL),
754 merge_at_join_blocks_(NULL),
755 normal_merge_at_join_block_count_(0),
756 deopt_merge_at_join_block_count_(0) {
757 HEnvironment* env = builder->environment();
758 first_true_block_ = builder->CreateBasicBlock(env->Copy());
759 first_false_block_ = builder->CreateBasicBlock(env->Copy());
763 HGraphBuilder::IfBuilder::IfBuilder(
764 HGraphBuilder* builder,
765 HIfContinuation* continuation)
774 needs_compare_(false),
775 pending_merge_block_(false),
776 first_true_block_(NULL),
777 first_false_block_(NULL),
778 split_edge_merge_block_(NULL),
779 merge_at_join_blocks_(NULL),
780 normal_merge_at_join_block_count_(0),
781 deopt_merge_at_join_block_count_(0) {
782 continuation->Continue(&first_true_block_,
783 &first_false_block_);
787 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
788 HControlInstruction* compare) {
789 ASSERT(did_then_ == did_else_);
791 // Handle if-then-elseif
797 pending_merge_block_ = false;
798 split_edge_merge_block_ = NULL;
799 HEnvironment* env = builder_->environment();
800 first_true_block_ = builder_->CreateBasicBlock(env->Copy());
801 first_false_block_ = builder_->CreateBasicBlock(env->Copy());
803 if (split_edge_merge_block_ != NULL) {
804 HEnvironment* env = first_false_block_->last_environment();
805 HBasicBlock* split_edge =
806 builder_->CreateBasicBlock(env->Copy());
808 compare->SetSuccessorAt(0, split_edge);
809 compare->SetSuccessorAt(1, first_false_block_);
811 compare->SetSuccessorAt(0, first_true_block_);
812 compare->SetSuccessorAt(1, split_edge);
814 builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
816 compare->SetSuccessorAt(0, first_true_block_);
817 compare->SetSuccessorAt(1, first_false_block_);
819 builder_->FinishCurrentBlock(compare);
820 needs_compare_ = false;
825 void HGraphBuilder::IfBuilder::Or() {
826 ASSERT(!needs_compare_);
829 HEnvironment* env = first_false_block_->last_environment();
830 if (split_edge_merge_block_ == NULL) {
831 split_edge_merge_block_ =
832 builder_->CreateBasicBlock(env->Copy());
833 builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
834 first_true_block_ = split_edge_merge_block_;
836 builder_->set_current_block(first_false_block_);
837 first_false_block_ = builder_->CreateBasicBlock(env->Copy());
841 void HGraphBuilder::IfBuilder::And() {
842 ASSERT(!needs_compare_);
845 HEnvironment* env = first_false_block_->last_environment();
846 if (split_edge_merge_block_ == NULL) {
847 split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
848 builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
849 first_false_block_ = split_edge_merge_block_;
851 builder_->set_current_block(first_true_block_);
852 first_true_block_ = builder_->CreateBasicBlock(env->Copy());
856 void HGraphBuilder::IfBuilder::CaptureContinuation(
857 HIfContinuation* continuation) {
858 ASSERT(!did_else_if_);
862 HBasicBlock* true_block = NULL;
863 HBasicBlock* false_block = NULL;
864 Finish(&true_block, &false_block);
865 ASSERT(true_block != NULL);
866 ASSERT(false_block != NULL);
867 continuation->Capture(true_block, false_block);
869 builder_->set_current_block(NULL);
874 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
875 ASSERT(!did_else_if_);
878 HBasicBlock* true_block = NULL;
879 HBasicBlock* false_block = NULL;
880 Finish(&true_block, &false_block);
881 merge_at_join_blocks_ = NULL;
882 if (true_block != NULL && !true_block->IsFinished()) {
883 ASSERT(continuation->IsTrueReachable());
884 builder_->GotoNoSimulate(true_block, continuation->true_branch());
886 if (false_block != NULL && !false_block->IsFinished()) {
887 ASSERT(continuation->IsFalseReachable());
888 builder_->GotoNoSimulate(false_block, continuation->false_branch());
895 void HGraphBuilder::IfBuilder::Then() {
899 if (needs_compare_) {
900 // Handle if's without any expressions, they jump directly to the "else"
901 // branch. However, we must pretend that the "then" branch is reachable,
902 // so that the graph builder visits it and sees any live range extending
903 // constructs within it.
904 HConstant* constant_false = builder_->graph()->GetConstantFalse();
905 ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
906 boolean_type.Add(ToBooleanStub::BOOLEAN);
907 HBranch* branch = builder()->New<HBranch>(
908 constant_false, boolean_type, first_true_block_, first_false_block_);
909 builder_->FinishCurrentBlock(branch);
911 builder_->set_current_block(first_true_block_);
912 pending_merge_block_ = true;
916 void HGraphBuilder::IfBuilder::Else() {
920 AddMergeAtJoinBlock(false);
921 builder_->set_current_block(first_false_block_);
922 pending_merge_block_ = true;
927 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
929 builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
930 AddMergeAtJoinBlock(true);
934 void HGraphBuilder::IfBuilder::Return(HValue* value) {
935 HValue* parameter_count = builder_->graph()->GetConstantMinus1();
936 builder_->FinishExitCurrentBlock(
937 builder_->New<HReturn>(value, parameter_count));
938 AddMergeAtJoinBlock(false);
942 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
943 if (!pending_merge_block_) return;
944 HBasicBlock* block = builder_->current_block();
945 ASSERT(block == NULL || !block->IsFinished());
946 MergeAtJoinBlock* record =
947 new(builder_->zone()) MergeAtJoinBlock(block, deopt,
948 merge_at_join_blocks_);
949 merge_at_join_blocks_ = record;
951 ASSERT(block->end() == NULL);
953 normal_merge_at_join_block_count_++;
955 deopt_merge_at_join_block_count_++;
958 builder_->set_current_block(NULL);
959 pending_merge_block_ = false;
963 void HGraphBuilder::IfBuilder::Finish() {
968 AddMergeAtJoinBlock(false);
971 AddMergeAtJoinBlock(false);
977 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
978 HBasicBlock** else_continuation) {
981 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
982 if (else_continuation != NULL) {
983 *else_continuation = else_record->block_;
985 MergeAtJoinBlock* then_record = else_record->next_;
986 if (then_continuation != NULL) {
987 *then_continuation = then_record->block_;
989 ASSERT(then_record->next_ == NULL);
993 void HGraphBuilder::IfBuilder::End() {
994 if (captured_) return;
997 int total_merged_blocks = normal_merge_at_join_block_count_ +
998 deopt_merge_at_join_block_count_;
999 ASSERT(total_merged_blocks >= 1);
1000 HBasicBlock* merge_block = total_merged_blocks == 1
1001 ? NULL : builder_->graph()->CreateBasicBlock();
1003 // Merge non-deopt blocks first to ensure environment has right size for
1005 MergeAtJoinBlock* current = merge_at_join_blocks_;
1006 while (current != NULL) {
1007 if (!current->deopt_ && current->block_ != NULL) {
1008 // If there is only one block that makes it through to the end of the
1009 // if, then just set it as the current block and continue rather then
1010 // creating an unnecessary merge block.
1011 if (total_merged_blocks == 1) {
1012 builder_->set_current_block(current->block_);
1015 builder_->GotoNoSimulate(current->block_, merge_block);
1017 current = current->next_;
1020 // Merge deopt blocks, padding when necessary.
1021 current = merge_at_join_blocks_;
1022 while (current != NULL) {
1023 if (current->deopt_ && current->block_ != NULL) {
1024 current->block_->FinishExit(
1025 HAbnormalExit::New(builder_->zone(), NULL),
1026 HSourcePosition::Unknown());
1028 current = current->next_;
1030 builder_->set_current_block(merge_block);
1034 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1036 LoopBuilder::Direction direction)
1037 : builder_(builder),
1039 direction_(direction),
1041 header_block_ = builder->CreateLoopHeaderBlock();
1044 exit_trampoline_block_ = NULL;
1045 increment_amount_ = builder_->graph()->GetConstant1();
1049 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1051 LoopBuilder::Direction direction,
1052 HValue* increment_amount)
1053 : builder_(builder),
1055 direction_(direction),
1057 header_block_ = builder->CreateLoopHeaderBlock();
1060 exit_trampoline_block_ = NULL;
1061 increment_amount_ = increment_amount;
1065 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1067 HValue* terminating,
1068 Token::Value token) {
1069 HEnvironment* env = builder_->environment();
1070 phi_ = header_block_->AddNewPhi(env->values()->length());
1071 phi_->AddInput(initial);
1073 builder_->GotoNoSimulate(header_block_);
1075 HEnvironment* body_env = env->Copy();
1076 HEnvironment* exit_env = env->Copy();
1077 // Remove the phi from the expression stack
1080 body_block_ = builder_->CreateBasicBlock(body_env);
1081 exit_block_ = builder_->CreateBasicBlock(exit_env);
1083 builder_->set_current_block(header_block_);
1085 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1086 phi_, terminating, token, body_block_, exit_block_));
1088 builder_->set_current_block(body_block_);
1089 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1090 HValue* one = builder_->graph()->GetConstant1();
1091 if (direction_ == kPreIncrement) {
1092 increment_ = HAdd::New(zone(), context_, phi_, one);
1094 increment_ = HSub::New(zone(), context_, phi_, one);
1096 increment_->ClearFlag(HValue::kCanOverflow);
1097 builder_->AddInstruction(increment_);
1105 void HGraphBuilder::LoopBuilder::Break() {
1106 if (exit_trampoline_block_ == NULL) {
1107 // Its the first time we saw a break.
1108 HEnvironment* env = exit_block_->last_environment()->Copy();
1109 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1110 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1113 builder_->GotoNoSimulate(exit_trampoline_block_);
1114 builder_->set_current_block(NULL);
1118 void HGraphBuilder::LoopBuilder::EndBody() {
1121 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1122 if (direction_ == kPostIncrement) {
1123 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1125 increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1127 increment_->ClearFlag(HValue::kCanOverflow);
1128 builder_->AddInstruction(increment_);
1131 // Push the new increment value on the expression stack to merge into the phi.
1132 builder_->environment()->Push(increment_);
1133 HBasicBlock* last_block = builder_->current_block();
1134 builder_->GotoNoSimulate(last_block, header_block_);
1135 header_block_->loop_information()->RegisterBackEdge(last_block);
1137 if (exit_trampoline_block_ != NULL) {
1138 builder_->set_current_block(exit_trampoline_block_);
1140 builder_->set_current_block(exit_block_);
1146 HGraph* HGraphBuilder::CreateGraph() {
1147 graph_ = new(zone()) HGraph(info_);
1148 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1149 CompilationPhase phase("H_Block building", info_);
1150 set_current_block(graph()->entry_block());
1151 if (!BuildGraph()) return NULL;
1152 graph()->FinalizeUniqueness();
1157 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1158 ASSERT(current_block() != NULL);
1159 ASSERT(!FLAG_hydrogen_track_positions ||
1160 !position_.IsUnknown() ||
1161 !info_->IsOptimizing());
1162 current_block()->AddInstruction(instr, source_position());
1163 if (graph()->IsInsideNoSideEffectsScope()) {
1164 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1170 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1171 ASSERT(!FLAG_hydrogen_track_positions ||
1172 !info_->IsOptimizing() ||
1173 !position_.IsUnknown());
1174 current_block()->Finish(last, source_position());
1175 if (last->IsReturn() || last->IsAbnormalExit()) {
1176 set_current_block(NULL);
1181 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1182 ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1183 !position_.IsUnknown());
1184 current_block()->FinishExit(instruction, source_position());
1185 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1186 set_current_block(NULL);
1191 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1192 if (FLAG_native_code_counters && counter->Enabled()) {
1193 HValue* reference = Add<HConstant>(ExternalReference(counter));
1194 HValue* old_value = Add<HLoadNamedField>(
1195 reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1196 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1197 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1198 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1199 new_value, STORE_TO_INITIALIZED_ENTRY);
1204 void HGraphBuilder::AddSimulate(BailoutId id,
1205 RemovableSimulate removable) {
1206 ASSERT(current_block() != NULL);
1207 ASSERT(!graph()->IsInsideNoSideEffectsScope());
1208 current_block()->AddNewSimulate(id, source_position(), removable);
1212 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1213 HBasicBlock* b = graph()->CreateBasicBlock();
1214 b->SetInitialEnvironment(env);
1219 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1220 HBasicBlock* header = graph()->CreateBasicBlock();
1221 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1222 header->SetInitialEnvironment(entry_env);
1223 header->AttachLoopInformation();
1228 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1229 if (obj->type().IsHeapObject()) return obj;
1230 return Add<HCheckHeapObject>(obj);
1234 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1235 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1236 FinishExitCurrentBlock(New<HAbnormalExit>());
1240 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1241 if (!string->type().IsString()) {
1242 ASSERT(!string->IsConstant() ||
1243 !HConstant::cast(string)->HasStringValue());
1244 BuildCheckHeapObject(string);
1245 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1251 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1252 if (object->type().IsJSObject()) return object;
1253 if (function->IsConstant() &&
1254 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1255 Handle<JSFunction> f = Handle<JSFunction>::cast(
1256 HConstant::cast(function)->handle(isolate()));
1257 SharedFunctionInfo* shared = f->shared();
1258 if (shared->strict_mode() == STRICT || shared->native()) return object;
1260 return Add<HWrapReceiver>(object, function);
1264 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1271 PropertyAccessType access_type) {
1272 IfBuilder length_checker(this);
1274 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1275 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1277 length_checker.Then();
1279 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1281 IfBuilder capacity_checker(this);
1283 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1285 capacity_checker.Then();
1287 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1288 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1290 Add<HBoundsCheck>(key, max_capacity);
1292 HValue* new_capacity = BuildNewElementsCapacity(key);
1293 HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1297 environment()->Push(new_elements);
1298 capacity_checker.Else();
1300 environment()->Push(elements);
1301 capacity_checker.End();
1304 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1305 new_length->ClearFlag(HValue::kCanOverflow);
1307 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1311 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1312 HValue* checked_elements = environment()->Top();
1314 // Write zero to ensure that the new element is initialized with some smi.
1315 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1318 length_checker.Else();
1319 Add<HBoundsCheck>(key, length);
1321 environment()->Push(elements);
1322 length_checker.End();
1324 return environment()->Pop();
1328 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1332 Factory* factory = isolate()->factory();
1334 IfBuilder cow_checker(this);
1336 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1339 HValue* capacity = AddLoadFixedArrayLength(elements);
1341 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1342 kind, length, capacity);
1344 environment()->Push(new_elements);
1348 environment()->Push(elements);
1352 return environment()->Pop();
1356 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1358 ElementsKind from_kind,
1359 ElementsKind to_kind,
1361 ASSERT(!IsFastHoleyElementsKind(from_kind) ||
1362 IsFastHoleyElementsKind(to_kind));
1364 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1365 Add<HTrapAllocationMemento>(object);
1368 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1369 HInstruction* elements = AddLoadElements(object);
1371 HInstruction* empty_fixed_array = Add<HConstant>(
1372 isolate()->factory()->empty_fixed_array());
1374 IfBuilder if_builder(this);
1376 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1380 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1382 HInstruction* array_length = is_jsarray
1383 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1384 HObjectAccess::ForArrayLength(from_kind))
1387 BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1388 array_length, elements_length);
1393 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1397 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1402 int current_probe) {
1403 if (current_probe == kNumberDictionaryProbes) {
1407 int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1408 HValue* raw_index = (current_probe == 0)
1410 : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1411 raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1412 int32_t entry_size = SeededNumberDictionary::kEntrySize;
1413 raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1414 raw_index->ClearFlag(HValue::kCanOverflow);
1416 int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1417 HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1418 key_index->ClearFlag(HValue::kCanOverflow);
1420 HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1421 static_cast<HValue*>(NULL),
1424 IfBuilder key_compare(this);
1425 key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1428 // Key at the current probe doesn't match, try at the next probe.
1429 HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1430 elements, key, hash, mask, current_probe + 1);
1431 if (result == NULL) {
1432 key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
1433 result = graph()->GetConstantUndefined();
1440 // Key at current probe matches. Details must be zero, otherwise the
1441 // dictionary element requires special handling.
1442 HValue* details_index = AddUncasted<HAdd>(
1443 raw_index, Add<HConstant>(base_offset + 2));
1444 details_index->ClearFlag(HValue::kCanOverflow);
1446 HValue* details = Add<HLoadKeyed>(elements, details_index,
1447 static_cast<HValue*>(NULL),
1449 IfBuilder details_compare(this);
1450 details_compare.If<HCompareNumericAndBranch>(details,
1451 graph()->GetConstant0(),
1453 details_compare.ThenDeopt("keyed load dictionary element not fast case");
1455 details_compare.Else();
1457 // Key matches and details are zero --> fast case. Load and return the
1459 HValue* result_index = AddUncasted<HAdd>(
1460 raw_index, Add<HConstant>(base_offset + 1));
1461 result_index->ClearFlag(HValue::kCanOverflow);
1463 Push(Add<HLoadKeyed>(elements, result_index,
1464 static_cast<HValue*>(NULL),
1467 details_compare.End();
1475 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1476 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1477 HValue* seed = Add<HConstant>(seed_value);
1478 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1480 // hash = ~hash + (hash << 15);
1481 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1482 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1483 graph()->GetConstantMinus1());
1484 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1486 // hash = hash ^ (hash >> 12);
1487 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1488 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1490 // hash = hash + (hash << 2);
1491 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1492 hash = AddUncasted<HAdd>(hash, shifted_hash);
1494 // hash = hash ^ (hash >> 4);
1495 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1496 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1498 // hash = hash * 2057;
1499 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1500 hash->ClearFlag(HValue::kCanOverflow);
1502 // hash = hash ^ (hash >> 16);
1503 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1504 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1508 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1510 HValue* elements = AddLoadElements(receiver);
1512 HValue* hash = BuildElementIndexHash(key);
1514 HValue* capacity = Add<HLoadKeyed>(
1516 Add<HConstant>(NameDictionary::kCapacityIndex),
1517 static_cast<HValue*>(NULL),
1520 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1521 mask->ChangeRepresentation(Representation::Integer32());
1522 mask->ClearFlag(HValue::kCanOverflow);
1524 return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1529 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1532 NoObservableSideEffectsScope scope(this);
1534 // Compute the size of the RegExpResult followed by FixedArray with length.
1535 HValue* size = length;
1536 size = AddUncasted<HShl>(size, Add<HConstant>(kPointerSizeLog2));
1537 size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>(
1538 JSRegExpResult::kSize + FixedArray::kHeaderSize)));
1540 // Make sure size does not exceeds max regular heap object size.
1541 Add<HBoundsCheck>(size, Add<HConstant>(Page::kMaxRegularHeapObjectSize));
1543 // Allocate the JSRegExpResult and the FixedArray in one step.
1544 HValue* result = Add<HAllocate>(
1545 size, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
1547 // Determine the elements FixedArray.
1548 HValue* elements = Add<HInnerAllocatedObject>(
1549 result, Add<HConstant>(JSRegExpResult::kSize));
1551 // Initialize the JSRegExpResult header.
1552 HValue* global_object = Add<HLoadNamedField>(
1553 context(), static_cast<HValue*>(NULL),
1554 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1555 HValue* native_context = Add<HLoadNamedField>(
1556 global_object, static_cast<HValue*>(NULL),
1557 HObjectAccess::ForGlobalObjectNativeContext());
1558 AddStoreMapNoWriteBarrier(result, Add<HLoadNamedField>(
1559 native_context, static_cast<HValue*>(NULL),
1560 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1561 Add<HStoreNamedField>(
1562 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1563 Add<HConstant>(isolate()->factory()->empty_fixed_array()));
1564 Add<HStoreNamedField>(
1565 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1567 Add<HStoreNamedField>(
1568 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1570 // Initialize the additional fields.
1571 Add<HStoreNamedField>(
1572 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1574 Add<HStoreNamedField>(
1575 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1578 // Initialize the elements header.
1579 AddStoreMapConstantNoWriteBarrier(elements,
1580 isolate()->factory()->fixed_array_map());
1581 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), length);
1583 // Initialize the elements contents with undefined.
1584 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1585 index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1587 Add<HStoreKeyed>(elements, index, graph()->GetConstantUndefined(),
1596 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1597 NoObservableSideEffectsScope scope(this);
1599 // Convert constant numbers at compile time.
1600 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1601 Handle<Object> number = HConstant::cast(object)->handle(isolate());
1602 Handle<String> result = isolate()->factory()->NumberToString(number);
1603 return Add<HConstant>(result);
1606 // Create a joinable continuation.
1607 HIfContinuation found(graph()->CreateBasicBlock(),
1608 graph()->CreateBasicBlock());
1610 // Load the number string cache.
1611 HValue* number_string_cache =
1612 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1614 // Make the hash mask from the length of the number string cache. It
1615 // contains two elements (number and string) for each cache entry.
1616 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1617 mask->set_type(HType::Smi());
1618 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1619 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1621 // Check whether object is a smi.
1622 IfBuilder if_objectissmi(this);
1623 if_objectissmi.If<HIsSmiAndBranch>(object);
1624 if_objectissmi.Then();
1626 // Compute hash for smi similar to smi_get_hash().
1627 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1630 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1631 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1632 static_cast<HValue*>(NULL),
1633 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1635 // Check if object == key.
1636 IfBuilder if_objectiskey(this);
1637 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1638 if_objectiskey.Then();
1640 // Make the key_index available.
1643 if_objectiskey.JoinContinuation(&found);
1645 if_objectissmi.Else();
1647 if (type->Is(Type::SignedSmall())) {
1648 if_objectissmi.Deopt("Expected smi");
1650 // Check if the object is a heap number.
1651 IfBuilder if_objectisnumber(this);
1652 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1653 object, isolate()->factory()->heap_number_map());
1654 if_objectisnumber.Then();
1656 // Compute hash for heap number similar to double_get_hash().
1657 HValue* low = Add<HLoadNamedField>(
1658 object, objectisnumber,
1659 HObjectAccess::ForHeapNumberValueLowestBits());
1660 HValue* high = Add<HLoadNamedField>(
1661 object, objectisnumber,
1662 HObjectAccess::ForHeapNumberValueHighestBits());
1663 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1664 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1667 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1668 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1669 static_cast<HValue*>(NULL),
1670 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1672 // Check if key is a heap number (the number string cache contains only
1673 // SMIs and heap number, so it is sufficient to do a SMI check here).
1674 IfBuilder if_keyisnotsmi(this);
1675 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1676 if_keyisnotsmi.Then();
1678 // Check if values of key and object match.
1679 IfBuilder if_keyeqobject(this);
1680 if_keyeqobject.If<HCompareNumericAndBranch>(
1681 Add<HLoadNamedField>(key, keyisnotsmi,
1682 HObjectAccess::ForHeapNumberValue()),
1683 Add<HLoadNamedField>(object, objectisnumber,
1684 HObjectAccess::ForHeapNumberValue()),
1686 if_keyeqobject.Then();
1688 // Make the key_index available.
1691 if_keyeqobject.JoinContinuation(&found);
1693 if_keyisnotsmi.JoinContinuation(&found);
1695 if_objectisnumber.Else();
1697 if (type->Is(Type::Number())) {
1698 if_objectisnumber.Deopt("Expected heap number");
1701 if_objectisnumber.JoinContinuation(&found);
1704 if_objectissmi.JoinContinuation(&found);
1706 // Check for cache hit.
1707 IfBuilder if_found(this, &found);
1710 // Count number to string operation in native code.
1711 AddIncrementCounter(isolate()->counters()->number_to_string_native());
1713 // Load the value in case of cache hit.
1714 HValue* key_index = Pop();
1715 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1716 Push(Add<HLoadKeyed>(number_string_cache, value_index,
1717 static_cast<HValue*>(NULL),
1718 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1722 // Cache miss, fallback to runtime.
1723 Add<HPushArgument>(object);
1724 Push(Add<HCallRuntime>(
1725 isolate()->factory()->empty_string(),
1726 Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache),
1735 HAllocate* HGraphBuilder::BuildAllocate(
1736 HValue* object_size,
1738 InstanceType instance_type,
1739 HAllocationMode allocation_mode) {
1740 // Compute the effective allocation size.
1741 HValue* size = object_size;
1742 if (allocation_mode.CreateAllocationMementos()) {
1743 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
1744 size->ClearFlag(HValue::kCanOverflow);
1747 // Perform the actual allocation.
1748 HAllocate* object = Add<HAllocate>(
1749 size, type, allocation_mode.GetPretenureMode(),
1750 instance_type, allocation_mode.feedback_site());
1752 // Setup the allocation memento.
1753 if (allocation_mode.CreateAllocationMementos()) {
1754 BuildCreateAllocationMemento(
1755 object, object_size, allocation_mode.current_site());
1762 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
1763 HValue* right_length) {
1764 // Compute the combined string length and check against max string length.
1765 HValue* length = AddUncasted<HAdd>(left_length, right_length);
1766 // Check that length <= kMaxLength <=> length < MaxLength + 1.
1767 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
1768 Add<HBoundsCheck>(length, max_length);
1773 HValue* HGraphBuilder::BuildCreateConsString(
1777 HAllocationMode allocation_mode) {
1778 // Determine the string instance types.
1779 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
1780 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
1782 // Allocate the cons string object. HAllocate does not care whether we
1783 // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
1784 // CONS_STRING_TYPE here. Below we decide whether the cons string is
1785 // one-byte or two-byte and set the appropriate map.
1786 ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
1787 CONS_ASCII_STRING_TYPE));
1788 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
1789 HType::String(), CONS_STRING_TYPE,
1792 // Compute intersection and difference of instance types.
1793 HValue* anded_instance_types = AddUncasted<HBitwise>(
1794 Token::BIT_AND, left_instance_type, right_instance_type);
1795 HValue* xored_instance_types = AddUncasted<HBitwise>(
1796 Token::BIT_XOR, left_instance_type, right_instance_type);
1798 // We create a one-byte cons string if
1799 // 1. both strings are one-byte, or
1800 // 2. at least one of the strings is two-byte, but happens to contain only
1801 // one-byte characters.
1802 // To do this, we check
1803 // 1. if both strings are one-byte, or if the one-byte data hint is set in
1805 // 2. if one of the strings has the one-byte data hint set and the other
1806 // string is one-byte.
1807 IfBuilder if_onebyte(this);
1808 STATIC_ASSERT(kOneByteStringTag != 0);
1809 STATIC_ASSERT(kOneByteDataHintMask != 0);
1810 if_onebyte.If<HCompareNumericAndBranch>(
1811 AddUncasted<HBitwise>(
1812 Token::BIT_AND, anded_instance_types,
1813 Add<HConstant>(static_cast<int32_t>(
1814 kStringEncodingMask | kOneByteDataHintMask))),
1815 graph()->GetConstant0(), Token::NE);
1817 STATIC_ASSERT(kOneByteStringTag != 0 &&
1818 kOneByteDataHintTag != 0 &&
1819 kOneByteDataHintTag != kOneByteStringTag);
1820 if_onebyte.If<HCompareNumericAndBranch>(
1821 AddUncasted<HBitwise>(
1822 Token::BIT_AND, xored_instance_types,
1823 Add<HConstant>(static_cast<int32_t>(
1824 kOneByteStringTag | kOneByteDataHintTag))),
1825 Add<HConstant>(static_cast<int32_t>(
1826 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
1829 // We can safely skip the write barrier for storing the map here.
1830 Handle<Map> map = isolate()->factory()->cons_ascii_string_map();
1831 AddStoreMapConstantNoWriteBarrier(result, map);
1835 // We can safely skip the write barrier for storing the map here.
1836 Handle<Map> map = isolate()->factory()->cons_string_map();
1837 AddStoreMapConstantNoWriteBarrier(result, map);
1841 // Initialize the cons string fields.
1842 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
1843 Add<HConstant>(String::kEmptyHashField));
1844 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
1845 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
1846 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
1848 // Count the native string addition.
1849 AddIncrementCounter(isolate()->counters()->string_add_native());
1855 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
1857 String::Encoding src_encoding,
1860 String::Encoding dst_encoding,
1862 ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
1863 src_encoding == String::ONE_BYTE_ENCODING);
1864 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1865 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1867 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
1869 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
1870 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
1871 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
1877 HValue* HGraphBuilder::BuildObjectSizeAlignment(
1878 HValue* unaligned_size, int header_size) {
1879 ASSERT((header_size & kObjectAlignmentMask) == 0);
1880 HValue* size = AddUncasted<HAdd>(
1881 unaligned_size, Add<HConstant>(static_cast<int32_t>(
1882 header_size + kObjectAlignmentMask)));
1883 size->ClearFlag(HValue::kCanOverflow);
1884 return AddUncasted<HBitwise>(
1885 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
1886 ~kObjectAlignmentMask)));
1890 HValue* HGraphBuilder::BuildUncheckedStringAdd(
1893 HAllocationMode allocation_mode) {
1894 // Determine the string lengths.
1895 HValue* left_length = AddLoadStringLength(left);
1896 HValue* right_length = AddLoadStringLength(right);
1898 // Compute the combined string length.
1899 HValue* length = BuildAddStringLengths(left_length, right_length);
1901 // Do some manual constant folding here.
1902 if (left_length->IsConstant()) {
1903 HConstant* c_left_length = HConstant::cast(left_length);
1904 ASSERT_NE(0, c_left_length->Integer32Value());
1905 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1906 // The right string contains at least one character.
1907 return BuildCreateConsString(length, left, right, allocation_mode);
1909 } else if (right_length->IsConstant()) {
1910 HConstant* c_right_length = HConstant::cast(right_length);
1911 ASSERT_NE(0, c_right_length->Integer32Value());
1912 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1913 // The left string contains at least one character.
1914 return BuildCreateConsString(length, left, right, allocation_mode);
1918 // Check if we should create a cons string.
1919 IfBuilder if_createcons(this);
1920 if_createcons.If<HCompareNumericAndBranch>(
1921 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
1922 if_createcons.Then();
1924 // Create a cons string.
1925 Push(BuildCreateConsString(length, left, right, allocation_mode));
1927 if_createcons.Else();
1929 // Determine the string instance types.
1930 HValue* left_instance_type = AddLoadStringInstanceType(left);
1931 HValue* right_instance_type = AddLoadStringInstanceType(right);
1933 // Compute union and difference of instance types.
1934 HValue* ored_instance_types = AddUncasted<HBitwise>(
1935 Token::BIT_OR, left_instance_type, right_instance_type);
1936 HValue* xored_instance_types = AddUncasted<HBitwise>(
1937 Token::BIT_XOR, left_instance_type, right_instance_type);
1939 // Check if both strings have the same encoding and both are
1941 IfBuilder if_sameencodingandsequential(this);
1942 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1943 AddUncasted<HBitwise>(
1944 Token::BIT_AND, xored_instance_types,
1945 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1946 graph()->GetConstant0(), Token::EQ);
1947 if_sameencodingandsequential.And();
1948 STATIC_ASSERT(kSeqStringTag == 0);
1949 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1950 AddUncasted<HBitwise>(
1951 Token::BIT_AND, ored_instance_types,
1952 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
1953 graph()->GetConstant0(), Token::EQ);
1954 if_sameencodingandsequential.Then();
1956 HConstant* string_map =
1957 Add<HConstant>(isolate()->factory()->string_map());
1958 HConstant* ascii_string_map =
1959 Add<HConstant>(isolate()->factory()->ascii_string_map());
1961 // Determine map and size depending on whether result is one-byte string.
1962 IfBuilder if_onebyte(this);
1963 STATIC_ASSERT(kOneByteStringTag != 0);
1964 if_onebyte.If<HCompareNumericAndBranch>(
1965 AddUncasted<HBitwise>(
1966 Token::BIT_AND, ored_instance_types,
1967 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1968 graph()->GetConstant0(), Token::NE);
1971 // Allocate sequential one-byte string object.
1973 Push(ascii_string_map);
1977 // Allocate sequential two-byte string object.
1978 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
1979 size->ClearFlag(HValue::kCanOverflow);
1980 size->SetFlag(HValue::kUint32);
1985 HValue* map = Pop();
1987 // Calculate the number of bytes needed for the characters in the
1988 // string while observing object alignment.
1989 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
1990 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
1992 // Allocate the string object. HAllocate does not care whether we pass
1993 // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here.
1994 HAllocate* result = BuildAllocate(
1995 size, HType::String(), STRING_TYPE, allocation_mode);
1997 // We can safely skip the write barrier for storing map here.
1998 AddStoreMapNoWriteBarrier(result, map);
2000 // Initialize the string fields.
2001 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2002 Add<HConstant>(String::kEmptyHashField));
2003 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2005 // Copy characters to the result string.
2006 IfBuilder if_twobyte(this);
2007 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2010 // Copy characters from the left string.
2011 BuildCopySeqStringChars(
2012 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2013 result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2016 // Copy characters from the right string.
2017 BuildCopySeqStringChars(
2018 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2019 result, left_length, String::TWO_BYTE_ENCODING,
2024 // Copy characters from the left string.
2025 BuildCopySeqStringChars(
2026 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2027 result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2030 // Copy characters from the right string.
2031 BuildCopySeqStringChars(
2032 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2033 result, left_length, String::ONE_BYTE_ENCODING,
2038 // Count the native string addition.
2039 AddIncrementCounter(isolate()->counters()->string_add_native());
2041 // Return the sequential string.
2044 if_sameencodingandsequential.Else();
2046 // Fallback to the runtime to add the two strings.
2047 Add<HPushArgument>(left);
2048 Add<HPushArgument>(right);
2049 Push(Add<HCallRuntime>(
2050 isolate()->factory()->empty_string(),
2051 Runtime::FunctionForId(Runtime::kHiddenStringAdd),
2054 if_sameencodingandsequential.End();
2056 if_createcons.End();
2062 HValue* HGraphBuilder::BuildStringAdd(
2065 HAllocationMode allocation_mode) {
2066 NoObservableSideEffectsScope no_effects(this);
2068 // Determine string lengths.
2069 HValue* left_length = AddLoadStringLength(left);
2070 HValue* right_length = AddLoadStringLength(right);
2072 // Check if left string is empty.
2073 IfBuilder if_leftempty(this);
2074 if_leftempty.If<HCompareNumericAndBranch>(
2075 left_length, graph()->GetConstant0(), Token::EQ);
2076 if_leftempty.Then();
2078 // Count the native string addition.
2079 AddIncrementCounter(isolate()->counters()->string_add_native());
2081 // Just return the right string.
2084 if_leftempty.Else();
2086 // Check if right string is empty.
2087 IfBuilder if_rightempty(this);
2088 if_rightempty.If<HCompareNumericAndBranch>(
2089 right_length, graph()->GetConstant0(), Token::EQ);
2090 if_rightempty.Then();
2092 // Count the native string addition.
2093 AddIncrementCounter(isolate()->counters()->string_add_native());
2095 // Just return the left string.
2098 if_rightempty.Else();
2100 // Add the two non-empty strings.
2101 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2103 if_rightempty.End();
2111 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2112 HValue* checked_object,
2116 ElementsKind elements_kind,
2117 PropertyAccessType access_type,
2118 LoadKeyedHoleMode load_mode,
2119 KeyedAccessStoreMode store_mode) {
2120 ASSERT((!IsExternalArrayElementsKind(elements_kind) &&
2121 !IsFixedTypedArrayElementsKind(elements_kind)) ||
2123 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2124 // on a HElementsTransition instruction. The flag can also be removed if the
2125 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2126 // ElementsKind transitions. Finally, the dependency can be removed for stores
2127 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2128 // generated store code.
2129 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2130 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2131 checked_object->ClearDependsOnFlag(kElementsKind);
2134 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2135 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2136 HValue* elements = AddLoadElements(checked_object);
2137 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2138 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2139 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2140 elements, isolate()->factory()->fixed_array_map());
2141 check_cow_map->ClearDependsOnFlag(kElementsKind);
2143 HInstruction* length = NULL;
2145 length = Add<HLoadNamedField>(
2146 checked_object, static_cast<HValue*>(NULL),
2147 HObjectAccess::ForArrayLength(elements_kind));
2149 length = AddLoadFixedArrayLength(elements);
2151 length->set_type(HType::Smi());
2152 HValue* checked_key = NULL;
2153 if (IsExternalArrayElementsKind(elements_kind) ||
2154 IsFixedTypedArrayElementsKind(elements_kind)) {
2155 HValue* backing_store;
2156 if (IsExternalArrayElementsKind(elements_kind)) {
2157 backing_store = Add<HLoadNamedField>(
2158 elements, static_cast<HValue*>(NULL),
2159 HObjectAccess::ForExternalArrayExternalPointer());
2161 backing_store = elements;
2163 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2164 NoObservableSideEffectsScope no_effects(this);
2165 IfBuilder length_checker(this);
2166 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2167 length_checker.Then();
2168 IfBuilder negative_checker(this);
2169 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2170 key, graph()->GetConstant0(), Token::GTE);
2171 negative_checker.Then();
2172 HInstruction* result = AddElementAccess(
2173 backing_store, key, val, bounds_check, elements_kind, access_type);
2174 negative_checker.ElseDeopt("Negative key encountered");
2175 negative_checker.End();
2176 length_checker.End();
2179 ASSERT(store_mode == STANDARD_STORE);
2180 checked_key = Add<HBoundsCheck>(key, length);
2181 return AddElementAccess(
2182 backing_store, checked_key, val,
2183 checked_object, elements_kind, access_type);
2186 ASSERT(fast_smi_only_elements ||
2188 IsFastDoubleElementsKind(elements_kind));
2190 // In case val is stored into a fast smi array, assure that the value is a smi
2191 // before manipulating the backing store. Otherwise the actual store may
2192 // deopt, leaving the backing store in an invalid state.
2193 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2194 !val->type().IsSmi()) {
2195 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2198 if (IsGrowStoreMode(store_mode)) {
2199 NoObservableSideEffectsScope no_effects(this);
2200 elements = BuildCheckForCapacityGrow(checked_object, elements,
2201 elements_kind, length, key,
2202 is_js_array, access_type);
2205 checked_key = Add<HBoundsCheck>(key, length);
2207 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2208 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2209 NoObservableSideEffectsScope no_effects(this);
2210 elements = BuildCopyElementsOnWrite(checked_object, elements,
2211 elements_kind, length);
2213 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2214 elements, isolate()->factory()->fixed_array_map());
2215 check_cow_map->ClearDependsOnFlag(kElementsKind);
2219 return AddElementAccess(elements, checked_key, val, checked_object,
2220 elements_kind, access_type, load_mode);
2225 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2226 JSArrayBuilder* array_builder,
2227 HValue* length_argument) {
2228 if (length_argument->IsConstant() &&
2229 HConstant::cast(length_argument)->HasSmiValue()) {
2230 int array_length = HConstant::cast(length_argument)->Integer32Value();
2231 HValue* new_object = array_length == 0
2232 ? array_builder->AllocateEmptyArray()
2233 : array_builder->AllocateArray(length_argument, length_argument);
2237 HValue* constant_zero = graph()->GetConstant0();
2238 HConstant* max_alloc_length =
2239 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2240 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2242 IfBuilder if_builder(this);
2243 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2246 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2247 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2248 Push(initial_capacity_node); // capacity
2249 Push(constant_zero); // length
2251 if (!(top_info()->IsStub()) &&
2252 IsFastPackedElementsKind(array_builder->kind())) {
2253 // We'll come back later with better (holey) feedback.
2254 if_builder.Deopt("Holey array despite packed elements_kind feedback");
2256 Push(checked_length); // capacity
2257 Push(checked_length); // length
2261 // Figure out total size
2262 HValue* length = Pop();
2263 HValue* capacity = Pop();
2264 return array_builder->AllocateArray(capacity, length);
2267 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2270 InstanceType instance_type;
2272 if (IsFastDoubleElementsKind(kind)) {
2273 elements_size = kDoubleSize;
2274 instance_type = FIXED_DOUBLE_ARRAY_TYPE;
2276 elements_size = kPointerSize;
2277 instance_type = FIXED_ARRAY_TYPE;
2280 HConstant* elements_size_value = Add<HConstant>(elements_size);
2281 HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
2282 mul->ClearFlag(HValue::kCanOverflow);
2284 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2285 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2286 total_size->ClearFlag(HValue::kCanOverflow);
2288 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
2289 isolate()->heap()->GetPretenureMode() : NOT_TENURED;
2291 return Add<HAllocate>(total_size, HType::Tagged(), pretenure_flag,
2296 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2299 Factory* factory = isolate()->factory();
2300 Handle<Map> map = IsFastDoubleElementsKind(kind)
2301 ? factory->fixed_double_array_map()
2302 : factory->fixed_array_map();
2304 AddStoreMapConstant(elements, map);
2305 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2310 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2313 // The HForceRepresentation is to prevent possible deopt on int-smi
2314 // conversion after allocation but before the new object fields are set.
2315 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2316 HValue* new_elements = BuildAllocateElements(kind, capacity);
2317 BuildInitializeElementsHeader(new_elements, kind, capacity);
2318 return new_elements;
2322 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
2324 AllocationSiteMode mode,
2325 ElementsKind elements_kind,
2326 HValue* allocation_site_payload,
2327 HValue* length_field) {
2329 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2331 HConstant* empty_fixed_array =
2332 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2334 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
2335 Add<HStoreNamedField>(array, access, empty_fixed_array);
2336 Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
2339 if (mode == TRACK_ALLOCATION_SITE) {
2340 BuildCreateAllocationMemento(
2341 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2344 int elements_location = JSArray::kSize;
2345 if (mode == TRACK_ALLOCATION_SITE) {
2346 elements_location += AllocationMemento::kSize;
2349 HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>(
2350 array, Add<HConstant>(elements_location));
2351 Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
2356 HInstruction* HGraphBuilder::AddElementAccess(
2358 HValue* checked_key,
2361 ElementsKind elements_kind,
2362 PropertyAccessType access_type,
2363 LoadKeyedHoleMode load_mode) {
2364 if (access_type == STORE) {
2365 ASSERT(val != NULL);
2366 if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2367 elements_kind == UINT8_CLAMPED_ELEMENTS) {
2368 val = Add<HClampToUint8>(val);
2370 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2371 elements_kind == FAST_SMI_ELEMENTS
2372 ? STORE_TO_INITIALIZED_ENTRY
2373 : INITIALIZING_STORE);
2376 ASSERT(access_type == LOAD);
2377 ASSERT(val == NULL);
2378 HLoadKeyed* load = Add<HLoadKeyed>(
2379 elements, checked_key, dependency, elements_kind, load_mode);
2380 if (FLAG_opt_safe_uint32_operations &&
2381 (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2382 elements_kind == UINT32_ELEMENTS)) {
2383 graph()->RecordUint32Instruction(load);
2389 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
2390 return Add<HLoadNamedField>(
2391 object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
2395 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
2396 return Add<HLoadNamedField>(
2397 object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
2401 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2402 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2403 graph_->GetConstant1());
2405 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2406 new_capacity->ClearFlag(HValue::kCanOverflow);
2408 HValue* min_growth = Add<HConstant>(16);
2410 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2411 new_capacity->ClearFlag(HValue::kCanOverflow);
2413 return new_capacity;
2417 void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
2418 int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
2420 int max_size = Page::kMaxRegularHeapObjectSize / element_size;
2421 max_size -= JSArray::kSize / element_size;
2422 HConstant* max_size_constant = Add<HConstant>(max_size);
2423 Add<HBoundsCheck>(length, max_size_constant);
2427 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2430 ElementsKind new_kind,
2432 HValue* new_capacity) {
2433 BuildNewSpaceArrayCheck(new_capacity, new_kind);
2435 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2436 new_kind, new_capacity);
2438 BuildCopyElements(elements, kind,
2439 new_elements, new_kind,
2440 length, new_capacity);
2442 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2445 return new_elements;
2449 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2450 ElementsKind elements_kind,
2453 // Fast elements kinds need to be initialized in case statements below cause
2454 // a garbage collection.
2455 Factory* factory = isolate()->factory();
2457 double nan_double = FixedDoubleArray::hole_nan_as_double();
2458 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2459 ? Add<HConstant>(factory->the_hole_value())
2460 : Add<HConstant>(nan_double);
2462 // Special loop unfolding case
2463 static const int kLoopUnfoldLimit = 8;
2464 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
2465 int initial_capacity = -1;
2466 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2467 int constant_from = from->GetInteger32Constant();
2468 int constant_to = to->GetInteger32Constant();
2470 if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
2471 initial_capacity = constant_to;
2475 // Since we're about to store a hole value, the store instruction below must
2476 // assume an elements kind that supports heap object values.
2477 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2478 elements_kind = FAST_HOLEY_ELEMENTS;
2481 if (initial_capacity >= 0) {
2482 for (int i = 0; i < initial_capacity; i++) {
2483 HInstruction* key = Add<HConstant>(i);
2484 Add<HStoreKeyed>(elements, key, hole, elements_kind);
2487 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2489 HValue* key = builder.BeginBody(from, to, Token::LT);
2491 Add<HStoreKeyed>(elements, key, hole, elements_kind);
2498 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2499 ElementsKind from_elements_kind,
2500 HValue* to_elements,
2501 ElementsKind to_elements_kind,
2504 bool pre_fill_with_holes =
2505 IsFastDoubleElementsKind(from_elements_kind) &&
2506 IsFastObjectElementsKind(to_elements_kind);
2508 if (pre_fill_with_holes) {
2509 // If the copy might trigger a GC, make sure that the FixedArray is
2510 // pre-initialized with holes to make sure that it's always in a consistent
2512 BuildFillElementsWithHole(to_elements, to_elements_kind,
2513 graph()->GetConstant0(), capacity);
2516 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2518 HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
2520 HValue* element = Add<HLoadKeyed>(from_elements, key,
2521 static_cast<HValue*>(NULL),
2525 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2526 IsFastSmiElementsKind(to_elements_kind))
2527 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2529 if (IsHoleyElementsKind(from_elements_kind) &&
2530 from_elements_kind != to_elements_kind) {
2531 IfBuilder if_hole(this);
2532 if_hole.If<HCompareHoleAndBranch>(element);
2534 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2535 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2536 : graph()->GetConstantHole();
2537 Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2539 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2540 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2543 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2544 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2549 if (!pre_fill_with_holes && length != capacity) {
2550 // Fill unused capacity with the hole.
2551 BuildFillElementsWithHole(to_elements, to_elements_kind,
2557 HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
2558 HValue* allocation_site,
2559 AllocationSiteMode mode,
2562 NoObservableSideEffectsScope no_effects(this);
2564 // All sizes here are multiples of kPointerSize.
2565 int size = JSArray::kSize;
2566 if (mode == TRACK_ALLOCATION_SITE) {
2567 size += AllocationMemento::kSize;
2570 HValue* size_in_bytes = Add<HConstant>(size);
2571 HInstruction* object = Add<HAllocate>(size_in_bytes,
2576 // Copy the JS array part.
2577 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
2578 if ((i != JSArray::kElementsOffset) || (length == 0)) {
2579 HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
2580 Add<HStoreNamedField>(
2581 object, access, Add<HLoadNamedField>(
2582 boilerplate, static_cast<HValue*>(NULL), access));
2586 // Create an allocation site info if requested.
2587 if (mode == TRACK_ALLOCATION_SITE) {
2588 BuildCreateAllocationMemento(
2589 object, Add<HConstant>(JSArray::kSize), allocation_site);
2593 // We have to initialize the elements pointer if allocation folding is
2595 if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
2596 HConstant* empty_fixed_array = Add<HConstant>(
2597 isolate()->factory()->empty_fixed_array());
2598 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2599 empty_fixed_array, INITIALIZING_STORE);
2602 HValue* boilerplate_elements = AddLoadElements(boilerplate);
2603 HValue* object_elements;
2604 if (IsFastDoubleElementsKind(kind)) {
2605 HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
2606 object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2607 NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
2609 HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
2610 object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2611 NOT_TENURED, FIXED_ARRAY_TYPE);
2613 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2616 // Copy the elements array header.
2617 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
2618 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2619 Add<HStoreNamedField>(
2620 object_elements, access, Add<HLoadNamedField>(
2621 boilerplate_elements, static_cast<HValue*>(NULL), access));
2624 // Copy the elements array contents.
2625 // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
2626 // copying loops with constant length up to a given boundary and use this
2627 // helper here instead.
2628 for (int i = 0; i < length; i++) {
2629 HValue* key_constant = Add<HConstant>(i);
2630 HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
2631 static_cast<HValue*>(NULL), kind);
2632 Add<HStoreKeyed>(object_elements, key_constant, value, kind);
2640 void HGraphBuilder::BuildCompareNil(
2643 HIfContinuation* continuation) {
2644 IfBuilder if_nil(this);
2645 bool some_case_handled = false;
2646 bool some_case_missing = false;
2648 if (type->Maybe(Type::Null())) {
2649 if (some_case_handled) if_nil.Or();
2650 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
2651 some_case_handled = true;
2653 some_case_missing = true;
2656 if (type->Maybe(Type::Undefined())) {
2657 if (some_case_handled) if_nil.Or();
2658 if_nil.If<HCompareObjectEqAndBranch>(value,
2659 graph()->GetConstantUndefined());
2660 some_case_handled = true;
2662 some_case_missing = true;
2665 if (type->Maybe(Type::Undetectable())) {
2666 if (some_case_handled) if_nil.Or();
2667 if_nil.If<HIsUndetectableAndBranch>(value);
2668 some_case_handled = true;
2670 some_case_missing = true;
2673 if (some_case_missing) {
2676 if (type->NumClasses() == 1) {
2677 BuildCheckHeapObject(value);
2678 // For ICs, the map checked below is a sentinel map that gets replaced by
2679 // the monomorphic map when the code is used as a template to generate a
2680 // new IC. For optimized functions, there is no sentinel map, the map
2681 // emitted below is the actual monomorphic map.
2682 Add<HCheckMaps>(value, type->Classes().Current());
2684 if_nil.Deopt("Too many undetectable types");
2688 if_nil.CaptureContinuation(continuation);
2692 void HGraphBuilder::BuildCreateAllocationMemento(
2693 HValue* previous_object,
2694 HValue* previous_object_size,
2695 HValue* allocation_site) {
2696 ASSERT(allocation_site != NULL);
2697 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2698 previous_object, previous_object_size);
2699 AddStoreMapConstant(
2700 allocation_memento, isolate()->factory()->allocation_memento_map());
2701 Add<HStoreNamedField>(
2703 HObjectAccess::ForAllocationMementoSite(),
2705 if (FLAG_allocation_site_pretenuring) {
2706 HValue* memento_create_count = Add<HLoadNamedField>(
2707 allocation_site, static_cast<HValue*>(NULL),
2708 HObjectAccess::ForAllocationSiteOffset(
2709 AllocationSite::kPretenureCreateCountOffset));
2710 memento_create_count = AddUncasted<HAdd>(
2711 memento_create_count, graph()->GetConstant1());
2712 // This smi value is reset to zero after every gc, overflow isn't a problem
2713 // since the counter is bounded by the new space size.
2714 memento_create_count->ClearFlag(HValue::kCanOverflow);
2715 HStoreNamedField* store = Add<HStoreNamedField>(
2716 allocation_site, HObjectAccess::ForAllocationSiteOffset(
2717 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
2718 // No write barrier needed to store a smi.
2719 store->SkipWriteBarrier();
2724 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
2725 // Get the global context, then the native context
2726 HInstruction* context =
2727 Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
2728 HObjectAccess::ForFunctionContextPointer());
2729 HInstruction* global_object = Add<HLoadNamedField>(
2730 context, static_cast<HValue*>(NULL),
2731 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2732 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2733 GlobalObject::kNativeContextOffset);
2734 return Add<HLoadNamedField>(
2735 global_object, static_cast<HValue*>(NULL), access);
2739 HInstruction* HGraphBuilder::BuildGetNativeContext() {
2740 // Get the global context, then the native context
2741 HValue* global_object = Add<HLoadNamedField>(
2742 context(), static_cast<HValue*>(NULL),
2743 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2744 return Add<HLoadNamedField>(
2745 global_object, static_cast<HValue*>(NULL),
2746 HObjectAccess::ForObservableJSObjectOffset(
2747 GlobalObject::kNativeContextOffset));
2751 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
2752 HInstruction* native_context = BuildGetNativeContext();
2753 HInstruction* index =
2754 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
2755 return Add<HLoadKeyed>(
2756 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2760 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2762 HValue* allocation_site_payload,
2763 HValue* constructor_function,
2764 AllocationSiteOverrideMode override_mode) :
2767 allocation_site_payload_(allocation_site_payload),
2768 constructor_function_(constructor_function) {
2769 ASSERT(!allocation_site_payload->IsConstant() ||
2770 HConstant::cast(allocation_site_payload)->handle(
2771 builder_->isolate())->IsAllocationSite());
2772 mode_ = override_mode == DISABLE_ALLOCATION_SITES
2773 ? DONT_TRACK_ALLOCATION_SITE
2774 : AllocationSite::GetMode(kind);
2778 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2780 HValue* constructor_function) :
2783 mode_(DONT_TRACK_ALLOCATION_SITE),
2784 allocation_site_payload_(NULL),
2785 constructor_function_(constructor_function) {
2789 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
2790 if (!builder()->top_info()->IsStub()) {
2791 // A constant map is fine.
2792 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
2793 builder()->isolate());
2794 return builder()->Add<HConstant>(map);
2797 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
2798 // No need for a context lookup if the kind_ matches the initial
2799 // map, because we can just load the map in that case.
2800 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2801 return builder()->Add<HLoadNamedField>(
2802 constructor_function_, static_cast<HValue*>(NULL), access);
2805 // TODO(mvstanton): we should always have a constructor function if we
2806 // are creating a stub.
2807 HInstruction* native_context = constructor_function_ != NULL
2808 ? builder()->BuildGetNativeContext(constructor_function_)
2809 : builder()->BuildGetNativeContext();
2811 HInstruction* index = builder()->Add<HConstant>(
2812 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
2814 HInstruction* map_array = builder()->Add<HLoadKeyed>(
2815 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2817 HInstruction* kind_index = builder()->Add<HConstant>(kind_);
2819 return builder()->Add<HLoadKeyed>(
2820 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2824 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
2825 // Find the map near the constructor function
2826 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2827 return builder()->Add<HLoadNamedField>(
2828 constructor_function_, static_cast<HValue*>(NULL), access);
2832 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
2833 HValue* length_node) {
2834 ASSERT(length_node != NULL);
2836 int base_size = JSArray::kSize;
2837 if (mode_ == TRACK_ALLOCATION_SITE) {
2838 base_size += AllocationMemento::kSize;
2841 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2842 base_size += FixedArray::kHeaderSize;
2844 HInstruction* elements_size_value =
2845 builder()->Add<HConstant>(elements_size());
2846 HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
2847 length_node, elements_size_value);
2848 builder()->AddInstruction(mul);
2849 HInstruction* base = builder()->Add<HConstant>(base_size);
2850 HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
2852 total_size->ClearFlag(HValue::kCanOverflow);
2853 builder()->AddInstruction(total_size);
2858 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
2859 int base_size = JSArray::kSize;
2860 if (mode_ == TRACK_ALLOCATION_SITE) {
2861 base_size += AllocationMemento::kSize;
2864 base_size += IsFastDoubleElementsKind(kind_)
2865 ? FixedDoubleArray::SizeFor(initial_capacity())
2866 : FixedArray::SizeFor(initial_capacity());
2868 return builder()->Add<HConstant>(base_size);
2872 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
2873 HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
2874 HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
2875 return AllocateArray(size_in_bytes,
2877 builder()->graph()->GetConstant0());
2881 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
2882 HValue* length_field,
2883 FillMode fill_mode) {
2884 HValue* size_in_bytes = EstablishAllocationSize(capacity);
2885 return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
2889 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
2891 HValue* length_field,
2892 FillMode fill_mode) {
2893 // These HForceRepresentations are because we store these as fields in the
2894 // objects we construct, and an int32-to-smi HChange could deopt. Accept
2895 // the deopt possibility now, before allocation occurs.
2897 builder()->AddUncasted<HForceRepresentation>(capacity,
2898 Representation::Smi());
2900 builder()->AddUncasted<HForceRepresentation>(length_field,
2901 Representation::Smi());
2902 // Allocate (dealing with failure appropriately)
2903 HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes,
2904 HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
2906 // Folded array allocation should be aligned if it has fast double elements.
2907 if (IsFastDoubleElementsKind(kind_)) {
2908 new_object->MakeDoubleAligned();
2911 // Fill in the fields: map, properties, length
2913 if (allocation_site_payload_ == NULL) {
2914 map = EmitInternalMapCode();
2916 map = EmitMapCode();
2918 elements_location_ = builder()->BuildJSArrayHeader(new_object,
2922 allocation_site_payload_,
2925 // Initialize the elements
2926 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
2928 if (fill_mode == FILL_WITH_HOLE) {
2929 builder()->BuildFillElementsWithHole(elements_location_, kind_,
2930 graph()->GetConstant0(), capacity);
2937 HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
2939 return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
2940 Add<HConstant>(map));
2944 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
2945 HValue* global_object = Add<HLoadNamedField>(
2946 context(), static_cast<HValue*>(NULL),
2947 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2948 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2949 GlobalObject::kBuiltinsOffset);
2950 HValue* builtins = Add<HLoadNamedField>(
2951 global_object, static_cast<HValue*>(NULL), access);
2952 HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
2953 JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
2954 return Add<HLoadNamedField>(
2955 builtins, static_cast<HValue*>(NULL), function_access);
2959 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
2960 : HGraphBuilder(info),
2961 function_state_(NULL),
2962 initial_function_state_(this, info, NORMAL_RETURN, 0),
2966 globals_(10, info->zone()),
2967 inline_bailout_(false),
2968 osr_(new(info->zone()) HOsrBuilder(this)) {
2969 // This is not initialized in the initializer list because the
2970 // constructor for the initial state relies on function_state_ == NULL
2971 // to know it's the initial state.
2972 function_state_= &initial_function_state_;
2973 InitializeAstVisitor(info->zone());
2974 if (FLAG_hydrogen_track_positions) {
2975 SetSourcePosition(info->shared_info()->start_position());
2980 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
2981 HBasicBlock* second,
2982 BailoutId join_id) {
2983 if (first == NULL) {
2985 } else if (second == NULL) {
2988 HBasicBlock* join_block = graph()->CreateBasicBlock();
2989 Goto(first, join_block);
2990 Goto(second, join_block);
2991 join_block->SetJoinId(join_id);
2997 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
2998 HBasicBlock* exit_block,
2999 HBasicBlock* continue_block) {
3000 if (continue_block != NULL) {
3001 if (exit_block != NULL) Goto(exit_block, continue_block);
3002 continue_block->SetJoinId(statement->ContinueId());
3003 return continue_block;
3009 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3010 HBasicBlock* loop_entry,
3011 HBasicBlock* body_exit,
3012 HBasicBlock* loop_successor,
3013 HBasicBlock* break_block) {
3014 if (body_exit != NULL) Goto(body_exit, loop_entry);
3015 loop_entry->PostProcessLoopHeader(statement);
3016 if (break_block != NULL) {
3017 if (loop_successor != NULL) Goto(loop_successor, break_block);
3018 break_block->SetJoinId(statement->ExitId());
3021 return loop_successor;
3025 // Build a new loop header block and set it as the current block.
3026 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3027 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3029 set_current_block(loop_entry);
3034 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3035 IterationStatement* statement) {
3036 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3037 ? osr()->BuildOsrLoopEntry(statement)
3043 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3044 HSourcePosition position) {
3045 Finish(instruction, position);
3050 HGraph::HGraph(CompilationInfo* info)
3051 : isolate_(info->isolate()),
3054 blocks_(8, info->zone()),
3055 values_(16, info->zone()),
3057 uint32_instructions_(NULL),
3060 zone_(info->zone()),
3061 is_recursive_(false),
3062 use_optimistic_licm_(false),
3063 depends_on_empty_array_proto_elements_(false),
3064 type_change_checksum_(0),
3065 maximum_environment_size_(0),
3066 no_side_effects_scope_count_(0),
3067 disallow_adding_new_values_(false),
3069 inlined_functions_(5, info->zone()) {
3070 if (info->IsStub()) {
3071 HydrogenCodeStub* stub = info->code_stub();
3072 CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor();
3073 start_environment_ =
3074 new(zone_) HEnvironment(zone_, descriptor->environment_length());
3076 TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3077 start_environment_ =
3078 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3080 start_environment_->set_ast_id(BailoutId::FunctionEntry());
3081 entry_block_ = CreateBasicBlock();
3082 entry_block_->SetInitialEnvironment(start_environment_);
3086 HBasicBlock* HGraph::CreateBasicBlock() {
3087 HBasicBlock* result = new(zone()) HBasicBlock(this);
3088 blocks_.Add(result, zone());
3093 void HGraph::FinalizeUniqueness() {
3094 DisallowHeapAllocation no_gc;
3095 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3096 for (int i = 0; i < blocks()->length(); ++i) {
3097 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3098 it.Current()->FinalizeUniqueness();
3104 int HGraph::TraceInlinedFunction(
3105 Handle<SharedFunctionInfo> shared,
3106 HSourcePosition position) {
3107 if (!FLAG_hydrogen_track_positions) {
3112 for (; id < inlined_functions_.length(); id++) {
3113 if (inlined_functions_[id].shared().is_identical_to(shared)) {
3118 if (id == inlined_functions_.length()) {
3119 inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3121 if (!shared->script()->IsUndefined()) {
3122 Handle<Script> script(Script::cast(shared->script()));
3123 if (!script->source()->IsUndefined()) {
3124 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3125 PrintF(tracing_scope.file(),
3126 "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n",
3127 shared->DebugName()->ToCString().get(),
3128 info()->optimization_id(),
3132 ConsStringIteratorOp op;
3133 StringCharacterStream stream(String::cast(script->source()),
3135 shared->start_position());
3136 // fun->end_position() points to the last character in the stream. We
3137 // need to compensate by adding one to calculate the length.
3139 shared->end_position() - shared->start_position() + 1;
3140 for (int i = 0; i < source_len; i++) {
3141 if (stream.HasMore()) {
3142 PrintF(tracing_scope.file(), "%c", stream.GetNext());
3147 PrintF(tracing_scope.file(), "\n--- END ---\n");
3152 int inline_id = next_inline_id_++;
3154 if (inline_id != 0) {
3155 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3156 PrintF(tracing_scope.file(), "INLINE (%s) id{%d,%d} AS %d AT ",
3157 shared->DebugName()->ToCString().get(),
3158 info()->optimization_id(),
3161 position.PrintTo(tracing_scope.file());
3162 PrintF(tracing_scope.file(), "\n");
3169 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3170 if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3174 return inlined_functions_[pos.inlining_id()].start_position() +
3179 // Block ordering was implemented with two mutually recursive methods,
3180 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3181 // The recursion could lead to stack overflow so the algorithm has been
3182 // implemented iteratively.
3183 // At a high level the algorithm looks like this:
3185 // Postorder(block, loop_header) : {
3186 // if (block has already been visited or is of another loop) return;
3187 // mark block as visited;
3188 // if (block is a loop header) {
3189 // VisitLoopMembers(block, loop_header);
3190 // VisitSuccessorsOfLoopHeader(block);
3192 // VisitSuccessors(block)
3194 // put block in result list;
3197 // VisitLoopMembers(block, outer_loop_header) {
3198 // foreach (block b in block loop members) {
3199 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
3200 // if (b is loop header) VisitLoopMembers(b);
3204 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3205 // foreach (block b in block successors) Postorder(b, outer_loop_header)
3208 // VisitSuccessorsOfLoopHeader(block) {
3209 // foreach (block b in block successors) Postorder(b, block)
3212 // VisitSuccessors(block, loop_header) {
3213 // foreach (block b in block successors) Postorder(b, loop_header)
3216 // The ordering is started calling Postorder(entry, NULL).
3218 // Each instance of PostorderProcessor represents the "stack frame" of the
3219 // recursion, and particularly keeps the state of the loop (iteration) of the
3220 // "Visit..." function it represents.
3221 // To recycle memory we keep all the frames in a double linked list but
3222 // this means that we cannot use constructors to initialize the frames.
3224 class PostorderProcessor : public ZoneObject {
3226 // Back link (towards the stack bottom).
3227 PostorderProcessor* parent() {return father_; }
3228 // Forward link (towards the stack top).
3229 PostorderProcessor* child() {return child_; }
3230 HBasicBlock* block() { return block_; }
3231 HLoopInformation* loop() { return loop_; }
3232 HBasicBlock* loop_header() { return loop_header_; }
3234 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3236 BitVector* visited) {
3237 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3238 return result->SetupSuccessors(zone, block, NULL, visited);
3241 PostorderProcessor* PerformStep(Zone* zone,
3243 ZoneList<HBasicBlock*>* order) {
3244 PostorderProcessor* next =
3245 PerformNonBacktrackingStep(zone, visited, order);
3249 return Backtrack(zone, visited, order);
3254 explicit PostorderProcessor(PostorderProcessor* father)
3255 : father_(father), child_(NULL), successor_iterator(NULL) { }
3257 // Each enum value states the cycle whose state is kept by this instance.
3261 SUCCESSORS_OF_LOOP_HEADER,
3263 SUCCESSORS_OF_LOOP_MEMBER
3266 // Each "Setup..." method is like a constructor for a cycle state.
3267 PostorderProcessor* SetupSuccessors(Zone* zone,
3269 HBasicBlock* loop_header,
3270 BitVector* visited) {
3271 if (block == NULL || visited->Contains(block->block_id()) ||
3272 block->parent_loop_header() != loop_header) {
3276 loop_header_ = NULL;
3281 visited->Add(block->block_id());
3283 if (block->IsLoopHeader()) {
3284 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3285 loop_header_ = block;
3286 InitializeSuccessors();
3287 PostorderProcessor* result = Push(zone);
3288 return result->SetupLoopMembers(zone, block, block->loop_information(),
3291 ASSERT(block->IsFinished());
3293 loop_header_ = loop_header;
3294 InitializeSuccessors();
3300 PostorderProcessor* SetupLoopMembers(Zone* zone,
3302 HLoopInformation* loop,
3303 HBasicBlock* loop_header) {
3304 kind_ = LOOP_MEMBERS;
3307 loop_header_ = loop_header;
3308 InitializeLoopMembers();
3312 PostorderProcessor* SetupSuccessorsOfLoopMember(
3314 HLoopInformation* loop,
3315 HBasicBlock* loop_header) {
3316 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3319 loop_header_ = loop_header;
3320 InitializeSuccessors();
3324 // This method "allocates" a new stack frame.
3325 PostorderProcessor* Push(Zone* zone) {
3326 if (child_ == NULL) {
3327 child_ = new(zone) PostorderProcessor(this);
3332 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3333 ASSERT(block_->end()->FirstSuccessor() == NULL ||
3334 order->Contains(block_->end()->FirstSuccessor()) ||
3335 block_->end()->FirstSuccessor()->IsLoopHeader());
3336 ASSERT(block_->end()->SecondSuccessor() == NULL ||
3337 order->Contains(block_->end()->SecondSuccessor()) ||
3338 block_->end()->SecondSuccessor()->IsLoopHeader());
3339 order->Add(block_, zone);
3342 // This method is the basic block to walk up the stack.
3343 PostorderProcessor* Pop(Zone* zone,
3345 ZoneList<HBasicBlock*>* order) {
3348 case SUCCESSORS_OF_LOOP_HEADER:
3349 ClosePostorder(order, zone);
3353 case SUCCESSORS_OF_LOOP_MEMBER:
3354 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3355 // In this case we need to perform a LOOP_MEMBERS cycle so we
3356 // initialize it and return this instead of father.
3357 return SetupLoopMembers(zone, block(),
3358 block()->loop_information(), loop_header_);
3369 // Walks up the stack.
3370 PostorderProcessor* Backtrack(Zone* zone,
3372 ZoneList<HBasicBlock*>* order) {
3373 PostorderProcessor* parent = Pop(zone, visited, order);
3374 while (parent != NULL) {
3375 PostorderProcessor* next =
3376 parent->PerformNonBacktrackingStep(zone, visited, order);
3380 parent = parent->Pop(zone, visited, order);
3386 PostorderProcessor* PerformNonBacktrackingStep(
3389 ZoneList<HBasicBlock*>* order) {
3390 HBasicBlock* next_block;
3393 next_block = AdvanceSuccessors();
3394 if (next_block != NULL) {
3395 PostorderProcessor* result = Push(zone);
3396 return result->SetupSuccessors(zone, next_block,
3397 loop_header_, visited);
3400 case SUCCESSORS_OF_LOOP_HEADER:
3401 next_block = AdvanceSuccessors();
3402 if (next_block != NULL) {
3403 PostorderProcessor* result = Push(zone);
3404 return result->SetupSuccessors(zone, next_block,
3409 next_block = AdvanceLoopMembers();
3410 if (next_block != NULL) {
3411 PostorderProcessor* result = Push(zone);
3412 return result->SetupSuccessorsOfLoopMember(next_block,
3413 loop_, loop_header_);
3416 case SUCCESSORS_OF_LOOP_MEMBER:
3417 next_block = AdvanceSuccessors();
3418 if (next_block != NULL) {
3419 PostorderProcessor* result = Push(zone);
3420 return result->SetupSuccessors(zone, next_block,
3421 loop_header_, visited);
3430 // The following two methods implement a "foreach b in successors" cycle.
3431 void InitializeSuccessors() {
3434 successor_iterator = HSuccessorIterator(block_->end());
3437 HBasicBlock* AdvanceSuccessors() {
3438 if (!successor_iterator.Done()) {
3439 HBasicBlock* result = successor_iterator.Current();
3440 successor_iterator.Advance();
3446 // The following two methods implement a "foreach b in loop members" cycle.
3447 void InitializeLoopMembers() {
3449 loop_length = loop_->blocks()->length();
3452 HBasicBlock* AdvanceLoopMembers() {
3453 if (loop_index < loop_length) {
3454 HBasicBlock* result = loop_->blocks()->at(loop_index);
3463 PostorderProcessor* father_;
3464 PostorderProcessor* child_;
3465 HLoopInformation* loop_;
3466 HBasicBlock* block_;
3467 HBasicBlock* loop_header_;
3470 HSuccessorIterator successor_iterator;
3474 void HGraph::OrderBlocks() {
3475 CompilationPhase phase("H_Block ordering", info());
3476 BitVector visited(blocks_.length(), zone());
3478 ZoneList<HBasicBlock*> reverse_result(8, zone());
3479 HBasicBlock* start = blocks_[0];
3480 PostorderProcessor* postorder =
3481 PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
3482 while (postorder != NULL) {
3483 postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
3487 for (int i = reverse_result.length() - 1; i >= 0; --i) {
3488 HBasicBlock* b = reverse_result[i];
3489 blocks_.Add(b, zone());
3490 b->set_block_id(index++);
3495 void HGraph::AssignDominators() {
3496 HPhase phase("H_Assign dominators", this);
3497 for (int i = 0; i < blocks_.length(); ++i) {
3498 HBasicBlock* block = blocks_[i];
3499 if (block->IsLoopHeader()) {
3500 // Only the first predecessor of a loop header is from outside the loop.
3501 // All others are back edges, and thus cannot dominate the loop header.
3502 block->AssignCommonDominator(block->predecessors()->first());
3503 block->AssignLoopSuccessorDominators();
3505 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3506 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3513 bool HGraph::CheckArgumentsPhiUses() {
3514 int block_count = blocks_.length();
3515 for (int i = 0; i < block_count; ++i) {
3516 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3517 HPhi* phi = blocks_[i]->phis()->at(j);
3518 // We don't support phi uses of arguments for now.
3519 if (phi->CheckFlag(HValue::kIsArguments)) return false;
3526 bool HGraph::CheckConstPhiUses() {
3527 int block_count = blocks_.length();
3528 for (int i = 0; i < block_count; ++i) {
3529 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3530 HPhi* phi = blocks_[i]->phis()->at(j);
3531 // Check for the hole value (from an uninitialized const).
3532 for (int k = 0; k < phi->OperandCount(); k++) {
3533 if (phi->OperandAt(k) == GetConstantHole()) return false;
3541 void HGraph::CollectPhis() {
3542 int block_count = blocks_.length();
3543 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3544 for (int i = 0; i < block_count; ++i) {
3545 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3546 HPhi* phi = blocks_[i]->phis()->at(j);
3547 phi_list_->Add(phi, zone());
3553 // Implementation of utility class to encapsulate the translation state for
3554 // a (possibly inlined) function.
3555 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3556 CompilationInfo* info,
3557 InliningKind inlining_kind,
3560 compilation_info_(info),
3561 call_context_(NULL),
3562 inlining_kind_(inlining_kind),
3563 function_return_(NULL),
3564 test_context_(NULL),
3566 arguments_object_(NULL),
3567 arguments_elements_(NULL),
3568 inlining_id_(inlining_id),
3569 outer_source_position_(HSourcePosition::Unknown()),
3570 outer_(owner->function_state()) {
3571 if (outer_ != NULL) {
3572 // State for an inline function.
3573 if (owner->ast_context()->IsTest()) {
3574 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3575 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3576 if_true->MarkAsInlineReturnTarget(owner->current_block());
3577 if_false->MarkAsInlineReturnTarget(owner->current_block());
3578 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3579 Expression* cond = outer_test_context->condition();
3580 // The AstContext constructor pushed on the context stack. This newed
3581 // instance is the reason that AstContext can't be BASE_EMBEDDED.
3582 test_context_ = new TestContext(owner, cond, if_true, if_false);
3584 function_return_ = owner->graph()->CreateBasicBlock();
3585 function_return()->MarkAsInlineReturnTarget(owner->current_block());
3587 // Set this after possibly allocating a new TestContext above.
3588 call_context_ = owner->ast_context();
3591 // Push on the state stack.
3592 owner->set_function_state(this);
3594 if (FLAG_hydrogen_track_positions) {
3595 outer_source_position_ = owner->source_position();
3596 owner->EnterInlinedSource(
3597 info->shared_info()->start_position(),
3599 owner->SetSourcePosition(info->shared_info()->start_position());
3604 FunctionState::~FunctionState() {
3605 delete test_context_;
3606 owner_->set_function_state(outer_);
3608 if (FLAG_hydrogen_track_positions) {
3609 owner_->set_source_position(outer_source_position_);
3610 owner_->EnterInlinedSource(
3611 outer_->compilation_info()->shared_info()->start_position(),
3612 outer_->inlining_id());
3617 // Implementation of utility classes to represent an expression's context in
3619 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3622 outer_(owner->ast_context()),
3623 for_typeof_(false) {
3624 owner->set_ast_context(this); // Push.
3626 ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
3627 original_length_ = owner->environment()->length();
3632 AstContext::~AstContext() {
3633 owner_->set_ast_context(outer_); // Pop.
3637 EffectContext::~EffectContext() {
3638 ASSERT(owner()->HasStackOverflow() ||
3639 owner()->current_block() == NULL ||
3640 (owner()->environment()->length() == original_length_ &&
3641 owner()->environment()->frame_type() == JS_FUNCTION));
3645 ValueContext::~ValueContext() {
3646 ASSERT(owner()->HasStackOverflow() ||
3647 owner()->current_block() == NULL ||
3648 (owner()->environment()->length() == original_length_ + 1 &&
3649 owner()->environment()->frame_type() == JS_FUNCTION));
3653 void EffectContext::ReturnValue(HValue* value) {
3654 // The value is simply ignored.
3658 void ValueContext::ReturnValue(HValue* value) {
3659 // The value is tracked in the bailout environment, and communicated
3660 // through the environment as the result of the expression.
3661 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3662 owner()->Bailout(kBadValueContextForArgumentsValue);
3664 owner()->Push(value);
3668 void TestContext::ReturnValue(HValue* value) {
3673 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3674 ASSERT(!instr->IsControlInstruction());
3675 owner()->AddInstruction(instr);
3676 if (instr->HasObservableSideEffects()) {
3677 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3682 void EffectContext::ReturnControl(HControlInstruction* instr,
3684 ASSERT(!instr->HasObservableSideEffects());
3685 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3686 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3687 instr->SetSuccessorAt(0, empty_true);
3688 instr->SetSuccessorAt(1, empty_false);
3689 owner()->FinishCurrentBlock(instr);
3690 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3691 owner()->set_current_block(join);
3695 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3697 HBasicBlock* true_branch = NULL;
3698 HBasicBlock* false_branch = NULL;
3699 continuation->Continue(&true_branch, &false_branch);
3700 if (!continuation->IsTrueReachable()) {
3701 owner()->set_current_block(false_branch);
3702 } else if (!continuation->IsFalseReachable()) {
3703 owner()->set_current_block(true_branch);
3705 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3706 owner()->set_current_block(join);
3711 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3712 ASSERT(!instr->IsControlInstruction());
3713 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3714 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3716 owner()->AddInstruction(instr);
3717 owner()->Push(instr);
3718 if (instr->HasObservableSideEffects()) {
3719 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3724 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3725 ASSERT(!instr->HasObservableSideEffects());
3726 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3727 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3729 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3730 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3731 instr->SetSuccessorAt(0, materialize_true);
3732 instr->SetSuccessorAt(1, materialize_false);
3733 owner()->FinishCurrentBlock(instr);
3734 owner()->set_current_block(materialize_true);
3735 owner()->Push(owner()->graph()->GetConstantTrue());
3736 owner()->set_current_block(materialize_false);
3737 owner()->Push(owner()->graph()->GetConstantFalse());
3739 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3740 owner()->set_current_block(join);
3744 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3746 HBasicBlock* materialize_true = NULL;
3747 HBasicBlock* materialize_false = NULL;
3748 continuation->Continue(&materialize_true, &materialize_false);
3749 if (continuation->IsTrueReachable()) {
3750 owner()->set_current_block(materialize_true);
3751 owner()->Push(owner()->graph()->GetConstantTrue());
3752 owner()->set_current_block(materialize_true);
3754 if (continuation->IsFalseReachable()) {
3755 owner()->set_current_block(materialize_false);
3756 owner()->Push(owner()->graph()->GetConstantFalse());
3757 owner()->set_current_block(materialize_false);
3759 if (continuation->TrueAndFalseReachable()) {
3761 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3762 owner()->set_current_block(join);
3767 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3768 ASSERT(!instr->IsControlInstruction());
3769 HOptimizedGraphBuilder* builder = owner();
3770 builder->AddInstruction(instr);
3771 // We expect a simulate after every expression with side effects, though
3772 // this one isn't actually needed (and wouldn't work if it were targeted).
3773 if (instr->HasObservableSideEffects()) {
3774 builder->Push(instr);
3775 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3782 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3783 ASSERT(!instr->HasObservableSideEffects());
3784 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3785 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3786 instr->SetSuccessorAt(0, empty_true);
3787 instr->SetSuccessorAt(1, empty_false);
3788 owner()->FinishCurrentBlock(instr);
3789 owner()->Goto(empty_true, if_true(), owner()->function_state());
3790 owner()->Goto(empty_false, if_false(), owner()->function_state());
3791 owner()->set_current_block(NULL);
3795 void TestContext::ReturnContinuation(HIfContinuation* continuation,
3797 HBasicBlock* true_branch = NULL;
3798 HBasicBlock* false_branch = NULL;
3799 continuation->Continue(&true_branch, &false_branch);
3800 if (continuation->IsTrueReachable()) {
3801 owner()->Goto(true_branch, if_true(), owner()->function_state());
3803 if (continuation->IsFalseReachable()) {
3804 owner()->Goto(false_branch, if_false(), owner()->function_state());
3806 owner()->set_current_block(NULL);
3810 void TestContext::BuildBranch(HValue* value) {
3811 // We expect the graph to be in edge-split form: there is no edge that
3812 // connects a branch node to a join node. We conservatively ensure that
3813 // property by always adding an empty block on the outgoing edges of this
3815 HOptimizedGraphBuilder* builder = owner();
3816 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3817 builder->Bailout(kArgumentsObjectValueInATestContext);
3819 ToBooleanStub::Types expected(condition()->to_boolean_types());
3820 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3824 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3825 #define CHECK_BAILOUT(call) \
3828 if (HasStackOverflow()) return; \
3832 #define CHECK_ALIVE(call) \
3835 if (HasStackOverflow() || current_block() == NULL) return; \
3839 #define CHECK_ALIVE_OR_RETURN(call, value) \
3842 if (HasStackOverflow() || current_block() == NULL) return value; \
3846 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
3847 current_info()->set_bailout_reason(reason);
3852 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
3853 EffectContext for_effect(this);
3858 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
3859 ArgumentsAllowedFlag flag) {
3860 ValueContext for_value(this, flag);
3865 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
3866 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3867 for_value.set_for_typeof(true);
3872 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
3873 HBasicBlock* true_block,
3874 HBasicBlock* false_block) {
3875 TestContext for_test(this, expr, true_block, false_block);
3880 void HOptimizedGraphBuilder::VisitExpressions(
3881 ZoneList<Expression*>* exprs) {
3882 for (int i = 0; i < exprs->length(); ++i) {
3883 CHECK_ALIVE(VisitForValue(exprs->at(i)));
3888 bool HOptimizedGraphBuilder::BuildGraph() {
3889 if (current_info()->function()->is_generator()) {
3890 Bailout(kFunctionIsAGenerator);
3893 Scope* scope = current_info()->scope();
3894 if (scope->HasIllegalRedeclaration()) {
3895 Bailout(kFunctionWithIllegalRedeclaration);
3898 if (scope->calls_eval()) {
3899 Bailout(kFunctionCallsEval);
3904 // Add an edge to the body entry. This is warty: the graph's start
3905 // environment will be used by the Lithium translation as the initial
3906 // environment on graph entry, but it has now been mutated by the
3907 // Hydrogen translation of the instructions in the start block. This
3908 // environment uses values which have not been defined yet. These
3909 // Hydrogen instructions will then be replayed by the Lithium
3910 // translation, so they cannot have an environment effect. The edge to
3911 // the body's entry block (along with some special logic for the start
3912 // block in HInstruction::InsertAfter) seals the start block from
3913 // getting unwanted instructions inserted.
3915 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
3916 // Make the Hydrogen instructions in the initial block into Hydrogen
3917 // values (but not instructions), present in the initial environment and
3918 // not replayed by the Lithium translation.
3919 HEnvironment* initial_env = environment()->CopyWithoutHistory();
3920 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3922 body_entry->SetJoinId(BailoutId::FunctionEntry());
3923 set_current_block(body_entry);
3925 // Handle implicit declaration of the function name in named function
3926 // expressions before other declarations.
3927 if (scope->is_function_scope() && scope->function() != NULL) {
3928 VisitVariableDeclaration(scope->function());
3930 VisitDeclarations(scope->declarations());
3931 Add<HSimulate>(BailoutId::Declarations());
3933 Add<HStackCheck>(HStackCheck::kFunctionEntry);
3935 VisitStatements(current_info()->function()->body());
3936 if (HasStackOverflow()) return false;
3938 if (current_block() != NULL) {
3939 Add<HReturn>(graph()->GetConstantUndefined());
3940 set_current_block(NULL);
3943 // If the checksum of the number of type info changes is the same as the
3944 // last time this function was compiled, then this recompile is likely not
3945 // due to missing/inadequate type feedback, but rather too aggressive
3946 // optimization. Disable optimistic LICM in that case.
3947 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
3948 ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3949 Handle<TypeFeedbackInfo> type_info(
3950 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
3951 int checksum = type_info->own_type_change_checksum();
3952 int composite_checksum = graph()->update_type_change_checksum(checksum);
3953 graph()->set_use_optimistic_licm(
3954 !type_info->matches_inlined_type_change_checksum(composite_checksum));
3955 type_info->set_inlined_type_change_checksum(composite_checksum);
3957 // Perform any necessary OSR-specific cleanups or changes to the graph.
3958 osr()->FinishGraph();
3964 bool HGraph::Optimize(BailoutReason* bailout_reason) {
3968 // We need to create a HConstant "zero" now so that GVN will fold every
3969 // zero-valued constant in the graph together.
3970 // The constant is needed to make idef-based bounds check work: the pass
3971 // evaluates relations with "zero" and that zero cannot be created after GVN.
3975 // Do a full verify after building the graph and computing dominators.
3979 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
3980 Run<HEnvironmentLivenessAnalysisPhase>();
3983 if (!CheckConstPhiUses()) {
3984 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
3987 Run<HRedundantPhiEliminationPhase>();
3988 if (!CheckArgumentsPhiUses()) {
3989 *bailout_reason = kUnsupportedPhiUseOfArguments;
3993 // Find and mark unreachable code to simplify optimizations, especially gvn,
3994 // where unreachable code could unnecessarily defeat LICM.
3995 Run<HMarkUnreachableBlocksPhase>();
3997 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
3998 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4000 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4004 if (has_osr()) osr()->FinishOsrValues();
4006 Run<HInferRepresentationPhase>();
4008 // Remove HSimulate instructions that have turned out not to be needed
4009 // after all by folding them into the following HSimulate.
4010 // This must happen after inferring representations.
4011 Run<HMergeRemovableSimulatesPhase>();
4013 Run<HMarkDeoptimizeOnUndefinedPhase>();
4014 Run<HRepresentationChangesPhase>();
4016 Run<HInferTypesPhase>();
4018 // Must be performed before canonicalization to ensure that Canonicalize
4019 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4021 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4023 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4025 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4027 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4029 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4031 Run<HRangeAnalysisPhase>();
4033 Run<HComputeChangeUndefinedToNaN>();
4035 // Eliminate redundant stack checks on backwards branches.
4036 Run<HStackCheckEliminationPhase>();
4038 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4039 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4040 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4041 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4043 RestoreActualValues();
4045 // Find unreachable code a second time, GVN and other optimizations may have
4046 // made blocks unreachable that were previously reachable.
4047 Run<HMarkUnreachableBlocksPhase>();
4053 void HGraph::RestoreActualValues() {
4054 HPhase phase("H_Restore actual values", this);
4056 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4057 HBasicBlock* block = blocks()->at(block_index);
4060 for (int i = 0; i < block->phis()->length(); i++) {
4061 HPhi* phi = block->phis()->at(i);
4062 ASSERT(phi->ActualValue() == phi);
4066 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4067 HInstruction* instruction = it.Current();
4068 if (instruction->ActualValue() == instruction) continue;
4069 if (instruction->CheckFlag(HValue::kIsDead)) {
4070 // The instruction was marked as deleted but left in the graph
4071 // as a control flow dependency point for subsequent
4073 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4075 ASSERT(instruction->IsInformativeDefinition());
4076 if (instruction->IsPurelyInformativeDefinition()) {
4077 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4079 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4087 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4088 ZoneList<HValue*> arguments(count, zone());
4089 for (int i = 0; i < count; ++i) {
4090 arguments.Add(Pop(), zone());
4093 while (!arguments.is_empty()) {
4094 Add<HPushArgument>(arguments.RemoveLast());
4099 template <class Instruction>
4100 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4101 PushArgumentsFromEnvironment(call->argument_count());
4106 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4107 // First special is HContext.
4108 HInstruction* context = Add<HContext>();
4109 environment()->BindContext(context);
4111 // Create an arguments object containing the initial parameters. Set the
4112 // initial values of parameters including "this" having parameter index 0.
4113 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4114 HArgumentsObject* arguments_object =
4115 New<HArgumentsObject>(environment()->parameter_count());
4116 for (int i = 0; i < environment()->parameter_count(); ++i) {
4117 HInstruction* parameter = Add<HParameter>(i);
4118 arguments_object->AddArgument(parameter, zone());
4119 environment()->Bind(i, parameter);
4121 AddInstruction(arguments_object);
4122 graph()->SetArgumentsObject(arguments_object);
4124 HConstant* undefined_constant = graph()->GetConstantUndefined();
4125 // Initialize specials and locals to undefined.
4126 for (int i = environment()->parameter_count() + 1;
4127 i < environment()->length();
4129 environment()->Bind(i, undefined_constant);
4132 // Handle the arguments and arguments shadow variables specially (they do
4133 // not have declarations).
4134 if (scope->arguments() != NULL) {
4135 if (!scope->arguments()->IsStackAllocated()) {
4136 return Bailout(kContextAllocatedArguments);
4139 environment()->Bind(scope->arguments(),
4140 graph()->GetArgumentsObject());
4145 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4146 for (int i = 0; i < statements->length(); i++) {
4147 Statement* stmt = statements->at(i);
4148 CHECK_ALIVE(Visit(stmt));
4149 if (stmt->IsJump()) break;
4154 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4155 ASSERT(!HasStackOverflow());
4156 ASSERT(current_block() != NULL);
4157 ASSERT(current_block()->HasPredecessor());
4158 if (stmt->scope() != NULL) {
4159 return Bailout(kScopedBlock);
4161 BreakAndContinueInfo break_info(stmt);
4162 { BreakAndContinueScope push(&break_info, this);
4163 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4165 HBasicBlock* break_block = break_info.break_block();
4166 if (break_block != NULL) {
4167 if (current_block() != NULL) Goto(break_block);
4168 break_block->SetJoinId(stmt->ExitId());
4169 set_current_block(break_block);
4174 void HOptimizedGraphBuilder::VisitExpressionStatement(
4175 ExpressionStatement* stmt) {
4176 ASSERT(!HasStackOverflow());
4177 ASSERT(current_block() != NULL);
4178 ASSERT(current_block()->HasPredecessor());
4179 VisitForEffect(stmt->expression());
4183 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4184 ASSERT(!HasStackOverflow());
4185 ASSERT(current_block() != NULL);
4186 ASSERT(current_block()->HasPredecessor());
4190 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4191 ASSERT(!HasStackOverflow());
4192 ASSERT(current_block() != NULL);
4193 ASSERT(current_block()->HasPredecessor());
4194 if (stmt->condition()->ToBooleanIsTrue()) {
4195 Add<HSimulate>(stmt->ThenId());
4196 Visit(stmt->then_statement());
4197 } else if (stmt->condition()->ToBooleanIsFalse()) {
4198 Add<HSimulate>(stmt->ElseId());
4199 Visit(stmt->else_statement());
4201 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4202 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4203 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4205 if (cond_true->HasPredecessor()) {
4206 cond_true->SetJoinId(stmt->ThenId());
4207 set_current_block(cond_true);
4208 CHECK_BAILOUT(Visit(stmt->then_statement()));
4209 cond_true = current_block();
4214 if (cond_false->HasPredecessor()) {
4215 cond_false->SetJoinId(stmt->ElseId());
4216 set_current_block(cond_false);
4217 CHECK_BAILOUT(Visit(stmt->else_statement()));
4218 cond_false = current_block();
4223 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4224 set_current_block(join);
4229 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4230 BreakableStatement* stmt,
4234 BreakAndContinueScope* current = this;
4235 while (current != NULL && current->info()->target() != stmt) {
4236 *drop_extra += current->info()->drop_extra();
4237 current = current->next();
4239 ASSERT(current != NULL); // Always found (unless stack is malformed).
4241 if (type == BREAK) {
4242 *drop_extra += current->info()->drop_extra();
4245 HBasicBlock* block = NULL;
4248 block = current->info()->break_block();
4249 if (block == NULL) {
4250 block = current->owner()->graph()->CreateBasicBlock();
4251 current->info()->set_break_block(block);
4256 block = current->info()->continue_block();
4257 if (block == NULL) {
4258 block = current->owner()->graph()->CreateBasicBlock();
4259 current->info()->set_continue_block(block);
4268 void HOptimizedGraphBuilder::VisitContinueStatement(
4269 ContinueStatement* stmt) {
4270 ASSERT(!HasStackOverflow());
4271 ASSERT(current_block() != NULL);
4272 ASSERT(current_block()->HasPredecessor());
4274 HBasicBlock* continue_block = break_scope()->Get(
4275 stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra);
4277 Goto(continue_block);
4278 set_current_block(NULL);
4282 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4283 ASSERT(!HasStackOverflow());
4284 ASSERT(current_block() != NULL);
4285 ASSERT(current_block()->HasPredecessor());
4287 HBasicBlock* break_block = break_scope()->Get(
4288 stmt->target(), BreakAndContinueScope::BREAK, &drop_extra);
4291 set_current_block(NULL);
4295 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4296 ASSERT(!HasStackOverflow());
4297 ASSERT(current_block() != NULL);
4298 ASSERT(current_block()->HasPredecessor());
4299 FunctionState* state = function_state();
4300 AstContext* context = call_context();
4301 if (context == NULL) {
4302 // Not an inlined return, so an actual one.
4303 CHECK_ALIVE(VisitForValue(stmt->expression()));
4304 HValue* result = environment()->Pop();
4305 Add<HReturn>(result);
4306 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4307 // Return from an inlined construct call. In a test context the return value
4308 // will always evaluate to true, in a value context the return value needs
4309 // to be a JSObject.
4310 if (context->IsTest()) {
4311 TestContext* test = TestContext::cast(context);
4312 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4313 Goto(test->if_true(), state);
4314 } else if (context->IsEffect()) {
4315 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4316 Goto(function_return(), state);
4318 ASSERT(context->IsValue());
4319 CHECK_ALIVE(VisitForValue(stmt->expression()));
4320 HValue* return_value = Pop();
4321 HValue* receiver = environment()->arguments_environment()->Lookup(0);
4322 HHasInstanceTypeAndBranch* typecheck =
4323 New<HHasInstanceTypeAndBranch>(return_value,
4324 FIRST_SPEC_OBJECT_TYPE,
4325 LAST_SPEC_OBJECT_TYPE);
4326 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4327 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4328 typecheck->SetSuccessorAt(0, if_spec_object);
4329 typecheck->SetSuccessorAt(1, not_spec_object);
4330 FinishCurrentBlock(typecheck);
4331 AddLeaveInlined(if_spec_object, return_value, state);
4332 AddLeaveInlined(not_spec_object, receiver, state);
4334 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4335 // Return from an inlined setter call. The returned value is never used, the
4336 // value of an assignment is always the value of the RHS of the assignment.
4337 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4338 if (context->IsTest()) {
4339 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4340 context->ReturnValue(rhs);
4341 } else if (context->IsEffect()) {
4342 Goto(function_return(), state);
4344 ASSERT(context->IsValue());
4345 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4346 AddLeaveInlined(rhs, state);
4349 // Return from a normal inlined function. Visit the subexpression in the
4350 // expression context of the call.
4351 if (context->IsTest()) {
4352 TestContext* test = TestContext::cast(context);
4353 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4354 } else if (context->IsEffect()) {
4355 // Visit in value context and ignore the result. This is needed to keep
4356 // environment in sync with full-codegen since some visitors (e.g.
4357 // VisitCountOperation) use the operand stack differently depending on
4359 CHECK_ALIVE(VisitForValue(stmt->expression()));
4361 Goto(function_return(), state);
4363 ASSERT(context->IsValue());
4364 CHECK_ALIVE(VisitForValue(stmt->expression()));
4365 AddLeaveInlined(Pop(), state);
4368 set_current_block(NULL);
4372 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4373 ASSERT(!HasStackOverflow());
4374 ASSERT(current_block() != NULL);
4375 ASSERT(current_block()->HasPredecessor());
4376 return Bailout(kWithStatement);
4380 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4381 ASSERT(!HasStackOverflow());
4382 ASSERT(current_block() != NULL);
4383 ASSERT(current_block()->HasPredecessor());
4385 // We only optimize switch statements with a bounded number of clauses.
4386 const int kCaseClauseLimit = 128;
4387 ZoneList<CaseClause*>* clauses = stmt->cases();
4388 int clause_count = clauses->length();
4389 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4390 if (clause_count > kCaseClauseLimit) {
4391 return Bailout(kSwitchStatementTooManyClauses);
4394 CHECK_ALIVE(VisitForValue(stmt->tag()));
4395 Add<HSimulate>(stmt->EntryId());
4396 HValue* tag_value = Top();
4397 Type* tag_type = stmt->tag()->bounds().lower;
4399 // 1. Build all the tests, with dangling true branches
4400 BailoutId default_id = BailoutId::None();
4401 for (int i = 0; i < clause_count; ++i) {
4402 CaseClause* clause = clauses->at(i);
4403 if (clause->is_default()) {
4404 body_blocks.Add(NULL, zone());
4405 if (default_id.IsNone()) default_id = clause->EntryId();
4409 // Generate a compare and branch.
4410 CHECK_ALIVE(VisitForValue(clause->label()));
4411 HValue* label_value = Pop();
4413 Type* label_type = clause->label()->bounds().lower;
4414 Type* combined_type = clause->compare_type();
4415 HControlInstruction* compare = BuildCompareInstruction(
4416 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4418 ScriptPositionToSourcePosition(stmt->tag()->position()),
4419 ScriptPositionToSourcePosition(clause->label()->position()),
4420 PUSH_BEFORE_SIMULATE, clause->id());
4422 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4423 HBasicBlock* body_block = graph()->CreateBasicBlock();
4424 body_blocks.Add(body_block, zone());
4425 compare->SetSuccessorAt(0, body_block);
4426 compare->SetSuccessorAt(1, next_test_block);
4427 FinishCurrentBlock(compare);
4429 set_current_block(body_block);
4430 Drop(1); // tag_value
4432 set_current_block(next_test_block);
4435 // Save the current block to use for the default or to join with the
4437 HBasicBlock* last_block = current_block();
4438 Drop(1); // tag_value
4440 // 2. Loop over the clauses and the linked list of tests in lockstep,
4441 // translating the clause bodies.
4442 HBasicBlock* fall_through_block = NULL;
4444 BreakAndContinueInfo break_info(stmt);
4445 { BreakAndContinueScope push(&break_info, this);
4446 for (int i = 0; i < clause_count; ++i) {
4447 CaseClause* clause = clauses->at(i);
4449 // Identify the block where normal (non-fall-through) control flow
4451 HBasicBlock* normal_block = NULL;
4452 if (clause->is_default()) {
4453 if (last_block == NULL) continue;
4454 normal_block = last_block;
4455 last_block = NULL; // Cleared to indicate we've handled it.
4457 normal_block = body_blocks[i];
4460 if (fall_through_block == NULL) {
4461 set_current_block(normal_block);
4463 HBasicBlock* join = CreateJoin(fall_through_block,
4466 set_current_block(join);
4469 CHECK_BAILOUT(VisitStatements(clause->statements()));
4470 fall_through_block = current_block();
4474 // Create an up-to-3-way join. Use the break block if it exists since
4475 // it's already a join block.
4476 HBasicBlock* break_block = break_info.break_block();
4477 if (break_block == NULL) {
4478 set_current_block(CreateJoin(fall_through_block,
4482 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4483 if (last_block != NULL) Goto(last_block, break_block);
4484 break_block->SetJoinId(stmt->ExitId());
4485 set_current_block(break_block);
4490 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4491 HBasicBlock* loop_entry,
4492 BreakAndContinueInfo* break_info) {
4493 BreakAndContinueScope push(break_info, this);
4494 Add<HSimulate>(stmt->StackCheckId());
4495 HStackCheck* stack_check =
4496 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4497 ASSERT(loop_entry->IsLoopHeader());
4498 loop_entry->loop_information()->set_stack_check(stack_check);
4499 CHECK_BAILOUT(Visit(stmt->body()));
4503 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4504 ASSERT(!HasStackOverflow());
4505 ASSERT(current_block() != NULL);
4506 ASSERT(current_block()->HasPredecessor());
4507 ASSERT(current_block() != NULL);
4508 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4510 BreakAndContinueInfo break_info(stmt);
4511 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4512 HBasicBlock* body_exit =
4513 JoinContinue(stmt, current_block(), break_info.continue_block());
4514 HBasicBlock* loop_successor = NULL;
4515 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4516 set_current_block(body_exit);
4517 loop_successor = graph()->CreateBasicBlock();
4518 if (stmt->cond()->ToBooleanIsFalse()) {
4519 Goto(loop_successor);
4522 // The block for a true condition, the actual predecessor block of the
4524 body_exit = graph()->CreateBasicBlock();
4525 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4527 if (body_exit != NULL && body_exit->HasPredecessor()) {
4528 body_exit->SetJoinId(stmt->BackEdgeId());
4532 if (loop_successor->HasPredecessor()) {
4533 loop_successor->SetJoinId(stmt->ExitId());
4535 loop_successor = NULL;
4538 HBasicBlock* loop_exit = CreateLoop(stmt,
4542 break_info.break_block());
4543 set_current_block(loop_exit);
4547 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4548 ASSERT(!HasStackOverflow());
4549 ASSERT(current_block() != NULL);
4550 ASSERT(current_block()->HasPredecessor());
4551 ASSERT(current_block() != NULL);
4552 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4554 // If the condition is constant true, do not generate a branch.
4555 HBasicBlock* loop_successor = NULL;
4556 if (!stmt->cond()->ToBooleanIsTrue()) {
4557 HBasicBlock* body_entry = graph()->CreateBasicBlock();
4558 loop_successor = graph()->CreateBasicBlock();
4559 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4560 if (body_entry->HasPredecessor()) {
4561 body_entry->SetJoinId(stmt->BodyId());
4562 set_current_block(body_entry);
4564 if (loop_successor->HasPredecessor()) {
4565 loop_successor->SetJoinId(stmt->ExitId());
4567 loop_successor = NULL;
4571 BreakAndContinueInfo break_info(stmt);
4572 if (current_block() != NULL) {
4573 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4575 HBasicBlock* body_exit =
4576 JoinContinue(stmt, current_block(), break_info.continue_block());
4577 HBasicBlock* loop_exit = CreateLoop(stmt,
4581 break_info.break_block());
4582 set_current_block(loop_exit);
4586 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4587 ASSERT(!HasStackOverflow());
4588 ASSERT(current_block() != NULL);
4589 ASSERT(current_block()->HasPredecessor());
4590 if (stmt->init() != NULL) {
4591 CHECK_ALIVE(Visit(stmt->init()));
4593 ASSERT(current_block() != NULL);
4594 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4596 HBasicBlock* loop_successor = NULL;
4597 if (stmt->cond() != NULL) {
4598 HBasicBlock* body_entry = graph()->CreateBasicBlock();
4599 loop_successor = graph()->CreateBasicBlock();
4600 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4601 if (body_entry->HasPredecessor()) {
4602 body_entry->SetJoinId(stmt->BodyId());
4603 set_current_block(body_entry);
4605 if (loop_successor->HasPredecessor()) {
4606 loop_successor->SetJoinId(stmt->ExitId());
4608 loop_successor = NULL;
4612 BreakAndContinueInfo break_info(stmt);
4613 if (current_block() != NULL) {
4614 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4616 HBasicBlock* body_exit =
4617 JoinContinue(stmt, current_block(), break_info.continue_block());
4619 if (stmt->next() != NULL && body_exit != NULL) {
4620 set_current_block(body_exit);
4621 CHECK_BAILOUT(Visit(stmt->next()));
4622 body_exit = current_block();
4625 HBasicBlock* loop_exit = CreateLoop(stmt,
4629 break_info.break_block());
4630 set_current_block(loop_exit);
4634 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4635 ASSERT(!HasStackOverflow());
4636 ASSERT(current_block() != NULL);
4637 ASSERT(current_block()->HasPredecessor());
4639 if (!FLAG_optimize_for_in) {
4640 return Bailout(kForInStatementOptimizationIsDisabled);
4643 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
4644 return Bailout(kForInStatementIsNotFastCase);
4647 if (!stmt->each()->IsVariableProxy() ||
4648 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4649 return Bailout(kForInStatementWithNonLocalEachVariable);
4652 Variable* each_var = stmt->each()->AsVariableProxy()->var();
4654 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4655 HValue* enumerable = Top(); // Leave enumerable at the top.
4657 HInstruction* map = Add<HForInPrepareMap>(enumerable);
4658 Add<HSimulate>(stmt->PrepareId());
4660 HInstruction* array = Add<HForInCacheArray>(
4661 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4663 HInstruction* enum_length = Add<HMapEnumLength>(map);
4665 HInstruction* start_index = Add<HConstant>(0);
4672 HInstruction* index_cache = Add<HForInCacheArray>(
4673 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
4674 HForInCacheArray::cast(array)->set_index_cache(
4675 HForInCacheArray::cast(index_cache));
4677 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4679 HValue* index = environment()->ExpressionStackAt(0);
4680 HValue* limit = environment()->ExpressionStackAt(1);
4682 // Check that we still have more keys.
4683 HCompareNumericAndBranch* compare_index =
4684 New<HCompareNumericAndBranch>(index, limit, Token::LT);
4685 compare_index->set_observed_input_representation(
4686 Representation::Smi(), Representation::Smi());
4688 HBasicBlock* loop_body = graph()->CreateBasicBlock();
4689 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4691 compare_index->SetSuccessorAt(0, loop_body);
4692 compare_index->SetSuccessorAt(1, loop_successor);
4693 FinishCurrentBlock(compare_index);
4695 set_current_block(loop_successor);
4698 set_current_block(loop_body);
4700 HValue* key = Add<HLoadKeyed>(
4701 environment()->ExpressionStackAt(2), // Enum cache.
4702 environment()->ExpressionStackAt(0), // Iteration index.
4703 environment()->ExpressionStackAt(0),
4706 // Check if the expected map still matches that of the enumerable.
4707 // If not just deoptimize.
4708 Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
4709 environment()->ExpressionStackAt(3));
4711 Bind(each_var, key);
4713 BreakAndContinueInfo break_info(stmt, 5);
4714 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4716 HBasicBlock* body_exit =
4717 JoinContinue(stmt, current_block(), break_info.continue_block());
4719 if (body_exit != NULL) {
4720 set_current_block(body_exit);
4722 HValue* current_index = Pop();
4723 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
4724 body_exit = current_block();
4727 HBasicBlock* loop_exit = CreateLoop(stmt,
4731 break_info.break_block());
4733 set_current_block(loop_exit);
4737 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
4738 ASSERT(!HasStackOverflow());
4739 ASSERT(current_block() != NULL);
4740 ASSERT(current_block()->HasPredecessor());
4741 return Bailout(kForOfStatement);
4745 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4746 ASSERT(!HasStackOverflow());
4747 ASSERT(current_block() != NULL);
4748 ASSERT(current_block()->HasPredecessor());
4749 return Bailout(kTryCatchStatement);
4753 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
4754 TryFinallyStatement* stmt) {
4755 ASSERT(!HasStackOverflow());
4756 ASSERT(current_block() != NULL);
4757 ASSERT(current_block()->HasPredecessor());
4758 return Bailout(kTryFinallyStatement);
4762 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4763 ASSERT(!HasStackOverflow());
4764 ASSERT(current_block() != NULL);
4765 ASSERT(current_block()->HasPredecessor());
4766 return Bailout(kDebuggerStatement);
4770 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
4775 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4776 ASSERT(!HasStackOverflow());
4777 ASSERT(current_block() != NULL);
4778 ASSERT(current_block()->HasPredecessor());
4779 Handle<SharedFunctionInfo> shared_info = expr->shared_info();
4780 if (shared_info.is_null()) {
4781 shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script());
4783 // We also have a stack overflow if the recursive compilation did.
4784 if (HasStackOverflow()) return;
4785 HFunctionLiteral* instr =
4786 New<HFunctionLiteral>(shared_info, expr->pretenure());
4787 return ast_context()->ReturnInstruction(instr, expr->id());
4791 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
4792 NativeFunctionLiteral* expr) {
4793 ASSERT(!HasStackOverflow());
4794 ASSERT(current_block() != NULL);
4795 ASSERT(current_block()->HasPredecessor());
4796 return Bailout(kNativeFunctionLiteral);
4800 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
4801 ASSERT(!HasStackOverflow());
4802 ASSERT(current_block() != NULL);
4803 ASSERT(current_block()->HasPredecessor());
4804 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4805 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4806 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4808 // Visit the true and false subexpressions in the same AST context as the
4809 // whole expression.
4810 if (cond_true->HasPredecessor()) {
4811 cond_true->SetJoinId(expr->ThenId());
4812 set_current_block(cond_true);
4813 CHECK_BAILOUT(Visit(expr->then_expression()));
4814 cond_true = current_block();
4819 if (cond_false->HasPredecessor()) {
4820 cond_false->SetJoinId(expr->ElseId());
4821 set_current_block(cond_false);
4822 CHECK_BAILOUT(Visit(expr->else_expression()));
4823 cond_false = current_block();
4828 if (!ast_context()->IsTest()) {
4829 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4830 set_current_block(join);
4831 if (join != NULL && !ast_context()->IsEffect()) {
4832 return ast_context()->ReturnValue(Pop());
4838 HOptimizedGraphBuilder::GlobalPropertyAccess
4839 HOptimizedGraphBuilder::LookupGlobalProperty(
4840 Variable* var, LookupResult* lookup, PropertyAccessType access_type) {
4841 if (var->is_this() || !current_info()->has_global_object()) {
4844 Handle<GlobalObject> global(current_info()->global_object());
4845 global->Lookup(var->name(), lookup);
4846 if (!lookup->IsNormal() ||
4847 (access_type == STORE && lookup->IsReadOnly()) ||
4848 lookup->holder() != *global) {
4856 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
4857 ASSERT(var->IsContextSlot());
4858 HValue* context = environment()->context();
4859 int length = current_info()->scope()->ContextChainLength(var->scope());
4860 while (length-- > 0) {
4861 context = Add<HLoadNamedField>(
4862 context, static_cast<HValue*>(NULL),
4863 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4869 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4870 if (expr->is_this()) {
4871 current_info()->set_this_has_uses(true);
4874 ASSERT(!HasStackOverflow());
4875 ASSERT(current_block() != NULL);
4876 ASSERT(current_block()->HasPredecessor());
4877 Variable* variable = expr->var();
4878 switch (variable->location()) {
4879 case Variable::UNALLOCATED: {
4880 if (IsLexicalVariableMode(variable->mode())) {
4881 // TODO(rossberg): should this be an ASSERT?
4882 return Bailout(kReferenceToGlobalLexicalVariable);
4884 // Handle known global constants like 'undefined' specially to avoid a
4885 // load from a global cell for them.
4886 Handle<Object> constant_value =
4887 isolate()->factory()->GlobalConstantFor(variable->name());
4888 if (!constant_value.is_null()) {
4889 HConstant* instr = New<HConstant>(constant_value);
4890 return ast_context()->ReturnInstruction(instr, expr->id());
4893 LookupResult lookup(isolate());
4894 GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, LOAD);
4896 if (type == kUseCell &&
4897 current_info()->global_object()->IsAccessCheckNeeded()) {
4901 if (type == kUseCell) {
4902 Handle<GlobalObject> global(current_info()->global_object());
4903 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
4904 if (cell->type()->IsConstant()) {
4905 PropertyCell::AddDependentCompilationInfo(cell, top_info());
4906 Handle<Object> constant_object = cell->type()->AsConstant()->Value();
4907 if (constant_object->IsConsString()) {
4909 String::Flatten(Handle<String>::cast(constant_object));
4911 HConstant* constant = New<HConstant>(constant_object);
4912 return ast_context()->ReturnInstruction(constant, expr->id());
4914 HLoadGlobalCell* instr =
4915 New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
4916 return ast_context()->ReturnInstruction(instr, expr->id());
4919 HValue* global_object = Add<HLoadNamedField>(
4920 context(), static_cast<HValue*>(NULL),
4921 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
4922 HLoadGlobalGeneric* instr =
4923 New<HLoadGlobalGeneric>(global_object,
4925 ast_context()->is_for_typeof());
4926 return ast_context()->ReturnInstruction(instr, expr->id());
4930 case Variable::PARAMETER:
4931 case Variable::LOCAL: {
4932 HValue* value = LookupAndMakeLive(variable);
4933 if (value == graph()->GetConstantHole()) {
4934 ASSERT(IsDeclaredVariableMode(variable->mode()) &&
4935 variable->mode() != VAR);
4936 return Bailout(kReferenceToUninitializedVariable);
4938 return ast_context()->ReturnValue(value);
4941 case Variable::CONTEXT: {
4942 HValue* context = BuildContextChainWalk(variable);
4943 HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4944 return ast_context()->ReturnInstruction(instr, expr->id());
4947 case Variable::LOOKUP:
4948 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
4953 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
4954 ASSERT(!HasStackOverflow());
4955 ASSERT(current_block() != NULL);
4956 ASSERT(current_block()->HasPredecessor());
4957 HConstant* instr = New<HConstant>(expr->value());
4958 return ast_context()->ReturnInstruction(instr, expr->id());
4962 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4963 ASSERT(!HasStackOverflow());
4964 ASSERT(current_block() != NULL);
4965 ASSERT(current_block()->HasPredecessor());
4966 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4967 Handle<FixedArray> literals(closure->literals());
4968 HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
4971 expr->literal_index());
4972 return ast_context()->ReturnInstruction(instr, expr->id());
4976 static bool CanInlinePropertyAccess(Type* type) {
4977 if (type->Is(Type::NumberOrString())) return true;
4978 if (!type->IsClass()) return false;
4979 Handle<Map> map = type->AsClass()->Map();
4980 return map->IsJSObjectMap() &&
4981 !map->is_dictionary_map() &&
4982 !map->has_named_interceptor();
4986 // Determines whether the given array or object literal boilerplate satisfies
4987 // all limits to be considered for fast deep-copying and computes the total
4988 // size of all objects that are part of the graph.
4989 static bool IsFastLiteral(Handle<JSObject> boilerplate,
4991 int* max_properties) {
4992 if (boilerplate->map()->is_deprecated() &&
4993 !JSObject::TryMigrateInstance(boilerplate)) {
4997 ASSERT(max_depth >= 0 && *max_properties >= 0);
4998 if (max_depth == 0) return false;
5000 Isolate* isolate = boilerplate->GetIsolate();
5001 Handle<FixedArrayBase> elements(boilerplate->elements());
5002 if (elements->length() > 0 &&
5003 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5004 if (boilerplate->HasFastObjectElements()) {
5005 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5006 int length = elements->length();
5007 for (int i = 0; i < length; i++) {
5008 if ((*max_properties)-- == 0) return false;
5009 Handle<Object> value(fast_elements->get(i), isolate);
5010 if (value->IsJSObject()) {
5011 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5012 if (!IsFastLiteral(value_object,
5019 } else if (!boilerplate->HasFastDoubleElements()) {
5024 Handle<FixedArray> properties(boilerplate->properties());
5025 if (properties->length() > 0) {
5028 Handle<DescriptorArray> descriptors(
5029 boilerplate->map()->instance_descriptors());
5030 int limit = boilerplate->map()->NumberOfOwnDescriptors();
5031 for (int i = 0; i < limit; i++) {
5032 PropertyDetails details = descriptors->GetDetails(i);
5033 if (details.type() != FIELD) continue;
5034 int index = descriptors->GetFieldIndex(i);
5035 if ((*max_properties)-- == 0) return false;
5036 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5037 if (value->IsJSObject()) {
5038 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5039 if (!IsFastLiteral(value_object,
5051 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5052 ASSERT(!HasStackOverflow());
5053 ASSERT(current_block() != NULL);
5054 ASSERT(current_block()->HasPredecessor());
5055 expr->BuildConstantProperties(isolate());
5056 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5057 HInstruction* literal;
5059 // Check whether to use fast or slow deep-copying for boilerplate.
5060 int max_properties = kMaxFastLiteralProperties;
5061 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5063 Handle<AllocationSite> site;
5064 Handle<JSObject> boilerplate;
5065 if (!literals_cell->IsUndefined()) {
5066 // Retrieve the boilerplate
5067 site = Handle<AllocationSite>::cast(literals_cell);
5068 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5072 if (!boilerplate.is_null() &&
5073 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5074 AllocationSiteUsageContext usage_context(isolate(), site, false);
5075 usage_context.EnterNewScope();
5076 literal = BuildFastLiteral(boilerplate, &usage_context);
5077 usage_context.ExitScope(site, boilerplate);
5079 NoObservableSideEffectsScope no_effects(this);
5080 Handle<FixedArray> closure_literals(closure->literals(), isolate());
5081 Handle<FixedArray> constant_properties = expr->constant_properties();
5082 int literal_index = expr->literal_index();
5083 int flags = expr->fast_elements()
5084 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5085 flags |= expr->has_function()
5086 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5088 Add<HPushArgument>(Add<HConstant>(closure_literals));
5089 Add<HPushArgument>(Add<HConstant>(literal_index));
5090 Add<HPushArgument>(Add<HConstant>(constant_properties));
5091 Add<HPushArgument>(Add<HConstant>(flags));
5093 // TODO(mvstanton): Add a flag to turn off creation of any
5094 // AllocationMementos for this call: we are in crankshaft and should have
5095 // learned enough about transition behavior to stop emitting mementos.
5096 Runtime::FunctionId function_id = Runtime::kHiddenCreateObjectLiteral;
5097 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5098 Runtime::FunctionForId(function_id),
5102 // The object is expected in the bailout environment during computation
5103 // of the property values and is the value of the entire expression.
5106 expr->CalculateEmitStore(zone());
5108 for (int i = 0; i < expr->properties()->length(); i++) {
5109 ObjectLiteral::Property* property = expr->properties()->at(i);
5110 if (property->IsCompileTimeValue()) continue;
5112 Literal* key = property->key();
5113 Expression* value = property->value();
5115 switch (property->kind()) {
5116 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5117 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
5119 case ObjectLiteral::Property::COMPUTED:
5120 if (key->value()->IsInternalizedString()) {
5121 if (property->emit_store()) {
5122 CHECK_ALIVE(VisitForValue(value));
5123 HValue* value = Pop();
5124 Handle<Map> map = property->GetReceiverType();
5125 Handle<String> name = property->key()->AsPropertyName();
5126 HInstruction* store;
5127 if (map.is_null()) {
5128 // If we don't know the monomorphic type, do a generic store.
5129 CHECK_ALIVE(store = BuildNamedGeneric(
5130 STORE, literal, name, value));
5132 PropertyAccessInfo info(this, STORE, ToType(map), name);
5133 if (info.CanAccessMonomorphic()) {
5134 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5135 ASSERT(!info.lookup()->IsPropertyCallbacks());
5136 store = BuildMonomorphicAccess(
5137 &info, literal, checked_literal, value,
5138 BailoutId::None(), BailoutId::None());
5140 CHECK_ALIVE(store = BuildNamedGeneric(
5141 STORE, literal, name, value));
5144 AddInstruction(store);
5145 if (store->HasObservableSideEffects()) {
5146 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5149 CHECK_ALIVE(VisitForEffect(value));
5154 case ObjectLiteral::Property::PROTOTYPE:
5155 case ObjectLiteral::Property::SETTER:
5156 case ObjectLiteral::Property::GETTER:
5157 return Bailout(kObjectLiteralWithComplexProperty);
5158 default: UNREACHABLE();
5162 if (expr->has_function()) {
5163 // Return the result of the transformation to fast properties
5164 // instead of the original since this operation changes the map
5165 // of the object. This makes sure that the original object won't
5166 // be used by other optimized code before it is transformed
5167 // (e.g. because of code motion).
5168 HToFastProperties* result = Add<HToFastProperties>(Pop());
5169 return ast_context()->ReturnValue(result);
5171 return ast_context()->ReturnValue(Pop());
5176 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5177 ASSERT(!HasStackOverflow());
5178 ASSERT(current_block() != NULL);
5179 ASSERT(current_block()->HasPredecessor());
5180 expr->BuildConstantElements(isolate());
5181 ZoneList<Expression*>* subexprs = expr->values();
5182 int length = subexprs->length();
5183 HInstruction* literal;
5185 Handle<AllocationSite> site;
5186 Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5187 bool uninitialized = false;
5188 Handle<Object> literals_cell(literals->get(expr->literal_index()),
5190 Handle<JSObject> boilerplate_object;
5191 if (literals_cell->IsUndefined()) {
5192 uninitialized = true;
5193 Handle<Object> raw_boilerplate;
5194 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
5195 isolate(), raw_boilerplate,
5196 Runtime::CreateArrayLiteralBoilerplate(
5197 isolate(), literals, expr->constant_elements()),
5198 Bailout(kArrayBoilerplateCreationFailed));
5200 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5201 AllocationSiteCreationContext creation_context(isolate());
5202 site = creation_context.EnterNewScope();
5203 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5204 return Bailout(kArrayBoilerplateCreationFailed);
5206 creation_context.ExitScope(site, boilerplate_object);
5207 literals->set(expr->literal_index(), *site);
5209 if (boilerplate_object->elements()->map() ==
5210 isolate()->heap()->fixed_cow_array_map()) {
5211 isolate()->counters()->cow_arrays_created_runtime()->Increment();
5214 ASSERT(literals_cell->IsAllocationSite());
5215 site = Handle<AllocationSite>::cast(literals_cell);
5216 boilerplate_object = Handle<JSObject>(
5217 JSObject::cast(site->transition_info()), isolate());
5220 ASSERT(!boilerplate_object.is_null());
5221 ASSERT(site->SitePointsToLiteral());
5223 ElementsKind boilerplate_elements_kind =
5224 boilerplate_object->GetElementsKind();
5226 // Check whether to use fast or slow deep-copying for boilerplate.
5227 int max_properties = kMaxFastLiteralProperties;
5228 if (IsFastLiteral(boilerplate_object,
5229 kMaxFastLiteralDepth,
5231 AllocationSiteUsageContext usage_context(isolate(), site, false);
5232 usage_context.EnterNewScope();
5233 literal = BuildFastLiteral(boilerplate_object, &usage_context);
5234 usage_context.ExitScope(site, boilerplate_object);
5236 NoObservableSideEffectsScope no_effects(this);
5237 // Boilerplate already exists and constant elements are never accessed,
5238 // pass an empty fixed array to the runtime function instead.
5239 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5240 int literal_index = expr->literal_index();
5241 int flags = expr->depth() == 1
5242 ? ArrayLiteral::kShallowElements
5243 : ArrayLiteral::kNoFlags;
5244 flags |= ArrayLiteral::kDisableMementos;
5246 Add<HPushArgument>(Add<HConstant>(literals));
5247 Add<HPushArgument>(Add<HConstant>(literal_index));
5248 Add<HPushArgument>(Add<HConstant>(constants));
5249 Add<HPushArgument>(Add<HConstant>(flags));
5251 // TODO(mvstanton): Consider a flag to turn off creation of any
5252 // AllocationMementos for this call: we are in crankshaft and should have
5253 // learned enough about transition behavior to stop emitting mementos.
5254 Runtime::FunctionId function_id = Runtime::kHiddenCreateArrayLiteral;
5255 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5256 Runtime::FunctionForId(function_id),
5259 // De-opt if elements kind changed from boilerplate_elements_kind.
5260 Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5261 literal = Add<HCheckMaps>(literal, map);
5264 // The array is expected in the bailout environment during computation
5265 // of the property values and is the value of the entire expression.
5267 // The literal index is on the stack, too.
5268 Push(Add<HConstant>(expr->literal_index()));
5270 HInstruction* elements = NULL;
5272 for (int i = 0; i < length; i++) {
5273 Expression* subexpr = subexprs->at(i);
5274 // If the subexpression is a literal or a simple materialized literal it
5275 // is already set in the cloned array.
5276 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5278 CHECK_ALIVE(VisitForValue(subexpr));
5279 HValue* value = Pop();
5280 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5282 elements = AddLoadElements(literal);
5284 HValue* key = Add<HConstant>(i);
5286 switch (boilerplate_elements_kind) {
5287 case FAST_SMI_ELEMENTS:
5288 case FAST_HOLEY_SMI_ELEMENTS:
5290 case FAST_HOLEY_ELEMENTS:
5291 case FAST_DOUBLE_ELEMENTS:
5292 case FAST_HOLEY_DOUBLE_ELEMENTS: {
5293 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5294 boilerplate_elements_kind);
5295 instr->SetUninitialized(uninitialized);
5303 Add<HSimulate>(expr->GetIdForElement(i));
5306 Drop(1); // array literal index
5307 return ast_context()->ReturnValue(Pop());
5311 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5313 BuildCheckHeapObject(object);
5314 return Add<HCheckMaps>(object, map);
5318 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5319 PropertyAccessInfo* info,
5320 HValue* checked_object) {
5321 // See if this is a load for an immutable property
5322 if (checked_object->ActualValue()->IsConstant() &&
5323 info->lookup()->IsCacheable() &&
5324 info->lookup()->IsReadOnly() && info->lookup()->IsDontDelete()) {
5325 Handle<Object> object(
5326 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5328 if (object->IsJSObject()) {
5329 LookupResult lookup(isolate());
5330 Handle<JSObject>::cast(object)->Lookup(info->name(), &lookup);
5331 Handle<Object> value(lookup.GetLazyValue(), isolate());
5333 if (!value->IsTheHole()) {
5334 return New<HConstant>(value);
5339 HObjectAccess access = info->access();
5340 if (access.representation().IsDouble()) {
5341 // Load the heap number.
5342 checked_object = Add<HLoadNamedField>(
5343 checked_object, static_cast<HValue*>(NULL),
5344 access.WithRepresentation(Representation::Tagged()));
5345 checked_object->set_type(HType::HeapNumber());
5346 // Load the double value from it.
5347 access = HObjectAccess::ForHeapNumberValue();
5349 return New<HLoadNamedField>(
5350 checked_object, checked_object, access, info->field_maps(), top_info());
5354 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5355 PropertyAccessInfo* info,
5356 HValue* checked_object,
5358 bool transition_to_field = info->lookup()->IsTransition();
5359 // TODO(verwaest): Move this logic into PropertyAccessInfo.
5360 HObjectAccess field_access = info->access();
5362 HStoreNamedField *instr;
5363 if (field_access.representation().IsDouble()) {
5364 HObjectAccess heap_number_access =
5365 field_access.WithRepresentation(Representation::Tagged());
5366 if (transition_to_field) {
5367 // The store requires a mutable HeapNumber to be allocated.
5368 NoObservableSideEffectsScope no_side_effects(this);
5369 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5371 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
5372 isolate()->heap()->GetPretenureMode() : NOT_TENURED;
5374 HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5375 HType::HeapNumber(),
5378 AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
5379 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5381 instr = New<HStoreNamedField>(checked_object->ActualValue(),
5385 // Already holds a HeapNumber; load the box and write its value field.
5386 HInstruction* heap_number = Add<HLoadNamedField>(
5387 checked_object, static_cast<HValue*>(NULL), heap_number_access);
5388 heap_number->set_type(HType::HeapNumber());
5389 instr = New<HStoreNamedField>(heap_number,
5390 HObjectAccess::ForHeapNumberValue(),
5391 value, STORE_TO_INITIALIZED_ENTRY);
5394 if (!info->field_maps()->is_empty()) {
5395 ASSERT(field_access.representation().IsHeapObject());
5396 BuildCheckHeapObject(value);
5397 value = Add<HCheckMaps>(value, info->field_maps());
5399 // TODO(bmeurer): This is a dirty hack to avoid repeating the smi check
5400 // that was already performed by the HCheckHeapObject above in the
5401 // HStoreNamedField below. We should really do this right instead and
5402 // make Crankshaft aware of Representation::HeapObject().
5403 field_access = field_access.WithRepresentation(Representation::Tagged());
5406 // This is a normal store.
5407 instr = New<HStoreNamedField>(
5408 checked_object->ActualValue(), field_access, value,
5409 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5412 if (transition_to_field) {
5413 Handle<Map> transition(info->transition());
5414 ASSERT(!transition->is_deprecated());
5415 instr->SetTransition(Add<HConstant>(transition));
5421 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5422 PropertyAccessInfo* info) {
5423 if (!CanInlinePropertyAccess(type_)) return false;
5425 // Currently only handle Type::Number as a polymorphic case.
5426 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5428 if (type_->Is(Type::Number())) return false;
5430 // Values are only compatible for monomorphic load if they all behave the same
5431 // regarding value wrappers.
5432 if (type_->Is(Type::NumberOrString())) {
5433 if (!info->type_->Is(Type::NumberOrString())) return false;
5435 if (info->type_->Is(Type::NumberOrString())) return false;
5438 if (!LookupDescriptor()) return false;
5440 if (!lookup_.IsFound()) {
5441 return (!info->lookup_.IsFound() || info->has_holder()) &&
5442 map()->prototype() == info->map()->prototype();
5445 // Mismatch if the other access info found the property in the prototype
5447 if (info->has_holder()) return false;
5449 if (lookup_.IsPropertyCallbacks()) {
5450 return accessor_.is_identical_to(info->accessor_) &&
5451 api_holder_.is_identical_to(info->api_holder_);
5454 if (lookup_.IsConstant()) {
5455 return constant_.is_identical_to(info->constant_);
5458 ASSERT(lookup_.IsField());
5459 if (!info->lookup_.IsField()) return false;
5461 Representation r = access_.representation();
5463 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5465 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5467 if (info->access_.offset() != access_.offset()) return false;
5468 if (info->access_.IsInobject() != access_.IsInobject()) return false;
5470 if (field_maps_.is_empty()) {
5471 info->field_maps_.Clear();
5472 } else if (!info->field_maps_.is_empty()) {
5473 for (int i = 0; i < field_maps_.length(); ++i) {
5474 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5476 info->field_maps_.Sort();
5479 // We can only merge stores that agree on their field maps. The comparison
5480 // below is safe, since we keep the field maps sorted.
5481 if (field_maps_.length() != info->field_maps_.length()) return false;
5482 for (int i = 0; i < field_maps_.length(); ++i) {
5483 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5488 info->GeneralizeRepresentation(r);
5493 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5494 if (!type_->IsClass()) return true;
5495 map()->LookupDescriptor(NULL, *name_, &lookup_);
5496 return LoadResult(map());
5500 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5501 if (!IsLoad() && lookup_.IsProperty() &&
5502 (lookup_.IsReadOnly() || !lookup_.IsCacheable())) {
5506 if (lookup_.IsField()) {
5507 // Construct the object field access.
5508 access_ = HObjectAccess::ForField(map, &lookup_, name_);
5510 // Load field map for heap objects.
5512 } else if (lookup_.IsPropertyCallbacks()) {
5513 Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5514 if (!callback->IsAccessorPair()) return false;
5515 Object* raw_accessor = IsLoad()
5516 ? Handle<AccessorPair>::cast(callback)->getter()
5517 : Handle<AccessorPair>::cast(callback)->setter();
5518 if (!raw_accessor->IsJSFunction()) return false;
5519 Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
5520 if (accessor->shared()->IsApiFunction()) {
5521 CallOptimization call_optimization(accessor);
5522 if (call_optimization.is_simple_api_call()) {
5523 CallOptimization::HolderLookup holder_lookup;
5524 Handle<Map> receiver_map = this->map();
5525 api_holder_ = call_optimization.LookupHolderOfExpectedType(
5526 receiver_map, &holder_lookup);
5529 accessor_ = accessor;
5530 } else if (lookup_.IsConstant()) {
5531 constant_ = handle(lookup_.GetConstantFromMap(*map), isolate());
5538 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
5540 // Clear any previously collected field maps.
5541 field_maps_.Clear();
5543 // Figure out the field type from the accessor map.
5544 Handle<HeapType> field_type(lookup_.GetFieldTypeFromMap(*map), isolate());
5546 // Collect the (stable) maps from the field type.
5547 int num_field_maps = field_type->NumClasses();
5548 if (num_field_maps == 0) return;
5549 ASSERT(access_.representation().IsHeapObject());
5550 field_maps_.Reserve(num_field_maps, zone());
5551 HeapType::Iterator<Map> it = field_type->Classes();
5552 while (!it.Done()) {
5553 Handle<Map> field_map = it.Current();
5554 if (!field_map->is_stable()) {
5555 field_maps_.Clear();
5558 field_maps_.Add(field_map, zone());
5562 ASSERT_EQ(num_field_maps, field_maps_.length());
5564 // Add dependency on the map that introduced the field.
5565 Map::AddDependentCompilationInfo(
5566 handle(lookup_.GetFieldOwnerFromMap(*map), isolate()),
5567 DependentCode::kFieldTypeGroup, top_info());
5571 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5572 Handle<Map> map = this->map();
5574 while (map->prototype()->IsJSObject()) {
5575 holder_ = handle(JSObject::cast(map->prototype()));
5576 if (holder_->map()->is_deprecated()) {
5577 JSObject::TryMigrateInstance(holder_);
5579 map = Handle<Map>(holder_->map());
5580 if (!CanInlinePropertyAccess(ToType(map))) {
5584 map->LookupDescriptor(*holder_, *name_, &lookup_);
5585 if (lookup_.IsFound()) return LoadResult(map);
5592 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
5593 if (IsSIMD128PropertyCallback() &&
5594 CpuFeatures::SupportsSIMD128InCrankshaft()) {
5597 if (!CanInlinePropertyAccess(type_)) return false;
5598 if (IsJSObjectFieldAccessor()) return IsLoad();
5599 if (!LookupDescriptor()) return false;
5600 if (lookup_.IsFound()) {
5601 if (IsLoad()) return true;
5602 return !lookup_.IsReadOnly() && lookup_.IsCacheable();
5604 if (!LookupInPrototypes()) return false;
5605 if (IsLoad()) return true;
5607 if (lookup_.IsPropertyCallbacks()) return true;
5608 Handle<Map> map = this->map();
5609 map->LookupTransition(NULL, *name_, &lookup_);
5610 if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
5611 // Construct the object field access.
5612 access_ = HObjectAccess::ForField(map, &lookup_, name_);
5614 // Load field map for heap objects.
5615 LoadFieldMaps(transition());
5622 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
5623 SmallMapList* types) {
5624 ASSERT(type_->Is(ToType(types->first())));
5625 if (!CanAccessMonomorphic()) return false;
5626 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5627 if (types->length() > kMaxLoadPolymorphism) return false;
5629 if (IsSIMD128PropertyCallback() &&
5630 CpuFeatures::SupportsSIMD128InCrankshaft()) {
5631 for (int i = 1; i < types->length(); ++i) {
5632 if (types->at(i)->instance_type() == types->first()->instance_type()) {
5639 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
5640 if (GetJSObjectFieldAccess(&access)) {
5641 for (int i = 1; i < types->length(); ++i) {
5642 PropertyAccessInfo test_info(
5643 builder_, access_type_, ToType(types->at(i)), name_);
5644 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
5645 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
5646 if (!access.Equals(test_access)) return false;
5651 // Currently only handle Type::Number as a polymorphic case.
5652 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5654 if (type_->Is(Type::Number())) return false;
5656 // Multiple maps cannot transition to the same target map.
5657 ASSERT(!IsLoad() || !lookup_.IsTransition());
5658 if (lookup_.IsTransition() && types->length() > 1) return false;
5660 for (int i = 1; i < types->length(); ++i) {
5661 PropertyAccessInfo test_info(
5662 builder_, access_type_, ToType(types->at(i)), name_);
5663 if (!test_info.IsCompatible(this)) return false;
5670 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
5671 return type->Is(Type::NumberOrString()) &&
5672 target->shared()->strict_mode() == SLOPPY &&
5673 !target->shared()->native();
5677 static bool IsSIMDProperty(Handle<String> name, uint8_t* mask) {
5678 SmartArrayPointer<char> cstring = name->ToCString();
5682 switch (cstring[i]) {
5694 *mask |= (shift << 2*i);
5702 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
5703 PropertyAccessInfo* info,
5705 HValue* checked_object,
5708 BailoutId return_id,
5709 bool can_inline_accessor) {
5711 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
5712 if (info->GetJSObjectFieldAccess(&access)) {
5713 ASSERT(info->IsLoad());
5714 return New<HLoadNamedField>(object, checked_object, access);
5717 HValue* checked_holder = checked_object;
5718 if (info->has_holder()) {
5719 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
5720 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
5723 if (!info->lookup()->IsFound()) {
5724 ASSERT(info->IsLoad());
5725 return graph()->GetConstantUndefined();
5728 if (info->lookup()->IsField()) {
5729 if (info->IsLoad()) {
5730 if (info->map()->constructor()->IsJSFunction()) {
5731 JSFunction* constructor = JSFunction::cast(info->map()->constructor());
5732 String* class_name =
5733 String::cast(constructor->shared()->instance_class_name());
5735 if (class_name->Equals(isolate()->heap()->simd()) &&
5736 IsSIMDProperty(info->name(), &mask) &&
5737 CpuFeatures::SupportsSIMD128InCrankshaft()) {
5738 return New<HConstant>(mask);
5741 return BuildLoadNamedField(info, checked_holder);
5743 return BuildStoreNamedField(info, checked_object, value);
5747 if (info->lookup()->IsTransition()) {
5748 ASSERT(!info->IsLoad());
5749 return BuildStoreNamedField(info, checked_object, value);
5752 if (info->lookup()->IsPropertyCallbacks()) {
5753 Push(checked_object);
5754 int argument_count = 1;
5755 if (!info->IsLoad()) {
5760 if (NeedsWrappingFor(info->type(), info->accessor())) {
5761 HValue* function = Add<HConstant>(info->accessor());
5762 PushArgumentsFromEnvironment(argument_count);
5763 return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
5764 } else if (FLAG_inline_accessors && can_inline_accessor) {
5765 bool success = info->IsLoad()
5766 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
5768 info->accessor(), info->map(), ast_id, return_id, value);
5769 if (success || HasStackOverflow()) return NULL;
5772 PushArgumentsFromEnvironment(argument_count);
5773 return BuildCallConstantFunction(info->accessor(), argument_count);
5776 ASSERT(info->lookup()->IsConstant());
5777 if (info->IsLoad()) {
5778 return New<HConstant>(info->constant());
5780 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
5785 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
5786 PropertyAccessType access_type,
5788 BailoutId return_id,
5791 SmallMapList* types,
5792 Handle<String> name) {
5793 // Something did not match; must use a polymorphic load.
5795 HBasicBlock* join = NULL;
5796 HBasicBlock* number_block = NULL;
5797 bool handled_string = false;
5799 bool handle_smi = false;
5800 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5801 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5802 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5803 if (info.type()->Is(Type::String())) {
5804 if (handled_string) continue;
5805 handled_string = true;
5807 if (info.CanAccessMonomorphic()) {
5809 if (info.type()->Is(Type::Number())) {
5817 HControlInstruction* smi_check = NULL;
5818 handled_string = false;
5820 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5821 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5822 if (info.type()->Is(Type::String())) {
5823 if (handled_string) continue;
5824 handled_string = true;
5826 if (!info.CanAccessMonomorphic()) continue;
5829 join = graph()->CreateBasicBlock();
5831 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
5832 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
5833 number_block = graph()->CreateBasicBlock();
5834 smi_check = New<HIsSmiAndBranch>(
5835 object, empty_smi_block, not_smi_block);
5836 FinishCurrentBlock(smi_check);
5837 GotoNoSimulate(empty_smi_block, number_block);
5838 set_current_block(not_smi_block);
5840 BuildCheckHeapObject(object);
5844 HBasicBlock* if_true = graph()->CreateBasicBlock();
5845 HBasicBlock* if_false = graph()->CreateBasicBlock();
5846 HUnaryControlInstruction* compare;
5849 if (info.type()->Is(Type::Number())) {
5850 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
5851 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
5852 dependency = smi_check;
5853 } else if (info.type()->Is(Type::String())) {
5854 compare = New<HIsStringAndBranch>(object, if_true, if_false);
5855 dependency = compare;
5857 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
5858 dependency = compare;
5860 FinishCurrentBlock(compare);
5862 if (info.type()->Is(Type::Number())) {
5863 GotoNoSimulate(if_true, number_block);
5864 if_true = number_block;
5867 set_current_block(if_true);
5869 HInstruction* access = BuildMonomorphicAccess(
5870 &info, object, dependency, value, ast_id,
5871 return_id, FLAG_polymorphic_inlining);
5873 HValue* result = NULL;
5874 switch (access_type) {
5883 if (access == NULL) {
5884 if (HasStackOverflow()) return;
5886 if (!access->IsLinked()) AddInstruction(access);
5887 if (!ast_context()->IsEffect()) Push(result);
5890 if (current_block() != NULL) Goto(join);
5891 set_current_block(if_false);
5894 // Finish up. Unconditionally deoptimize if we've handled all the maps we
5895 // know about and do not want to handle ones we've never seen. Otherwise
5896 // use a generic IC.
5897 if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5898 FinishExitWithHardDeoptimization("Uknown map in polymorphic access");
5900 HInstruction* instr = BuildNamedGeneric(access_type, object, name, value);
5901 AddInstruction(instr);
5902 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
5907 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5908 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5913 ASSERT(join != NULL);
5914 if (join->HasPredecessor()) {
5915 join->SetJoinId(ast_id);
5916 set_current_block(join);
5917 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5919 set_current_block(NULL);
5924 static bool ComputeReceiverTypes(Expression* expr,
5928 SmallMapList* types = expr->GetReceiverTypes();
5930 bool monomorphic = expr->IsMonomorphic();
5931 if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
5932 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
5933 types->FilterForPossibleTransitions(root_map);
5934 monomorphic = types->length() == 1;
5936 return monomorphic && CanInlinePropertyAccess(
5937 IC::MapToType<Type>(types->first(), zone));
5941 static bool AreStringTypes(SmallMapList* types) {
5942 for (int i = 0; i < types->length(); i++) {
5943 if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
5949 static bool AreInt32x4Types(SmallMapList* types) {
5950 if (types == NULL || types->length() == 0) return false;
5951 for (int i = 0; i < types->length(); i++) {
5952 if (types->at(i)->instance_type() != INT32x4_TYPE) return false;
5958 static bool AreFloat32x4Types(SmallMapList* types) {
5959 if (types == NULL || types->length() == 0) return false;
5960 for (int i = 0; i < types->length(); i++) {
5961 if (types->at(i)->instance_type() != FLOAT32x4_TYPE) return false;
5967 static bool AreFloat64x2Types(SmallMapList* types) {
5968 if (types == NULL || types->length() == 0) return false;
5969 for (int i = 0; i < types->length(); i++) {
5970 if (types->at(i)->instance_type() != FLOAT64x2_TYPE) return false;
5976 static BuiltinFunctionId NameToId(Isolate* isolate, Handle<String> name,
5977 InstanceType type) {
5978 BuiltinFunctionId id;
5979 if (name->Equals(isolate->heap()->signMask())) {
5980 if (type == FLOAT32x4_TYPE) {
5981 id = kFloat32x4GetSignMask;
5982 } else if (type == FLOAT64x2_TYPE) {
5983 id = kFloat64x2GetSignMask;
5985 ASSERT(type == INT32x4_TYPE);
5986 id = kInt32x4GetSignMask;
5988 } else if (name->Equals(isolate->heap()->x())) {
5989 if (type == FLOAT32x4_TYPE) {
5990 id = kFloat32x4GetX;
5991 } else if (type == FLOAT64x2_TYPE) {
5992 id = kFloat64x2GetX;
5994 ASSERT(type == INT32x4_TYPE);
5997 } else if (name->Equals(isolate->heap()->y())) {
5998 if (type == FLOAT32x4_TYPE) {
5999 id = kFloat32x4GetY;
6000 } else if (type == FLOAT64x2_TYPE) {
6001 id = kFloat64x2GetY;
6003 ASSERT(type == INT32x4_TYPE);
6006 } else if (name->Equals(isolate->heap()->z())) {
6007 id = type == FLOAT32x4_TYPE ? kFloat32x4GetZ : kInt32x4GetZ;
6008 } else if (name->Equals(isolate->heap()->w())) {
6009 id = type == FLOAT32x4_TYPE ? kFloat32x4GetW : kInt32x4GetW;
6010 } else if (name->Equals(isolate->heap()->flagX())) {
6011 ASSERT(type == INT32x4_TYPE);
6012 id = kInt32x4GetFlagX;
6013 } else if (name->Equals(isolate->heap()->flagY())) {
6014 ASSERT(type == INT32x4_TYPE);
6015 id = kInt32x4GetFlagY;
6016 } else if (name->Equals(isolate->heap()->flagZ())) {
6017 ASSERT(type == INT32x4_TYPE);
6018 id = kInt32x4GetFlagZ;
6019 } else if (name->Equals(isolate->heap()->flagW())) {
6020 ASSERT(type == INT32x4_TYPE);
6021 id = kInt32x4GetFlagW;
6024 id = kSIMD128Unreachable;
6031 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
6034 BailoutId return_id,
6035 bool is_uninitialized) {
6036 if (!prop->key()->IsPropertyName()) {
6038 HValue* value = environment()->ExpressionStackAt(0);
6039 HValue* key = environment()->ExpressionStackAt(1);
6040 HValue* object = environment()->ExpressionStackAt(2);
6041 bool has_side_effects = false;
6042 HandleKeyedElementAccess(object, key, value, expr,
6043 STORE, &has_side_effects);
6046 Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
6047 return ast_context()->ReturnValue(Pop());
6051 HValue* value = Pop();
6052 HValue* object = Pop();
6054 Literal* key = prop->key()->AsLiteral();
6055 Handle<String> name = Handle<String>::cast(key->value());
6056 ASSERT(!name.is_null());
6058 HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
6059 object, name, value, is_uninitialized);
6060 if (instr == NULL) return;
6062 if (!ast_context()->IsEffect()) Push(value);
6063 AddInstruction(instr);
6064 if (instr->HasObservableSideEffects()) {
6065 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6067 if (!ast_context()->IsEffect()) Drop(1);
6068 return ast_context()->ReturnValue(value);
6072 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6073 Property* prop = expr->target()->AsProperty();
6074 ASSERT(prop != NULL);
6075 CHECK_ALIVE(VisitForValue(prop->obj()));
6076 if (!prop->key()->IsPropertyName()) {
6077 CHECK_ALIVE(VisitForValue(prop->key()));
6079 CHECK_ALIVE(VisitForValue(expr->value()));
6080 BuildStore(expr, prop, expr->id(),
6081 expr->AssignmentId(), expr->IsUninitialized());
6085 // Because not every expression has a position and there is not common
6086 // superclass of Assignment and CountOperation, we cannot just pass the
6087 // owning expression instead of position and ast_id separately.
6088 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6092 LookupResult lookup(isolate());
6093 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, STORE);
6094 if (type == kUseCell) {
6095 Handle<GlobalObject> global(current_info()->global_object());
6096 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
6097 if (cell->type()->IsConstant()) {
6098 Handle<Object> constant = cell->type()->AsConstant()->Value();
6099 if (value->IsConstant()) {
6100 HConstant* c_value = HConstant::cast(value);
6101 if (!constant.is_identical_to(c_value->handle(isolate()))) {
6102 Add<HDeoptimize>("Constant global variable assignment",
6103 Deoptimizer::EAGER);
6106 HValue* c_constant = Add<HConstant>(constant);
6107 IfBuilder builder(this);
6108 if (constant->IsNumber()) {
6109 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6111 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6115 Add<HDeoptimize>("Constant global variable assignment",
6116 Deoptimizer::EAGER);
6120 HInstruction* instr =
6121 Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
6122 if (instr->HasObservableSideEffects()) {
6123 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6126 HValue* global_object = Add<HLoadNamedField>(
6127 context(), static_cast<HValue*>(NULL),
6128 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
6129 HStoreNamedGeneric* instr =
6130 Add<HStoreNamedGeneric>(global_object, var->name(),
6131 value, function_strict_mode());
6133 ASSERT(instr->HasObservableSideEffects());
6134 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6139 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6140 Expression* target = expr->target();
6141 VariableProxy* proxy = target->AsVariableProxy();
6142 Property* prop = target->AsProperty();
6143 ASSERT(proxy == NULL || prop == NULL);
6145 // We have a second position recorded in the FullCodeGenerator to have
6146 // type feedback for the binary operation.
6147 BinaryOperation* operation = expr->binary_operation();
6149 if (proxy != NULL) {
6150 Variable* var = proxy->var();
6151 if (var->mode() == LET) {
6152 return Bailout(kUnsupportedLetCompoundAssignment);
6155 CHECK_ALIVE(VisitForValue(operation));
6157 switch (var->location()) {
6158 case Variable::UNALLOCATED:
6159 HandleGlobalVariableAssignment(var,
6161 expr->AssignmentId());
6164 case Variable::PARAMETER:
6165 case Variable::LOCAL:
6166 if (var->mode() == CONST_LEGACY) {
6167 return Bailout(kUnsupportedConstCompoundAssignment);
6169 BindIfLive(var, Top());
6172 case Variable::CONTEXT: {
6173 // Bail out if we try to mutate a parameter value in a function
6174 // using the arguments object. We do not (yet) correctly handle the
6175 // arguments property of the function.
6176 if (current_info()->scope()->arguments() != NULL) {
6177 // Parameters will be allocated to context slots. We have no
6178 // direct way to detect that the variable is a parameter so we do
6179 // a linear search of the parameter variables.
6180 int count = current_info()->scope()->num_parameters();
6181 for (int i = 0; i < count; ++i) {
6182 if (var == current_info()->scope()->parameter(i)) {
6183 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6188 HStoreContextSlot::Mode mode;
6190 switch (var->mode()) {
6192 mode = HStoreContextSlot::kCheckDeoptimize;
6195 // This case is checked statically so no need to
6196 // perform checks here
6199 return ast_context()->ReturnValue(Pop());
6201 mode = HStoreContextSlot::kNoCheck;
6204 HValue* context = BuildContextChainWalk(var);
6205 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6206 context, var->index(), mode, Top());
6207 if (instr->HasObservableSideEffects()) {
6208 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6213 case Variable::LOOKUP:
6214 return Bailout(kCompoundAssignmentToLookupSlot);
6216 return ast_context()->ReturnValue(Pop());
6218 } else if (prop != NULL) {
6219 CHECK_ALIVE(VisitForValue(prop->obj()));
6220 HValue* object = Top();
6222 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
6223 prop->IsStringAccess()) {
6224 CHECK_ALIVE(VisitForValue(prop->key()));
6228 CHECK_ALIVE(PushLoad(prop, object, key));
6230 CHECK_ALIVE(VisitForValue(expr->value()));
6231 HValue* right = Pop();
6232 HValue* left = Pop();
6234 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6236 BuildStore(expr, prop, expr->id(),
6237 expr->AssignmentId(), expr->IsUninitialized());
6239 return Bailout(kInvalidLhsInCompoundAssignment);
6244 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6245 ASSERT(!HasStackOverflow());
6246 ASSERT(current_block() != NULL);
6247 ASSERT(current_block()->HasPredecessor());
6248 VariableProxy* proxy = expr->target()->AsVariableProxy();
6249 Property* prop = expr->target()->AsProperty();
6250 ASSERT(proxy == NULL || prop == NULL);
6252 if (expr->is_compound()) {
6253 HandleCompoundAssignment(expr);
6258 HandlePropertyAssignment(expr);
6259 } else if (proxy != NULL) {
6260 Variable* var = proxy->var();
6262 if (var->mode() == CONST) {
6263 if (expr->op() != Token::INIT_CONST) {
6264 return Bailout(kNonInitializerAssignmentToConst);
6266 } else if (var->mode() == CONST_LEGACY) {
6267 if (expr->op() != Token::INIT_CONST_LEGACY) {
6268 CHECK_ALIVE(VisitForValue(expr->value()));
6269 return ast_context()->ReturnValue(Pop());
6272 if (var->IsStackAllocated()) {
6273 // We insert a use of the old value to detect unsupported uses of const
6274 // variables (e.g. initialization inside a loop).
6275 HValue* old_value = environment()->Lookup(var);
6276 Add<HUseConst>(old_value);
6280 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6282 // Handle the assignment.
6283 switch (var->location()) {
6284 case Variable::UNALLOCATED:
6285 CHECK_ALIVE(VisitForValue(expr->value()));
6286 HandleGlobalVariableAssignment(var,
6288 expr->AssignmentId());
6289 return ast_context()->ReturnValue(Pop());
6291 case Variable::PARAMETER:
6292 case Variable::LOCAL: {
6293 // Perform an initialization check for let declared variables
6295 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6296 HValue* env_value = environment()->Lookup(var);
6297 if (env_value == graph()->GetConstantHole()) {
6298 return Bailout(kAssignmentToLetVariableBeforeInitialization);
6301 // We do not allow the arguments object to occur in a context where it
6302 // may escape, but assignments to stack-allocated locals are
6304 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6305 HValue* value = Pop();
6306 BindIfLive(var, value);
6307 return ast_context()->ReturnValue(value);
6310 case Variable::CONTEXT: {
6311 // Bail out if we try to mutate a parameter value in a function using
6312 // the arguments object. We do not (yet) correctly handle the
6313 // arguments property of the function.
6314 if (current_info()->scope()->arguments() != NULL) {
6315 // Parameters will rewrite to context slots. We have no direct way
6316 // to detect that the variable is a parameter.
6317 int count = current_info()->scope()->num_parameters();
6318 for (int i = 0; i < count; ++i) {
6319 if (var == current_info()->scope()->parameter(i)) {
6320 return Bailout(kAssignmentToParameterInArgumentsObject);
6325 CHECK_ALIVE(VisitForValue(expr->value()));
6326 HStoreContextSlot::Mode mode;
6327 if (expr->op() == Token::ASSIGN) {
6328 switch (var->mode()) {
6330 mode = HStoreContextSlot::kCheckDeoptimize;
6333 // This case is checked statically so no need to
6334 // perform checks here
6337 return ast_context()->ReturnValue(Pop());
6339 mode = HStoreContextSlot::kNoCheck;
6341 } else if (expr->op() == Token::INIT_VAR ||
6342 expr->op() == Token::INIT_LET ||
6343 expr->op() == Token::INIT_CONST) {
6344 mode = HStoreContextSlot::kNoCheck;
6346 ASSERT(expr->op() == Token::INIT_CONST_LEGACY);
6348 mode = HStoreContextSlot::kCheckIgnoreAssignment;
6351 HValue* context = BuildContextChainWalk(var);
6352 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6353 context, var->index(), mode, Top());
6354 if (instr->HasObservableSideEffects()) {
6355 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6357 return ast_context()->ReturnValue(Pop());
6360 case Variable::LOOKUP:
6361 return Bailout(kAssignmentToLOOKUPVariable);
6364 return Bailout(kInvalidLeftHandSideInAssignment);
6369 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6370 // Generators are not optimized, so we should never get here.
6375 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6376 ASSERT(!HasStackOverflow());
6377 ASSERT(current_block() != NULL);
6378 ASSERT(current_block()->HasPredecessor());
6379 // We don't optimize functions with invalid left-hand sides in
6380 // assignments, count operations, or for-in. Consequently throw can
6381 // currently only occur in an effect context.
6382 ASSERT(ast_context()->IsEffect());
6383 CHECK_ALIVE(VisitForValue(expr->exception()));
6385 HValue* value = environment()->Pop();
6386 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6387 Add<HPushArgument>(value);
6388 Add<HCallRuntime>(isolate()->factory()->empty_string(),
6389 Runtime::FunctionForId(Runtime::kHiddenThrow), 1);
6390 Add<HSimulate>(expr->id());
6392 // If the throw definitely exits the function, we can finish with a dummy
6393 // control flow at this point. This is not the case if the throw is inside
6394 // an inlined function which may be replaced.
6395 if (call_context() == NULL) {
6396 FinishExitCurrentBlock(New<HAbnormalExit>());
6401 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6402 if (string->IsConstant()) {
6403 HConstant* c_string = HConstant::cast(string);
6404 if (c_string->HasStringValue()) {
6405 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6408 return Add<HLoadNamedField>(
6409 Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6410 HObjectAccess::ForMap()),
6411 static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6415 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6416 if (string->IsConstant()) {
6417 HConstant* c_string = HConstant::cast(string);
6418 if (c_string->HasStringValue()) {
6419 return Add<HConstant>(c_string->StringValue()->length());
6422 return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6423 HObjectAccess::ForStringLength());
6427 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6428 PropertyAccessType access_type,
6430 Handle<String> name,
6432 bool is_uninitialized) {
6433 if (is_uninitialized) {
6434 Add<HDeoptimize>("Insufficient type feedback for generic named access",
6437 if (access_type == LOAD) {
6438 return New<HLoadNamedGeneric>(object, name);
6440 return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6446 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6447 PropertyAccessType access_type,
6451 if (access_type == LOAD) {
6452 return New<HLoadKeyedGeneric>(object, key);
6454 return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6459 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6460 // Loads from a "stock" fast holey double arrays can elide the hole check.
6461 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6462 if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6463 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6464 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6465 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6466 BuildCheckPrototypeMaps(prototype, object_prototype);
6467 load_mode = ALLOW_RETURN_HOLE;
6468 graph()->MarkDependsOnEmptyArrayProtoElements();
6475 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6481 PropertyAccessType access_type,
6482 KeyedAccessStoreMode store_mode) {
6483 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6485 checked_object->ClearDependsOnFlag(kElementsKind);
6488 if (access_type == STORE && map->prototype()->IsJSObject()) {
6489 // monomorphic stores need a prototype chain check because shape
6490 // changes could allow callbacks on elements in the chain that
6491 // aren't compatible with monomorphic keyed stores.
6492 Handle<JSObject> prototype(JSObject::cast(map->prototype()));
6493 Object* holder = map->prototype();
6494 while (holder->GetPrototype(isolate())->IsJSObject()) {
6495 holder = holder->GetPrototype(isolate());
6497 ASSERT(holder->GetPrototype(isolate())->IsNull());
6499 BuildCheckPrototypeMaps(prototype,
6500 Handle<JSObject>(JSObject::cast(holder)));
6503 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6504 return BuildUncheckedMonomorphicElementAccess(
6505 checked_object, key, val,
6506 map->instance_type() == JS_ARRAY_TYPE,
6507 map->elements_kind(), access_type,
6508 load_mode, store_mode);
6512 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6516 SmallMapList* maps) {
6517 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6518 // double), always use the "worst case" code without a transition. This is
6519 // much faster than transitioning the elements to the worst case, trading a
6520 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6521 bool has_double_maps = false;
6522 bool has_smi_or_object_maps = false;
6523 bool has_js_array_access = false;
6524 bool has_non_js_array_access = false;
6525 bool has_seen_holey_elements = false;
6526 Handle<Map> most_general_consolidated_map;
6527 for (int i = 0; i < maps->length(); ++i) {
6528 Handle<Map> map = maps->at(i);
6529 if (!map->IsJSObjectMap()) return NULL;
6530 // Don't allow mixing of JSArrays with JSObjects.
6531 if (map->instance_type() == JS_ARRAY_TYPE) {
6532 if (has_non_js_array_access) return NULL;
6533 has_js_array_access = true;
6534 } else if (has_js_array_access) {
6537 has_non_js_array_access = true;
6539 // Don't allow mixed, incompatible elements kinds.
6540 if (map->has_fast_double_elements()) {
6541 if (has_smi_or_object_maps) return NULL;
6542 has_double_maps = true;
6543 } else if (map->has_fast_smi_or_object_elements()) {
6544 if (has_double_maps) return NULL;
6545 has_smi_or_object_maps = true;
6549 // Remember if we've ever seen holey elements.
6550 if (IsHoleyElementsKind(map->elements_kind())) {
6551 has_seen_holey_elements = true;
6553 // Remember the most general elements kind, the code for its load will
6554 // properly handle all of the more specific cases.
6555 if ((i == 0) || IsMoreGeneralElementsKindTransition(
6556 most_general_consolidated_map->elements_kind(),
6557 map->elements_kind())) {
6558 most_general_consolidated_map = map;
6561 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6563 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6564 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6565 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6566 ElementsKind consolidated_elements_kind = has_seen_holey_elements
6567 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6568 : most_general_consolidated_map->elements_kind();
6569 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6570 checked_object, key, val,
6571 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6572 consolidated_elements_kind,
6573 LOAD, NEVER_RETURN_HOLE, STANDARD_STORE);
6578 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6583 PropertyAccessType access_type,
6584 KeyedAccessStoreMode store_mode,
6585 bool* has_side_effects) {
6586 *has_side_effects = false;
6587 BuildCheckHeapObject(object);
6589 if (access_type == LOAD) {
6590 HInstruction* consolidated_load =
6591 TryBuildConsolidatedElementLoad(object, key, val, maps);
6592 if (consolidated_load != NULL) {
6593 *has_side_effects |= consolidated_load->HasObservableSideEffects();
6594 return consolidated_load;
6598 // Elements_kind transition support.
6599 MapHandleList transition_target(maps->length());
6600 // Collect possible transition targets.
6601 MapHandleList possible_transitioned_maps(maps->length());
6602 for (int i = 0; i < maps->length(); ++i) {
6603 Handle<Map> map = maps->at(i);
6604 ElementsKind elements_kind = map->elements_kind();
6605 if (IsFastElementsKind(elements_kind) &&
6606 elements_kind != GetInitialFastElementsKind()) {
6607 possible_transitioned_maps.Add(map);
6609 if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
6610 HInstruction* result = BuildKeyedGeneric(access_type, object, key, val);
6611 *has_side_effects = result->HasObservableSideEffects();
6612 return AddInstruction(result);
6615 // Get transition target for each map (NULL == no transition).
6616 for (int i = 0; i < maps->length(); ++i) {
6617 Handle<Map> map = maps->at(i);
6618 Handle<Map> transitioned_map =
6619 map->FindTransitionedMap(&possible_transitioned_maps);
6620 transition_target.Add(transitioned_map);
6623 MapHandleList untransitionable_maps(maps->length());
6624 HTransitionElementsKind* transition = NULL;
6625 for (int i = 0; i < maps->length(); ++i) {
6626 Handle<Map> map = maps->at(i);
6627 ASSERT(map->IsMap());
6628 if (!transition_target.at(i).is_null()) {
6629 ASSERT(Map::IsValidElementsTransition(
6630 map->elements_kind(),
6631 transition_target.at(i)->elements_kind()));
6632 transition = Add<HTransitionElementsKind>(object, map,
6633 transition_target.at(i));
6635 untransitionable_maps.Add(map);
6639 // If only one map is left after transitioning, handle this case
6641 ASSERT(untransitionable_maps.length() >= 1);
6642 if (untransitionable_maps.length() == 1) {
6643 Handle<Map> untransitionable_map = untransitionable_maps[0];
6644 HInstruction* instr = NULL;
6645 if (untransitionable_map->has_slow_elements_kind() ||
6646 !untransitionable_map->IsJSObjectMap()) {
6647 instr = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6649 instr = BuildMonomorphicElementAccess(
6650 object, key, val, transition, untransitionable_map, access_type,
6653 *has_side_effects |= instr->HasObservableSideEffects();
6654 return access_type == STORE ? NULL : instr;
6657 HBasicBlock* join = graph()->CreateBasicBlock();
6659 for (int i = 0; i < untransitionable_maps.length(); ++i) {
6660 Handle<Map> map = untransitionable_maps[i];
6661 if (!map->IsJSObjectMap()) continue;
6662 ElementsKind elements_kind = map->elements_kind();
6663 HBasicBlock* this_map = graph()->CreateBasicBlock();
6664 HBasicBlock* other_map = graph()->CreateBasicBlock();
6665 HCompareMap* mapcompare =
6666 New<HCompareMap>(object, map, this_map, other_map);
6667 FinishCurrentBlock(mapcompare);
6669 set_current_block(this_map);
6670 HInstruction* access = NULL;
6671 if (IsDictionaryElementsKind(elements_kind)) {
6672 access = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6674 ASSERT(IsFastElementsKind(elements_kind) ||
6675 IsExternalArrayElementsKind(elements_kind) ||
6676 IsFixedTypedArrayElementsKind(elements_kind));
6677 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6678 // Happily, mapcompare is a checked object.
6679 access = BuildUncheckedMonomorphicElementAccess(
6680 mapcompare, key, val,
6681 map->instance_type() == JS_ARRAY_TYPE,
6682 elements_kind, access_type,
6686 *has_side_effects |= access->HasObservableSideEffects();
6687 // The caller will use has_side_effects and add a correct Simulate.
6688 access->SetFlag(HValue::kHasNoObservableSideEffects);
6689 if (access_type == LOAD) {
6692 NoObservableSideEffectsScope scope(this);
6693 GotoNoSimulate(join);
6694 set_current_block(other_map);
6697 // Ensure that we visited at least one map above that goes to join. This is
6698 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
6699 // rather than joining the join block. If this becomes an issue, insert a
6700 // generic access in the case length() == 0.
6701 ASSERT(join->predecessors()->length() > 0);
6702 // Deopt if none of the cases matched.
6703 NoObservableSideEffectsScope scope(this);
6704 FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
6705 set_current_block(join);
6706 return access_type == STORE ? NULL : Pop();
6710 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
6715 PropertyAccessType access_type,
6716 bool* has_side_effects) {
6717 ASSERT(!expr->IsPropertyName());
6718 HInstruction* instr = NULL;
6720 SmallMapList* types;
6721 bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
6723 bool force_generic = false;
6724 if (access_type == STORE &&
6725 (monomorphic || (types != NULL && !types->is_empty()))) {
6726 // Stores can't be mono/polymorphic if their prototype chain has dictionary
6727 // elements. However a receiver map that has dictionary elements itself
6728 // should be left to normal mono/poly behavior (the other maps may benefit
6729 // from highly optimized stores).
6730 for (int i = 0; i < types->length(); i++) {
6731 Handle<Map> current_map = types->at(i);
6732 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
6733 force_generic = true;
6734 monomorphic = false;
6741 Handle<Map> map = types->first();
6742 if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
6743 instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6745 BuildCheckHeapObject(obj);
6746 instr = BuildMonomorphicElementAccess(
6747 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
6749 } else if (!force_generic && (types != NULL && !types->is_empty())) {
6750 return HandlePolymorphicElementAccess(
6751 obj, key, val, types, access_type,
6752 expr->GetStoreMode(), has_side_effects);
6754 if (access_type == STORE) {
6755 if (expr->IsAssignment() &&
6756 expr->AsAssignment()->HasNoTypeInformation()) {
6757 Add<HDeoptimize>("Insufficient type feedback for keyed store",
6761 if (expr->AsProperty()->HasNoTypeInformation()) {
6762 Add<HDeoptimize>("Insufficient type feedback for keyed load",
6766 instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6768 *has_side_effects = instr->HasObservableSideEffects();
6773 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
6774 // Outermost function already has arguments on the stack.
6775 if (function_state()->outer() == NULL) return;
6777 if (function_state()->arguments_pushed()) return;
6779 // Push arguments when entering inlined function.
6780 HEnterInlined* entry = function_state()->entry();
6781 entry->set_arguments_pushed();
6783 HArgumentsObject* arguments = entry->arguments_object();
6784 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
6786 HInstruction* insert_after = entry;
6787 for (int i = 0; i < arguments_values->length(); i++) {
6788 HValue* argument = arguments_values->at(i);
6789 HInstruction* push_argument = New<HPushArgument>(argument);
6790 push_argument->InsertAfter(insert_after);
6791 insert_after = push_argument;
6794 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
6795 arguments_elements->ClearFlag(HValue::kUseGVN);
6796 arguments_elements->InsertAfter(insert_after);
6797 function_state()->set_arguments_elements(arguments_elements);
6801 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
6802 VariableProxy* proxy = expr->obj()->AsVariableProxy();
6803 if (proxy == NULL) return false;
6804 if (!proxy->var()->IsStackAllocated()) return false;
6805 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
6809 HInstruction* result = NULL;
6810 if (expr->key()->IsPropertyName()) {
6811 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6812 if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
6814 if (function_state()->outer() == NULL) {
6815 HInstruction* elements = Add<HArgumentsElements>(false);
6816 result = New<HArgumentsLength>(elements);
6818 // Number of arguments without receiver.
6819 int argument_count = environment()->
6820 arguments_environment()->parameter_count() - 1;
6821 result = New<HConstant>(argument_count);
6824 Push(graph()->GetArgumentsObject());
6825 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
6826 HValue* key = Pop();
6827 Drop(1); // Arguments object.
6828 if (function_state()->outer() == NULL) {
6829 HInstruction* elements = Add<HArgumentsElements>(false);
6830 HInstruction* length = Add<HArgumentsLength>(elements);
6831 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6832 result = New<HAccessArgumentsAt>(elements, length, checked_key);
6834 EnsureArgumentsArePushedForAccess();
6836 // Number of arguments without receiver.
6837 HInstruction* elements = function_state()->arguments_elements();
6838 int argument_count = environment()->
6839 arguments_environment()->parameter_count() - 1;
6840 HInstruction* length = Add<HConstant>(argument_count);
6841 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6842 result = New<HAccessArgumentsAt>(elements, length, checked_key);
6845 ast_context()->ReturnInstruction(result, expr->id());
6850 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
6851 PropertyAccessType access,
6853 BailoutId return_id,
6856 Handle<String> name,
6858 bool is_uninitialized) {
6859 SmallMapList* types;
6860 ComputeReceiverTypes(expr, object, &types, zone());
6861 ASSERT(types != NULL);
6863 if (types->length() > 0) {
6864 PropertyAccessInfo info(this, access, ToType(types->first()), name);
6865 if (!info.CanAccessAsMonomorphic(types)) {
6866 HandlePolymorphicNamedFieldAccess(
6867 access, ast_id, return_id, object, value, types, name);
6871 HValue* checked_object;
6872 // Type::Number() is only supported by polymorphic load/call handling.
6873 ASSERT(!info.type()->Is(Type::Number()));
6874 BuildCheckHeapObject(object);
6876 if (AreStringTypes(types)) {
6878 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
6879 } else if (AreFloat32x4Types(types) &&
6880 CpuFeatures::SupportsSIMD128InCrankshaft()) {
6881 Handle<JSFunction> function(
6882 isolate()->native_context()->float32x4_function());
6883 HInstruction* constant_function = Add<HConstant>(function);
6884 HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6885 HInstruction* map = Add<HLoadNamedField>(
6886 constant_function, static_cast<HValue*>(NULL), map_access);
6887 HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6888 HInstruction* prototype = Add<HLoadNamedField>(
6889 map, static_cast<HValue*>(NULL), prototype_access);
6890 Handle<Map> initial_function_prototype_map(
6891 isolate()->native_context()->float32x4_function_prototype_map());
6892 Add<HCheckMaps>(prototype, initial_function_prototype_map);
6893 BuiltinFunctionId id = NameToId(isolate(), name, FLOAT32x4_TYPE);
6894 return NewUncasted<HUnarySIMDOperation>(object, id);
6895 } else if (AreFloat64x2Types(types) &&
6896 CpuFeatures::SupportsSIMD128InCrankshaft()) {
6897 Handle<JSFunction> function(
6898 isolate()->native_context()->float64x2_function());
6899 HInstruction* constant_function = Add<HConstant>(function);
6900 HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6901 HInstruction* map = Add<HLoadNamedField>(
6902 constant_function, static_cast<HValue*>(NULL), map_access);
6903 HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6904 HInstruction* prototype = Add<HLoadNamedField>(
6905 map, static_cast<HValue*>(NULL), prototype_access);
6906 Handle<Map> initial_function_prototype_map(
6907 isolate()->native_context()->float64x2_function_prototype_map());
6908 Add<HCheckMaps>(prototype, initial_function_prototype_map);
6909 BuiltinFunctionId id = NameToId(isolate(), name, FLOAT64x2_TYPE);
6910 return NewUncasted<HUnarySIMDOperation>(object, id);
6911 } else if (AreInt32x4Types(types) &&
6912 CpuFeatures::SupportsSIMD128InCrankshaft()) {
6913 Handle<JSFunction> function(
6914 isolate()->native_context()->int32x4_function());
6915 HInstruction* constant_function = Add<HConstant>(function);
6916 HObjectAccess map_access = HObjectAccess::ForPrototypeOrInitialMap();
6917 HInstruction* map = Add<HLoadNamedField>(
6918 constant_function, static_cast<HValue*>(NULL), map_access);
6919 HObjectAccess prototype_access = HObjectAccess::ForMapPrototype();
6920 HInstruction* prototype = Add<HLoadNamedField>(
6921 map, static_cast<HValue*>(NULL), prototype_access);
6922 Handle<Map> initial_function_prototype_map(
6923 isolate()->native_context()->int32x4_function_prototype_map());
6924 Add<HCheckMaps>(prototype, initial_function_prototype_map);
6925 BuiltinFunctionId id = NameToId(isolate(), name, INT32x4_TYPE);
6926 return NewUncasted<HUnarySIMDOperation>(object, id);
6928 checked_object = Add<HCheckMaps>(object, types);
6930 return BuildMonomorphicAccess(
6931 &info, object, checked_object, value, ast_id, return_id);
6934 return BuildNamedGeneric(access, object, name, value, is_uninitialized);
6938 void HOptimizedGraphBuilder::PushLoad(Property* expr,
6941 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
6943 if (key != NULL) Push(key);
6944 BuildLoad(expr, expr->LoadId());
6948 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
6950 HInstruction* instr = NULL;
6951 if (expr->IsStringAccess()) {
6952 HValue* index = Pop();
6953 HValue* string = Pop();
6954 HInstruction* char_code = BuildStringCharCodeAt(string, index);
6955 AddInstruction(char_code);
6956 instr = NewUncasted<HStringCharFromCode>(char_code);
6958 } else if (expr->IsFunctionPrototype()) {
6959 HValue* function = Pop();
6960 BuildCheckHeapObject(function);
6961 instr = New<HLoadFunctionPrototype>(function);
6963 } else if (expr->key()->IsPropertyName()) {
6964 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6965 HValue* object = Pop();
6967 instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
6968 object, name, NULL, expr->IsUninitialized());
6969 if (instr == NULL) return;
6970 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
6973 HValue* key = Pop();
6974 HValue* obj = Pop();
6976 bool has_side_effects = false;
6977 HValue* load = HandleKeyedElementAccess(
6978 obj, key, NULL, expr, LOAD, &has_side_effects);
6979 if (has_side_effects) {
6980 if (ast_context()->IsEffect()) {
6981 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6984 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6988 return ast_context()->ReturnValue(load);
6990 return ast_context()->ReturnInstruction(instr, ast_id);
6994 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
6995 ASSERT(!HasStackOverflow());
6996 ASSERT(current_block() != NULL);
6997 ASSERT(current_block()->HasPredecessor());
6999 if (TryArgumentsAccess(expr)) return;
7001 CHECK_ALIVE(VisitForValue(expr->obj()));
7002 if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
7003 expr->IsStringAccess()) {
7004 CHECK_ALIVE(VisitForValue(expr->key()));
7007 BuildLoad(expr, expr->id());
7011 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7012 HCheckMaps* check = Add<HCheckMaps>(
7013 Add<HConstant>(constant), handle(constant->map()));
7014 check->ClearDependsOnFlag(kElementsKind);
7019 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7020 Handle<JSObject> holder) {
7021 while (holder.is_null() || !prototype.is_identical_to(holder)) {
7022 BuildConstantMapCheck(prototype);
7023 Object* next_prototype = prototype->GetPrototype();
7024 if (next_prototype->IsNull()) return NULL;
7025 CHECK(next_prototype->IsJSObject());
7026 prototype = handle(JSObject::cast(next_prototype));
7028 return BuildConstantMapCheck(prototype);
7032 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7033 Handle<Map> receiver_map) {
7034 if (!holder.is_null()) {
7035 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7036 BuildCheckPrototypeMaps(prototype, holder);
7041 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
7042 HValue* fun, int argument_count, bool pass_argument_count) {
7043 return New<HCallJSFunction>(
7044 fun, argument_count, pass_argument_count);
7048 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
7049 HValue* fun, HValue* context,
7050 int argument_count, HValue* expected_param_count) {
7051 CallInterfaceDescriptor* descriptor =
7052 isolate()->call_descriptor(Isolate::ArgumentAdaptorCall);
7054 HValue* arity = Add<HConstant>(argument_count - 1);
7056 HValue* op_vals[] = { fun, context, arity, expected_param_count };
7058 Handle<Code> adaptor =
7059 isolate()->builtins()->ArgumentsAdaptorTrampoline();
7060 HConstant* adaptor_value = Add<HConstant>(adaptor);
7062 return New<HCallWithDescriptor>(
7063 adaptor_value, argument_count, descriptor,
7064 Vector<HValue*>(op_vals, descriptor->environment_length()));
7068 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
7069 Handle<JSFunction> jsfun, int argument_count) {
7070 HValue* target = Add<HConstant>(jsfun);
7071 // For constant functions, we try to avoid calling the
7072 // argument adaptor and instead call the function directly
7073 int formal_parameter_count = jsfun->shared()->formal_parameter_count();
7074 bool dont_adapt_arguments =
7075 (formal_parameter_count ==
7076 SharedFunctionInfo::kDontAdaptArgumentsSentinel);
7077 int arity = argument_count - 1;
7078 bool can_invoke_directly =
7079 dont_adapt_arguments || formal_parameter_count == arity;
7080 if (can_invoke_directly) {
7081 if (jsfun.is_identical_to(current_info()->closure())) {
7082 graph()->MarkRecursive();
7084 return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
7086 HValue* param_count_value = Add<HConstant>(formal_parameter_count);
7087 HValue* context = Add<HLoadNamedField>(
7088 target, static_cast<HValue*>(NULL),
7089 HObjectAccess::ForFunctionContextPointer());
7090 return NewArgumentAdaptorCall(target, context,
7091 argument_count, param_count_value);
7098 class FunctionSorter {
7100 FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7101 : index_(index), ticks_(ticks), size_(size) { }
7103 int index() const { return index_; }
7104 int ticks() const { return ticks_; }
7105 int size() const { return size_; }
7114 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7115 int diff = lhs.ticks() - rhs.ticks();
7116 if (diff != 0) return diff > 0;
7117 return lhs.size() < rhs.size();
7121 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
7124 SmallMapList* types,
7125 Handle<String> name) {
7126 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
7127 FunctionSorter order[kMaxCallPolymorphism];
7129 bool handle_smi = false;
7130 bool handled_string = false;
7131 int ordered_functions = 0;
7134 i < types->length() && ordered_functions < kMaxCallPolymorphism;
7136 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7137 if (info.CanAccessMonomorphic() &&
7138 info.lookup()->IsConstant() &&
7139 info.constant()->IsJSFunction()) {
7140 if (info.type()->Is(Type::String())) {
7141 if (handled_string) continue;
7142 handled_string = true;
7144 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7145 if (info.type()->Is(Type::Number())) {
7148 expr->set_target(target);
7149 order[ordered_functions++] = FunctionSorter(
7150 i, target->shared()->profiler_ticks(), InliningAstSize(target));
7154 std::sort(order, order + ordered_functions);
7156 HBasicBlock* number_block = NULL;
7157 HBasicBlock* join = NULL;
7158 handled_string = false;
7161 for (int fn = 0; fn < ordered_functions; ++fn) {
7162 int i = order[fn].index();
7163 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7164 if (info.type()->Is(Type::String())) {
7165 if (handled_string) continue;
7166 handled_string = true;
7168 // Reloads the target.
7169 info.CanAccessMonomorphic();
7170 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7172 expr->set_target(target);
7174 // Only needed once.
7175 join = graph()->CreateBasicBlock();
7177 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7178 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7179 number_block = graph()->CreateBasicBlock();
7180 FinishCurrentBlock(New<HIsSmiAndBranch>(
7181 receiver, empty_smi_block, not_smi_block));
7182 GotoNoSimulate(empty_smi_block, number_block);
7183 set_current_block(not_smi_block);
7185 BuildCheckHeapObject(receiver);
7189 HBasicBlock* if_true = graph()->CreateBasicBlock();
7190 HBasicBlock* if_false = graph()->CreateBasicBlock();
7191 HUnaryControlInstruction* compare;
7193 Handle<Map> map = info.map();
7194 if (info.type()->Is(Type::Number())) {
7195 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7196 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7197 } else if (info.type()->Is(Type::String())) {
7198 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7200 compare = New<HCompareMap>(receiver, map, if_true, if_false);
7202 FinishCurrentBlock(compare);
7204 if (info.type()->Is(Type::Number())) {
7205 GotoNoSimulate(if_true, number_block);
7206 if_true = number_block;
7209 set_current_block(if_true);
7211 AddCheckPrototypeMaps(info.holder(), map);
7213 HValue* function = Add<HConstant>(expr->target());
7214 environment()->SetExpressionStackAt(0, function);
7216 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7217 bool needs_wrapping = NeedsWrappingFor(info.type(), target);
7218 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7219 if (FLAG_trace_inlining && try_inline) {
7220 Handle<JSFunction> caller = current_info()->closure();
7221 SmartArrayPointer<char> caller_name =
7222 caller->shared()->DebugName()->ToCString();
7223 PrintF("Trying to inline the polymorphic call to %s from %s\n",
7224 name->ToCString().get(),
7227 if (try_inline && TryInlineCall(expr)) {
7228 // Trying to inline will signal that we should bailout from the
7229 // entire compilation by setting stack overflow on the visitor.
7230 if (HasStackOverflow()) return;
7232 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7233 // use the regular CallFunctionStub for method calls to wrap the receiver.
7234 // TODO(verwaest): Support creation of value wrappers directly in
7236 HInstruction* call = needs_wrapping
7237 ? NewUncasted<HCallFunction>(
7238 function, argument_count, WRAP_AND_CALL)
7239 : BuildCallConstantFunction(target, argument_count);
7240 PushArgumentsFromEnvironment(argument_count);
7241 AddInstruction(call);
7242 Drop(1); // Drop the function.
7243 if (!ast_context()->IsEffect()) Push(call);
7246 if (current_block() != NULL) Goto(join);
7247 set_current_block(if_false);
7250 // Finish up. Unconditionally deoptimize if we've handled all the maps we
7251 // know about and do not want to handle ones we've never seen. Otherwise
7252 // use a generic IC.
7253 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
7254 FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
7256 Property* prop = expr->expression()->AsProperty();
7257 HInstruction* function = BuildNamedGeneric(
7258 LOAD, receiver, name, NULL, prop->IsUninitialized());
7259 AddInstruction(function);
7261 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7263 environment()->SetExpressionStackAt(1, function);
7264 environment()->SetExpressionStackAt(0, receiver);
7265 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7267 CallFunctionFlags flags = receiver->type().IsJSObject()
7268 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
7269 HInstruction* call = New<HCallFunction>(
7270 function, argument_count, flags);
7272 PushArgumentsFromEnvironment(argument_count);
7274 Drop(1); // Function.
7277 AddInstruction(call);
7278 if (!ast_context()->IsEffect()) Push(call);
7281 return ast_context()->ReturnInstruction(call, expr->id());
7285 // We assume that control flow is always live after an expression. So
7286 // even without predecessors to the join block, we set it as the exit
7287 // block and continue by adding instructions there.
7288 ASSERT(join != NULL);
7289 if (join->HasPredecessor()) {
7290 set_current_block(join);
7291 join->SetJoinId(expr->id());
7292 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7294 set_current_block(NULL);
7299 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7300 Handle<JSFunction> caller,
7301 const char* reason) {
7302 if (FLAG_trace_inlining) {
7303 SmartArrayPointer<char> target_name =
7304 target->shared()->DebugName()->ToCString();
7305 SmartArrayPointer<char> caller_name =
7306 caller->shared()->DebugName()->ToCString();
7307 if (reason == NULL) {
7308 PrintF("Inlined %s called from %s.\n", target_name.get(),
7311 PrintF("Did not inline %s called from %s (%s).\n",
7312 target_name.get(), caller_name.get(), reason);
7318 static const int kNotInlinable = 1000000000;
7321 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7322 if (!FLAG_use_inlining) return kNotInlinable;
7324 // Precondition: call is monomorphic and we have found a target with the
7325 // appropriate arity.
7326 Handle<JSFunction> caller = current_info()->closure();
7327 Handle<SharedFunctionInfo> target_shared(target->shared());
7329 // Always inline builtins marked for inlining.
7330 if (target->IsBuiltin()) {
7331 return target_shared->inline_builtin() ? 0 : kNotInlinable;
7334 if (target_shared->IsApiFunction()) {
7335 TraceInline(target, caller, "target is api function");
7336 return kNotInlinable;
7339 // Do a quick check on source code length to avoid parsing large
7340 // inlining candidates.
7341 if (target_shared->SourceSize() >
7342 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7343 TraceInline(target, caller, "target text too big");
7344 return kNotInlinable;
7347 // Target must be inlineable.
7348 if (!target_shared->IsInlineable()) {
7349 TraceInline(target, caller, "target not inlineable");
7350 return kNotInlinable;
7352 if (target_shared->dont_inline() || target_shared->dont_optimize()) {
7353 TraceInline(target, caller, "target contains unsupported syntax [early]");
7354 return kNotInlinable;
7357 int nodes_added = target_shared->ast_node_count();
7362 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7363 int arguments_count,
7364 HValue* implicit_return_value,
7366 BailoutId return_id,
7367 InliningKind inlining_kind,
7368 HSourcePosition position) {
7369 int nodes_added = InliningAstSize(target);
7370 if (nodes_added == kNotInlinable) return false;
7372 Handle<JSFunction> caller = current_info()->closure();
7374 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7375 TraceInline(target, caller, "target AST is too large [early]");
7379 // Don't inline deeper than the maximum number of inlining levels.
7380 HEnvironment* env = environment();
7381 int current_level = 1;
7382 while (env->outer() != NULL) {
7383 if (current_level == FLAG_max_inlining_levels) {
7384 TraceInline(target, caller, "inline depth limit reached");
7387 if (env->outer()->frame_type() == JS_FUNCTION) {
7393 // Don't inline recursive functions.
7394 for (FunctionState* state = function_state();
7396 state = state->outer()) {
7397 if (*state->compilation_info()->closure() == *target) {
7398 TraceInline(target, caller, "target is recursive");
7403 // We don't want to add more than a certain number of nodes from inlining.
7404 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7405 kUnlimitedMaxInlinedNodesCumulative)) {
7406 TraceInline(target, caller, "cumulative AST node limit reached");
7410 // Parse and allocate variables.
7411 CompilationInfo target_info(target, zone());
7412 Handle<SharedFunctionInfo> target_shared(target->shared());
7413 if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7414 if (target_info.isolate()->has_pending_exception()) {
7415 // Parse or scope error, never optimize this function.
7417 target_shared->DisableOptimization(kParseScopeError);
7419 TraceInline(target, caller, "parse failure");
7423 if (target_info.scope()->num_heap_slots() > 0) {
7424 TraceInline(target, caller, "target has context-allocated variables");
7427 FunctionLiteral* function = target_info.function();
7429 // The following conditions must be checked again after re-parsing, because
7430 // earlier the information might not have been complete due to lazy parsing.
7431 nodes_added = function->ast_node_count();
7432 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7433 TraceInline(target, caller, "target AST is too large [late]");
7436 AstProperties::Flags* flags(function->flags());
7437 if (flags->Contains(kDontInline) || function->dont_optimize()) {
7438 TraceInline(target, caller, "target contains unsupported syntax [late]");
7442 // If the function uses the arguments object check that inlining of functions
7443 // with arguments object is enabled and the arguments-variable is
7445 if (function->scope()->arguments() != NULL) {
7446 if (!FLAG_inline_arguments) {
7447 TraceInline(target, caller, "target uses arguments object");
7451 if (!function->scope()->arguments()->IsStackAllocated()) {
7454 "target uses non-stackallocated arguments object");
7459 // All declarations must be inlineable.
7460 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7461 int decl_count = decls->length();
7462 for (int i = 0; i < decl_count; ++i) {
7463 if (!decls->at(i)->IsInlineable()) {
7464 TraceInline(target, caller, "target has non-trivial declaration");
7469 // Generate the deoptimization data for the unoptimized version of
7470 // the target function if we don't already have it.
7471 if (!target_shared->has_deoptimization_support()) {
7472 // Note that we compile here using the same AST that we will use for
7473 // generating the optimized inline code.
7474 target_info.EnableDeoptimizationSupport();
7475 if (!FullCodeGenerator::MakeCode(&target_info)) {
7476 TraceInline(target, caller, "could not generate deoptimization info");
7479 if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) {
7480 // The scope info might not have been set if a lazily compiled
7481 // function is inlined before being called for the first time.
7482 Handle<ScopeInfo> target_scope_info =
7483 ScopeInfo::Create(target_info.scope(), zone());
7484 target_shared->set_scope_info(*target_scope_info);
7486 target_shared->EnableDeoptimizationSupport(*target_info.code());
7487 target_shared->set_feedback_vector(*target_info.feedback_vector());
7488 Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7493 // ----------------------------------------------------------------
7494 // After this point, we've made a decision to inline this function (so
7495 // TryInline should always return true).
7497 // Type-check the inlined function.
7498 ASSERT(target_shared->has_deoptimization_support());
7499 AstTyper::Run(&target_info);
7501 int function_id = graph()->TraceInlinedFunction(target_shared, position);
7503 // Save the pending call context. Set up new one for the inlined function.
7504 // The function state is new-allocated because we need to delete it
7505 // in two different places.
7506 FunctionState* target_state = new FunctionState(
7507 this, &target_info, inlining_kind, function_id);
7509 HConstant* undefined = graph()->GetConstantUndefined();
7511 HEnvironment* inner_env =
7512 environment()->CopyForInlining(target,
7516 function_state()->inlining_kind());
7518 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7519 inner_env->BindContext(context);
7521 HArgumentsObject* arguments_object = NULL;
7523 // If the function uses arguments object create and bind one, also copy
7524 // current arguments values to use them for materialization.
7525 if (function->scope()->arguments() != NULL) {
7526 ASSERT(function->scope()->arguments()->IsStackAllocated());
7527 HEnvironment* arguments_env = inner_env->arguments_environment();
7528 int arguments_count = arguments_env->parameter_count();
7529 arguments_object = Add<HArgumentsObject>(arguments_count);
7530 inner_env->Bind(function->scope()->arguments(), arguments_object);
7531 for (int i = 0; i < arguments_count; i++) {
7532 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7536 // Capture the state before invoking the inlined function for deopt in the
7537 // inlined function. This simulate has no bailout-id since it's not directly
7538 // reachable for deopt, and is only used to capture the state. If the simulate
7539 // becomes reachable by merging, the ast id of the simulate merged into it is
7541 Add<HSimulate>(BailoutId::None());
7543 current_block()->UpdateEnvironment(inner_env);
7545 HEnterInlined* enter_inlined =
7546 Add<HEnterInlined>(return_id, target, arguments_count, function,
7547 function_state()->inlining_kind(),
7548 function->scope()->arguments(),
7550 function_state()->set_entry(enter_inlined);
7552 VisitDeclarations(target_info.scope()->declarations());
7553 VisitStatements(function->body());
7554 if (HasStackOverflow()) {
7555 // Bail out if the inline function did, as we cannot residualize a call
7557 TraceInline(target, caller, "inline graph construction failed");
7558 target_shared->DisableOptimization(kInliningBailedOut);
7559 inline_bailout_ = true;
7560 delete target_state;
7564 // Update inlined nodes count.
7565 inlined_count_ += nodes_added;
7567 Handle<Code> unoptimized_code(target_shared->code());
7568 ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7569 Handle<TypeFeedbackInfo> type_info(
7570 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7571 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7573 TraceInline(target, caller, NULL);
7575 if (current_block() != NULL) {
7576 FunctionState* state = function_state();
7577 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7578 // Falling off the end of an inlined construct call. In a test context the
7579 // return value will always evaluate to true, in a value context the
7580 // return value is the newly allocated receiver.
7581 if (call_context()->IsTest()) {
7582 Goto(inlined_test_context()->if_true(), state);
7583 } else if (call_context()->IsEffect()) {
7584 Goto(function_return(), state);
7586 ASSERT(call_context()->IsValue());
7587 AddLeaveInlined(implicit_return_value, state);
7589 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7590 // Falling off the end of an inlined setter call. The returned value is
7591 // never used, the value of an assignment is always the value of the RHS
7592 // of the assignment.
7593 if (call_context()->IsTest()) {
7594 inlined_test_context()->ReturnValue(implicit_return_value);
7595 } else if (call_context()->IsEffect()) {
7596 Goto(function_return(), state);
7598 ASSERT(call_context()->IsValue());
7599 AddLeaveInlined(implicit_return_value, state);
7602 // Falling off the end of a normal inlined function. This basically means
7603 // returning undefined.
7604 if (call_context()->IsTest()) {
7605 Goto(inlined_test_context()->if_false(), state);
7606 } else if (call_context()->IsEffect()) {
7607 Goto(function_return(), state);
7609 ASSERT(call_context()->IsValue());
7610 AddLeaveInlined(undefined, state);
7615 // Fix up the function exits.
7616 if (inlined_test_context() != NULL) {
7617 HBasicBlock* if_true = inlined_test_context()->if_true();
7618 HBasicBlock* if_false = inlined_test_context()->if_false();
7620 HEnterInlined* entry = function_state()->entry();
7622 // Pop the return test context from the expression context stack.
7623 ASSERT(ast_context() == inlined_test_context());
7624 ClearInlinedTestContext();
7625 delete target_state;
7627 // Forward to the real test context.
7628 if (if_true->HasPredecessor()) {
7629 entry->RegisterReturnTarget(if_true, zone());
7630 if_true->SetJoinId(ast_id);
7631 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
7632 Goto(if_true, true_target, function_state());
7634 if (if_false->HasPredecessor()) {
7635 entry->RegisterReturnTarget(if_false, zone());
7636 if_false->SetJoinId(ast_id);
7637 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
7638 Goto(if_false, false_target, function_state());
7640 set_current_block(NULL);
7643 } else if (function_return()->HasPredecessor()) {
7644 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
7645 function_return()->SetJoinId(ast_id);
7646 set_current_block(function_return());
7648 set_current_block(NULL);
7650 delete target_state;
7655 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
7656 return TryInline(expr->target(),
7657 expr->arguments()->length(),
7662 ScriptPositionToSourcePosition(expr->position()));
7666 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
7667 HValue* implicit_return_value) {
7668 return TryInline(expr->target(),
7669 expr->arguments()->length(),
7670 implicit_return_value,
7673 CONSTRUCT_CALL_RETURN,
7674 ScriptPositionToSourcePosition(expr->position()));
7678 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7679 Handle<Map> receiver_map,
7681 BailoutId return_id) {
7682 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
7683 return TryInline(getter,
7693 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7694 Handle<Map> receiver_map,
7696 BailoutId assignment_id,
7697 HValue* implicit_return_value) {
7698 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
7699 return TryInline(setter,
7701 implicit_return_value,
7708 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function,
7710 int arguments_count) {
7711 return TryInline(function,
7717 ScriptPositionToSourcePosition(expr->position()));
7721 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
7722 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7723 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7726 if (!FLAG_fast_math) break;
7727 // Fall through if FLAG_fast_math.
7734 if (expr->arguments()->length() == 1) {
7735 HValue* argument = Pop();
7736 Drop(2); // Receiver and function.
7737 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7738 ast_context()->ReturnInstruction(op, expr->id());
7743 if (expr->arguments()->length() == 2) {
7744 HValue* right = Pop();
7745 HValue* left = Pop();
7746 Drop(2); // Receiver and function.
7747 HInstruction* op = HMul::NewImul(zone(), context(), left, right);
7748 ast_context()->ReturnInstruction(op, expr->id());
7752 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4) \
7754 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
7755 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
7756 if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7757 expr->arguments()->length() == 0) {
7758 Drop(2); // Receiver and function.
7759 HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
7760 ast_context()->ReturnInstruction(op, expr->id());
7764 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5) \
7766 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
7767 #undef SIMD_UNARY_OPERATION_CASE_ITEM
7768 if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7769 expr->arguments()->length() == 1) {
7770 HValue* argument = Pop();
7771 Drop(2); // Receiver and function.
7772 HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
7773 ast_context()->ReturnInstruction(op, expr->id());
7777 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6) \
7779 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
7780 #undef SIMD_BINARY_OPERATION_CASE_ITEM
7781 if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7782 expr->arguments()->length() == 2) {
7783 HValue* right = Pop();
7784 HValue* left = Pop();
7785 Drop(2); // Receiver and function.
7786 HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
7787 ast_context()->ReturnInstruction(op, expr->id());
7791 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7) \
7793 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
7794 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
7795 if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7796 expr->arguments()->length() == 3) {
7797 HValue* right = Pop();
7798 HValue* left = Pop();
7799 HValue* value = Pop();
7800 Drop(2); // Receiver and function.
7802 NewUncasted<HTernarySIMDOperation>(value, left, right, id);
7803 ast_context()->ReturnInstruction(op, expr->id());
7807 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
7809 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
7810 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
7811 if (CpuFeatures::SupportsSIMD128InCrankshaft() &&
7812 expr->arguments()->length() == 4) {
7817 Drop(2); // Receiver and function.
7819 NewUncasted<HQuarternarySIMDOperation>(x, y, z, w, id);
7820 ast_context()->ReturnInstruction(op, expr->id());
7825 // Not supported for inlining yet.
7832 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
7835 Handle<Map> receiver_map) {
7836 // Try to inline calls like Math.* as operations in the calling function.
7837 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7838 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7839 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
7841 case kStringCharCodeAt:
7843 if (argument_count == 2) {
7844 HValue* index = Pop();
7845 HValue* string = Pop();
7846 Drop(1); // Function.
7847 HInstruction* char_code =
7848 BuildStringCharCodeAt(string, index);
7849 if (id == kStringCharCodeAt) {
7850 ast_context()->ReturnInstruction(char_code, expr->id());
7853 AddInstruction(char_code);
7854 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
7855 ast_context()->ReturnInstruction(result, expr->id());
7859 case kStringFromCharCode:
7860 if (argument_count == 2) {
7861 HValue* argument = Pop();
7862 Drop(2); // Receiver and function.
7863 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
7864 ast_context()->ReturnInstruction(result, expr->id());
7869 if (!FLAG_fast_math) break;
7870 // Fall through if FLAG_fast_math.
7877 if (argument_count == 2) {
7878 HValue* argument = Pop();
7879 Drop(2); // Receiver and function.
7880 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7881 ast_context()->ReturnInstruction(op, expr->id());
7886 if (argument_count == 3) {
7887 HValue* right = Pop();
7888 HValue* left = Pop();
7889 Drop(2); // Receiver and function.
7890 HInstruction* result = NULL;
7891 // Use sqrt() if exponent is 0.5 or -0.5.
7892 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7893 double exponent = HConstant::cast(right)->DoubleValue();
7894 if (exponent == 0.5) {
7895 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
7896 } else if (exponent == -0.5) {
7897 HValue* one = graph()->GetConstant1();
7898 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
7899 left, kMathPowHalf);
7900 // MathPowHalf doesn't have side effects so there's no need for
7901 // an environment simulation here.
7902 ASSERT(!sqrt->HasObservableSideEffects());
7903 result = NewUncasted<HDiv>(one, sqrt);
7904 } else if (exponent == 2.0) {
7905 result = NewUncasted<HMul>(left, left);
7909 if (result == NULL) {
7910 result = NewUncasted<HPower>(left, right);
7912 ast_context()->ReturnInstruction(result, expr->id());
7918 if (argument_count == 3) {
7919 HValue* right = Pop();
7920 HValue* left = Pop();
7921 Drop(2); // Receiver and function.
7922 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
7923 : HMathMinMax::kMathMax;
7924 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
7925 ast_context()->ReturnInstruction(result, expr->id());
7930 if (argument_count == 3) {
7931 HValue* right = Pop();
7932 HValue* left = Pop();
7933 Drop(2); // Receiver and function.
7934 HInstruction* result = HMul::NewImul(zone(), context(), left, right);
7935 ast_context()->ReturnInstruction(result, expr->id());
7940 if (receiver_map.is_null()) return false;
7941 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
7942 ElementsKind elements_kind = receiver_map->elements_kind();
7943 if (!IsFastElementsKind(elements_kind)) return false;
7944 if (receiver_map->is_observed()) return false;
7945 ASSERT(receiver_map->is_extensible());
7947 Drop(expr->arguments()->length());
7949 HValue* reduced_length;
7950 HValue* receiver = Pop();
7952 HValue* checked_object = AddCheckMap(receiver, receiver_map);
7953 HValue* length = Add<HLoadNamedField>(
7954 checked_object, static_cast<HValue*>(NULL),
7955 HObjectAccess::ForArrayLength(elements_kind));
7957 Drop(1); // Function.
7959 { NoObservableSideEffectsScope scope(this);
7960 IfBuilder length_checker(this);
7962 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
7963 length, graph()->GetConstant0(), Token::EQ);
7964 length_checker.Then();
7966 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
7968 length_checker.Else();
7969 HValue* elements = AddLoadElements(checked_object);
7970 // Ensure that we aren't popping from a copy-on-write array.
7971 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7972 elements = BuildCopyElementsOnWrite(checked_object, elements,
7973 elements_kind, length);
7975 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
7976 result = AddElementAccess(elements, reduced_length, NULL,
7977 bounds_check, elements_kind, LOAD);
7978 Factory* factory = isolate()->factory();
7979 double nan_double = FixedDoubleArray::hole_nan_as_double();
7980 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
7981 ? Add<HConstant>(factory->the_hole_value())
7982 : Add<HConstant>(nan_double);
7983 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7984 elements_kind = FAST_HOLEY_ELEMENTS;
7987 elements, reduced_length, hole, bounds_check, elements_kind, STORE);
7988 Add<HStoreNamedField>(
7989 checked_object, HObjectAccess::ForArrayLength(elements_kind),
7990 reduced_length, STORE_TO_INITIALIZED_ENTRY);
7992 if (!ast_context()->IsEffect()) Push(result);
7994 length_checker.End();
7996 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
7997 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
7998 if (!ast_context()->IsEffect()) Drop(1);
8000 ast_context()->ReturnValue(result);
8004 if (receiver_map.is_null()) return false;
8005 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8006 ElementsKind elements_kind = receiver_map->elements_kind();
8007 if (!IsFastElementsKind(elements_kind)) return false;
8008 if (receiver_map->is_observed()) return false;
8009 ASSERT(receiver_map->is_extensible());
8011 // If there may be elements accessors in the prototype chain, the fast
8012 // inlined version can't be used.
8013 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8014 // If there currently can be no elements accessors on the prototype chain,
8015 // it doesn't mean that there won't be any later. Install a full prototype
8016 // chain check to trap element accessors being installed on the prototype
8017 // chain, which would cause elements to go to dictionary mode and result
8019 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8020 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8022 const int argc = expr->arguments()->length();
8023 if (argc != 1) return false;
8025 HValue* value_to_push = Pop();
8026 HValue* array = Pop();
8027 Drop(1); // Drop function.
8029 HInstruction* new_size = NULL;
8030 HValue* length = NULL;
8033 NoObservableSideEffectsScope scope(this);
8035 length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL),
8036 HObjectAccess::ForArrayLength(elements_kind));
8038 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8040 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8041 BuildUncheckedMonomorphicElementAccess(array, length,
8042 value_to_push, is_array,
8043 elements_kind, STORE,
8045 STORE_AND_GROW_NO_TRANSITION);
8047 if (!ast_context()->IsEffect()) Push(new_size);
8048 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8049 if (!ast_context()->IsEffect()) Drop(1);
8052 ast_context()->ReturnValue(new_size);
8055 #define SIMD_NULLARY_OPERATION_CASE_ITEM(p1, p2, name, p4) \
8057 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
8058 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
8059 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 1) {
8060 Drop(2); // Receiver and function.
8061 HInstruction* op = NewUncasted<HNullarySIMDOperation>(id);
8062 ast_context()->ReturnInstruction(op, expr->id());
8066 #define SIMD_UNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5) \
8068 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
8069 #undef SIMD_UNARY_OPERATION_CASE_ITEM
8070 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8071 HValue* argument = Pop();
8072 Drop(2); // Receiver and function.
8073 HInstruction* op = NewUncasted<HUnarySIMDOperation>(argument, id);
8074 ast_context()->ReturnInstruction(op, expr->id());
8078 #define SIMD_BINARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6) \
8080 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
8081 #undef SIMD_BINARY_OPERATION_CASE_ITEM
8082 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8083 HValue* right = Pop();
8084 HValue* left = Pop();
8085 Drop(2); // Receiver and function.
8086 HInstruction* op = NewUncasted<HBinarySIMDOperation>(left, right, id);
8087 ast_context()->ReturnInstruction(op, expr->id());
8091 #define SIMD_TERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7) \
8093 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
8094 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
8095 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 4) {
8096 HValue* right = Pop();
8097 HValue* left = Pop();
8098 HValue* value = Pop();
8099 Drop(2); // Receiver and function.
8101 NewUncasted<HTernarySIMDOperation>(value, left, right, id);
8102 ast_context()->ReturnInstruction(op, expr->id());
8106 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(p1, p2, name, p4, p5, p6, p7, p8) \
8108 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
8109 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
8110 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 5) {
8115 Drop(2); // Receiver and function.
8116 HValue* context = environment()->context();
8118 HQuarternarySIMDOperation::New(zone(), context, x, y, z, w, id);
8119 ast_context()->ReturnInstruction(op, expr->id());
8123 case kFloat32x4ArrayGetAt:
8124 case kFloat64x2ArrayGetAt:
8125 case kInt32x4ArrayGetAt:
8126 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 2) {
8127 HValue* key = Pop();
8128 HValue* typed32x4_array = Pop();
8129 ASSERT(typed32x4_array == receiver);
8130 Drop(1); // Drop function.
8131 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
8132 typed32x4_array, key, NULL,
8133 receiver_map->instance_type() == JS_ARRAY_TYPE,
8134 receiver_map->elements_kind(),
8136 NEVER_RETURN_HOLE, // load_mode.
8138 ast_context()->ReturnValue(instr);
8142 case kFloat32x4ArraySetAt:
8143 case kFloat64x2ArraySetAt:
8144 case kInt32x4ArraySetAt:
8145 if (CpuFeatures::SupportsSIMD128InCrankshaft() && argument_count == 3) {
8146 HValue* value = Pop();
8147 HValue* key = Pop();
8148 HValue* typed32x4_array = Pop();
8149 ASSERT(typed32x4_array == receiver);
8150 Drop(1); // Drop function.
8151 // TODO(haitao): add STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS.
8152 KeyedAccessStoreMode store_mode = STANDARD_STORE;
8153 BuildUncheckedMonomorphicElementAccess(
8154 typed32x4_array, key, value,
8155 receiver_map->instance_type() == JS_ARRAY_TYPE,
8156 receiver_map->elements_kind(),
8158 NEVER_RETURN_HOLE, // load_mode.
8161 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8162 ast_context()->ReturnValue(Pop());
8167 // Not yet supported for inlining.
8174 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8176 Handle<JSFunction> function = expr->target();
8177 int argc = expr->arguments()->length();
8178 SmallMapList receiver_maps;
8179 return TryInlineApiCall(function,
8188 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8191 SmallMapList* receiver_maps) {
8192 Handle<JSFunction> function = expr->target();
8193 int argc = expr->arguments()->length();
8194 return TryInlineApiCall(function,
8203 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
8204 Handle<Map> receiver_map,
8206 SmallMapList receiver_maps(1, zone());
8207 receiver_maps.Add(receiver_map, zone());
8208 return TryInlineApiCall(function,
8209 NULL, // Receiver is on expression stack.
8217 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
8218 Handle<Map> receiver_map,
8220 SmallMapList receiver_maps(1, zone());
8221 receiver_maps.Add(receiver_map, zone());
8222 return TryInlineApiCall(function,
8223 NULL, // Receiver is on expression stack.
8231 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
8233 SmallMapList* receiver_maps,
8236 ApiCallType call_type) {
8237 CallOptimization optimization(function);
8238 if (!optimization.is_simple_api_call()) return false;
8239 Handle<Map> holder_map;
8240 if (call_type == kCallApiFunction) {
8241 // Cannot embed a direct reference to the global proxy map
8242 // as it maybe dropped on deserialization.
8243 CHECK(!Serializer::enabled(isolate()));
8244 ASSERT_EQ(0, receiver_maps->length());
8245 receiver_maps->Add(handle(
8246 function->context()->global_object()->global_receiver()->map()),
8249 CallOptimization::HolderLookup holder_lookup =
8250 CallOptimization::kHolderNotFound;
8251 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8252 receiver_maps->first(), &holder_lookup);
8253 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8255 if (FLAG_trace_inlining) {
8256 PrintF("Inlining api function ");
8257 function->ShortPrint();
8261 bool drop_extra = false;
8262 bool is_store = false;
8263 switch (call_type) {
8264 case kCallApiFunction:
8265 case kCallApiMethod:
8266 // Need to check that none of the receiver maps could have changed.
8267 Add<HCheckMaps>(receiver, receiver_maps);
8268 // Need to ensure the chain between receiver and api_holder is intact.
8269 if (holder_lookup == CallOptimization::kHolderFound) {
8270 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
8272 ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
8274 // Includes receiver.
8275 PushArgumentsFromEnvironment(argc + 1);
8276 // Drop function after call.
8279 case kCallApiGetter:
8280 // Receiver and prototype chain cannot have changed.
8282 ASSERT_EQ(NULL, receiver);
8283 // Receiver is on expression stack.
8285 Add<HPushArgument>(receiver);
8287 case kCallApiSetter:
8290 // Receiver and prototype chain cannot have changed.
8292 ASSERT_EQ(NULL, receiver);
8293 // Receiver and value are on expression stack.
8294 HValue* value = Pop();
8296 Add<HPushArgument>(receiver);
8297 Add<HPushArgument>(value);
8302 HValue* holder = NULL;
8303 switch (holder_lookup) {
8304 case CallOptimization::kHolderFound:
8305 holder = Add<HConstant>(api_holder);
8307 case CallOptimization::kHolderIsReceiver:
8310 case CallOptimization::kHolderNotFound:
8314 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
8315 Handle<Object> call_data_obj(api_call_info->data(), isolate());
8316 bool call_data_is_undefined = call_data_obj->IsUndefined();
8317 HValue* call_data = Add<HConstant>(call_data_obj);
8318 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
8319 ExternalReference ref = ExternalReference(&fun,
8320 ExternalReference::DIRECT_API_CALL,
8322 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
8324 HValue* op_vals[] = {
8325 Add<HConstant>(function),
8328 api_function_address,
8332 CallInterfaceDescriptor* descriptor =
8333 isolate()->call_descriptor(Isolate::ApiFunctionCall);
8335 CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
8336 Handle<Code> code = stub.GetCode();
8337 HConstant* code_value = Add<HConstant>(code);
8339 ASSERT((sizeof(op_vals) / kPointerSize) ==
8340 descriptor->environment_length());
8342 HInstruction* call = New<HCallWithDescriptor>(
8343 code_value, argc + 1, descriptor,
8344 Vector<HValue*>(op_vals, descriptor->environment_length()));
8346 if (drop_extra) Drop(1); // Drop function.
8347 ast_context()->ReturnInstruction(call, ast_id);
8352 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
8353 ASSERT(expr->expression()->IsProperty());
8355 if (!expr->IsMonomorphic()) {
8358 Handle<Map> function_map = expr->GetReceiverTypes()->first();
8359 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
8360 !expr->target()->shared()->HasBuiltinFunctionId() ||
8361 expr->target()->shared()->builtin_function_id() != kFunctionApply) {
8365 if (current_info()->scope()->arguments() == NULL) return false;
8367 ZoneList<Expression*>* args = expr->arguments();
8368 if (args->length() != 2) return false;
8370 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
8371 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
8372 HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
8373 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
8375 // Found pattern f.apply(receiver, arguments).
8376 CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
8377 HValue* receiver = Pop(); // receiver
8378 HValue* function = Pop(); // f
8381 if (function_state()->outer() == NULL) {
8382 HInstruction* elements = Add<HArgumentsElements>(false);
8383 HInstruction* length = Add<HArgumentsLength>(elements);
8384 HValue* wrapped_receiver = BuildWrapReceiver(receiver, function);
8385 HInstruction* result = New<HApplyArguments>(function,
8389 ast_context()->ReturnInstruction(result, expr->id());
8392 // We are inside inlined function and we know exactly what is inside
8393 // arguments object. But we need to be able to materialize at deopt.
8394 ASSERT_EQ(environment()->arguments_environment()->parameter_count(),
8395 function_state()->entry()->arguments_object()->arguments_count());
8396 HArgumentsObject* args = function_state()->entry()->arguments_object();
8397 const ZoneList<HValue*>* arguments_values = args->arguments_values();
8398 int arguments_count = arguments_values->length();
8400 Push(BuildWrapReceiver(receiver, function));
8401 for (int i = 1; i < arguments_count; i++) {
8402 Push(arguments_values->at(i));
8405 Handle<JSFunction> known_function;
8406 if (function->IsConstant() &&
8407 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8408 known_function = Handle<JSFunction>::cast(
8409 HConstant::cast(function)->handle(isolate()));
8410 int args_count = arguments_count - 1; // Excluding receiver.
8411 if (TryInlineApply(known_function, expr, args_count)) return true;
8414 PushArgumentsFromEnvironment(arguments_count);
8415 HInvokeFunction* call = New<HInvokeFunction>(
8416 function, known_function, arguments_count);
8417 Drop(1); // Function.
8418 ast_context()->ReturnInstruction(call, expr->id());
8424 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
8425 Handle<JSFunction> target) {
8426 SharedFunctionInfo* shared = target->shared();
8427 if (shared->strict_mode() == SLOPPY && !shared->native()) {
8428 // Cannot embed a direct reference to the global proxy
8429 // as is it dropped on deserialization.
8430 CHECK(!Serializer::enabled(isolate()));
8431 Handle<JSObject> global_receiver(
8432 target->context()->global_object()->global_receiver());
8433 return Add<HConstant>(global_receiver);
8435 return graph()->GetConstantUndefined();
8439 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
8440 ASSERT(!HasStackOverflow());
8441 ASSERT(current_block() != NULL);
8442 ASSERT(current_block()->HasPredecessor());
8443 Expression* callee = expr->expression();
8444 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
8445 HInstruction* call = NULL;
8447 Property* prop = callee->AsProperty();
8449 CHECK_ALIVE(VisitForValue(prop->obj()));
8450 HValue* receiver = Top();
8452 SmallMapList* types;
8453 ComputeReceiverTypes(expr, receiver, &types, zone());
8455 if (prop->key()->IsPropertyName() && types->length() > 0) {
8456 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
8457 PropertyAccessInfo info(this, LOAD, ToType(types->first()), name);
8458 if (!info.CanAccessAsMonomorphic(types)) {
8459 HandlePolymorphicCallNamed(expr, receiver, types, name);
8465 if (!prop->key()->IsPropertyName()) {
8466 CHECK_ALIVE(VisitForValue(prop->key()));
8470 CHECK_ALIVE(PushLoad(prop, receiver, key));
8471 HValue* function = Pop();
8473 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8475 // Push the function under the receiver.
8476 environment()->SetExpressionStackAt(0, function);
8480 if (function->IsConstant() &&
8481 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8482 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
8483 HConstant::cast(function)->handle(isolate()));
8484 expr->set_target(known_function);
8486 if (TryCallApply(expr)) return;
8487 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8489 Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
8490 if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
8491 if (FLAG_trace_inlining) {
8492 PrintF("Inlining builtin ");
8493 known_function->ShortPrint();
8498 if (TryInlineApiMethodCall(expr, receiver, types)) return;
8500 // Wrap the receiver if necessary.
8501 if (NeedsWrappingFor(ToType(types->first()), known_function)) {
8502 // Since HWrapReceiver currently cannot actually wrap numbers and
8503 // strings, use the regular CallFunctionStub for method calls to wrap
8505 // TODO(verwaest): Support creation of value wrappers directly in
8507 call = New<HCallFunction>(
8508 function, argument_count, WRAP_AND_CALL);
8509 } else if (TryInlineCall(expr)) {
8512 call = BuildCallConstantFunction(known_function, argument_count);
8516 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8517 CallFunctionFlags flags = receiver->type().IsJSObject()
8518 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
8519 call = New<HCallFunction>(function, argument_count, flags);
8521 PushArgumentsFromEnvironment(argument_count);
8524 VariableProxy* proxy = expr->expression()->AsVariableProxy();
8525 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
8526 return Bailout(kPossibleDirectCallToEval);
8529 // The function is on the stack in the unoptimized code during
8530 // evaluation of the arguments.
8531 CHECK_ALIVE(VisitForValue(expr->expression()));
8532 HValue* function = Top();
8533 bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
8535 Variable* var = proxy->var();
8536 bool known_global_function = false;
8537 // If there is a global property cell for the name at compile time and
8538 // access check is not enabled we assume that the function will not change
8539 // and generate optimized code for calling the function.
8540 LookupResult lookup(isolate());
8541 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, LOAD);
8542 if (type == kUseCell &&
8543 !current_info()->global_object()->IsAccessCheckNeeded()) {
8544 Handle<GlobalObject> global(current_info()->global_object());
8545 known_global_function = expr->ComputeGlobalTarget(global, &lookup);
8547 if (known_global_function) {
8548 Add<HCheckValue>(function, expr->target());
8550 // Placeholder for the receiver.
8551 Push(graph()->GetConstantUndefined());
8552 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8554 // Patch the global object on the stack by the expected receiver.
8555 HValue* receiver = ImplicitReceiverFor(function, expr->target());
8556 const int receiver_index = argument_count - 1;
8557 environment()->SetExpressionStackAt(receiver_index, receiver);
8559 if (TryInlineBuiltinFunctionCall(expr)) {
8560 if (FLAG_trace_inlining) {
8561 PrintF("Inlining builtin ");
8562 expr->target()->ShortPrint();
8567 if (TryInlineApiFunctionCall(expr, receiver)) return;
8568 if (TryInlineCall(expr)) return;
8570 PushArgumentsFromEnvironment(argument_count);
8571 call = BuildCallConstantFunction(expr->target(), argument_count);
8573 Push(graph()->GetConstantUndefined());
8574 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8575 PushArgumentsFromEnvironment(argument_count);
8576 call = New<HCallFunction>(function, argument_count);
8579 } else if (expr->IsMonomorphic()) {
8580 Add<HCheckValue>(function, expr->target());
8582 Push(graph()->GetConstantUndefined());
8583 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8585 HValue* receiver = ImplicitReceiverFor(function, expr->target());
8586 const int receiver_index = argument_count - 1;
8587 environment()->SetExpressionStackAt(receiver_index, receiver);
8589 if (TryInlineBuiltinFunctionCall(expr)) {
8590 if (FLAG_trace_inlining) {
8591 PrintF("Inlining builtin ");
8592 expr->target()->ShortPrint();
8597 if (TryInlineApiFunctionCall(expr, receiver)) return;
8599 if (TryInlineCall(expr)) return;
8601 call = PreProcessCall(New<HInvokeFunction>(
8602 function, expr->target(), argument_count));
8605 Push(graph()->GetConstantUndefined());
8606 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8607 PushArgumentsFromEnvironment(argument_count);
8608 call = New<HCallFunction>(function, argument_count);
8612 Drop(1); // Drop the function.
8613 return ast_context()->ReturnInstruction(call, expr->id());
8617 void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
8618 NoObservableSideEffectsScope no_effects(this);
8620 int argument_count = expr->arguments()->length();
8621 // We should at least have the constructor on the expression stack.
8622 HValue* constructor = environment()->ExpressionStackAt(argument_count);
8624 ElementsKind kind = expr->elements_kind();
8625 Handle<AllocationSite> site = expr->allocation_site();
8626 ASSERT(!site.is_null());
8628 // Register on the site for deoptimization if the transition feedback changes.
8629 AllocationSite::AddDependentCompilationInfo(
8630 site, AllocationSite::TRANSITIONS, top_info());
8631 HInstruction* site_instruction = Add<HConstant>(site);
8633 // In the single constant argument case, we may have to adjust elements kind
8634 // to avoid creating a packed non-empty array.
8635 if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
8636 HValue* argument = environment()->Top();
8637 if (argument->IsConstant()) {
8638 HConstant* constant_argument = HConstant::cast(argument);
8639 ASSERT(constant_argument->HasSmiValue());
8640 int constant_array_size = constant_argument->Integer32Value();
8641 if (constant_array_size != 0) {
8642 kind = GetHoleyElementsKind(kind);
8648 JSArrayBuilder array_builder(this,
8652 DISABLE_ALLOCATION_SITES);
8654 if (argument_count == 0) {
8655 new_object = array_builder.AllocateEmptyArray();
8656 } else if (argument_count == 1) {
8657 HValue* argument = environment()->Top();
8658 new_object = BuildAllocateArrayFromLength(&array_builder, argument);
8660 HValue* length = Add<HConstant>(argument_count);
8661 // Smi arrays need to initialize array elements with the hole because
8662 // bailout could occur if the arguments don't fit in a smi.
8664 // TODO(mvstanton): If all the arguments are constants in smi range, then
8665 // we could set fill_with_hole to false and save a few instructions.
8666 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
8667 ? JSArrayBuilder::FILL_WITH_HOLE
8668 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
8669 new_object = array_builder.AllocateArray(length, length, fill_mode);
8670 HValue* elements = array_builder.GetElementsLocation();
8671 for (int i = 0; i < argument_count; i++) {
8672 HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
8673 HValue* constant_i = Add<HConstant>(i);
8674 Add<HStoreKeyed>(elements, constant_i, value, kind);
8678 Drop(argument_count + 1); // drop constructor and args.
8679 ast_context()->ReturnValue(new_object);
8683 // Checks whether allocation using the given constructor can be inlined.
8684 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
8685 return constructor->has_initial_map() &&
8686 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
8687 constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
8688 constructor->initial_map()->InitialPropertiesLength() == 0;
8692 bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
8693 Handle<JSFunction> caller = current_info()->closure();
8694 Handle<JSFunction> target(isolate()->native_context()->array_function(),
8696 int argument_count = expr->arguments()->length();
8697 // We should have the function plus array arguments on the environment stack.
8698 ASSERT(environment()->length() >= (argument_count + 1));
8699 Handle<AllocationSite> site = expr->allocation_site();
8700 ASSERT(!site.is_null());
8702 bool inline_ok = false;
8703 if (site->CanInlineCall()) {
8704 // We also want to avoid inlining in certain 1 argument scenarios.
8705 if (argument_count == 1) {
8706 HValue* argument = Top();
8707 if (argument->IsConstant()) {
8708 // Do not inline if the constant length argument is not a smi or
8709 // outside the valid range for a fast array.
8710 HConstant* constant_argument = HConstant::cast(argument);
8711 if (constant_argument->HasSmiValue()) {
8712 int value = constant_argument->Integer32Value();
8713 inline_ok = value >= 0 &&
8714 value < JSObject::kInitialMaxFastElementArray;
8716 TraceInline(target, caller,
8717 "Length outside of valid array range");
8727 TraceInline(target, caller, "AllocationSite requested no inlining.");
8731 TraceInline(target, caller, NULL);
8737 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
8738 ASSERT(!HasStackOverflow());
8739 ASSERT(current_block() != NULL);
8740 ASSERT(current_block()->HasPredecessor());
8741 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8742 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
8743 Factory* factory = isolate()->factory();
8745 // The constructor function is on the stack in the unoptimized code
8746 // during evaluation of the arguments.
8747 CHECK_ALIVE(VisitForValue(expr->expression()));
8748 HValue* function = Top();
8749 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8751 if (FLAG_inline_construct &&
8752 expr->IsMonomorphic() &&
8753 IsAllocationInlineable(expr->target())) {
8754 Handle<JSFunction> constructor = expr->target();
8755 HValue* check = Add<HCheckValue>(function, constructor);
8757 // Force completion of inobject slack tracking before generating
8758 // allocation code to finalize instance size.
8759 if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
8760 constructor->shared()->CompleteInobjectSlackTracking();
8763 // Calculate instance size from initial map of constructor.
8764 ASSERT(constructor->has_initial_map());
8765 Handle<Map> initial_map(constructor->initial_map());
8766 int instance_size = initial_map->instance_size();
8767 ASSERT(initial_map->InitialPropertiesLength() == 0);
8769 // Allocate an instance of the implicit receiver object.
8770 HValue* size_in_bytes = Add<HConstant>(instance_size);
8771 HAllocationMode allocation_mode;
8772 if (FLAG_pretenuring_call_new) {
8773 if (FLAG_allocation_site_pretenuring) {
8774 // Try to use pretenuring feedback.
8775 Handle<AllocationSite> allocation_site = expr->allocation_site();
8776 allocation_mode = HAllocationMode(allocation_site);
8777 // Take a dependency on allocation site.
8778 AllocationSite::AddDependentCompilationInfo(allocation_site,
8779 AllocationSite::TENURING,
8782 allocation_mode = HAllocationMode(
8783 isolate()->heap()->GetPretenureMode());
8787 HAllocate* receiver =
8788 BuildAllocate(size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE,
8790 receiver->set_known_initial_map(initial_map);
8792 // Load the initial map from the constructor.
8793 HValue* constructor_value = Add<HConstant>(constructor);
8794 HValue* initial_map_value =
8795 Add<HLoadNamedField>(constructor_value, static_cast<HValue*>(NULL),
8796 HObjectAccess::ForMapAndOffset(
8797 handle(constructor->map()),
8798 JSFunction::kPrototypeOrInitialMapOffset));
8800 // Initialize map and fields of the newly allocated object.
8801 { NoObservableSideEffectsScope no_effects(this);
8802 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
8803 Add<HStoreNamedField>(receiver,
8804 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
8806 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
8807 Add<HStoreNamedField>(receiver,
8808 HObjectAccess::ForMapAndOffset(initial_map,
8809 JSObject::kPropertiesOffset),
8811 Add<HStoreNamedField>(receiver,
8812 HObjectAccess::ForMapAndOffset(initial_map,
8813 JSObject::kElementsOffset),
8815 if (initial_map->inobject_properties() != 0) {
8816 HConstant* undefined = graph()->GetConstantUndefined();
8817 for (int i = 0; i < initial_map->inobject_properties(); i++) {
8818 int property_offset = initial_map->GetInObjectPropertyOffset(i);
8819 Add<HStoreNamedField>(receiver,
8820 HObjectAccess::ForMapAndOffset(initial_map, property_offset),
8826 // Replace the constructor function with a newly allocated receiver using
8827 // the index of the receiver from the top of the expression stack.
8828 const int receiver_index = argument_count - 1;
8829 ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
8830 environment()->SetExpressionStackAt(receiver_index, receiver);
8832 if (TryInlineConstruct(expr, receiver)) return;
8834 // TODO(mstarzinger): For now we remove the previous HAllocate and all
8835 // corresponding instructions and instead add HPushArgument for the
8836 // arguments in case inlining failed. What we actually should do is for
8837 // inlining to try to build a subgraph without mutating the parent graph.
8838 HInstruction* instr = current_block()->last();
8839 while (instr != initial_map_value) {
8840 HInstruction* prev_instr = instr->previous();
8841 instr->DeleteAndReplaceWith(NULL);
8844 initial_map_value->DeleteAndReplaceWith(NULL);
8845 receiver->DeleteAndReplaceWith(NULL);
8846 check->DeleteAndReplaceWith(NULL);
8847 environment()->SetExpressionStackAt(receiver_index, function);
8848 HInstruction* call =
8849 PreProcessCall(New<HCallNew>(function, argument_count));
8850 return ast_context()->ReturnInstruction(call, expr->id());
8852 // The constructor function is both an operand to the instruction and an
8853 // argument to the construct call.
8854 Handle<JSFunction> array_function(
8855 isolate()->native_context()->array_function(), isolate());
8856 bool use_call_new_array = expr->target().is_identical_to(array_function);
8857 if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
8858 // Verify we are still calling the array function for our native context.
8859 Add<HCheckValue>(function, array_function);
8860 BuildInlinedCallNewArray(expr);
8865 if (use_call_new_array) {
8866 Add<HCheckValue>(function, array_function);
8867 call = New<HCallNewArray>(function, argument_count,
8868 expr->elements_kind());
8870 call = New<HCallNew>(function, argument_count);
8872 PreProcessCall(call);
8873 return ast_context()->ReturnInstruction(call, expr->id());
8878 // Support for generating inlined runtime functions.
8880 // Lookup table for generators for runtime calls that are generated inline.
8881 // Elements of the table are member pointers to functions of
8882 // HOptimizedGraphBuilder.
8883 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
8884 &HOptimizedGraphBuilder::Generate##Name,
8886 const HOptimizedGraphBuilder::InlineFunctionGenerator
8887 HOptimizedGraphBuilder::kInlineFunctionGenerators[] = {
8888 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8889 INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8891 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
8894 template <class ViewClass>
8895 void HGraphBuilder::BuildArrayBufferViewInitialization(
8898 HValue* byte_offset,
8899 HValue* byte_length) {
8901 for (int offset = ViewClass::kSize;
8902 offset < ViewClass::kSizeWithInternalFields;
8903 offset += kPointerSize) {
8904 Add<HStoreNamedField>(obj,
8905 HObjectAccess::ForObservableJSObjectOffset(offset),
8906 graph()->GetConstant0());
8909 Add<HStoreNamedField>(
8911 HObjectAccess::ForJSArrayBufferViewByteOffset(),
8913 Add<HStoreNamedField>(
8915 HObjectAccess::ForJSArrayBufferViewByteLength(),
8918 if (buffer != NULL) {
8919 Add<HStoreNamedField>(
8921 HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
8922 HObjectAccess weak_first_view_access =
8923 HObjectAccess::ForJSArrayBufferWeakFirstView();
8924 Add<HStoreNamedField>(obj,
8925 HObjectAccess::ForJSArrayBufferViewWeakNext(),
8926 Add<HLoadNamedField>(buffer,
8927 static_cast<HValue*>(NULL),
8928 weak_first_view_access));
8929 Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
8931 Add<HStoreNamedField>(
8933 HObjectAccess::ForJSArrayBufferViewBuffer(),
8934 Add<HConstant>(static_cast<int32_t>(0)));
8935 Add<HStoreNamedField>(obj,
8936 HObjectAccess::ForJSArrayBufferViewWeakNext(),
8937 graph()->GetConstantUndefined());
8942 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
8943 CallRuntime* expr) {
8944 ZoneList<Expression*>* arguments = expr->arguments();
8946 NoObservableSideEffectsScope scope(this);
8947 ASSERT(arguments->length()== 4);
8948 CHECK_ALIVE(VisitForValue(arguments->at(0)));
8949 HValue* obj = Pop();
8951 CHECK_ALIVE(VisitForValue(arguments->at(1)));
8952 HValue* buffer = Pop();
8954 CHECK_ALIVE(VisitForValue(arguments->at(2)));
8955 HValue* byte_offset = Pop();
8957 CHECK_ALIVE(VisitForValue(arguments->at(3)));
8958 HValue* byte_length = Pop();
8960 BuildArrayBufferViewInitialization<JSDataView>(
8961 obj, buffer, byte_offset, byte_length);
8965 static Handle<Map> TypedArrayMap(Isolate* isolate,
8966 ExternalArrayType array_type,
8967 ElementsKind target_kind) {
8968 Handle<Context> native_context = isolate->native_context();
8969 Handle<JSFunction> fun;
8970 switch (array_type) {
8971 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
8972 case kExternal##Type##Array: \
8973 fun = Handle<JSFunction>(native_context->type##_array_fun()); \
8976 TYPED_ARRAYS(TYPED_ARRAY_CASE)
8977 #undef TYPED_ARRAY_CASE
8979 Handle<Map> map(fun->initial_map());
8980 return Map::AsElementsKind(map, target_kind);
8984 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
8985 ExternalArrayType array_type,
8986 bool is_zero_byte_offset,
8987 HValue* buffer, HValue* byte_offset, HValue* length) {
8988 Handle<Map> external_array_map(
8989 isolate()->heap()->MapForExternalArrayType(array_type));
8991 // The HForceRepresentation is to prevent possible deopt on int-smi
8992 // conversion after allocation but before the new object fields are set.
8993 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
8996 Add<HConstant>(ExternalArray::kAlignedSize),
8999 external_array_map->instance_type());
9001 AddStoreMapConstant(elements, external_array_map);
9002 Add<HStoreNamedField>(elements,
9003 HObjectAccess::ForFixedArrayLength(), length);
9005 HValue* backing_store = Add<HLoadNamedField>(
9006 buffer, static_cast<HValue*>(NULL),
9007 HObjectAccess::ForJSArrayBufferBackingStore());
9009 HValue* typed_array_start;
9010 if (is_zero_byte_offset) {
9011 typed_array_start = backing_store;
9013 HInstruction* external_pointer =
9014 AddUncasted<HAdd>(backing_store, byte_offset);
9015 // Arguments are checked prior to call to TypedArrayInitialize,
9016 // including byte_offset.
9017 external_pointer->ClearFlag(HValue::kCanOverflow);
9018 typed_array_start = external_pointer;
9021 Add<HStoreNamedField>(elements,
9022 HObjectAccess::ForExternalArrayExternalPointer(),
9029 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9030 ExternalArrayType array_type, size_t element_size,
9031 ElementsKind fixed_elements_kind,
9032 HValue* byte_length, HValue* length) {
9034 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
9037 // if fixed array's elements are not aligned to object's alignment,
9038 // we need to align the whole array to object alignment.
9039 if (element_size % kObjectAlignment != 0) {
9040 total_size = BuildObjectSizeAlignment(
9041 byte_length, FixedTypedArrayBase::kHeaderSize);
9043 total_size = AddUncasted<HAdd>(byte_length,
9044 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9045 total_size->ClearFlag(HValue::kCanOverflow);
9048 // The HForceRepresentation is to prevent possible deopt on int-smi
9049 // conversion after allocation but before the new object fields are set.
9050 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9051 Handle<Map> fixed_typed_array_map(
9052 isolate()->heap()->MapForFixedTypedArray(array_type));
9054 Add<HAllocate>(total_size, HType::Tagged(),
9056 fixed_typed_array_map->instance_type());
9057 AddStoreMapConstant(elements, fixed_typed_array_map);
9059 Add<HStoreNamedField>(elements,
9060 HObjectAccess::ForFixedArrayLength(),
9063 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9064 if (IsFixedFloat32x4ElementsKind(fixed_elements_kind)) {
9065 if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9066 filler = AddUncasted<HNullarySIMDOperation>(kFloat32x4Zero);
9068 HValue* size = Add<HConstant>(Float32x4::kSize);
9069 filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9070 Float32x4::kInstanceType);
9071 AddStoreMapConstant(filler, isolate()->factory()->float32x4_map());
9072 HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9073 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9074 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9076 } else if (IsFixedFloat64x2ElementsKind(fixed_elements_kind)) {
9077 if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9078 filler = AddUncasted<HNullarySIMDOperation>(kFloat64x2Zero);
9080 HValue* size = Add<HConstant>(Float64x2::kSize);
9081 filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9082 Float64x2::kInstanceType);
9083 AddStoreMapConstant(filler, isolate()->factory()->float64x2_map());
9084 HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9085 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9086 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9088 } else if (IsFixedInt32x4ElementsKind(fixed_elements_kind)) {
9089 if (CpuFeatures::SupportsSIMD128InCrankshaft()) {
9090 filler = AddUncasted<HNullarySIMDOperation>(kInt32x4Zero);
9092 HValue* size = Add<HConstant>(Int32x4::kSize);
9093 filler = Add<HAllocate>(size, HType::Tagged(), NOT_TENURED,
9094 Int32x4::kInstanceType);
9095 AddStoreMapConstant(filler, isolate()->factory()->int32x4_map());
9096 HValue* zero = Add<HConstant>(static_cast<double>(0.0));
9097 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double0(), zero);
9098 Add<HStoreNamedField>(filler, HObjectAccess::ForSIMD128Double1(), zero);
9103 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9105 HValue* key = builder.BeginBody(
9106 Add<HConstant>(static_cast<int32_t>(0)),
9108 Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
9116 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
9117 CallRuntime* expr) {
9118 ZoneList<Expression*>* arguments = expr->arguments();
9120 NoObservableSideEffectsScope scope(this);
9121 static const int kObjectArg = 0;
9122 static const int kArrayIdArg = 1;
9123 static const int kBufferArg = 2;
9124 static const int kByteOffsetArg = 3;
9125 static const int kByteLengthArg = 4;
9126 static const int kArgsLength = 5;
9127 ASSERT(arguments->length() == kArgsLength);
9130 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
9131 HValue* obj = Pop();
9133 ASSERT(arguments->at(kArrayIdArg)->node_type() == AstNode::kLiteral);
9134 Handle<Object> value =
9135 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
9136 ASSERT(value->IsSmi());
9137 int array_id = Smi::cast(*value)->value();
9140 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
9141 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
9147 HValue* byte_offset;
9148 bool is_zero_byte_offset;
9150 if (arguments->at(kByteOffsetArg)->node_type() == AstNode::kLiteral
9151 && Smi::FromInt(0) ==
9152 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
9153 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
9154 is_zero_byte_offset = true;
9156 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
9157 byte_offset = Pop();
9158 is_zero_byte_offset = false;
9159 ASSERT(buffer != NULL);
9162 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
9163 HValue* byte_length = Pop();
9165 IfBuilder byte_offset_smi(this);
9167 if (!is_zero_byte_offset) {
9168 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
9169 byte_offset_smi.Then();
9172 ExternalArrayType array_type =
9173 kExternalInt8Array; // Bogus initialization.
9174 size_t element_size = 1; // Bogus initialization.
9175 ElementsKind external_elements_kind = // Bogus initialization.
9176 EXTERNAL_INT8_ELEMENTS;
9177 ElementsKind fixed_elements_kind = // Bogus initialization.
9179 Runtime::ArrayIdToTypeAndSize(array_id,
9181 &external_elements_kind,
9182 &fixed_elements_kind,
9186 { // byte_offset is Smi.
9187 BuildArrayBufferViewInitialization<JSTypedArray>(
9188 obj, buffer, byte_offset, byte_length);
9191 HInstruction* length = AddUncasted<HDiv>(byte_length,
9192 Add<HConstant>(static_cast<int32_t>(element_size)));
9194 Add<HStoreNamedField>(obj,
9195 HObjectAccess::ForJSTypedArrayLength(),
9199 if (buffer != NULL) {
9200 elements = BuildAllocateExternalElements(
9201 array_type, is_zero_byte_offset, buffer, byte_offset, length);
9202 Handle<Map> obj_map = TypedArrayMap(
9203 isolate(), array_type, external_elements_kind);
9204 AddStoreMapConstant(obj, obj_map);
9206 ASSERT(is_zero_byte_offset);
9207 elements = BuildAllocateFixedTypedArray(
9208 array_type, element_size, fixed_elements_kind,
9209 byte_length, length);
9211 Add<HStoreNamedField>(
9212 obj, HObjectAccess::ForElementsPointer(), elements);
9215 if (!is_zero_byte_offset) {
9216 byte_offset_smi.Else();
9217 { // byte_offset is not Smi.
9219 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
9223 PushArgumentsFromEnvironment(kArgsLength);
9224 Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
9227 byte_offset_smi.End();
9231 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
9232 ASSERT(expr->arguments()->length() == 0);
9233 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
9234 return ast_context()->ReturnInstruction(max_smi, expr->id());
9238 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
9239 CallRuntime* expr) {
9240 ASSERT(expr->arguments()->length() == 0);
9241 HConstant* result = New<HConstant>(static_cast<int32_t>(
9242 FLAG_typed_array_max_size_in_heap));
9243 return ast_context()->ReturnInstruction(result, expr->id());
9247 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
9248 CallRuntime* expr) {
9249 ASSERT(expr->arguments()->length() == 1);
9250 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9251 HValue* buffer = Pop();
9252 HInstruction* result = New<HLoadNamedField>(
9254 static_cast<HValue*>(NULL),
9255 HObjectAccess::ForJSArrayBufferByteLength());
9256 return ast_context()->ReturnInstruction(result, expr->id());
9260 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
9261 CallRuntime* expr) {
9262 ASSERT(expr->arguments()->length() == 1);
9263 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9264 HValue* buffer = Pop();
9265 HInstruction* result = New<HLoadNamedField>(
9267 static_cast<HValue*>(NULL),
9268 HObjectAccess::ForJSArrayBufferViewByteLength());
9269 return ast_context()->ReturnInstruction(result, expr->id());
9273 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
9274 CallRuntime* expr) {
9275 ASSERT(expr->arguments()->length() == 1);
9276 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9277 HValue* buffer = Pop();
9278 HInstruction* result = New<HLoadNamedField>(
9280 static_cast<HValue*>(NULL),
9281 HObjectAccess::ForJSArrayBufferViewByteOffset());
9282 return ast_context()->ReturnInstruction(result, expr->id());
9286 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
9287 CallRuntime* expr) {
9288 ASSERT(expr->arguments()->length() == 1);
9289 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9290 HValue* buffer = Pop();
9291 HInstruction* result = New<HLoadNamedField>(
9293 static_cast<HValue*>(NULL),
9294 HObjectAccess::ForJSTypedArrayLength());
9295 return ast_context()->ReturnInstruction(result, expr->id());
9299 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
9300 ASSERT(!HasStackOverflow());
9301 ASSERT(current_block() != NULL);
9302 ASSERT(current_block()->HasPredecessor());
9303 if (expr->is_jsruntime()) {
9304 return Bailout(kCallToAJavaScriptRuntimeFunction);
9307 const Runtime::Function* function = expr->function();
9308 ASSERT(function != NULL);
9310 if (function->intrinsic_type == Runtime::INLINE ||
9311 function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
9312 ASSERT(expr->name()->length() > 0);
9313 ASSERT(expr->name()->Get(0) == '_');
9314 // Call to an inline function.
9315 int lookup_index = static_cast<int>(function->function_id) -
9316 static_cast<int>(Runtime::kFirstInlineFunction);
9317 ASSERT(lookup_index >= 0);
9318 ASSERT(static_cast<size_t>(lookup_index) <
9319 ARRAY_SIZE(kInlineFunctionGenerators));
9320 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
9322 // Call the inline code generator using the pointer-to-member.
9323 (this->*generator)(expr);
9325 ASSERT(function->intrinsic_type == Runtime::RUNTIME);
9326 Handle<String> name = expr->name();
9327 int argument_count = expr->arguments()->length();
9328 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9329 PushArgumentsFromEnvironment(argument_count);
9330 HCallRuntime* call = New<HCallRuntime>(name, function,
9332 return ast_context()->ReturnInstruction(call, expr->id());
9337 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
9338 ASSERT(!HasStackOverflow());
9339 ASSERT(current_block() != NULL);
9340 ASSERT(current_block()->HasPredecessor());
9341 switch (expr->op()) {
9342 case Token::DELETE: return VisitDelete(expr);
9343 case Token::VOID: return VisitVoid(expr);
9344 case Token::TYPEOF: return VisitTypeof(expr);
9345 case Token::NOT: return VisitNot(expr);
9346 default: UNREACHABLE();
9351 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
9352 Property* prop = expr->expression()->AsProperty();
9353 VariableProxy* proxy = expr->expression()->AsVariableProxy();
9355 CHECK_ALIVE(VisitForValue(prop->obj()));
9356 CHECK_ALIVE(VisitForValue(prop->key()));
9357 HValue* key = Pop();
9358 HValue* obj = Pop();
9359 HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
9360 Add<HPushArgument>(obj);
9361 Add<HPushArgument>(key);
9362 Add<HPushArgument>(Add<HConstant>(function_strict_mode()));
9363 // TODO(olivf) InvokeFunction produces a check for the parameter count,
9364 // even though we are certain to pass the correct number of arguments here.
9365 HInstruction* instr = New<HInvokeFunction>(function, 3);
9366 return ast_context()->ReturnInstruction(instr, expr->id());
9367 } else if (proxy != NULL) {
9368 Variable* var = proxy->var();
9369 if (var->IsUnallocated()) {
9370 Bailout(kDeleteWithGlobalVariable);
9371 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
9372 // Result of deleting non-global variables is false. 'this' is not
9373 // really a variable, though we implement it as one. The
9374 // subexpression does not have side effects.
9375 HValue* value = var->is_this()
9376 ? graph()->GetConstantTrue()
9377 : graph()->GetConstantFalse();
9378 return ast_context()->ReturnValue(value);
9380 Bailout(kDeleteWithNonGlobalVariable);
9383 // Result of deleting non-property, non-variable reference is true.
9384 // Evaluate the subexpression for side effects.
9385 CHECK_ALIVE(VisitForEffect(expr->expression()));
9386 return ast_context()->ReturnValue(graph()->GetConstantTrue());
9391 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
9392 CHECK_ALIVE(VisitForEffect(expr->expression()));
9393 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9397 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
9398 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
9399 HValue* value = Pop();
9400 HInstruction* instr = New<HTypeof>(value);
9401 return ast_context()->ReturnInstruction(instr, expr->id());
9405 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
9406 if (ast_context()->IsTest()) {
9407 TestContext* context = TestContext::cast(ast_context());
9408 VisitForControl(expr->expression(),
9409 context->if_false(),
9410 context->if_true());
9414 if (ast_context()->IsEffect()) {
9415 VisitForEffect(expr->expression());
9419 ASSERT(ast_context()->IsValue());
9420 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
9421 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
9422 CHECK_BAILOUT(VisitForControl(expr->expression(),
9426 if (materialize_false->HasPredecessor()) {
9427 materialize_false->SetJoinId(expr->MaterializeFalseId());
9428 set_current_block(materialize_false);
9429 Push(graph()->GetConstantFalse());
9431 materialize_false = NULL;
9434 if (materialize_true->HasPredecessor()) {
9435 materialize_true->SetJoinId(expr->MaterializeTrueId());
9436 set_current_block(materialize_true);
9437 Push(graph()->GetConstantTrue());
9439 materialize_true = NULL;
9443 CreateJoin(materialize_false, materialize_true, expr->id());
9444 set_current_block(join);
9445 if (join != NULL) return ast_context()->ReturnValue(Pop());
9449 HInstruction* HOptimizedGraphBuilder::BuildIncrement(
9450 bool returns_original_input,
9451 CountOperation* expr) {
9452 // The input to the count operation is on top of the expression stack.
9453 Representation rep = Representation::FromType(expr->type());
9454 if (rep.IsNone() || rep.IsTagged()) {
9455 rep = Representation::Smi();
9458 if (returns_original_input) {
9459 // We need an explicit HValue representing ToNumber(input). The
9460 // actual HChange instruction we need is (sometimes) added in a later
9461 // phase, so it is not available now to be used as an input to HAdd and
9462 // as the return value.
9463 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
9464 if (!rep.IsDouble()) {
9465 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
9466 number_input->SetFlag(HInstruction::kCannotBeTagged);
9471 // The addition has no side effects, so we do not need
9472 // to simulate the expression stack after this instruction.
9473 // Any later failures deopt to the load of the input or earlier.
9474 HConstant* delta = (expr->op() == Token::INC)
9475 ? graph()->GetConstant1()
9476 : graph()->GetConstantMinus1();
9477 HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
9478 if (instr->IsAdd()) {
9479 HAdd* add = HAdd::cast(instr);
9480 add->set_observed_input_representation(1, rep);
9481 add->set_observed_input_representation(2, Representation::Smi());
9483 instr->SetFlag(HInstruction::kCannotBeTagged);
9484 instr->ClearAllSideEffects();
9489 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
9492 BailoutId return_id,
9496 EffectContext for_effect(this);
9498 if (key != NULL) Push(key);
9500 BuildStore(expr, prop, ast_id, return_id);
9504 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
9505 ASSERT(!HasStackOverflow());
9506 ASSERT(current_block() != NULL);
9507 ASSERT(current_block()->HasPredecessor());
9508 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9509 Expression* target = expr->expression();
9510 VariableProxy* proxy = target->AsVariableProxy();
9511 Property* prop = target->AsProperty();
9512 if (proxy == NULL && prop == NULL) {
9513 return Bailout(kInvalidLhsInCountOperation);
9516 // Match the full code generator stack by simulating an extra stack
9517 // element for postfix operations in a non-effect context. The return
9518 // value is ToNumber(input).
9519 bool returns_original_input =
9520 expr->is_postfix() && !ast_context()->IsEffect();
9521 HValue* input = NULL; // ToNumber(original_input).
9522 HValue* after = NULL; // The result after incrementing or decrementing.
9524 if (proxy != NULL) {
9525 Variable* var = proxy->var();
9526 if (var->mode() == CONST_LEGACY) {
9527 return Bailout(kUnsupportedCountOperationWithConst);
9529 // Argument of the count operation is a variable, not a property.
9530 ASSERT(prop == NULL);
9531 CHECK_ALIVE(VisitForValue(target));
9533 after = BuildIncrement(returns_original_input, expr);
9534 input = returns_original_input ? Top() : Pop();
9537 switch (var->location()) {
9538 case Variable::UNALLOCATED:
9539 HandleGlobalVariableAssignment(var,
9541 expr->AssignmentId());
9544 case Variable::PARAMETER:
9545 case Variable::LOCAL:
9546 BindIfLive(var, after);
9549 case Variable::CONTEXT: {
9550 // Bail out if we try to mutate a parameter value in a function
9551 // using the arguments object. We do not (yet) correctly handle the
9552 // arguments property of the function.
9553 if (current_info()->scope()->arguments() != NULL) {
9554 // Parameters will rewrite to context slots. We have no direct
9555 // way to detect that the variable is a parameter so we use a
9556 // linear search of the parameter list.
9557 int count = current_info()->scope()->num_parameters();
9558 for (int i = 0; i < count; ++i) {
9559 if (var == current_info()->scope()->parameter(i)) {
9560 return Bailout(kAssignmentToParameterInArgumentsObject);
9565 HValue* context = BuildContextChainWalk(var);
9566 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
9567 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
9568 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
9570 if (instr->HasObservableSideEffects()) {
9571 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
9576 case Variable::LOOKUP:
9577 return Bailout(kLookupVariableInCountOperation);
9580 Drop(returns_original_input ? 2 : 1);
9581 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
9584 // Argument of the count operation is a property.
9585 ASSERT(prop != NULL);
9586 if (returns_original_input) Push(graph()->GetConstantUndefined());
9588 CHECK_ALIVE(VisitForValue(prop->obj()));
9589 HValue* object = Top();
9592 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
9593 prop->IsStringAccess()) {
9594 CHECK_ALIVE(VisitForValue(prop->key()));
9598 CHECK_ALIVE(PushLoad(prop, object, key));
9600 after = BuildIncrement(returns_original_input, expr);
9602 if (returns_original_input) {
9604 // Drop object and key to push it again in the effect context below.
9605 Drop(key == NULL ? 1 : 2);
9606 environment()->SetExpressionStackAt(0, input);
9607 CHECK_ALIVE(BuildStoreForEffect(
9608 expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
9609 return ast_context()->ReturnValue(Pop());
9612 environment()->SetExpressionStackAt(0, after);
9613 return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
9617 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
9620 if (string->IsConstant() && index->IsConstant()) {
9621 HConstant* c_string = HConstant::cast(string);
9622 HConstant* c_index = HConstant::cast(index);
9623 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
9624 int32_t i = c_index->NumberValueAsInteger32();
9625 Handle<String> s = c_string->StringValue();
9626 if (i < 0 || i >= s->length()) {
9627 return New<HConstant>(OS::nan_value());
9629 return New<HConstant>(s->Get(i));
9632 string = BuildCheckString(string);
9633 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
9634 return New<HStringCharCodeAt>(string, index);
9638 // Checks if the given shift amounts have following forms:
9639 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
9640 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
9641 HValue* const32_minus_sa) {
9642 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
9643 const HConstant* c1 = HConstant::cast(sa);
9644 const HConstant* c2 = HConstant::cast(const32_minus_sa);
9645 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
9646 (c1->Integer32Value() + c2->Integer32Value() == 32);
9648 if (!const32_minus_sa->IsSub()) return false;
9649 HSub* sub = HSub::cast(const32_minus_sa);
9650 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
9654 // Checks if the left and the right are shift instructions with the oposite
9655 // directions that can be replaced by one rotate right instruction or not.
9656 // Returns the operand and the shift amount for the rotate instruction in the
9658 bool HGraphBuilder::MatchRotateRight(HValue* left,
9661 HValue** shift_amount) {
9664 if (left->IsShl() && right->IsShr()) {
9665 shl = HShl::cast(left);
9666 shr = HShr::cast(right);
9667 } else if (left->IsShr() && right->IsShl()) {
9668 shl = HShl::cast(right);
9669 shr = HShr::cast(left);
9673 if (shl->left() != shr->left()) return false;
9675 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
9676 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
9679 *operand= shr->left();
9680 *shift_amount = shr->right();
9685 bool CanBeZero(HValue* right) {
9686 if (right->IsConstant()) {
9687 HConstant* right_const = HConstant::cast(right);
9688 if (right_const->HasInteger32Value() &&
9689 (right_const->Integer32Value() & 0x1f) != 0) {
9697 HValue* HGraphBuilder::EnforceNumberType(HValue* number,
9699 if (expected->Is(Type::SignedSmall())) {
9700 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
9702 if (expected->Is(Type::Signed32())) {
9703 return AddUncasted<HForceRepresentation>(number,
9704 Representation::Integer32());
9710 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
9711 if (value->IsConstant()) {
9712 HConstant* constant = HConstant::cast(value);
9713 Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
9714 if (number.has_value) {
9715 *expected = Type::Number(zone());
9716 return AddInstruction(number.value);
9720 // We put temporary values on the stack, which don't correspond to anything
9721 // in baseline code. Since nothing is observable we avoid recording those
9722 // pushes with a NoObservableSideEffectsScope.
9723 NoObservableSideEffectsScope no_effects(this);
9725 Type* expected_type = *expected;
9727 // Separate the number type from the rest.
9728 Type* expected_obj =
9729 Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
9730 Type* expected_number =
9731 Type::Intersect(expected_type, Type::Number(zone()), zone());
9733 // We expect to get a number.
9734 // (We need to check first, since Type::None->Is(Type::Any()) == true.
9735 if (expected_obj->Is(Type::None())) {
9736 ASSERT(!expected_number->Is(Type::None(zone())));
9740 if (expected_obj->Is(Type::Undefined(zone()))) {
9741 // This is already done by HChange.
9742 *expected = Type::Union(expected_number, Type::Float(zone()), zone());
9750 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
9751 BinaryOperation* expr,
9754 PushBeforeSimulateBehavior push_sim_result) {
9755 Type* left_type = expr->left()->bounds().lower;
9756 Type* right_type = expr->right()->bounds().lower;
9757 Type* result_type = expr->bounds().lower;
9758 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
9759 Handle<AllocationSite> allocation_site = expr->allocation_site();
9761 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
9762 isolate()->heap()->GetPretenureMode() : NOT_TENURED;
9764 HAllocationMode allocation_mode =
9765 FLAG_allocation_site_pretenuring
9766 ? (allocation_site.is_null()
9767 ? HAllocationMode(NOT_TENURED)
9768 : HAllocationMode(allocation_site))
9769 : HAllocationMode(pretenure_flag);
9771 HValue* result = HGraphBuilder::BuildBinaryOperation(
9772 expr->op(), left, right, left_type, right_type, result_type,
9773 fixed_right_arg, allocation_mode);
9774 // Add a simulate after instructions with observable side effects, and
9775 // after phis, which are the result of BuildBinaryOperation when we
9776 // inlined some complex subgraph.
9777 if (result->HasObservableSideEffects() || result->IsPhi()) {
9778 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9780 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9783 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9790 HValue* HGraphBuilder::BuildBinaryOperation(
9797 Maybe<int> fixed_right_arg,
9798 HAllocationMode allocation_mode) {
9800 Representation left_rep = Representation::FromType(left_type);
9801 Representation right_rep = Representation::FromType(right_type);
9803 bool maybe_string_add = op == Token::ADD &&
9804 (left_type->Maybe(Type::String()) ||
9805 right_type->Maybe(Type::String()));
9807 if (left_type->Is(Type::None())) {
9808 Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
9810 // TODO(rossberg): we should be able to get rid of non-continuous
9812 left_type = Type::Any(zone());
9814 if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
9815 left_rep = Representation::FromType(left_type);
9818 if (right_type->Is(Type::None())) {
9819 Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
9821 right_type = Type::Any(zone());
9823 if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
9824 right_rep = Representation::FromType(right_type);
9827 // Special case for string addition here.
9828 if (op == Token::ADD &&
9829 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
9830 // Validate type feedback for left argument.
9831 if (left_type->Is(Type::String())) {
9832 left = BuildCheckString(left);
9835 // Validate type feedback for right argument.
9836 if (right_type->Is(Type::String())) {
9837 right = BuildCheckString(right);
9840 // Convert left argument as necessary.
9841 if (left_type->Is(Type::Number())) {
9842 ASSERT(right_type->Is(Type::String()));
9843 left = BuildNumberToString(left, left_type);
9844 } else if (!left_type->Is(Type::String())) {
9845 ASSERT(right_type->Is(Type::String()));
9846 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
9847 Add<HPushArgument>(left);
9848 Add<HPushArgument>(right);
9849 return AddUncasted<HInvokeFunction>(function, 2);
9852 // Convert right argument as necessary.
9853 if (right_type->Is(Type::Number())) {
9854 ASSERT(left_type->Is(Type::String()));
9855 right = BuildNumberToString(right, right_type);
9856 } else if (!right_type->Is(Type::String())) {
9857 ASSERT(left_type->Is(Type::String()));
9858 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
9859 Add<HPushArgument>(left);
9860 Add<HPushArgument>(right);
9861 return AddUncasted<HInvokeFunction>(function, 2);
9864 // Fast path for empty constant strings.
9865 if (left->IsConstant() &&
9866 HConstant::cast(left)->HasStringValue() &&
9867 HConstant::cast(left)->StringValue()->length() == 0) {
9870 if (right->IsConstant() &&
9871 HConstant::cast(right)->HasStringValue() &&
9872 HConstant::cast(right)->StringValue()->length() == 0) {
9876 // Register the dependent code with the allocation site.
9877 if (!allocation_mode.feedback_site().is_null()) {
9878 ASSERT(!graph()->info()->IsStub());
9879 Handle<AllocationSite> site(allocation_mode.feedback_site());
9880 AllocationSite::AddDependentCompilationInfo(
9881 site, AllocationSite::TENURING, top_info());
9884 // Inline the string addition into the stub when creating allocation
9885 // mementos to gather allocation site feedback, or if we can statically
9886 // infer that we're going to create a cons string.
9887 if ((graph()->info()->IsStub() &&
9888 allocation_mode.CreateAllocationMementos()) ||
9889 (left->IsConstant() &&
9890 HConstant::cast(left)->HasStringValue() &&
9891 HConstant::cast(left)->StringValue()->length() + 1 >=
9892 ConsString::kMinLength) ||
9893 (right->IsConstant() &&
9894 HConstant::cast(right)->HasStringValue() &&
9895 HConstant::cast(right)->StringValue()->length() + 1 >=
9896 ConsString::kMinLength)) {
9897 return BuildStringAdd(left, right, allocation_mode);
9900 // Fallback to using the string add stub.
9901 return AddUncasted<HStringAdd>(
9902 left, right, allocation_mode.GetPretenureMode(),
9903 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
9906 if (graph()->info()->IsStub()) {
9907 left = EnforceNumberType(left, left_type);
9908 right = EnforceNumberType(right, right_type);
9911 Representation result_rep = Representation::FromType(result_type);
9913 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
9914 (right_rep.IsTagged() && !right_rep.IsSmi());
9916 HInstruction* instr = NULL;
9917 // Only the stub is allowed to call into the runtime, since otherwise we would
9918 // inline several instructions (including the two pushes) for every tagged
9919 // operation in optimized code, which is more expensive, than a stub call.
9920 if (graph()->info()->IsStub() && is_non_primitive) {
9921 HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
9922 Add<HPushArgument>(left);
9923 Add<HPushArgument>(right);
9924 instr = AddUncasted<HInvokeFunction>(function, 2);
9928 instr = AddUncasted<HAdd>(left, right);
9931 instr = AddUncasted<HSub>(left, right);
9934 instr = AddUncasted<HMul>(left, right);
9937 if (fixed_right_arg.has_value &&
9938 !right->EqualsInteger32Constant(fixed_right_arg.value)) {
9939 HConstant* fixed_right = Add<HConstant>(
9940 static_cast<int>(fixed_right_arg.value));
9941 IfBuilder if_same(this);
9942 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
9944 if_same.ElseDeopt("Unexpected RHS of binary operation");
9945 right = fixed_right;
9947 instr = AddUncasted<HMod>(left, right);
9951 instr = AddUncasted<HDiv>(left, right);
9953 case Token::BIT_XOR:
9954 case Token::BIT_AND:
9955 instr = AddUncasted<HBitwise>(op, left, right);
9957 case Token::BIT_OR: {
9958 HValue* operand, *shift_amount;
9959 if (left_type->Is(Type::Signed32()) &&
9960 right_type->Is(Type::Signed32()) &&
9961 MatchRotateRight(left, right, &operand, &shift_amount)) {
9962 instr = AddUncasted<HRor>(operand, shift_amount);
9964 instr = AddUncasted<HBitwise>(op, left, right);
9969 instr = AddUncasted<HSar>(left, right);
9972 instr = AddUncasted<HShr>(left, right);
9973 if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
9975 graph()->RecordUint32Instruction(instr);
9979 instr = AddUncasted<HShl>(left, right);
9986 if (instr->IsBinaryOperation()) {
9987 HBinaryOperation* binop = HBinaryOperation::cast(instr);
9988 binop->set_observed_input_representation(1, left_rep);
9989 binop->set_observed_input_representation(2, right_rep);
9990 binop->initialize_output_representation(result_rep);
9991 if (graph()->info()->IsStub()) {
9992 // Stub should not call into stub.
9993 instr->SetFlag(HValue::kCannotBeTagged);
9994 // And should truncate on HForceRepresentation already.
9995 if (left->IsForceRepresentation()) {
9996 left->CopyFlag(HValue::kTruncatingToSmi, instr);
9997 left->CopyFlag(HValue::kTruncatingToInt32, instr);
9999 if (right->IsForceRepresentation()) {
10000 right->CopyFlag(HValue::kTruncatingToSmi, instr);
10001 right->CopyFlag(HValue::kTruncatingToInt32, instr);
10009 // Check for the form (%_ClassOf(foo) === 'BarClass').
10010 static bool IsClassOfTest(CompareOperation* expr) {
10011 if (expr->op() != Token::EQ_STRICT) return false;
10012 CallRuntime* call = expr->left()->AsCallRuntime();
10013 if (call == NULL) return false;
10014 Literal* literal = expr->right()->AsLiteral();
10015 if (literal == NULL) return false;
10016 if (!literal->value()->IsString()) return false;
10017 if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) {
10020 ASSERT(call->arguments()->length() == 1);
10025 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10026 ASSERT(!HasStackOverflow());
10027 ASSERT(current_block() != NULL);
10028 ASSERT(current_block()->HasPredecessor());
10029 switch (expr->op()) {
10031 return VisitComma(expr);
10034 return VisitLogicalExpression(expr);
10036 return VisitArithmeticExpression(expr);
10041 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
10042 CHECK_ALIVE(VisitForEffect(expr->left()));
10043 // Visit the right subexpression in the same AST context as the entire
10045 Visit(expr->right());
10049 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
10050 bool is_logical_and = expr->op() == Token::AND;
10051 if (ast_context()->IsTest()) {
10052 TestContext* context = TestContext::cast(ast_context());
10053 // Translate left subexpression.
10054 HBasicBlock* eval_right = graph()->CreateBasicBlock();
10055 if (is_logical_and) {
10056 CHECK_BAILOUT(VisitForControl(expr->left(),
10058 context->if_false()));
10060 CHECK_BAILOUT(VisitForControl(expr->left(),
10061 context->if_true(),
10065 // Translate right subexpression by visiting it in the same AST
10066 // context as the entire expression.
10067 if (eval_right->HasPredecessor()) {
10068 eval_right->SetJoinId(expr->RightId());
10069 set_current_block(eval_right);
10070 Visit(expr->right());
10073 } else if (ast_context()->IsValue()) {
10074 CHECK_ALIVE(VisitForValue(expr->left()));
10075 ASSERT(current_block() != NULL);
10076 HValue* left_value = Top();
10078 // Short-circuit left values that always evaluate to the same boolean value.
10079 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
10080 // l (evals true) && r -> r
10081 // l (evals true) || r -> l
10082 // l (evals false) && r -> l
10083 // l (evals false) || r -> r
10084 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
10086 CHECK_ALIVE(VisitForValue(expr->right()));
10088 return ast_context()->ReturnValue(Pop());
10091 // We need an extra block to maintain edge-split form.
10092 HBasicBlock* empty_block = graph()->CreateBasicBlock();
10093 HBasicBlock* eval_right = graph()->CreateBasicBlock();
10094 ToBooleanStub::Types expected(expr->left()->to_boolean_types());
10095 HBranch* test = is_logical_and
10096 ? New<HBranch>(left_value, expected, eval_right, empty_block)
10097 : New<HBranch>(left_value, expected, empty_block, eval_right);
10098 FinishCurrentBlock(test);
10100 set_current_block(eval_right);
10101 Drop(1); // Value of the left subexpression.
10102 CHECK_BAILOUT(VisitForValue(expr->right()));
10104 HBasicBlock* join_block =
10105 CreateJoin(empty_block, current_block(), expr->id());
10106 set_current_block(join_block);
10107 return ast_context()->ReturnValue(Pop());
10110 ASSERT(ast_context()->IsEffect());
10111 // In an effect context, we don't need the value of the left subexpression,
10112 // only its control flow and side effects. We need an extra block to
10113 // maintain edge-split form.
10114 HBasicBlock* empty_block = graph()->CreateBasicBlock();
10115 HBasicBlock* right_block = graph()->CreateBasicBlock();
10116 if (is_logical_and) {
10117 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
10119 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
10122 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
10123 // actually two empty blocks (one here and one inserted by
10124 // TestContext::BuildBranch, and that they both have an HSimulate though the
10125 // second one is not a merge node, and that we really have no good AST ID to
10126 // put on that first HSimulate.
10128 if (empty_block->HasPredecessor()) {
10129 empty_block->SetJoinId(expr->id());
10131 empty_block = NULL;
10134 if (right_block->HasPredecessor()) {
10135 right_block->SetJoinId(expr->RightId());
10136 set_current_block(right_block);
10137 CHECK_BAILOUT(VisitForEffect(expr->right()));
10138 right_block = current_block();
10140 right_block = NULL;
10143 HBasicBlock* join_block =
10144 CreateJoin(empty_block, right_block, expr->id());
10145 set_current_block(join_block);
10146 // We did not materialize any value in the predecessor environments,
10147 // so there is no need to handle it here.
10152 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
10153 CHECK_ALIVE(VisitForValue(expr->left()));
10154 CHECK_ALIVE(VisitForValue(expr->right()));
10155 SetSourcePosition(expr->position());
10156 HValue* right = Pop();
10157 HValue* left = Pop();
10159 BuildBinaryOperation(expr, left, right,
10160 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10161 : PUSH_BEFORE_SIMULATE);
10162 if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
10163 HBinaryOperation::cast(result)->SetOperandPositions(
10165 ScriptPositionToSourcePosition(expr->left()->position()),
10166 ScriptPositionToSourcePosition(expr->right()->position()));
10168 return ast_context()->ReturnValue(result);
10172 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
10173 Expression* sub_expr,
10174 Handle<String> check) {
10175 CHECK_ALIVE(VisitForTypeOf(sub_expr));
10176 SetSourcePosition(expr->position());
10177 HValue* value = Pop();
10178 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
10179 return ast_context()->ReturnControl(instr, expr->id());
10183 static bool IsLiteralCompareBool(Isolate* isolate,
10187 return op == Token::EQ_STRICT &&
10188 ((left->IsConstant() &&
10189 HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
10190 (right->IsConstant() &&
10191 HConstant::cast(right)->handle(isolate)->IsBoolean()));
10195 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
10196 ASSERT(!HasStackOverflow());
10197 ASSERT(current_block() != NULL);
10198 ASSERT(current_block()->HasPredecessor());
10200 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10202 // Check for a few fast cases. The AST visiting behavior must be in sync
10203 // with the full codegen: We don't push both left and right values onto
10204 // the expression stack when one side is a special-case literal.
10205 Expression* sub_expr = NULL;
10206 Handle<String> check;
10207 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
10208 return HandleLiteralCompareTypeof(expr, sub_expr, check);
10210 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
10211 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
10213 if (expr->IsLiteralCompareNull(&sub_expr)) {
10214 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
10217 if (IsClassOfTest(expr)) {
10218 CallRuntime* call = expr->left()->AsCallRuntime();
10219 ASSERT(call->arguments()->length() == 1);
10220 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10221 HValue* value = Pop();
10222 Literal* literal = expr->right()->AsLiteral();
10223 Handle<String> rhs = Handle<String>::cast(literal->value());
10224 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
10225 return ast_context()->ReturnControl(instr, expr->id());
10228 Type* left_type = expr->left()->bounds().lower;
10229 Type* right_type = expr->right()->bounds().lower;
10230 Type* combined_type = expr->combined_type();
10232 CHECK_ALIVE(VisitForValue(expr->left()));
10233 CHECK_ALIVE(VisitForValue(expr->right()));
10235 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10237 HValue* right = Pop();
10238 HValue* left = Pop();
10239 Token::Value op = expr->op();
10241 if (IsLiteralCompareBool(isolate(), left, op, right)) {
10242 HCompareObjectEqAndBranch* result =
10243 New<HCompareObjectEqAndBranch>(left, right);
10244 return ast_context()->ReturnControl(result, expr->id());
10247 if (op == Token::INSTANCEOF) {
10248 // Check to see if the rhs of the instanceof is a global function not
10249 // residing in new space. If it is we assume that the function will stay the
10251 Handle<JSFunction> target = Handle<JSFunction>::null();
10252 VariableProxy* proxy = expr->right()->AsVariableProxy();
10253 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
10254 if (global_function &&
10255 current_info()->has_global_object() &&
10256 !current_info()->global_object()->IsAccessCheckNeeded()) {
10257 Handle<String> name = proxy->name();
10258 Handle<GlobalObject> global(current_info()->global_object());
10259 LookupResult lookup(isolate());
10260 global->Lookup(name, &lookup);
10261 if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
10262 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
10263 // If the function is in new space we assume it's more likely to
10264 // change and thus prefer the general IC code.
10265 if (!isolate()->heap()->InNewSpace(*candidate)) {
10266 target = candidate;
10271 // If the target is not null we have found a known global function that is
10272 // assumed to stay the same for this instanceof.
10273 if (target.is_null()) {
10274 HInstanceOf* result = New<HInstanceOf>(left, right);
10275 return ast_context()->ReturnInstruction(result, expr->id());
10277 Add<HCheckValue>(right, target);
10278 HInstanceOfKnownGlobal* result =
10279 New<HInstanceOfKnownGlobal>(left, target);
10280 return ast_context()->ReturnInstruction(result, expr->id());
10283 // Code below assumes that we don't fall through.
10285 } else if (op == Token::IN) {
10286 HValue* function = AddLoadJSBuiltin(Builtins::IN);
10287 Add<HPushArgument>(left);
10288 Add<HPushArgument>(right);
10289 // TODO(olivf) InvokeFunction produces a check for the parameter count,
10290 // even though we are certain to pass the correct number of arguments here.
10291 HInstruction* result = New<HInvokeFunction>(function, 2);
10292 return ast_context()->ReturnInstruction(result, expr->id());
10295 PushBeforeSimulateBehavior push_behavior =
10296 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10297 : PUSH_BEFORE_SIMULATE;
10298 HControlInstruction* compare = BuildCompareInstruction(
10299 op, left, right, left_type, right_type, combined_type,
10300 ScriptPositionToSourcePosition(expr->left()->position()),
10301 ScriptPositionToSourcePosition(expr->right()->position()),
10302 push_behavior, expr->id());
10303 if (compare == NULL) return; // Bailed out.
10304 return ast_context()->ReturnControl(compare, expr->id());
10308 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
10314 Type* combined_type,
10315 HSourcePosition left_position,
10316 HSourcePosition right_position,
10317 PushBeforeSimulateBehavior push_sim_result,
10318 BailoutId bailout_id) {
10319 // Cases handled below depend on collected type feedback. They should
10320 // soft deoptimize when there is no type feedback.
10321 if (combined_type->Is(Type::None())) {
10322 Add<HDeoptimize>("Insufficient type feedback for combined type "
10323 "of binary operation",
10324 Deoptimizer::SOFT);
10325 combined_type = left_type = right_type = Type::Any(zone());
10328 Representation left_rep = Representation::FromType(left_type);
10329 Representation right_rep = Representation::FromType(right_type);
10330 Representation combined_rep = Representation::FromType(combined_type);
10332 if (combined_type->Is(Type::Receiver())) {
10333 if (Token::IsEqualityOp(op)) {
10334 // HCompareObjectEqAndBranch can only deal with object, so
10335 // exclude numbers.
10336 if ((left->IsConstant() &&
10337 HConstant::cast(left)->HasNumberValue()) ||
10338 (right->IsConstant() &&
10339 HConstant::cast(right)->HasNumberValue())) {
10340 Add<HDeoptimize>("Type mismatch between feedback and constant",
10341 Deoptimizer::SOFT);
10342 // The caller expects a branch instruction, so make it happy.
10343 return New<HBranch>(graph()->GetConstantTrue());
10345 // Can we get away with map check and not instance type check?
10346 HValue* operand_to_check =
10347 left->block()->block_id() < right->block()->block_id() ? left : right;
10348 if (combined_type->IsClass()) {
10349 Handle<Map> map = combined_type->AsClass()->Map();
10350 AddCheckMap(operand_to_check, map);
10351 HCompareObjectEqAndBranch* result =
10352 New<HCompareObjectEqAndBranch>(left, right);
10353 if (FLAG_hydrogen_track_positions) {
10354 result->set_operand_position(zone(), 0, left_position);
10355 result->set_operand_position(zone(), 1, right_position);
10359 BuildCheckHeapObject(operand_to_check);
10360 Add<HCheckInstanceType>(operand_to_check,
10361 HCheckInstanceType::IS_SPEC_OBJECT);
10362 HCompareObjectEqAndBranch* result =
10363 New<HCompareObjectEqAndBranch>(left, right);
10367 Bailout(kUnsupportedNonPrimitiveCompare);
10370 } else if (combined_type->Is(Type::InternalizedString()) &&
10371 Token::IsEqualityOp(op)) {
10372 // If we have a constant argument, it should be consistent with the type
10373 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
10374 if ((left->IsConstant() &&
10375 !HConstant::cast(left)->HasInternalizedStringValue()) ||
10376 (right->IsConstant() &&
10377 !HConstant::cast(right)->HasInternalizedStringValue())) {
10378 Add<HDeoptimize>("Type mismatch between feedback and constant",
10379 Deoptimizer::SOFT);
10380 // The caller expects a branch instruction, so make it happy.
10381 return New<HBranch>(graph()->GetConstantTrue());
10383 BuildCheckHeapObject(left);
10384 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
10385 BuildCheckHeapObject(right);
10386 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
10387 HCompareObjectEqAndBranch* result =
10388 New<HCompareObjectEqAndBranch>(left, right);
10390 } else if (combined_type->Is(Type::String())) {
10391 BuildCheckHeapObject(left);
10392 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
10393 BuildCheckHeapObject(right);
10394 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
10395 HStringCompareAndBranch* result =
10396 New<HStringCompareAndBranch>(left, right, op);
10399 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
10400 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
10401 result->set_observed_input_representation(1, left_rep);
10402 result->set_observed_input_representation(2, right_rep);
10403 if (result->HasObservableSideEffects()) {
10404 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10406 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10409 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10412 // TODO(jkummerow): Can we make this more efficient?
10413 HBranch* branch = New<HBranch>(result);
10416 HCompareNumericAndBranch* result =
10417 New<HCompareNumericAndBranch>(left, right, op);
10418 result->set_observed_input_representation(left_rep, right_rep);
10419 if (FLAG_hydrogen_track_positions) {
10420 result->SetOperandPositions(zone(), left_position, right_position);
10428 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
10429 Expression* sub_expr,
10431 ASSERT(!HasStackOverflow());
10432 ASSERT(current_block() != NULL);
10433 ASSERT(current_block()->HasPredecessor());
10434 ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
10435 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10436 CHECK_ALIVE(VisitForValue(sub_expr));
10437 HValue* value = Pop();
10438 if (expr->op() == Token::EQ_STRICT) {
10439 HConstant* nil_constant = nil == kNullValue
10440 ? graph()->GetConstantNull()
10441 : graph()->GetConstantUndefined();
10442 HCompareObjectEqAndBranch* instr =
10443 New<HCompareObjectEqAndBranch>(value, nil_constant);
10444 return ast_context()->ReturnControl(instr, expr->id());
10446 ASSERT_EQ(Token::EQ, expr->op());
10447 Type* type = expr->combined_type()->Is(Type::None())
10448 ? Type::Any(zone()) : expr->combined_type();
10449 HIfContinuation continuation;
10450 BuildCompareNil(value, type, &continuation);
10451 return ast_context()->ReturnContinuation(&continuation, expr->id());
10456 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
10457 // If we share optimized code between different closures, the
10458 // this-function is not a constant, except inside an inlined body.
10459 if (function_state()->outer() != NULL) {
10460 return New<HConstant>(
10461 function_state()->compilation_info()->closure());
10463 return New<HThisFunction>();
10468 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
10469 Handle<JSObject> boilerplate_object,
10470 AllocationSiteUsageContext* site_context) {
10471 NoObservableSideEffectsScope no_effects(this);
10472 InstanceType instance_type = boilerplate_object->map()->instance_type();
10473 ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
10475 HType type = instance_type == JS_ARRAY_TYPE
10476 ? HType::JSArray() : HType::JSObject();
10477 HValue* object_size_constant = Add<HConstant>(
10478 boilerplate_object->map()->instance_size());
10480 PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
10481 if (FLAG_allocation_site_pretenuring) {
10482 pretenure_flag = site_context->current()->GetPretenureMode();
10483 Handle<AllocationSite> site(site_context->current());
10484 AllocationSite::AddDependentCompilationInfo(
10485 site, AllocationSite::TENURING, top_info());
10488 HInstruction* object = Add<HAllocate>(object_size_constant, type,
10489 pretenure_flag, instance_type, site_context->current());
10491 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
10492 // elements array may not get folded into the object. Hence, we set the
10493 // elements pointer to empty fixed array and let store elimination remove
10494 // this store in the folding case.
10495 HConstant* empty_fixed_array = Add<HConstant>(
10496 isolate()->factory()->empty_fixed_array());
10497 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
10498 empty_fixed_array, INITIALIZING_STORE);
10500 BuildEmitObjectHeader(boilerplate_object, object);
10502 Handle<FixedArrayBase> elements(boilerplate_object->elements());
10503 int elements_size = (elements->length() > 0 &&
10504 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
10505 elements->Size() : 0;
10507 if (pretenure_flag == TENURED &&
10508 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
10509 isolate()->heap()->InNewSpace(*elements)) {
10510 // If we would like to pretenure a fixed cow array, we must ensure that the
10511 // array is already in old space, otherwise we'll create too many old-to-
10512 // new-space pointers (overflowing the store buffer).
10513 elements = Handle<FixedArrayBase>(
10514 isolate()->factory()->CopyAndTenureFixedCOWArray(
10515 Handle<FixedArray>::cast(elements)));
10516 boilerplate_object->set_elements(*elements);
10519 HInstruction* object_elements = NULL;
10520 if (elements_size > 0) {
10521 HValue* object_elements_size = Add<HConstant>(elements_size);
10522 if (boilerplate_object->HasFastDoubleElements()) {
10523 object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
10524 pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE, site_context->current());
10526 object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
10527 pretenure_flag, FIXED_ARRAY_TYPE, site_context->current());
10530 BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
10532 // Copy object elements if non-COW.
10533 if (object_elements != NULL) {
10534 BuildEmitElements(boilerplate_object, elements, object_elements,
10538 // Copy in-object properties.
10539 if (boilerplate_object->map()->NumberOfFields() != 0) {
10540 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
10547 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
10548 Handle<JSObject> boilerplate_object,
10549 HInstruction* object) {
10550 ASSERT(boilerplate_object->properties()->length() == 0);
10552 Handle<Map> boilerplate_object_map(boilerplate_object->map());
10553 AddStoreMapConstant(object, boilerplate_object_map);
10555 Handle<Object> properties_field =
10556 Handle<Object>(boilerplate_object->properties(), isolate());
10557 ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
10558 HInstruction* properties = Add<HConstant>(properties_field);
10559 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
10560 Add<HStoreNamedField>(object, access, properties);
10562 if (boilerplate_object->IsJSArray()) {
10563 Handle<JSArray> boilerplate_array =
10564 Handle<JSArray>::cast(boilerplate_object);
10565 Handle<Object> length_field =
10566 Handle<Object>(boilerplate_array->length(), isolate());
10567 HInstruction* length = Add<HConstant>(length_field);
10569 ASSERT(boilerplate_array->length()->IsSmi());
10570 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
10571 boilerplate_array->GetElementsKind()), length);
10576 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
10577 Handle<JSObject> boilerplate_object,
10578 HInstruction* object,
10579 HInstruction* object_elements) {
10580 ASSERT(boilerplate_object->properties()->length() == 0);
10581 if (object_elements == NULL) {
10582 Handle<Object> elements_field =
10583 Handle<Object>(boilerplate_object->elements(), isolate());
10584 object_elements = Add<HConstant>(elements_field);
10586 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
10591 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
10592 Handle<JSObject> boilerplate_object,
10593 HInstruction* object,
10594 AllocationSiteUsageContext* site_context,
10595 PretenureFlag pretenure_flag) {
10596 Handle<Map> boilerplate_map(boilerplate_object->map());
10597 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
10598 int limit = boilerplate_map->NumberOfOwnDescriptors();
10600 int copied_fields = 0;
10601 for (int i = 0; i < limit; i++) {
10602 PropertyDetails details = descriptors->GetDetails(i);
10603 if (details.type() != FIELD) continue;
10605 int index = descriptors->GetFieldIndex(i);
10606 int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
10607 Handle<Name> name(descriptors->GetKey(i));
10608 Handle<Object> value =
10609 Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
10612 // The access for the store depends on the type of the boilerplate.
10613 HObjectAccess access = boilerplate_object->IsJSArray() ?
10614 HObjectAccess::ForJSArrayOffset(property_offset) :
10615 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10617 if (value->IsJSObject()) {
10618 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10619 Handle<AllocationSite> current_site = site_context->EnterNewScope();
10620 HInstruction* result =
10621 BuildFastLiteral(value_object, site_context);
10622 site_context->ExitScope(current_site, value_object);
10623 Add<HStoreNamedField>(object, access, result);
10625 Representation representation = details.representation();
10626 HInstruction* value_instruction;
10628 if (representation.IsDouble()) {
10629 // Allocate a HeapNumber box and store the value into it.
10630 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
10631 // This heap number alloc does not have a corresponding
10632 // AllocationSite. That is okay because
10633 // 1) it's a child object of another object with a valid allocation site
10634 // 2) we can just use the mode of the parent object for pretenuring
10635 HInstruction* double_box =
10636 Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
10637 pretenure_flag, HEAP_NUMBER_TYPE);
10638 AddStoreMapConstant(double_box,
10639 isolate()->factory()->heap_number_map());
10640 Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
10641 Add<HConstant>(value));
10642 value_instruction = double_box;
10643 } else if (representation.IsSmi()) {
10644 value_instruction = value->IsUninitialized()
10645 ? graph()->GetConstant0()
10646 : Add<HConstant>(value);
10647 // Ensure that value is stored as smi.
10648 access = access.WithRepresentation(representation);
10650 value_instruction = Add<HConstant>(value);
10653 Add<HStoreNamedField>(object, access, value_instruction);
10657 int inobject_properties = boilerplate_object->map()->inobject_properties();
10658 HInstruction* value_instruction =
10659 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
10660 for (int i = copied_fields; i < inobject_properties; i++) {
10661 ASSERT(boilerplate_object->IsJSObject());
10662 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
10663 HObjectAccess access =
10664 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10665 Add<HStoreNamedField>(object, access, value_instruction);
10670 void HOptimizedGraphBuilder::BuildEmitElements(
10671 Handle<JSObject> boilerplate_object,
10672 Handle<FixedArrayBase> elements,
10673 HValue* object_elements,
10674 AllocationSiteUsageContext* site_context) {
10675 ElementsKind kind = boilerplate_object->map()->elements_kind();
10676 int elements_length = elements->length();
10677 HValue* object_elements_length = Add<HConstant>(elements_length);
10678 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
10680 // Copy elements backing store content.
10681 if (elements->IsFixedDoubleArray()) {
10682 BuildEmitFixedDoubleArray(elements, kind, object_elements);
10683 } else if (elements->IsFixedArray()) {
10684 BuildEmitFixedArray(elements, kind, object_elements,
10692 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
10693 Handle<FixedArrayBase> elements,
10695 HValue* object_elements) {
10696 HInstruction* boilerplate_elements = Add<HConstant>(elements);
10697 int elements_length = elements->length();
10698 for (int i = 0; i < elements_length; i++) {
10699 HValue* key_constant = Add<HConstant>(i);
10700 HInstruction* value_instruction =
10701 Add<HLoadKeyed>(boilerplate_elements, key_constant,
10702 static_cast<HValue*>(NULL), kind,
10703 ALLOW_RETURN_HOLE);
10704 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
10705 value_instruction, kind);
10706 store->SetFlag(HValue::kAllowUndefinedAsNaN);
10711 void HOptimizedGraphBuilder::BuildEmitFixedArray(
10712 Handle<FixedArrayBase> elements,
10714 HValue* object_elements,
10715 AllocationSiteUsageContext* site_context) {
10716 HInstruction* boilerplate_elements = Add<HConstant>(elements);
10717 int elements_length = elements->length();
10718 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
10719 for (int i = 0; i < elements_length; i++) {
10720 Handle<Object> value(fast_elements->get(i), isolate());
10721 HValue* key_constant = Add<HConstant>(i);
10722 if (value->IsJSObject()) {
10723 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10724 Handle<AllocationSite> current_site = site_context->EnterNewScope();
10725 HInstruction* result =
10726 BuildFastLiteral(value_object, site_context);
10727 site_context->ExitScope(current_site, value_object);
10728 Add<HStoreKeyed>(object_elements, key_constant, result, kind);
10730 HInstruction* value_instruction =
10731 Add<HLoadKeyed>(boilerplate_elements, key_constant,
10732 static_cast<HValue*>(NULL), kind,
10733 ALLOW_RETURN_HOLE);
10734 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
10740 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
10741 ASSERT(!HasStackOverflow());
10742 ASSERT(current_block() != NULL);
10743 ASSERT(current_block()->HasPredecessor());
10744 HInstruction* instr = BuildThisFunction();
10745 return ast_context()->ReturnInstruction(instr, expr->id());
10749 void HOptimizedGraphBuilder::VisitDeclarations(
10750 ZoneList<Declaration*>* declarations) {
10751 ASSERT(globals_.is_empty());
10752 AstVisitor::VisitDeclarations(declarations);
10753 if (!globals_.is_empty()) {
10754 Handle<FixedArray> array =
10755 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
10756 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
10757 int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
10758 DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
10759 DeclareGlobalsStrictMode::encode(current_info()->strict_mode());
10760 Add<HDeclareGlobals>(array, flags);
10766 void HOptimizedGraphBuilder::VisitVariableDeclaration(
10767 VariableDeclaration* declaration) {
10768 VariableProxy* proxy = declaration->proxy();
10769 VariableMode mode = declaration->mode();
10770 Variable* variable = proxy->var();
10771 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
10772 switch (variable->location()) {
10773 case Variable::UNALLOCATED:
10774 globals_.Add(variable->name(), zone());
10775 globals_.Add(variable->binding_needs_init()
10776 ? isolate()->factory()->the_hole_value()
10777 : isolate()->factory()->undefined_value(), zone());
10779 case Variable::PARAMETER:
10780 case Variable::LOCAL:
10782 HValue* value = graph()->GetConstantHole();
10783 environment()->Bind(variable, value);
10786 case Variable::CONTEXT:
10788 HValue* value = graph()->GetConstantHole();
10789 HValue* context = environment()->context();
10790 HStoreContextSlot* store = Add<HStoreContextSlot>(
10791 context, variable->index(), HStoreContextSlot::kNoCheck, value);
10792 if (store->HasObservableSideEffects()) {
10793 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10797 case Variable::LOOKUP:
10798 return Bailout(kUnsupportedLookupSlotInDeclaration);
10803 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
10804 FunctionDeclaration* declaration) {
10805 VariableProxy* proxy = declaration->proxy();
10806 Variable* variable = proxy->var();
10807 switch (variable->location()) {
10808 case Variable::UNALLOCATED: {
10809 globals_.Add(variable->name(), zone());
10810 Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
10811 declaration->fun(), current_info()->script());
10812 // Check for stack-overflow exception.
10813 if (function.is_null()) return SetStackOverflow();
10814 globals_.Add(function, zone());
10817 case Variable::PARAMETER:
10818 case Variable::LOCAL: {
10819 CHECK_ALIVE(VisitForValue(declaration->fun()));
10820 HValue* value = Pop();
10821 BindIfLive(variable, value);
10824 case Variable::CONTEXT: {
10825 CHECK_ALIVE(VisitForValue(declaration->fun()));
10826 HValue* value = Pop();
10827 HValue* context = environment()->context();
10828 HStoreContextSlot* store = Add<HStoreContextSlot>(
10829 context, variable->index(), HStoreContextSlot::kNoCheck, value);
10830 if (store->HasObservableSideEffects()) {
10831 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10835 case Variable::LOOKUP:
10836 return Bailout(kUnsupportedLookupSlotInDeclaration);
10841 void HOptimizedGraphBuilder::VisitModuleDeclaration(
10842 ModuleDeclaration* declaration) {
10847 void HOptimizedGraphBuilder::VisitImportDeclaration(
10848 ImportDeclaration* declaration) {
10853 void HOptimizedGraphBuilder::VisitExportDeclaration(
10854 ExportDeclaration* declaration) {
10859 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
10864 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
10869 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
10874 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
10879 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
10884 // Generators for inline runtime functions.
10885 // Support for types.
10886 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
10887 ASSERT(call->arguments()->length() == 1);
10888 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10889 HValue* value = Pop();
10890 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
10891 return ast_context()->ReturnControl(result, call->id());
10895 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
10896 ASSERT(call->arguments()->length() == 1);
10897 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10898 HValue* value = Pop();
10899 HHasInstanceTypeAndBranch* result =
10900 New<HHasInstanceTypeAndBranch>(value,
10901 FIRST_SPEC_OBJECT_TYPE,
10902 LAST_SPEC_OBJECT_TYPE);
10903 return ast_context()->ReturnControl(result, call->id());
10907 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
10908 ASSERT(call->arguments()->length() == 1);
10909 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10910 HValue* value = Pop();
10911 HHasInstanceTypeAndBranch* result =
10912 New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
10913 return ast_context()->ReturnControl(result, call->id());
10917 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
10918 ASSERT(call->arguments()->length() == 1);
10919 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10920 HValue* value = Pop();
10921 HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
10922 return ast_context()->ReturnControl(result, call->id());
10926 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
10927 ASSERT(call->arguments()->length() == 1);
10928 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10929 HValue* value = Pop();
10930 HHasCachedArrayIndexAndBranch* result =
10931 New<HHasCachedArrayIndexAndBranch>(value);
10932 return ast_context()->ReturnControl(result, call->id());
10936 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
10937 ASSERT(call->arguments()->length() == 1);
10938 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10939 HValue* value = Pop();
10940 HHasInstanceTypeAndBranch* result =
10941 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
10942 return ast_context()->ReturnControl(result, call->id());
10946 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
10947 ASSERT(call->arguments()->length() == 1);
10948 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10949 HValue* value = Pop();
10950 HHasInstanceTypeAndBranch* result =
10951 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
10952 return ast_context()->ReturnControl(result, call->id());
10956 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
10957 ASSERT(call->arguments()->length() == 1);
10958 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10959 HValue* value = Pop();
10960 HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
10961 return ast_context()->ReturnControl(result, call->id());
10965 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
10966 return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
10970 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
10971 ASSERT(call->arguments()->length() == 1);
10972 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10973 HValue* value = Pop();
10974 HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
10975 return ast_context()->ReturnControl(result, call->id());
10979 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
10980 CallRuntime* call) {
10981 return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
10985 // Support for construct call checks.
10986 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
10987 ASSERT(call->arguments()->length() == 0);
10988 if (function_state()->outer() != NULL) {
10989 // We are generating graph for inlined function.
10990 HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
10991 ? graph()->GetConstantTrue()
10992 : graph()->GetConstantFalse();
10993 return ast_context()->ReturnValue(value);
10995 return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
11001 // Support for arguments.length and arguments[?].
11002 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
11003 // Our implementation of arguments (based on this stack frame or an
11004 // adapter below it) does not work for inlined functions. This runtime
11005 // function is blacklisted by AstNode::IsInlineable.
11006 ASSERT(function_state()->outer() == NULL);
11007 ASSERT(call->arguments()->length() == 0);
11008 HInstruction* elements = Add<HArgumentsElements>(false);
11009 HArgumentsLength* result = New<HArgumentsLength>(elements);
11010 return ast_context()->ReturnInstruction(result, call->id());
11014 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
11015 // Our implementation of arguments (based on this stack frame or an
11016 // adapter below it) does not work for inlined functions. This runtime
11017 // function is blacklisted by AstNode::IsInlineable.
11018 ASSERT(function_state()->outer() == NULL);
11019 ASSERT(call->arguments()->length() == 1);
11020 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11021 HValue* index = Pop();
11022 HInstruction* elements = Add<HArgumentsElements>(false);
11023 HInstruction* length = Add<HArgumentsLength>(elements);
11024 HInstruction* checked_index = Add<HBoundsCheck>(index, length);
11025 HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
11026 elements, length, checked_index);
11027 return ast_context()->ReturnInstruction(result, call->id());
11031 // Support for accessing the class and value fields of an object.
11032 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
11033 // The special form detected by IsClassOfTest is detected before we get here
11034 // and does not cause a bailout.
11035 return Bailout(kInlinedRuntimeFunctionClassOf);
11039 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
11040 ASSERT(call->arguments()->length() == 1);
11041 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11042 HValue* object = Pop();
11044 IfBuilder if_objectisvalue(this);
11045 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
11046 object, JS_VALUE_TYPE);
11047 if_objectisvalue.Then();
11049 // Return the actual value.
11050 Push(Add<HLoadNamedField>(
11051 object, objectisvalue,
11052 HObjectAccess::ForObservableJSObjectOffset(
11053 JSValue::kValueOffset)));
11054 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11056 if_objectisvalue.Else();
11058 // If the object is not a value return the object.
11060 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11062 if_objectisvalue.End();
11063 return ast_context()->ReturnValue(Pop());
11067 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
11068 ASSERT(call->arguments()->length() == 2);
11069 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
11070 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
11071 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11072 HValue* date = Pop();
11073 HDateField* result = New<HDateField>(date, index);
11074 return ast_context()->ReturnInstruction(result, call->id());
11078 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
11079 CallRuntime* call) {
11080 ASSERT(call->arguments()->length() == 3);
11081 // We need to follow the evaluation order of full codegen.
11082 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11083 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11084 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11085 HValue* string = Pop();
11086 HValue* value = Pop();
11087 HValue* index = Pop();
11088 Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
11090 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11091 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11095 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
11096 CallRuntime* call) {
11097 ASSERT(call->arguments()->length() == 3);
11098 // We need to follow the evaluation order of full codegen.
11099 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11100 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11101 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11102 HValue* string = Pop();
11103 HValue* value = Pop();
11104 HValue* index = Pop();
11105 Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
11107 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11108 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11112 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
11113 ASSERT(call->arguments()->length() == 2);
11114 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11115 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11116 HValue* value = Pop();
11117 HValue* object = Pop();
11119 // Check if object is a JSValue.
11120 IfBuilder if_objectisvalue(this);
11121 if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
11122 if_objectisvalue.Then();
11124 // Create in-object property store to kValueOffset.
11125 Add<HStoreNamedField>(object,
11126 HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
11128 if (!ast_context()->IsEffect()) {
11131 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11133 if_objectisvalue.Else();
11135 // Nothing to do in this case.
11136 if (!ast_context()->IsEffect()) {
11139 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11141 if_objectisvalue.End();
11142 if (!ast_context()->IsEffect()) {
11145 return ast_context()->ReturnValue(value);
11149 // Fast support for charCodeAt(n).
11150 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
11151 ASSERT(call->arguments()->length() == 2);
11152 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11153 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11154 HValue* index = Pop();
11155 HValue* string = Pop();
11156 HInstruction* result = BuildStringCharCodeAt(string, index);
11157 return ast_context()->ReturnInstruction(result, call->id());
11161 // Fast support for string.charAt(n) and string[n].
11162 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
11163 ASSERT(call->arguments()->length() == 1);
11164 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11165 HValue* char_code = Pop();
11166 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11167 return ast_context()->ReturnInstruction(result, call->id());
11171 // Fast support for string.charAt(n) and string[n].
11172 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
11173 ASSERT(call->arguments()->length() == 2);
11174 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11175 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11176 HValue* index = Pop();
11177 HValue* string = Pop();
11178 HInstruction* char_code = BuildStringCharCodeAt(string, index);
11179 AddInstruction(char_code);
11180 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11181 return ast_context()->ReturnInstruction(result, call->id());
11185 // Fast support for object equality testing.
11186 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
11187 ASSERT(call->arguments()->length() == 2);
11188 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11189 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11190 HValue* right = Pop();
11191 HValue* left = Pop();
11192 HCompareObjectEqAndBranch* result =
11193 New<HCompareObjectEqAndBranch>(left, right);
11194 return ast_context()->ReturnControl(result, call->id());
11198 void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
11199 // %_Log is ignored in optimized code.
11200 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11204 // Fast support for StringAdd.
11205 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
11206 ASSERT_EQ(2, call->arguments()->length());
11207 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11208 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11209 HValue* right = Pop();
11210 HValue* left = Pop();
11211 HInstruction* result = NewUncasted<HStringAdd>(left, right);
11212 return ast_context()->ReturnInstruction(result, call->id());
11216 // Fast support for SubString.
11217 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
11218 ASSERT_EQ(3, call->arguments()->length());
11219 CHECK_ALIVE(VisitExpressions(call->arguments()));
11220 PushArgumentsFromEnvironment(call->arguments()->length());
11221 HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
11222 return ast_context()->ReturnInstruction(result, call->id());
11226 // Fast support for StringCompare.
11227 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
11228 ASSERT_EQ(2, call->arguments()->length());
11229 CHECK_ALIVE(VisitExpressions(call->arguments()));
11230 PushArgumentsFromEnvironment(call->arguments()->length());
11231 HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
11232 return ast_context()->ReturnInstruction(result, call->id());
11236 // Support for direct calls from JavaScript to native RegExp code.
11237 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
11238 ASSERT_EQ(4, call->arguments()->length());
11239 CHECK_ALIVE(VisitExpressions(call->arguments()));
11240 PushArgumentsFromEnvironment(call->arguments()->length());
11241 HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
11242 return ast_context()->ReturnInstruction(result, call->id());
11246 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
11247 ASSERT_EQ(1, call->arguments()->length());
11248 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11249 HValue* value = Pop();
11250 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
11251 return ast_context()->ReturnInstruction(result, call->id());
11255 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
11256 ASSERT_EQ(1, call->arguments()->length());
11257 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11258 HValue* value = Pop();
11259 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
11260 return ast_context()->ReturnInstruction(result, call->id());
11264 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
11265 ASSERT_EQ(2, call->arguments()->length());
11266 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11267 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11268 HValue* lo = Pop();
11269 HValue* hi = Pop();
11270 HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
11271 return ast_context()->ReturnInstruction(result, call->id());
11275 // Construct a RegExp exec result with two in-object properties.
11276 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
11277 ASSERT_EQ(3, call->arguments()->length());
11278 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11279 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11280 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11281 HValue* input = Pop();
11282 HValue* index = Pop();
11283 HValue* length = Pop();
11284 HValue* result = BuildRegExpConstructResult(length, index, input);
11285 return ast_context()->ReturnValue(result);
11289 // Support for fast native caches.
11290 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
11291 return Bailout(kInlinedRuntimeFunctionGetFromCache);
11295 // Fast support for number to string.
11296 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
11297 ASSERT_EQ(1, call->arguments()->length());
11298 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11299 HValue* number = Pop();
11300 HValue* result = BuildNumberToString(number, Type::Any(zone()));
11301 return ast_context()->ReturnValue(result);
11305 // Fast call for custom callbacks.
11306 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
11307 // 1 ~ The function to call is not itself an argument to the call.
11308 int arg_count = call->arguments()->length() - 1;
11309 ASSERT(arg_count >= 1); // There's always at least a receiver.
11311 CHECK_ALIVE(VisitExpressions(call->arguments()));
11312 // The function is the last argument
11313 HValue* function = Pop();
11314 // Push the arguments to the stack
11315 PushArgumentsFromEnvironment(arg_count);
11317 IfBuilder if_is_jsfunction(this);
11318 if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
11320 if_is_jsfunction.Then();
11322 HInstruction* invoke_result =
11323 Add<HInvokeFunction>(function, arg_count);
11324 if (!ast_context()->IsEffect()) {
11325 Push(invoke_result);
11327 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11330 if_is_jsfunction.Else();
11332 HInstruction* call_result =
11333 Add<HCallFunction>(function, arg_count);
11334 if (!ast_context()->IsEffect()) {
11337 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11339 if_is_jsfunction.End();
11341 if (ast_context()->IsEffect()) {
11342 // EffectContext::ReturnValue ignores the value, so we can just pass
11343 // 'undefined' (as we do not have the call result anymore).
11344 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11346 return ast_context()->ReturnValue(Pop());
11351 // Fast call to math functions.
11352 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
11353 ASSERT_EQ(2, call->arguments()->length());
11354 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11355 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11356 HValue* right = Pop();
11357 HValue* left = Pop();
11358 HInstruction* result = NewUncasted<HPower>(left, right);
11359 return ast_context()->ReturnInstruction(result, call->id());
11363 void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
11364 ASSERT(call->arguments()->length() == 1);
11365 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11366 HValue* value = Pop();
11367 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
11368 return ast_context()->ReturnInstruction(result, call->id());
11372 void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
11373 ASSERT(call->arguments()->length() == 1);
11374 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11375 HValue* value = Pop();
11376 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
11377 return ast_context()->ReturnInstruction(result, call->id());
11381 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
11382 ASSERT(call->arguments()->length() == 1);
11383 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11384 HValue* value = Pop();
11385 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
11386 return ast_context()->ReturnInstruction(result, call->id());
11390 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
11391 return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
11395 // Support for generators.
11396 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
11397 return Bailout(kInlinedRuntimeFunctionGeneratorNext);
11401 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
11402 return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
11406 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
11407 CallRuntime* call) {
11408 Add<HDebugBreak>();
11409 return ast_context()->ReturnValue(graph()->GetConstant0());
11413 void HOptimizedGraphBuilder::GenerateDebugCallbackSupportsStepping(
11414 CallRuntime* call) {
11415 ASSERT(call->arguments()->length() == 1);
11416 // Debugging is not supported in optimized code.
11417 return ast_context()->ReturnValue(graph()->GetConstantFalse());
11421 #undef CHECK_BAILOUT
11425 HEnvironment::HEnvironment(HEnvironment* outer,
11427 Handle<JSFunction> closure,
11429 : closure_(closure),
11431 frame_type_(JS_FUNCTION),
11432 parameter_count_(0),
11433 specials_count_(1),
11439 ast_id_(BailoutId::None()),
11441 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
11445 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
11446 : values_(0, zone),
11448 parameter_count_(parameter_count),
11449 specials_count_(1),
11455 ast_id_(BailoutId::None()),
11457 Initialize(parameter_count, 0, 0);
11461 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
11462 : values_(0, zone),
11463 frame_type_(JS_FUNCTION),
11464 parameter_count_(0),
11465 specials_count_(0),
11471 ast_id_(other->ast_id()),
11477 HEnvironment::HEnvironment(HEnvironment* outer,
11478 Handle<JSFunction> closure,
11479 FrameType frame_type,
11482 : closure_(closure),
11483 values_(arguments, zone),
11484 frame_type_(frame_type),
11485 parameter_count_(arguments),
11486 specials_count_(0),
11492 ast_id_(BailoutId::None()),
11497 void HEnvironment::Initialize(int parameter_count,
11499 int stack_height) {
11500 parameter_count_ = parameter_count;
11501 local_count_ = local_count;
11503 // Avoid reallocating the temporaries' backing store on the first Push.
11504 int total = parameter_count + specials_count_ + local_count + stack_height;
11505 values_.Initialize(total + 4, zone());
11506 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
11510 void HEnvironment::Initialize(const HEnvironment* other) {
11511 closure_ = other->closure();
11512 values_.AddAll(other->values_, zone());
11513 assigned_variables_.Union(other->assigned_variables_, zone());
11514 frame_type_ = other->frame_type_;
11515 parameter_count_ = other->parameter_count_;
11516 local_count_ = other->local_count_;
11517 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
11518 entry_ = other->entry_;
11519 pop_count_ = other->pop_count_;
11520 push_count_ = other->push_count_;
11521 specials_count_ = other->specials_count_;
11522 ast_id_ = other->ast_id_;
11526 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
11527 ASSERT(!block->IsLoopHeader());
11528 ASSERT(values_.length() == other->values_.length());
11530 int length = values_.length();
11531 for (int i = 0; i < length; ++i) {
11532 HValue* value = values_[i];
11533 if (value != NULL && value->IsPhi() && value->block() == block) {
11534 // There is already a phi for the i'th value.
11535 HPhi* phi = HPhi::cast(value);
11536 // Assert index is correct and that we haven't missed an incoming edge.
11537 ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
11538 ASSERT(phi->OperandCount() == block->predecessors()->length());
11539 phi->AddInput(other->values_[i]);
11540 } else if (values_[i] != other->values_[i]) {
11541 // There is a fresh value on the incoming edge, a phi is needed.
11542 ASSERT(values_[i] != NULL && other->values_[i] != NULL);
11543 HPhi* phi = block->AddNewPhi(i);
11544 HValue* old_value = values_[i];
11545 for (int j = 0; j < block->predecessors()->length(); j++) {
11546 phi->AddInput(old_value);
11548 phi->AddInput(other->values_[i]);
11549 this->values_[i] = phi;
11555 void HEnvironment::Bind(int index, HValue* value) {
11556 ASSERT(value != NULL);
11557 assigned_variables_.Add(index, zone());
11558 values_[index] = value;
11562 bool HEnvironment::HasExpressionAt(int index) const {
11563 return index >= parameter_count_ + specials_count_ + local_count_;
11567 bool HEnvironment::ExpressionStackIsEmpty() const {
11568 ASSERT(length() >= first_expression_index());
11569 return length() == first_expression_index();
11573 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
11574 int count = index_from_top + 1;
11575 int index = values_.length() - count;
11576 ASSERT(HasExpressionAt(index));
11577 // The push count must include at least the element in question or else
11578 // the new value will not be included in this environment's history.
11579 if (push_count_ < count) {
11580 // This is the same effect as popping then re-pushing 'count' elements.
11581 pop_count_ += (count - push_count_);
11582 push_count_ = count;
11584 values_[index] = value;
11588 void HEnvironment::Drop(int count) {
11589 for (int i = 0; i < count; ++i) {
11595 HEnvironment* HEnvironment::Copy() const {
11596 return new(zone()) HEnvironment(this, zone());
11600 HEnvironment* HEnvironment::CopyWithoutHistory() const {
11601 HEnvironment* result = Copy();
11602 result->ClearHistory();
11607 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
11608 HEnvironment* new_env = Copy();
11609 for (int i = 0; i < values_.length(); ++i) {
11610 HPhi* phi = loop_header->AddNewPhi(i);
11611 phi->AddInput(values_[i]);
11612 new_env->values_[i] = phi;
11614 new_env->ClearHistory();
11619 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
11620 Handle<JSFunction> target,
11621 FrameType frame_type,
11622 int arguments) const {
11623 HEnvironment* new_env =
11624 new(zone()) HEnvironment(outer, target, frame_type,
11625 arguments + 1, zone());
11626 for (int i = 0; i <= arguments; ++i) { // Include receiver.
11627 new_env->Push(ExpressionStackAt(arguments - i));
11629 new_env->ClearHistory();
11634 HEnvironment* HEnvironment::CopyForInlining(
11635 Handle<JSFunction> target,
11637 FunctionLiteral* function,
11638 HConstant* undefined,
11639 InliningKind inlining_kind) const {
11640 ASSERT(frame_type() == JS_FUNCTION);
11642 // Outer environment is a copy of this one without the arguments.
11643 int arity = function->scope()->num_parameters();
11645 HEnvironment* outer = Copy();
11646 outer->Drop(arguments + 1); // Including receiver.
11647 outer->ClearHistory();
11649 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
11650 // Create artificial constructor stub environment. The receiver should
11651 // actually be the constructor function, but we pass the newly allocated
11652 // object instead, DoComputeConstructStubFrame() relies on that.
11653 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
11654 } else if (inlining_kind == GETTER_CALL_RETURN) {
11655 // We need an additional StackFrame::INTERNAL frame for restoring the
11656 // correct context.
11657 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
11658 } else if (inlining_kind == SETTER_CALL_RETURN) {
11659 // We need an additional StackFrame::INTERNAL frame for temporarily saving
11660 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
11661 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
11664 if (arity != arguments) {
11665 // Create artificial arguments adaptation environment.
11666 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
11669 HEnvironment* inner =
11670 new(zone()) HEnvironment(outer, function->scope(), target, zone());
11671 // Get the argument values from the original environment.
11672 for (int i = 0; i <= arity; ++i) { // Include receiver.
11673 HValue* push = (i <= arguments) ?
11674 ExpressionStackAt(arguments - i) : undefined;
11675 inner->SetValueAt(i, push);
11677 inner->SetValueAt(arity + 1, context());
11678 for (int i = arity + 2; i < inner->length(); ++i) {
11679 inner->SetValueAt(i, undefined);
11682 inner->set_ast_id(BailoutId::FunctionEntry());
11687 void HEnvironment::PrintTo(StringStream* stream) {
11688 for (int i = 0; i < length(); i++) {
11689 if (i == 0) stream->Add("parameters\n");
11690 if (i == parameter_count()) stream->Add("specials\n");
11691 if (i == parameter_count() + specials_count()) stream->Add("locals\n");
11692 if (i == parameter_count() + specials_count() + local_count()) {
11693 stream->Add("expressions\n");
11695 HValue* val = values_.at(i);
11696 stream->Add("%d: ", i);
11698 val->PrintNameTo(stream);
11700 stream->Add("NULL");
11708 void HEnvironment::PrintToStd() {
11709 HeapStringAllocator string_allocator;
11710 StringStream trace(&string_allocator);
11712 PrintF("%s", trace.ToCString().get());
11716 void HTracer::TraceCompilation(CompilationInfo* info) {
11717 Tag tag(this, "compilation");
11718 if (info->IsOptimizing()) {
11719 Handle<String> name = info->function()->debug_name();
11720 PrintStringProperty("name", name->ToCString().get());
11722 trace_.Add("method \"%s:%d\"\n",
11723 name->ToCString().get(),
11724 info->optimization_id());
11726 CodeStub::Major major_key = info->code_stub()->MajorKey();
11727 PrintStringProperty("name", CodeStub::MajorName(major_key, false));
11728 PrintStringProperty("method", "stub");
11730 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
11734 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
11735 ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
11736 AllowHandleDereference allow_deref;
11737 AllowDeferredHandleDereference allow_deferred_deref;
11738 Trace(name, chunk->graph(), chunk);
11742 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
11743 ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
11744 AllowHandleDereference allow_deref;
11745 AllowDeferredHandleDereference allow_deferred_deref;
11746 Trace(name, graph, NULL);
11750 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
11751 Tag tag(this, "cfg");
11752 PrintStringProperty("name", name);
11753 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
11754 for (int i = 0; i < blocks->length(); i++) {
11755 HBasicBlock* current = blocks->at(i);
11756 Tag block_tag(this, "block");
11757 PrintBlockProperty("name", current->block_id());
11758 PrintIntProperty("from_bci", -1);
11759 PrintIntProperty("to_bci", -1);
11761 if (!current->predecessors()->is_empty()) {
11763 trace_.Add("predecessors");
11764 for (int j = 0; j < current->predecessors()->length(); ++j) {
11765 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
11769 PrintEmptyProperty("predecessors");
11772 if (current->end()->SuccessorCount() == 0) {
11773 PrintEmptyProperty("successors");
11776 trace_.Add("successors");
11777 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
11778 trace_.Add(" \"B%d\"", it.Current()->block_id());
11783 PrintEmptyProperty("xhandlers");
11787 trace_.Add("flags");
11788 if (current->IsLoopSuccessorDominator()) {
11789 trace_.Add(" \"dom-loop-succ\"");
11791 if (current->IsUnreachable()) {
11792 trace_.Add(" \"dead\"");
11794 if (current->is_osr_entry()) {
11795 trace_.Add(" \"osr\"");
11800 if (current->dominator() != NULL) {
11801 PrintBlockProperty("dominator", current->dominator()->block_id());
11804 PrintIntProperty("loop_depth", current->LoopNestingDepth());
11806 if (chunk != NULL) {
11807 int first_index = current->first_instruction_index();
11808 int last_index = current->last_instruction_index();
11811 LifetimePosition::FromInstructionIndex(first_index).Value());
11814 LifetimePosition::FromInstructionIndex(last_index).Value());
11818 Tag states_tag(this, "states");
11819 Tag locals_tag(this, "locals");
11820 int total = current->phis()->length();
11821 PrintIntProperty("size", current->phis()->length());
11822 PrintStringProperty("method", "None");
11823 for (int j = 0; j < total; ++j) {
11824 HPhi* phi = current->phis()->at(j);
11826 trace_.Add("%d ", phi->merged_index());
11827 phi->PrintNameTo(&trace_);
11829 phi->PrintTo(&trace_);
11835 Tag HIR_tag(this, "HIR");
11836 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
11837 HInstruction* instruction = it.Current();
11838 int uses = instruction->UseCount();
11840 trace_.Add("0 %d ", uses);
11841 instruction->PrintNameTo(&trace_);
11843 instruction->PrintTo(&trace_);
11844 if (FLAG_hydrogen_track_positions &&
11845 instruction->has_position() &&
11846 instruction->position().raw() != 0) {
11847 const HSourcePosition pos = instruction->position();
11848 trace_.Add(" pos:");
11849 if (pos.inlining_id() != 0) {
11850 trace_.Add("%d_", pos.inlining_id());
11852 trace_.Add("%d", pos.position());
11854 trace_.Add(" <|@\n");
11859 if (chunk != NULL) {
11860 Tag LIR_tag(this, "LIR");
11861 int first_index = current->first_instruction_index();
11862 int last_index = current->last_instruction_index();
11863 if (first_index != -1 && last_index != -1) {
11864 const ZoneList<LInstruction*>* instructions = chunk->instructions();
11865 for (int i = first_index; i <= last_index; ++i) {
11866 LInstruction* linstr = instructions->at(i);
11867 if (linstr != NULL) {
11870 LifetimePosition::FromInstructionIndex(i).Value());
11871 linstr->PrintTo(&trace_);
11872 trace_.Add(" [hir:");
11873 linstr->hydrogen_value()->PrintNameTo(&trace_);
11875 trace_.Add(" <|@\n");
11884 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
11885 Tag tag(this, "intervals");
11886 PrintStringProperty("name", name);
11888 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
11889 for (int i = 0; i < fixed_d->length(); ++i) {
11890 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
11893 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
11894 for (int i = 0; i < fixed->length(); ++i) {
11895 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
11898 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
11899 for (int i = 0; i < live_ranges->length(); ++i) {
11900 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
11905 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
11907 if (range != NULL && !range->IsEmpty()) {
11909 trace_.Add("%d %s", range->id(), type);
11910 if (range->HasRegisterAssigned()) {
11911 LOperand* op = range->CreateAssignedOperand(zone);
11912 int assigned_reg = op->index();
11913 if (op->IsDoubleRegister()) {
11914 trace_.Add(" \"%s\"",
11915 DoubleRegister::AllocationIndexToString(assigned_reg));
11916 } else if (op->IsFloat32x4Register()) {
11917 trace_.Add(" \"%s\"",
11918 SIMD128Register::AllocationIndexToString(assigned_reg));
11919 } else if (op->IsFloat64x2Register()) {
11920 trace_.Add(" \"%s\"",
11921 SIMD128Register::AllocationIndexToString(assigned_reg));
11922 } else if (op->IsInt32x4Register()) {
11923 trace_.Add(" \"%s\"",
11924 SIMD128Register::AllocationIndexToString(assigned_reg));
11926 ASSERT(op->IsRegister());
11927 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
11929 } else if (range->IsSpilled()) {
11930 LOperand* op = range->TopLevel()->GetSpillOperand();
11931 if (op->IsDoubleStackSlot()) {
11932 trace_.Add(" \"double_stack:%d\"", op->index());
11933 } else if (op->IsFloat32x4StackSlot()) {
11934 trace_.Add(" \"float32x4_stack:%d\"", op->index());
11935 } else if (op->IsFloat64x2StackSlot()) {
11936 trace_.Add(" \"float64x2_stack:%d\"", op->index());
11937 } else if (op->IsInt32x4StackSlot()) {
11938 trace_.Add(" \"int32x4_stack:%d\"", op->index());
11940 ASSERT(op->IsStackSlot());
11941 trace_.Add(" \"stack:%d\"", op->index());
11944 int parent_index = -1;
11945 if (range->IsChild()) {
11946 parent_index = range->parent()->id();
11948 parent_index = range->id();
11950 LOperand* op = range->FirstHint();
11951 int hint_index = -1;
11952 if (op != NULL && op->IsUnallocated()) {
11953 hint_index = LUnallocated::cast(op)->virtual_register();
11955 trace_.Add(" %d %d", parent_index, hint_index);
11956 UseInterval* cur_interval = range->first_interval();
11957 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
11958 trace_.Add(" [%d, %d[",
11959 cur_interval->start().Value(),
11960 cur_interval->end().Value());
11961 cur_interval = cur_interval->next();
11964 UsePosition* current_pos = range->first_pos();
11965 while (current_pos != NULL) {
11966 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
11967 trace_.Add(" %d M", current_pos->pos().Value());
11969 current_pos = current_pos->next();
11972 trace_.Add(" \"\"\n");
11977 void HTracer::FlushToFile() {
11978 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
11984 void HStatistics::Initialize(CompilationInfo* info) {
11985 if (info->shared_info().is_null()) return;
11986 source_size_ += info->shared_info()->SourceSize();
11990 void HStatistics::Print() {
11991 PrintF("Timing results:\n");
11993 for (int i = 0; i < times_.length(); ++i) {
11997 for (int i = 0; i < names_.length(); ++i) {
11998 PrintF("%32s", names_[i]);
11999 double ms = times_[i].InMillisecondsF();
12000 double percent = times_[i].PercentOf(sum);
12001 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12003 unsigned size = sizes_[i];
12004 double size_percent = static_cast<double>(size) * 100 / total_size_;
12005 PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
12008 PrintF("----------------------------------------"
12009 "---------------------------------------\n");
12010 TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12011 PrintF("%32s %8.3f ms / %4.1f %% \n",
12013 create_graph_.InMillisecondsF(),
12014 create_graph_.PercentOf(total));
12015 PrintF("%32s %8.3f ms / %4.1f %% \n",
12017 optimize_graph_.InMillisecondsF(),
12018 optimize_graph_.PercentOf(total));
12019 PrintF("%32s %8.3f ms / %4.1f %% \n",
12020 "Generate and install code",
12021 generate_code_.InMillisecondsF(),
12022 generate_code_.PercentOf(total));
12023 PrintF("----------------------------------------"
12024 "---------------------------------------\n");
12025 PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n",
12027 total.InMillisecondsF(),
12028 total.TimesOf(full_code_gen_));
12030 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
12031 double normalized_time = source_size_in_kb > 0
12032 ? total.InMillisecondsF() / source_size_in_kb
12034 double normalized_size_in_kb = source_size_in_kb > 0
12035 ? total_size_ / 1024 / source_size_in_kb
12037 PrintF("%32s %8.3f ms %7.3f kB allocated\n",
12038 "Average per kB source",
12039 normalized_time, normalized_size_in_kb);
12043 void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) {
12044 total_size_ += size;
12045 for (int i = 0; i < names_.length(); ++i) {
12046 if (strcmp(names_[i], name) == 0) {
12058 HPhase::~HPhase() {
12059 if (ShouldProduceTraceOutput()) {
12060 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
12064 graph_->Verify(false); // No full verify.
12068 } } // namespace v8::internal