1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/hydrogen.h"
11 #include "src/allocation-site-scopes.h"
12 #include "src/ast-numbering.h"
13 #include "src/full-codegen.h"
14 #include "src/hydrogen-bce.h"
15 #include "src/hydrogen-bch.h"
16 #include "src/hydrogen-canonicalize.h"
17 #include "src/hydrogen-check-elimination.h"
18 #include "src/hydrogen-dce.h"
19 #include "src/hydrogen-dehoist.h"
20 #include "src/hydrogen-environment-liveness.h"
21 #include "src/hydrogen-escape-analysis.h"
22 #include "src/hydrogen-gvn.h"
23 #include "src/hydrogen-infer-representation.h"
24 #include "src/hydrogen-infer-types.h"
25 #include "src/hydrogen-load-elimination.h"
26 #include "src/hydrogen-mark-deoptimize.h"
27 #include "src/hydrogen-mark-unreachable.h"
28 #include "src/hydrogen-osr.h"
29 #include "src/hydrogen-range-analysis.h"
30 #include "src/hydrogen-redundant-phi.h"
31 #include "src/hydrogen-removable-simulates.h"
32 #include "src/hydrogen-representation-changes.h"
33 #include "src/hydrogen-sce.h"
34 #include "src/hydrogen-store-elimination.h"
35 #include "src/hydrogen-uint32-analysis.h"
36 #include "src/ic/call-optimization.h"
37 #include "src/ic/ic.h"
39 #include "src/ic/ic-inl.h"
40 #include "src/lithium-allocator.h"
41 #include "src/parser.h"
42 #include "src/runtime/runtime.h"
43 #include "src/scopeinfo.h"
44 #include "src/typing.h"
46 #if V8_TARGET_ARCH_IA32
47 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT
48 #elif V8_TARGET_ARCH_X64
49 #include "src/x64/lithium-codegen-x64.h" // NOLINT
50 #elif V8_TARGET_ARCH_ARM64
51 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT
52 #elif V8_TARGET_ARCH_ARM
53 #include "src/arm/lithium-codegen-arm.h" // NOLINT
54 #elif V8_TARGET_ARCH_MIPS
55 #include "src/mips/lithium-codegen-mips.h" // NOLINT
56 #elif V8_TARGET_ARCH_MIPS64
57 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT
58 #elif V8_TARGET_ARCH_X87
59 #include "src/x87/lithium-codegen-x87.h" // NOLINT
61 #error Unsupported target architecture.
67 HBasicBlock::HBasicBlock(HGraph* graph)
68 : block_id_(graph->GetNextBlockID()),
70 phis_(4, graph->zone()),
74 loop_information_(NULL),
75 predecessors_(2, graph->zone()),
77 dominated_blocks_(4, graph->zone()),
78 last_environment_(NULL),
80 first_instruction_index_(-1),
81 last_instruction_index_(-1),
82 deleted_phis_(4, graph->zone()),
83 parent_loop_header_(NULL),
84 inlined_entry_block_(NULL),
85 is_inline_return_target_(false),
87 dominates_loop_successors_(false),
89 is_ordered_(false) { }
92 Isolate* HBasicBlock::isolate() const {
93 return graph_->isolate();
97 void HBasicBlock::MarkUnreachable() {
98 is_reachable_ = false;
102 void HBasicBlock::AttachLoopInformation() {
103 DCHECK(!IsLoopHeader());
104 loop_information_ = new(zone()) HLoopInformation(this, zone());
108 void HBasicBlock::DetachLoopInformation() {
109 DCHECK(IsLoopHeader());
110 loop_information_ = NULL;
114 void HBasicBlock::AddPhi(HPhi* phi) {
115 DCHECK(!IsStartBlock());
116 phis_.Add(phi, zone());
121 void HBasicBlock::RemovePhi(HPhi* phi) {
122 DCHECK(phi->block() == this);
123 DCHECK(phis_.Contains(phi));
125 phis_.RemoveElement(phi);
130 void HBasicBlock::AddInstruction(HInstruction* instr,
131 HSourcePosition position) {
132 DCHECK(!IsStartBlock() || !IsFinished());
133 DCHECK(!instr->IsLinked());
134 DCHECK(!IsFinished());
136 if (!position.IsUnknown()) {
137 instr->set_position(position);
139 if (first_ == NULL) {
140 DCHECK(last_environment() != NULL);
141 DCHECK(!last_environment()->ast_id().IsNone());
142 HBlockEntry* entry = new(zone()) HBlockEntry();
143 entry->InitializeAsFirst(this);
144 if (!position.IsUnknown()) {
145 entry->set_position(position);
147 DCHECK(!FLAG_hydrogen_track_positions ||
148 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
150 first_ = last_ = entry;
152 instr->InsertAfter(last_);
156 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
157 if (graph()->IsInsideNoSideEffectsScope()) {
158 merged_index = HPhi::kInvalidMergedIndex;
160 HPhi* phi = new(zone()) HPhi(merged_index, zone());
166 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
167 RemovableSimulate removable) {
168 DCHECK(HasEnvironment());
169 HEnvironment* environment = last_environment();
170 DCHECK(ast_id.IsNone() ||
171 ast_id == BailoutId::StubEntry() ||
172 environment->closure()->shared()->VerifyBailoutId(ast_id));
174 int push_count = environment->push_count();
175 int pop_count = environment->pop_count();
178 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
180 instr->set_closure(environment->closure());
182 // Order of pushed values: newest (top of stack) first. This allows
183 // HSimulate::MergeWith() to easily append additional pushed values
184 // that are older (from further down the stack).
185 for (int i = 0; i < push_count; ++i) {
186 instr->AddPushedValue(environment->ExpressionStackAt(i));
188 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
192 int index = it.Current();
193 instr->AddAssignedValue(index, environment->Lookup(index));
195 environment->ClearHistory();
200 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
201 DCHECK(!IsFinished());
202 AddInstruction(end, position);
204 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
205 it.Current()->RegisterPredecessor(this);
210 void HBasicBlock::Goto(HBasicBlock* block,
211 HSourcePosition position,
212 FunctionState* state,
214 bool drop_extra = state != NULL &&
215 state->inlining_kind() == NORMAL_RETURN;
217 if (block->IsInlineReturnTarget()) {
218 HEnvironment* env = last_environment();
219 int argument_count = env->arguments_environment()->parameter_count();
220 AddInstruction(new(zone())
221 HLeaveInlined(state->entry(), argument_count),
223 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
226 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
227 HGoto* instr = new(zone()) HGoto(block);
228 Finish(instr, position);
232 void HBasicBlock::AddLeaveInlined(HValue* return_value,
233 FunctionState* state,
234 HSourcePosition position) {
235 HBasicBlock* target = state->function_return();
236 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
238 DCHECK(target->IsInlineReturnTarget());
239 DCHECK(return_value != NULL);
240 HEnvironment* env = last_environment();
241 int argument_count = env->arguments_environment()->parameter_count();
242 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
244 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
245 last_environment()->Push(return_value);
246 AddNewSimulate(BailoutId::None(), position);
247 HGoto* instr = new(zone()) HGoto(target);
248 Finish(instr, position);
252 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
253 DCHECK(!HasEnvironment());
254 DCHECK(first() == NULL);
255 UpdateEnvironment(env);
259 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
260 last_environment_ = env;
261 graph()->update_maximum_environment_size(env->first_expression_index());
265 void HBasicBlock::SetJoinId(BailoutId ast_id) {
266 int length = predecessors_.length();
268 for (int i = 0; i < length; i++) {
269 HBasicBlock* predecessor = predecessors_[i];
270 DCHECK(predecessor->end()->IsGoto());
271 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
273 (predecessor->last_environment()->closure().is_null() ||
274 predecessor->last_environment()->closure()->shared()
275 ->VerifyBailoutId(ast_id)));
276 simulate->set_ast_id(ast_id);
277 predecessor->last_environment()->set_ast_id(ast_id);
282 bool HBasicBlock::Dominates(HBasicBlock* other) const {
283 HBasicBlock* current = other->dominator();
284 while (current != NULL) {
285 if (current == this) return true;
286 current = current->dominator();
292 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
293 if (this == other) return true;
294 return Dominates(other);
298 int HBasicBlock::LoopNestingDepth() const {
299 const HBasicBlock* current = this;
300 int result = (current->IsLoopHeader()) ? 1 : 0;
301 while (current->parent_loop_header() != NULL) {
302 current = current->parent_loop_header();
309 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
310 DCHECK(IsLoopHeader());
312 SetJoinId(stmt->EntryId());
313 if (predecessors()->length() == 1) {
314 // This is a degenerated loop.
315 DetachLoopInformation();
319 // Only the first entry into the loop is from outside the loop. All other
320 // entries must be back edges.
321 for (int i = 1; i < predecessors()->length(); ++i) {
322 loop_information()->RegisterBackEdge(predecessors()->at(i));
327 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
328 DCHECK(IsFinished());
329 HBasicBlock* succ_block = end()->SuccessorAt(succ);
331 DCHECK(succ_block->predecessors()->length() == 1);
332 succ_block->MarkUnreachable();
336 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
337 if (HasPredecessor()) {
338 // Only loop header blocks can have a predecessor added after
339 // instructions have been added to the block (they have phis for all
340 // values in the environment, these phis may be eliminated later).
341 DCHECK(IsLoopHeader() || first_ == NULL);
342 HEnvironment* incoming_env = pred->last_environment();
343 if (IsLoopHeader()) {
344 DCHECK(phis()->length() == incoming_env->length());
345 for (int i = 0; i < phis_.length(); ++i) {
346 phis_[i]->AddInput(incoming_env->values()->at(i));
349 last_environment()->AddIncomingEdge(this, pred->last_environment());
351 } else if (!HasEnvironment() && !IsFinished()) {
352 DCHECK(!IsLoopHeader());
353 SetInitialEnvironment(pred->last_environment()->Copy());
356 predecessors_.Add(pred, zone());
360 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
361 DCHECK(!dominated_blocks_.Contains(block));
362 // Keep the list of dominated blocks sorted such that if there is two
363 // succeeding block in this list, the predecessor is before the successor.
365 while (index < dominated_blocks_.length() &&
366 dominated_blocks_[index]->block_id() < block->block_id()) {
369 dominated_blocks_.InsertAt(index, block, zone());
373 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
374 if (dominator_ == NULL) {
376 other->AddDominatedBlock(this);
377 } else if (other->dominator() != NULL) {
378 HBasicBlock* first = dominator_;
379 HBasicBlock* second = other;
381 while (first != second) {
382 if (first->block_id() > second->block_id()) {
383 first = first->dominator();
385 second = second->dominator();
387 DCHECK(first != NULL && second != NULL);
390 if (dominator_ != first) {
391 DCHECK(dominator_->dominated_blocks_.Contains(this));
392 dominator_->dominated_blocks_.RemoveElement(this);
394 first->AddDominatedBlock(this);
400 void HBasicBlock::AssignLoopSuccessorDominators() {
401 // Mark blocks that dominate all subsequent reachable blocks inside their
402 // loop. Exploit the fact that blocks are sorted in reverse post order. When
403 // the loop is visited in increasing block id order, if the number of
404 // non-loop-exiting successor edges at the dominator_candidate block doesn't
405 // exceed the number of previously encountered predecessor edges, there is no
406 // path from the loop header to any block with higher id that doesn't go
407 // through the dominator_candidate block. In this case, the
408 // dominator_candidate block is guaranteed to dominate all blocks reachable
409 // from it with higher ids.
410 HBasicBlock* last = loop_information()->GetLastBackEdge();
411 int outstanding_successors = 1; // one edge from the pre-header
412 // Header always dominates everything.
413 MarkAsLoopSuccessorDominator();
414 for (int j = block_id(); j <= last->block_id(); ++j) {
415 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
416 for (HPredecessorIterator it(dominator_candidate); !it.Done();
418 HBasicBlock* predecessor = it.Current();
419 // Don't count back edges.
420 if (predecessor->block_id() < dominator_candidate->block_id()) {
421 outstanding_successors--;
425 // If more successors than predecessors have been seen in the loop up to
426 // now, it's not possible to guarantee that the current block dominates
427 // all of the blocks with higher IDs. In this case, assume conservatively
428 // that those paths through loop that don't go through the current block
429 // contain all of the loop's dependencies. Also be careful to record
430 // dominator information about the current loop that's being processed,
431 // and not nested loops, which will be processed when
432 // AssignLoopSuccessorDominators gets called on their header.
433 DCHECK(outstanding_successors >= 0);
434 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
435 if (outstanding_successors == 0 &&
436 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
437 dominator_candidate->MarkAsLoopSuccessorDominator();
439 HControlInstruction* end = dominator_candidate->end();
440 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
441 HBasicBlock* successor = it.Current();
442 // Only count successors that remain inside the loop and don't loop back
444 if (successor->block_id() > dominator_candidate->block_id() &&
445 successor->block_id() <= last->block_id()) {
446 // Backwards edges must land on loop headers.
447 DCHECK(successor->block_id() > dominator_candidate->block_id() ||
448 successor->IsLoopHeader());
449 outstanding_successors++;
456 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
457 for (int i = 0; i < predecessors_.length(); ++i) {
458 if (predecessors_[i] == predecessor) return i;
466 void HBasicBlock::Verify() {
467 // Check that every block is finished.
468 DCHECK(IsFinished());
469 DCHECK(block_id() >= 0);
471 // Check that the incoming edges are in edge split form.
472 if (predecessors_.length() > 1) {
473 for (int i = 0; i < predecessors_.length(); ++i) {
474 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
481 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
482 this->back_edges_.Add(block, block->zone());
487 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
489 HBasicBlock* result = NULL;
490 for (int i = 0; i < back_edges_.length(); ++i) {
491 HBasicBlock* cur = back_edges_[i];
492 if (cur->block_id() > max_id) {
493 max_id = cur->block_id();
501 void HLoopInformation::AddBlock(HBasicBlock* block) {
502 if (block == loop_header()) return;
503 if (block->parent_loop_header() == loop_header()) return;
504 if (block->parent_loop_header() != NULL) {
505 AddBlock(block->parent_loop_header());
507 block->set_parent_loop_header(loop_header());
508 blocks_.Add(block, block->zone());
509 for (int i = 0; i < block->predecessors()->length(); ++i) {
510 AddBlock(block->predecessors()->at(i));
518 // Checks reachability of the blocks in this graph and stores a bit in
519 // the BitVector "reachable()" for every block that can be reached
520 // from the start block of the graph. If "dont_visit" is non-null, the given
521 // block is treated as if it would not be part of the graph. "visited_count()"
522 // returns the number of reachable blocks.
523 class ReachabilityAnalyzer BASE_EMBEDDED {
525 ReachabilityAnalyzer(HBasicBlock* entry_block,
527 HBasicBlock* dont_visit)
529 stack_(16, entry_block->zone()),
530 reachable_(block_count, entry_block->zone()),
531 dont_visit_(dont_visit) {
532 PushBlock(entry_block);
536 int visited_count() const { return visited_count_; }
537 const BitVector* reachable() const { return &reachable_; }
540 void PushBlock(HBasicBlock* block) {
541 if (block != NULL && block != dont_visit_ &&
542 !reachable_.Contains(block->block_id())) {
543 reachable_.Add(block->block_id());
544 stack_.Add(block, block->zone());
550 while (!stack_.is_empty()) {
551 HControlInstruction* end = stack_.RemoveLast()->end();
552 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
553 PushBlock(it.Current());
559 ZoneList<HBasicBlock*> stack_;
560 BitVector reachable_;
561 HBasicBlock* dont_visit_;
565 void HGraph::Verify(bool do_full_verify) const {
566 Heap::RelocationLock relocation_lock(isolate()->heap());
567 AllowHandleDereference allow_deref;
568 AllowDeferredHandleDereference allow_deferred_deref;
569 for (int i = 0; i < blocks_.length(); i++) {
570 HBasicBlock* block = blocks_.at(i);
574 // Check that every block contains at least one node and that only the last
575 // node is a control instruction.
576 HInstruction* current = block->first();
577 DCHECK(current != NULL && current->IsBlockEntry());
578 while (current != NULL) {
579 DCHECK((current->next() == NULL) == current->IsControlInstruction());
580 DCHECK(current->block() == block);
582 current = current->next();
585 // Check that successors are correctly set.
586 HBasicBlock* first = block->end()->FirstSuccessor();
587 HBasicBlock* second = block->end()->SecondSuccessor();
588 DCHECK(second == NULL || first != NULL);
590 // Check that the predecessor array is correct.
592 DCHECK(first->predecessors()->Contains(block));
593 if (second != NULL) {
594 DCHECK(second->predecessors()->Contains(block));
598 // Check that phis have correct arguments.
599 for (int j = 0; j < block->phis()->length(); j++) {
600 HPhi* phi = block->phis()->at(j);
604 // Check that all join blocks have predecessors that end with an
605 // unconditional goto and agree on their environment node id.
606 if (block->predecessors()->length() >= 2) {
608 block->predecessors()->first()->last_environment()->ast_id();
609 for (int k = 0; k < block->predecessors()->length(); k++) {
610 HBasicBlock* predecessor = block->predecessors()->at(k);
611 DCHECK(predecessor->end()->IsGoto() ||
612 predecessor->end()->IsDeoptimize());
613 DCHECK(predecessor->last_environment()->ast_id() == id);
618 // Check special property of first block to have no predecessors.
619 DCHECK(blocks_.at(0)->predecessors()->is_empty());
621 if (do_full_verify) {
622 // Check that the graph is fully connected.
623 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
624 DCHECK(analyzer.visited_count() == blocks_.length());
626 // Check that entry block dominator is NULL.
627 DCHECK(entry_block_->dominator() == NULL);
630 for (int i = 0; i < blocks_.length(); ++i) {
631 HBasicBlock* block = blocks_.at(i);
632 if (block->dominator() == NULL) {
633 // Only start block may have no dominator assigned to.
636 // Assert that block is unreachable if dominator must not be visited.
637 ReachabilityAnalyzer dominator_analyzer(entry_block_,
640 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
649 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
651 if (!pointer->is_set()) {
652 // Can't pass GetInvalidContext() to HConstant::New, because that will
653 // recursively call GetConstant
654 HConstant* constant = HConstant::New(zone(), NULL, value);
655 constant->InsertAfter(entry_block()->first());
656 pointer->set(constant);
659 return ReinsertConstantIfNecessary(pointer->get());
663 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
664 if (!constant->IsLinked()) {
665 // The constant was removed from the graph. Reinsert.
666 constant->ClearFlag(HValue::kIsDead);
667 constant->InsertAfter(entry_block()->first());
673 HConstant* HGraph::GetConstant0() {
674 return GetConstant(&constant_0_, 0);
678 HConstant* HGraph::GetConstant1() {
679 return GetConstant(&constant_1_, 1);
683 HConstant* HGraph::GetConstantMinus1() {
684 return GetConstant(&constant_minus1_, -1);
688 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \
689 HConstant* HGraph::GetConstant##Name() { \
690 if (!constant_##name##_.is_set()) { \
691 HConstant* constant = new(zone()) HConstant( \
692 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
693 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
695 Representation::Tagged(), \
701 constant->InsertAfter(entry_block()->first()); \
702 constant_##name##_.set(constant); \
704 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
708 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false)
709 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true)
710 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false)
711 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false)
712 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false)
715 #undef DEFINE_GET_CONSTANT
717 #define DEFINE_IS_CONSTANT(Name, name) \
718 bool HGraph::IsConstant##Name(HConstant* constant) { \
719 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
721 DEFINE_IS_CONSTANT(Undefined, undefined)
722 DEFINE_IS_CONSTANT(0, 0)
723 DEFINE_IS_CONSTANT(1, 1)
724 DEFINE_IS_CONSTANT(Minus1, minus1)
725 DEFINE_IS_CONSTANT(True, true)
726 DEFINE_IS_CONSTANT(False, false)
727 DEFINE_IS_CONSTANT(Hole, the_hole)
728 DEFINE_IS_CONSTANT(Null, null)
730 #undef DEFINE_IS_CONSTANT
733 HConstant* HGraph::GetInvalidContext() {
734 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
738 bool HGraph::IsStandardConstant(HConstant* constant) {
739 if (IsConstantUndefined(constant)) return true;
740 if (IsConstant0(constant)) return true;
741 if (IsConstant1(constant)) return true;
742 if (IsConstantMinus1(constant)) return true;
743 if (IsConstantTrue(constant)) return true;
744 if (IsConstantFalse(constant)) return true;
745 if (IsConstantHole(constant)) return true;
746 if (IsConstantNull(constant)) return true;
751 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
754 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
755 : needs_compare_(true) {
760 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
761 HIfContinuation* continuation)
762 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
763 InitializeDontCreateBlocks(builder);
764 continuation->Continue(&first_true_block_, &first_false_block_);
768 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
769 HGraphBuilder* builder) {
774 did_else_if_ = false;
778 pending_merge_block_ = false;
779 split_edge_merge_block_ = NULL;
780 merge_at_join_blocks_ = NULL;
781 normal_merge_at_join_block_count_ = 0;
782 deopt_merge_at_join_block_count_ = 0;
786 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
787 InitializeDontCreateBlocks(builder);
788 HEnvironment* env = builder->environment();
789 first_true_block_ = builder->CreateBasicBlock(env->Copy());
790 first_false_block_ = builder->CreateBasicBlock(env->Copy());
794 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
795 HControlInstruction* compare) {
796 DCHECK(did_then_ == did_else_);
798 // Handle if-then-elseif
804 pending_merge_block_ = false;
805 split_edge_merge_block_ = NULL;
806 HEnvironment* env = builder()->environment();
807 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
808 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
810 if (split_edge_merge_block_ != NULL) {
811 HEnvironment* env = first_false_block_->last_environment();
812 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
814 compare->SetSuccessorAt(0, split_edge);
815 compare->SetSuccessorAt(1, first_false_block_);
817 compare->SetSuccessorAt(0, first_true_block_);
818 compare->SetSuccessorAt(1, split_edge);
820 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
822 compare->SetSuccessorAt(0, first_true_block_);
823 compare->SetSuccessorAt(1, first_false_block_);
825 builder()->FinishCurrentBlock(compare);
826 needs_compare_ = false;
831 void HGraphBuilder::IfBuilder::Or() {
832 DCHECK(!needs_compare_);
835 HEnvironment* env = first_false_block_->last_environment();
836 if (split_edge_merge_block_ == NULL) {
837 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
838 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
839 first_true_block_ = split_edge_merge_block_;
841 builder()->set_current_block(first_false_block_);
842 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
846 void HGraphBuilder::IfBuilder::And() {
847 DCHECK(!needs_compare_);
850 HEnvironment* env = first_false_block_->last_environment();
851 if (split_edge_merge_block_ == NULL) {
852 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
853 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
854 first_false_block_ = split_edge_merge_block_;
856 builder()->set_current_block(first_true_block_);
857 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
861 void HGraphBuilder::IfBuilder::CaptureContinuation(
862 HIfContinuation* continuation) {
863 DCHECK(!did_else_if_);
867 HBasicBlock* true_block = NULL;
868 HBasicBlock* false_block = NULL;
869 Finish(&true_block, &false_block);
870 DCHECK(true_block != NULL);
871 DCHECK(false_block != NULL);
872 continuation->Capture(true_block, false_block);
874 builder()->set_current_block(NULL);
879 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
880 DCHECK(!did_else_if_);
883 HBasicBlock* true_block = NULL;
884 HBasicBlock* false_block = NULL;
885 Finish(&true_block, &false_block);
886 merge_at_join_blocks_ = NULL;
887 if (true_block != NULL && !true_block->IsFinished()) {
888 DCHECK(continuation->IsTrueReachable());
889 builder()->GotoNoSimulate(true_block, continuation->true_branch());
891 if (false_block != NULL && !false_block->IsFinished()) {
892 DCHECK(continuation->IsFalseReachable());
893 builder()->GotoNoSimulate(false_block, continuation->false_branch());
900 void HGraphBuilder::IfBuilder::Then() {
904 if (needs_compare_) {
905 // Handle if's without any expressions, they jump directly to the "else"
906 // branch. However, we must pretend that the "then" branch is reachable,
907 // so that the graph builder visits it and sees any live range extending
908 // constructs within it.
909 HConstant* constant_false = builder()->graph()->GetConstantFalse();
910 ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
911 boolean_type.Add(ToBooleanStub::BOOLEAN);
912 HBranch* branch = builder()->New<HBranch>(
913 constant_false, boolean_type, first_true_block_, first_false_block_);
914 builder()->FinishCurrentBlock(branch);
916 builder()->set_current_block(first_true_block_);
917 pending_merge_block_ = true;
921 void HGraphBuilder::IfBuilder::Else() {
925 AddMergeAtJoinBlock(false);
926 builder()->set_current_block(first_false_block_);
927 pending_merge_block_ = true;
932 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
934 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
935 AddMergeAtJoinBlock(true);
939 void HGraphBuilder::IfBuilder::Return(HValue* value) {
940 HValue* parameter_count = builder()->graph()->GetConstantMinus1();
941 builder()->FinishExitCurrentBlock(
942 builder()->New<HReturn>(value, parameter_count));
943 AddMergeAtJoinBlock(false);
947 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
948 if (!pending_merge_block_) return;
949 HBasicBlock* block = builder()->current_block();
950 DCHECK(block == NULL || !block->IsFinished());
951 MergeAtJoinBlock* record = new (builder()->zone())
952 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
953 merge_at_join_blocks_ = record;
955 DCHECK(block->end() == NULL);
957 normal_merge_at_join_block_count_++;
959 deopt_merge_at_join_block_count_++;
962 builder()->set_current_block(NULL);
963 pending_merge_block_ = false;
967 void HGraphBuilder::IfBuilder::Finish() {
972 AddMergeAtJoinBlock(false);
975 AddMergeAtJoinBlock(false);
981 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
982 HBasicBlock** else_continuation) {
985 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
986 if (else_continuation != NULL) {
987 *else_continuation = else_record->block_;
989 MergeAtJoinBlock* then_record = else_record->next_;
990 if (then_continuation != NULL) {
991 *then_continuation = then_record->block_;
993 DCHECK(then_record->next_ == NULL);
997 void HGraphBuilder::IfBuilder::End() {
998 if (captured_) return;
1001 int total_merged_blocks = normal_merge_at_join_block_count_ +
1002 deopt_merge_at_join_block_count_;
1003 DCHECK(total_merged_blocks >= 1);
1004 HBasicBlock* merge_block =
1005 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1007 // Merge non-deopt blocks first to ensure environment has right size for
1009 MergeAtJoinBlock* current = merge_at_join_blocks_;
1010 while (current != NULL) {
1011 if (!current->deopt_ && current->block_ != NULL) {
1012 // If there is only one block that makes it through to the end of the
1013 // if, then just set it as the current block and continue rather then
1014 // creating an unnecessary merge block.
1015 if (total_merged_blocks == 1) {
1016 builder()->set_current_block(current->block_);
1019 builder()->GotoNoSimulate(current->block_, merge_block);
1021 current = current->next_;
1024 // Merge deopt blocks, padding when necessary.
1025 current = merge_at_join_blocks_;
1026 while (current != NULL) {
1027 if (current->deopt_ && current->block_ != NULL) {
1028 current->block_->FinishExit(HAbnormalExit::New(builder()->zone(), NULL),
1029 HSourcePosition::Unknown());
1031 current = current->next_;
1033 builder()->set_current_block(merge_block);
1037 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1038 Initialize(builder, NULL, kWhileTrue, NULL);
1042 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1043 LoopBuilder::Direction direction) {
1044 Initialize(builder, context, direction, builder->graph()->GetConstant1());
1048 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1049 LoopBuilder::Direction direction,
1050 HValue* increment_amount) {
1051 Initialize(builder, context, direction, increment_amount);
1052 increment_amount_ = increment_amount;
1056 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1058 Direction direction,
1059 HValue* increment_amount) {
1062 direction_ = direction;
1063 increment_amount_ = increment_amount;
1066 header_block_ = builder->CreateLoopHeaderBlock();
1069 exit_trampoline_block_ = NULL;
1073 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1075 HValue* terminating,
1076 Token::Value token) {
1077 DCHECK(direction_ != kWhileTrue);
1078 HEnvironment* env = builder_->environment();
1079 phi_ = header_block_->AddNewPhi(env->values()->length());
1080 phi_->AddInput(initial);
1082 builder_->GotoNoSimulate(header_block_);
1084 HEnvironment* body_env = env->Copy();
1085 HEnvironment* exit_env = env->Copy();
1086 // Remove the phi from the expression stack
1089 body_block_ = builder_->CreateBasicBlock(body_env);
1090 exit_block_ = builder_->CreateBasicBlock(exit_env);
1092 builder_->set_current_block(header_block_);
1094 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1095 phi_, terminating, token, body_block_, exit_block_));
1097 builder_->set_current_block(body_block_);
1098 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1099 HValue* one = builder_->graph()->GetConstant1();
1100 if (direction_ == kPreIncrement) {
1101 increment_ = HAdd::New(zone(), context_, phi_, one);
1103 increment_ = HSub::New(zone(), context_, phi_, one);
1105 increment_->ClearFlag(HValue::kCanOverflow);
1106 builder_->AddInstruction(increment_);
1114 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1115 DCHECK(direction_ == kWhileTrue);
1116 HEnvironment* env = builder_->environment();
1117 builder_->GotoNoSimulate(header_block_);
1118 builder_->set_current_block(header_block_);
1119 env->Drop(drop_count);
1123 void HGraphBuilder::LoopBuilder::Break() {
1124 if (exit_trampoline_block_ == NULL) {
1125 // Its the first time we saw a break.
1126 if (direction_ == kWhileTrue) {
1127 HEnvironment* env = builder_->environment()->Copy();
1128 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1130 HEnvironment* env = exit_block_->last_environment()->Copy();
1131 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1132 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1136 builder_->GotoNoSimulate(exit_trampoline_block_);
1137 builder_->set_current_block(NULL);
1141 void HGraphBuilder::LoopBuilder::EndBody() {
1144 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1145 if (direction_ == kPostIncrement) {
1146 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1148 increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1150 increment_->ClearFlag(HValue::kCanOverflow);
1151 builder_->AddInstruction(increment_);
1154 if (direction_ != kWhileTrue) {
1155 // Push the new increment value on the expression stack to merge into
1157 builder_->environment()->Push(increment_);
1159 HBasicBlock* last_block = builder_->current_block();
1160 builder_->GotoNoSimulate(last_block, header_block_);
1161 header_block_->loop_information()->RegisterBackEdge(last_block);
1163 if (exit_trampoline_block_ != NULL) {
1164 builder_->set_current_block(exit_trampoline_block_);
1166 builder_->set_current_block(exit_block_);
1172 HGraph* HGraphBuilder::CreateGraph() {
1173 graph_ = new(zone()) HGraph(info_);
1174 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1175 CompilationPhase phase("H_Block building", info_);
1176 set_current_block(graph()->entry_block());
1177 if (!BuildGraph()) return NULL;
1178 graph()->FinalizeUniqueness();
1183 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1184 DCHECK(current_block() != NULL);
1185 DCHECK(!FLAG_hydrogen_track_positions ||
1186 !position_.IsUnknown() ||
1187 !info_->IsOptimizing());
1188 current_block()->AddInstruction(instr, source_position());
1189 if (graph()->IsInsideNoSideEffectsScope()) {
1190 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1196 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1197 DCHECK(!FLAG_hydrogen_track_positions ||
1198 !info_->IsOptimizing() ||
1199 !position_.IsUnknown());
1200 current_block()->Finish(last, source_position());
1201 if (last->IsReturn() || last->IsAbnormalExit()) {
1202 set_current_block(NULL);
1207 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1208 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1209 !position_.IsUnknown());
1210 current_block()->FinishExit(instruction, source_position());
1211 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1212 set_current_block(NULL);
1217 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1218 if (FLAG_native_code_counters && counter->Enabled()) {
1219 HValue* reference = Add<HConstant>(ExternalReference(counter));
1220 HValue* old_value = Add<HLoadNamedField>(
1221 reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1222 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1223 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1224 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1225 new_value, STORE_TO_INITIALIZED_ENTRY);
1230 void HGraphBuilder::AddSimulate(BailoutId id,
1231 RemovableSimulate removable) {
1232 DCHECK(current_block() != NULL);
1233 DCHECK(!graph()->IsInsideNoSideEffectsScope());
1234 current_block()->AddNewSimulate(id, source_position(), removable);
1238 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1239 HBasicBlock* b = graph()->CreateBasicBlock();
1240 b->SetInitialEnvironment(env);
1245 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1246 HBasicBlock* header = graph()->CreateBasicBlock();
1247 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1248 header->SetInitialEnvironment(entry_env);
1249 header->AttachLoopInformation();
1254 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1255 HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1256 HObjectAccess::ForMap());
1258 HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1259 HObjectAccess::ForMapBitField2());
1260 return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1264 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1265 if (obj->type().IsHeapObject()) return obj;
1266 return Add<HCheckHeapObject>(obj);
1270 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1271 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1272 FinishExitCurrentBlock(New<HAbnormalExit>());
1276 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1277 if (!string->type().IsString()) {
1278 DCHECK(!string->IsConstant() ||
1279 !HConstant::cast(string)->HasStringValue());
1280 BuildCheckHeapObject(string);
1281 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1287 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1288 if (object->type().IsJSObject()) return object;
1289 if (function->IsConstant() &&
1290 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1291 Handle<JSFunction> f = Handle<JSFunction>::cast(
1292 HConstant::cast(function)->handle(isolate()));
1293 SharedFunctionInfo* shared = f->shared();
1294 if (shared->strict_mode() == STRICT || shared->native()) return object;
1296 return Add<HWrapReceiver>(object, function);
1300 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1307 PropertyAccessType access_type) {
1308 IfBuilder length_checker(this);
1310 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1311 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1313 length_checker.Then();
1315 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1317 IfBuilder capacity_checker(this);
1319 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1321 capacity_checker.Then();
1323 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1324 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1326 Add<HBoundsCheck>(key, max_capacity);
1328 HValue* new_capacity = BuildNewElementsCapacity(key);
1329 HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1333 environment()->Push(new_elements);
1334 capacity_checker.Else();
1336 environment()->Push(elements);
1337 capacity_checker.End();
1340 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1341 new_length->ClearFlag(HValue::kCanOverflow);
1343 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1347 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1348 HValue* checked_elements = environment()->Top();
1350 // Write zero to ensure that the new element is initialized with some smi.
1351 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1354 length_checker.Else();
1355 Add<HBoundsCheck>(key, length);
1357 environment()->Push(elements);
1358 length_checker.End();
1360 return environment()->Pop();
1364 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1368 Factory* factory = isolate()->factory();
1370 IfBuilder cow_checker(this);
1372 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1375 HValue* capacity = AddLoadFixedArrayLength(elements);
1377 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1378 kind, length, capacity);
1380 environment()->Push(new_elements);
1384 environment()->Push(elements);
1388 return environment()->Pop();
1392 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1394 ElementsKind from_kind,
1395 ElementsKind to_kind,
1397 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
1398 IsFastHoleyElementsKind(to_kind));
1400 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1401 Add<HTrapAllocationMemento>(object);
1404 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1405 HInstruction* elements = AddLoadElements(object);
1407 HInstruction* empty_fixed_array = Add<HConstant>(
1408 isolate()->factory()->empty_fixed_array());
1410 IfBuilder if_builder(this);
1412 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1416 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1418 HInstruction* array_length = is_jsarray
1419 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1420 HObjectAccess::ForArrayLength(from_kind))
1423 BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1424 array_length, elements_length);
1429 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1433 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1434 int bit_field_mask) {
1435 // Check that the object isn't a smi.
1436 Add<HCheckHeapObject>(receiver);
1438 // Get the map of the receiver.
1439 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1440 HObjectAccess::ForMap());
1442 // Check the instance type and if an access check is needed, this can be
1443 // done with a single load, since both bytes are adjacent in the map.
1444 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1445 HValue* instance_type_and_bit_field =
1446 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access);
1448 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1449 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1450 instance_type_and_bit_field,
1452 HValue* sub_result = AddUncasted<HSub>(and_result,
1453 Add<HConstant>(JS_OBJECT_TYPE));
1454 Add<HBoundsCheck>(sub_result,
1455 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
1459 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1460 HIfContinuation* join_continuation) {
1461 // The sometimes unintuitively backward ordering of the ifs below is
1462 // convoluted, but necessary. All of the paths must guarantee that the
1463 // if-true of the continuation returns a smi element index and the if-false of
1464 // the continuation returns either a symbol or a unique string key. All other
1465 // object types cause a deopt to fall back to the runtime.
1467 IfBuilder key_smi_if(this);
1468 key_smi_if.If<HIsSmiAndBranch>(key);
1471 Push(key); // Nothing to do, just continue to true of continuation.
1475 HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1476 HObjectAccess::ForMap());
1477 HValue* instance_type =
1478 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1479 HObjectAccess::ForMapInstanceType());
1481 // Non-unique string, check for a string with a hash code that is actually
1483 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
1484 IfBuilder not_string_or_name_if(this);
1485 not_string_or_name_if.If<HCompareNumericAndBranch>(
1487 Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1490 not_string_or_name_if.Then();
1492 // Non-smi, non-Name, non-String: Try to convert to smi in case of
1494 // TODO(danno): This could call some variant of ToString
1495 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1497 not_string_or_name_if.Else();
1499 // String or Name: check explicitly for Name, they can short-circuit
1500 // directly to unique non-index key path.
1501 IfBuilder not_symbol_if(this);
1502 not_symbol_if.If<HCompareNumericAndBranch>(
1504 Add<HConstant>(SYMBOL_TYPE),
1507 not_symbol_if.Then();
1509 // String: check whether the String is a String of an index. If it is,
1510 // extract the index value from the hash.
1512 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1513 HObjectAccess::ForNameHashField());
1514 HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1515 String::kContainsCachedArrayIndexMask));
1517 HValue* not_index_test = AddUncasted<HBitwise>(
1518 Token::BIT_AND, hash, not_index_mask);
1520 IfBuilder string_index_if(this);
1521 string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1522 graph()->GetConstant0(),
1524 string_index_if.Then();
1526 // String with index in hash: extract string and merge to index path.
1527 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1529 string_index_if.Else();
1531 // Key is a non-index String, check for uniqueness/internalization.
1532 // If it's not internalized yet, internalize it now.
1533 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1536 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1538 IfBuilder internalized(this);
1539 internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
1540 graph()->GetConstant0(),
1542 internalized.Then();
1545 internalized.Else();
1546 Add<HPushArguments>(key);
1547 HValue* intern_key = Add<HCallRuntime>(
1548 isolate()->factory()->empty_string(),
1549 Runtime::FunctionForId(Runtime::kInternalizeString), 1);
1553 // Key guaranteed to be a unique string
1555 string_index_if.JoinContinuation(join_continuation);
1557 not_symbol_if.Else();
1559 Push(key); // Key is symbol
1561 not_symbol_if.JoinContinuation(join_continuation);
1563 not_string_or_name_if.JoinContinuation(join_continuation);
1565 key_smi_if.JoinContinuation(join_continuation);
1569 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1570 // Get the the instance type of the receiver, and make sure that it is
1571 // not one of the global object types.
1572 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1573 HObjectAccess::ForMap());
1574 HValue* instance_type =
1575 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1576 HObjectAccess::ForMapInstanceType());
1577 STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1);
1578 HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1579 HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE);
1581 IfBuilder if_global_object(this);
1582 if_global_object.If<HCompareNumericAndBranch>(instance_type,
1585 if_global_object.And();
1586 if_global_object.If<HCompareNumericAndBranch>(instance_type,
1589 if_global_object.ThenDeopt("receiver was a global object");
1590 if_global_object.End();
1594 void HGraphBuilder::BuildTestForDictionaryProperties(
1596 HIfContinuation* continuation) {
1597 HValue* properties = Add<HLoadNamedField>(
1598 object, static_cast<HValue*>(NULL),
1599 HObjectAccess::ForPropertiesPointer());
1600 HValue* properties_map =
1601 Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL),
1602 HObjectAccess::ForMap());
1603 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1604 IfBuilder builder(this);
1605 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1606 builder.CaptureContinuation(continuation);
1610 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1612 // Load the map of the receiver, compute the keyed lookup cache hash
1613 // based on 32 bits of the map pointer and the string hash.
1614 HValue* object_map =
1615 Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1616 HObjectAccess::ForMapAsInteger32());
1617 HValue* shifted_map = AddUncasted<HShr>(
1618 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1619 HValue* string_hash =
1620 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1621 HObjectAccess::ForStringHashField());
1622 HValue* shifted_hash = AddUncasted<HShr>(
1623 string_hash, Add<HConstant>(String::kHashShift));
1624 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1626 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1627 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1628 Add<HConstant>(mask));
1632 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1633 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1634 HValue* seed = Add<HConstant>(seed_value);
1635 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1637 // hash = ~hash + (hash << 15);
1638 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1639 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1640 graph()->GetConstantMinus1());
1641 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1643 // hash = hash ^ (hash >> 12);
1644 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1645 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1647 // hash = hash + (hash << 2);
1648 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1649 hash = AddUncasted<HAdd>(hash, shifted_hash);
1651 // hash = hash ^ (hash >> 4);
1652 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1653 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1655 // hash = hash * 2057;
1656 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1657 hash->ClearFlag(HValue::kCanOverflow);
1659 // hash = hash ^ (hash >> 16);
1660 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1661 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1665 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1669 HValue* capacity = Add<HLoadKeyed>(
1671 Add<HConstant>(NameDictionary::kCapacityIndex),
1672 static_cast<HValue*>(NULL),
1675 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1676 mask->ChangeRepresentation(Representation::Integer32());
1677 mask->ClearFlag(HValue::kCanOverflow);
1679 HValue* entry = hash;
1680 HValue* count = graph()->GetConstant1();
1684 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1685 graph()->CreateBasicBlock());
1686 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1687 graph()->CreateBasicBlock());
1688 LoopBuilder probe_loop(this);
1689 probe_loop.BeginBody(2); // Drop entry, count from last environment to
1690 // appease live range building without simulates.
1694 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1695 int entry_size = SeededNumberDictionary::kEntrySize;
1696 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1697 base_index->ClearFlag(HValue::kCanOverflow);
1698 int start_offset = SeededNumberDictionary::kElementsStartIndex;
1700 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1701 key_index->ClearFlag(HValue::kCanOverflow);
1703 HValue* candidate_key = Add<HLoadKeyed>(
1704 elements, key_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1705 IfBuilder if_undefined(this);
1706 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1707 graph()->GetConstantUndefined());
1708 if_undefined.Then();
1710 // element == undefined means "not found". Call the runtime.
1711 // TODO(jkummerow): walk the prototype chain instead.
1712 Add<HPushArguments>(receiver, key);
1713 Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
1714 Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1717 if_undefined.Else();
1719 IfBuilder if_match(this);
1720 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1724 // Update non-internalized string in the dictionary with internalized key?
1725 IfBuilder if_update_with_internalized(this);
1727 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1728 if_update_with_internalized.And();
1729 HValue* map = AddLoadMap(candidate_key, smi_check);
1730 HValue* instance_type = Add<HLoadNamedField>(
1731 map, static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
1732 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1733 Token::BIT_AND, instance_type,
1734 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1735 if_update_with_internalized.If<HCompareNumericAndBranch>(
1736 not_internalized_bit, graph()->GetConstant0(), Token::NE);
1737 if_update_with_internalized.And();
1738 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1739 candidate_key, graph()->GetConstantHole());
1740 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1742 if_update_with_internalized.Then();
1743 // Replace a key that is a non-internalized string by the equivalent
1744 // internalized string for faster further lookups.
1745 Add<HStoreKeyed>(elements, key_index, key, FAST_ELEMENTS);
1746 if_update_with_internalized.Else();
1748 if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1749 if_match.JoinContinuation(&found_key_match_continuation);
1751 IfBuilder found_key_match(this, &found_key_match_continuation);
1752 found_key_match.Then();
1753 // Key at current probe matches. Relevant bits in the |details| field must
1754 // be zero, otherwise the dictionary element requires special handling.
1755 HValue* details_index =
1756 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1757 details_index->ClearFlag(HValue::kCanOverflow);
1758 HValue* details = Add<HLoadKeyed>(
1759 elements, details_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1760 int details_mask = PropertyDetails::TypeField::kMask |
1761 PropertyDetails::DeletedField::kMask;
1762 details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1763 Add<HConstant>(details_mask));
1764 IfBuilder details_compare(this);
1765 details_compare.If<HCompareNumericAndBranch>(
1766 details, graph()->GetConstant0(), Token::EQ);
1767 details_compare.Then();
1768 HValue* result_index =
1769 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
1770 result_index->ClearFlag(HValue::kCanOverflow);
1771 Push(Add<HLoadKeyed>(elements, result_index, static_cast<HValue*>(NULL),
1773 details_compare.Else();
1774 Add<HPushArguments>(receiver, key);
1775 Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
1776 Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1778 details_compare.End();
1780 found_key_match.Else();
1781 found_key_match.JoinContinuation(&return_or_loop_continuation);
1783 if_undefined.JoinContinuation(&return_or_loop_continuation);
1785 IfBuilder return_or_loop(this, &return_or_loop_continuation);
1786 return_or_loop.Then();
1789 return_or_loop.Else();
1790 entry = AddUncasted<HAdd>(entry, count);
1791 entry->ClearFlag(HValue::kCanOverflow);
1792 count = AddUncasted<HAdd>(count, graph()->GetConstant1());
1793 count->ClearFlag(HValue::kCanOverflow);
1797 probe_loop.EndBody();
1799 return_or_loop.End();
1805 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1808 NoObservableSideEffectsScope scope(this);
1809 HConstant* max_length = Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1810 Add<HBoundsCheck>(length, max_length);
1812 // Generate size calculation code here in order to make it dominate
1813 // the JSRegExpResult allocation.
1814 ElementsKind elements_kind = FAST_ELEMENTS;
1815 HValue* size = BuildCalculateElementsSize(elements_kind, length);
1817 // Allocate the JSRegExpResult and the FixedArray in one step.
1818 HValue* result = Add<HAllocate>(
1819 Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
1820 NOT_TENURED, JS_ARRAY_TYPE);
1822 // Initialize the JSRegExpResult header.
1823 HValue* global_object = Add<HLoadNamedField>(
1824 context(), static_cast<HValue*>(NULL),
1825 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1826 HValue* native_context = Add<HLoadNamedField>(
1827 global_object, static_cast<HValue*>(NULL),
1828 HObjectAccess::ForGlobalObjectNativeContext());
1829 Add<HStoreNamedField>(
1830 result, HObjectAccess::ForMap(),
1831 Add<HLoadNamedField>(
1832 native_context, static_cast<HValue*>(NULL),
1833 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1834 HConstant* empty_fixed_array =
1835 Add<HConstant>(isolate()->factory()->empty_fixed_array());
1836 Add<HStoreNamedField>(
1837 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1839 Add<HStoreNamedField>(
1840 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1842 Add<HStoreNamedField>(
1843 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1845 // Initialize the additional fields.
1846 Add<HStoreNamedField>(
1847 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1849 Add<HStoreNamedField>(
1850 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1853 // Allocate and initialize the elements header.
1854 HAllocate* elements = BuildAllocateElements(elements_kind, size);
1855 BuildInitializeElementsHeader(elements, elements_kind, length);
1857 if (!elements->has_size_upper_bound()) {
1858 HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize(
1859 elements_kind, max_length->Integer32Value());
1860 elements->set_size_upper_bound(size_in_bytes_upper_bound);
1863 Add<HStoreNamedField>(
1864 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1867 // Initialize the elements contents with undefined.
1868 BuildFillElementsWithValue(
1869 elements, elements_kind, graph()->GetConstant0(), length,
1870 graph()->GetConstantUndefined());
1876 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1877 NoObservableSideEffectsScope scope(this);
1879 // Convert constant numbers at compile time.
1880 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1881 Handle<Object> number = HConstant::cast(object)->handle(isolate());
1882 Handle<String> result = isolate()->factory()->NumberToString(number);
1883 return Add<HConstant>(result);
1886 // Create a joinable continuation.
1887 HIfContinuation found(graph()->CreateBasicBlock(),
1888 graph()->CreateBasicBlock());
1890 // Load the number string cache.
1891 HValue* number_string_cache =
1892 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1894 // Make the hash mask from the length of the number string cache. It
1895 // contains two elements (number and string) for each cache entry.
1896 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1897 mask->set_type(HType::Smi());
1898 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1899 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1901 // Check whether object is a smi.
1902 IfBuilder if_objectissmi(this);
1903 if_objectissmi.If<HIsSmiAndBranch>(object);
1904 if_objectissmi.Then();
1906 // Compute hash for smi similar to smi_get_hash().
1907 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1910 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1911 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1912 static_cast<HValue*>(NULL),
1913 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1915 // Check if object == key.
1916 IfBuilder if_objectiskey(this);
1917 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1918 if_objectiskey.Then();
1920 // Make the key_index available.
1923 if_objectiskey.JoinContinuation(&found);
1925 if_objectissmi.Else();
1927 if (type->Is(Type::SignedSmall())) {
1928 if_objectissmi.Deopt("Expected smi");
1930 // Check if the object is a heap number.
1931 IfBuilder if_objectisnumber(this);
1932 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1933 object, isolate()->factory()->heap_number_map());
1934 if_objectisnumber.Then();
1936 // Compute hash for heap number similar to double_get_hash().
1937 HValue* low = Add<HLoadNamedField>(
1938 object, objectisnumber,
1939 HObjectAccess::ForHeapNumberValueLowestBits());
1940 HValue* high = Add<HLoadNamedField>(
1941 object, objectisnumber,
1942 HObjectAccess::ForHeapNumberValueHighestBits());
1943 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1944 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1947 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1948 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1949 static_cast<HValue*>(NULL),
1950 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1952 // Check if the key is a heap number and compare it with the object.
1953 IfBuilder if_keyisnotsmi(this);
1954 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1955 if_keyisnotsmi.Then();
1957 IfBuilder if_keyisheapnumber(this);
1958 if_keyisheapnumber.If<HCompareMap>(
1959 key, isolate()->factory()->heap_number_map());
1960 if_keyisheapnumber.Then();
1962 // Check if values of key and object match.
1963 IfBuilder if_keyeqobject(this);
1964 if_keyeqobject.If<HCompareNumericAndBranch>(
1965 Add<HLoadNamedField>(key, keyisnotsmi,
1966 HObjectAccess::ForHeapNumberValue()),
1967 Add<HLoadNamedField>(object, objectisnumber,
1968 HObjectAccess::ForHeapNumberValue()),
1970 if_keyeqobject.Then();
1972 // Make the key_index available.
1975 if_keyeqobject.JoinContinuation(&found);
1977 if_keyisheapnumber.JoinContinuation(&found);
1979 if_keyisnotsmi.JoinContinuation(&found);
1981 if_objectisnumber.Else();
1983 if (type->Is(Type::Number())) {
1984 if_objectisnumber.Deopt("Expected heap number");
1987 if_objectisnumber.JoinContinuation(&found);
1990 if_objectissmi.JoinContinuation(&found);
1992 // Check for cache hit.
1993 IfBuilder if_found(this, &found);
1996 // Count number to string operation in native code.
1997 AddIncrementCounter(isolate()->counters()->number_to_string_native());
1999 // Load the value in case of cache hit.
2000 HValue* key_index = Pop();
2001 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
2002 Push(Add<HLoadKeyed>(number_string_cache, value_index,
2003 static_cast<HValue*>(NULL),
2004 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
2008 // Cache miss, fallback to runtime.
2009 Add<HPushArguments>(object);
2010 Push(Add<HCallRuntime>(
2011 isolate()->factory()->empty_string(),
2012 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
2021 HAllocate* HGraphBuilder::BuildAllocate(
2022 HValue* object_size,
2024 InstanceType instance_type,
2025 HAllocationMode allocation_mode) {
2026 // Compute the effective allocation size.
2027 HValue* size = object_size;
2028 if (allocation_mode.CreateAllocationMementos()) {
2029 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2030 size->ClearFlag(HValue::kCanOverflow);
2033 // Perform the actual allocation.
2034 HAllocate* object = Add<HAllocate>(
2035 size, type, allocation_mode.GetPretenureMode(),
2036 instance_type, allocation_mode.feedback_site());
2038 // Setup the allocation memento.
2039 if (allocation_mode.CreateAllocationMementos()) {
2040 BuildCreateAllocationMemento(
2041 object, object_size, allocation_mode.current_site());
2048 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2049 HValue* right_length) {
2050 // Compute the combined string length and check against max string length.
2051 HValue* length = AddUncasted<HAdd>(left_length, right_length);
2052 // Check that length <= kMaxLength <=> length < MaxLength + 1.
2053 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2054 Add<HBoundsCheck>(length, max_length);
2059 HValue* HGraphBuilder::BuildCreateConsString(
2063 HAllocationMode allocation_mode) {
2064 // Determine the string instance types.
2065 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2066 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2068 // Allocate the cons string object. HAllocate does not care whether we
2069 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2070 // CONS_STRING_TYPE here. Below we decide whether the cons string is
2071 // one-byte or two-byte and set the appropriate map.
2072 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2073 CONS_ONE_BYTE_STRING_TYPE));
2074 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2075 HType::String(), CONS_STRING_TYPE,
2078 // Compute intersection and difference of instance types.
2079 HValue* anded_instance_types = AddUncasted<HBitwise>(
2080 Token::BIT_AND, left_instance_type, right_instance_type);
2081 HValue* xored_instance_types = AddUncasted<HBitwise>(
2082 Token::BIT_XOR, left_instance_type, right_instance_type);
2084 // We create a one-byte cons string if
2085 // 1. both strings are one-byte, or
2086 // 2. at least one of the strings is two-byte, but happens to contain only
2087 // one-byte characters.
2088 // To do this, we check
2089 // 1. if both strings are one-byte, or if the one-byte data hint is set in
2091 // 2. if one of the strings has the one-byte data hint set and the other
2092 // string is one-byte.
2093 IfBuilder if_onebyte(this);
2094 STATIC_ASSERT(kOneByteStringTag != 0);
2095 STATIC_ASSERT(kOneByteDataHintMask != 0);
2096 if_onebyte.If<HCompareNumericAndBranch>(
2097 AddUncasted<HBitwise>(
2098 Token::BIT_AND, anded_instance_types,
2099 Add<HConstant>(static_cast<int32_t>(
2100 kStringEncodingMask | kOneByteDataHintMask))),
2101 graph()->GetConstant0(), Token::NE);
2103 STATIC_ASSERT(kOneByteStringTag != 0 &&
2104 kOneByteDataHintTag != 0 &&
2105 kOneByteDataHintTag != kOneByteStringTag);
2106 if_onebyte.If<HCompareNumericAndBranch>(
2107 AddUncasted<HBitwise>(
2108 Token::BIT_AND, xored_instance_types,
2109 Add<HConstant>(static_cast<int32_t>(
2110 kOneByteStringTag | kOneByteDataHintTag))),
2111 Add<HConstant>(static_cast<int32_t>(
2112 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2115 // We can safely skip the write barrier for storing the map here.
2116 Add<HStoreNamedField>(
2117 result, HObjectAccess::ForMap(),
2118 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2122 // We can safely skip the write barrier for storing the map here.
2123 Add<HStoreNamedField>(
2124 result, HObjectAccess::ForMap(),
2125 Add<HConstant>(isolate()->factory()->cons_string_map()));
2129 // Initialize the cons string fields.
2130 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2131 Add<HConstant>(String::kEmptyHashField));
2132 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2133 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2134 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2136 // Count the native string addition.
2137 AddIncrementCounter(isolate()->counters()->string_add_native());
2143 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2145 String::Encoding src_encoding,
2148 String::Encoding dst_encoding,
2150 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2151 src_encoding == String::ONE_BYTE_ENCODING);
2152 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2153 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2155 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2157 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2158 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2159 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2165 HValue* HGraphBuilder::BuildObjectSizeAlignment(
2166 HValue* unaligned_size, int header_size) {
2167 DCHECK((header_size & kObjectAlignmentMask) == 0);
2168 HValue* size = AddUncasted<HAdd>(
2169 unaligned_size, Add<HConstant>(static_cast<int32_t>(
2170 header_size + kObjectAlignmentMask)));
2171 size->ClearFlag(HValue::kCanOverflow);
2172 return AddUncasted<HBitwise>(
2173 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2174 ~kObjectAlignmentMask)));
2178 HValue* HGraphBuilder::BuildUncheckedStringAdd(
2181 HAllocationMode allocation_mode) {
2182 // Determine the string lengths.
2183 HValue* left_length = AddLoadStringLength(left);
2184 HValue* right_length = AddLoadStringLength(right);
2186 // Compute the combined string length.
2187 HValue* length = BuildAddStringLengths(left_length, right_length);
2189 // Do some manual constant folding here.
2190 if (left_length->IsConstant()) {
2191 HConstant* c_left_length = HConstant::cast(left_length);
2192 DCHECK_NE(0, c_left_length->Integer32Value());
2193 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2194 // The right string contains at least one character.
2195 return BuildCreateConsString(length, left, right, allocation_mode);
2197 } else if (right_length->IsConstant()) {
2198 HConstant* c_right_length = HConstant::cast(right_length);
2199 DCHECK_NE(0, c_right_length->Integer32Value());
2200 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2201 // The left string contains at least one character.
2202 return BuildCreateConsString(length, left, right, allocation_mode);
2206 // Check if we should create a cons string.
2207 IfBuilder if_createcons(this);
2208 if_createcons.If<HCompareNumericAndBranch>(
2209 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2210 if_createcons.Then();
2212 // Create a cons string.
2213 Push(BuildCreateConsString(length, left, right, allocation_mode));
2215 if_createcons.Else();
2217 // Determine the string instance types.
2218 HValue* left_instance_type = AddLoadStringInstanceType(left);
2219 HValue* right_instance_type = AddLoadStringInstanceType(right);
2221 // Compute union and difference of instance types.
2222 HValue* ored_instance_types = AddUncasted<HBitwise>(
2223 Token::BIT_OR, left_instance_type, right_instance_type);
2224 HValue* xored_instance_types = AddUncasted<HBitwise>(
2225 Token::BIT_XOR, left_instance_type, right_instance_type);
2227 // Check if both strings have the same encoding and both are
2229 IfBuilder if_sameencodingandsequential(this);
2230 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2231 AddUncasted<HBitwise>(
2232 Token::BIT_AND, xored_instance_types,
2233 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2234 graph()->GetConstant0(), Token::EQ);
2235 if_sameencodingandsequential.And();
2236 STATIC_ASSERT(kSeqStringTag == 0);
2237 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2238 AddUncasted<HBitwise>(
2239 Token::BIT_AND, ored_instance_types,
2240 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2241 graph()->GetConstant0(), Token::EQ);
2242 if_sameencodingandsequential.Then();
2244 HConstant* string_map =
2245 Add<HConstant>(isolate()->factory()->string_map());
2246 HConstant* one_byte_string_map =
2247 Add<HConstant>(isolate()->factory()->one_byte_string_map());
2249 // Determine map and size depending on whether result is one-byte string.
2250 IfBuilder if_onebyte(this);
2251 STATIC_ASSERT(kOneByteStringTag != 0);
2252 if_onebyte.If<HCompareNumericAndBranch>(
2253 AddUncasted<HBitwise>(
2254 Token::BIT_AND, ored_instance_types,
2255 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2256 graph()->GetConstant0(), Token::NE);
2259 // Allocate sequential one-byte string object.
2261 Push(one_byte_string_map);
2265 // Allocate sequential two-byte string object.
2266 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2267 size->ClearFlag(HValue::kCanOverflow);
2268 size->SetFlag(HValue::kUint32);
2273 HValue* map = Pop();
2275 // Calculate the number of bytes needed for the characters in the
2276 // string while observing object alignment.
2277 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2278 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2280 // Allocate the string object. HAllocate does not care whether we pass
2281 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2282 HAllocate* result = BuildAllocate(
2283 size, HType::String(), STRING_TYPE, allocation_mode);
2284 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2286 // Initialize the string fields.
2287 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2288 Add<HConstant>(String::kEmptyHashField));
2289 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2291 // Copy characters to the result string.
2292 IfBuilder if_twobyte(this);
2293 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2296 // Copy characters from the left string.
2297 BuildCopySeqStringChars(
2298 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2299 result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2302 // Copy characters from the right string.
2303 BuildCopySeqStringChars(
2304 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2305 result, left_length, String::TWO_BYTE_ENCODING,
2310 // Copy characters from the left string.
2311 BuildCopySeqStringChars(
2312 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2313 result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2316 // Copy characters from the right string.
2317 BuildCopySeqStringChars(
2318 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2319 result, left_length, String::ONE_BYTE_ENCODING,
2324 // Count the native string addition.
2325 AddIncrementCounter(isolate()->counters()->string_add_native());
2327 // Return the sequential string.
2330 if_sameencodingandsequential.Else();
2332 // Fallback to the runtime to add the two strings.
2333 Add<HPushArguments>(left, right);
2334 Push(Add<HCallRuntime>(
2335 isolate()->factory()->empty_string(),
2336 Runtime::FunctionForId(Runtime::kStringAdd),
2339 if_sameencodingandsequential.End();
2341 if_createcons.End();
2347 HValue* HGraphBuilder::BuildStringAdd(
2350 HAllocationMode allocation_mode) {
2351 NoObservableSideEffectsScope no_effects(this);
2353 // Determine string lengths.
2354 HValue* left_length = AddLoadStringLength(left);
2355 HValue* right_length = AddLoadStringLength(right);
2357 // Check if left string is empty.
2358 IfBuilder if_leftempty(this);
2359 if_leftempty.If<HCompareNumericAndBranch>(
2360 left_length, graph()->GetConstant0(), Token::EQ);
2361 if_leftempty.Then();
2363 // Count the native string addition.
2364 AddIncrementCounter(isolate()->counters()->string_add_native());
2366 // Just return the right string.
2369 if_leftempty.Else();
2371 // Check if right string is empty.
2372 IfBuilder if_rightempty(this);
2373 if_rightempty.If<HCompareNumericAndBranch>(
2374 right_length, graph()->GetConstant0(), Token::EQ);
2375 if_rightempty.Then();
2377 // Count the native string addition.
2378 AddIncrementCounter(isolate()->counters()->string_add_native());
2380 // Just return the left string.
2383 if_rightempty.Else();
2385 // Add the two non-empty strings.
2386 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2388 if_rightempty.End();
2396 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2397 HValue* checked_object,
2401 ElementsKind elements_kind,
2402 PropertyAccessType access_type,
2403 LoadKeyedHoleMode load_mode,
2404 KeyedAccessStoreMode store_mode) {
2405 DCHECK((!IsExternalArrayElementsKind(elements_kind) &&
2406 !IsFixedTypedArrayElementsKind(elements_kind)) ||
2408 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2409 // on a HElementsTransition instruction. The flag can also be removed if the
2410 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2411 // ElementsKind transitions. Finally, the dependency can be removed for stores
2412 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2413 // generated store code.
2414 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2415 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2416 checked_object->ClearDependsOnFlag(kElementsKind);
2419 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2420 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2421 HValue* elements = AddLoadElements(checked_object);
2422 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2423 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2424 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2425 elements, isolate()->factory()->fixed_array_map());
2426 check_cow_map->ClearDependsOnFlag(kElementsKind);
2428 HInstruction* length = NULL;
2430 length = Add<HLoadNamedField>(
2431 checked_object->ActualValue(), checked_object,
2432 HObjectAccess::ForArrayLength(elements_kind));
2434 length = AddLoadFixedArrayLength(elements);
2436 length->set_type(HType::Smi());
2437 HValue* checked_key = NULL;
2438 if (IsExternalArrayElementsKind(elements_kind) ||
2439 IsFixedTypedArrayElementsKind(elements_kind)) {
2440 HValue* backing_store;
2441 if (IsExternalArrayElementsKind(elements_kind)) {
2442 backing_store = Add<HLoadNamedField>(
2443 elements, static_cast<HValue*>(NULL),
2444 HObjectAccess::ForExternalArrayExternalPointer());
2446 backing_store = elements;
2448 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2449 NoObservableSideEffectsScope no_effects(this);
2450 IfBuilder length_checker(this);
2451 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2452 length_checker.Then();
2453 IfBuilder negative_checker(this);
2454 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2455 key, graph()->GetConstant0(), Token::GTE);
2456 negative_checker.Then();
2457 HInstruction* result = AddElementAccess(
2458 backing_store, key, val, bounds_check, elements_kind, access_type);
2459 negative_checker.ElseDeopt("Negative key encountered");
2460 negative_checker.End();
2461 length_checker.End();
2464 DCHECK(store_mode == STANDARD_STORE);
2465 checked_key = Add<HBoundsCheck>(key, length);
2466 return AddElementAccess(
2467 backing_store, checked_key, val,
2468 checked_object, elements_kind, access_type);
2471 DCHECK(fast_smi_only_elements ||
2473 IsFastDoubleElementsKind(elements_kind));
2475 // In case val is stored into a fast smi array, assure that the value is a smi
2476 // before manipulating the backing store. Otherwise the actual store may
2477 // deopt, leaving the backing store in an invalid state.
2478 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2479 !val->type().IsSmi()) {
2480 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2483 if (IsGrowStoreMode(store_mode)) {
2484 NoObservableSideEffectsScope no_effects(this);
2485 Representation representation = HStoreKeyed::RequiredValueRepresentation(
2486 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2487 val = AddUncasted<HForceRepresentation>(val, representation);
2488 elements = BuildCheckForCapacityGrow(checked_object, elements,
2489 elements_kind, length, key,
2490 is_js_array, access_type);
2493 checked_key = Add<HBoundsCheck>(key, length);
2495 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2496 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2497 NoObservableSideEffectsScope no_effects(this);
2498 elements = BuildCopyElementsOnWrite(checked_object, elements,
2499 elements_kind, length);
2501 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2502 elements, isolate()->factory()->fixed_array_map());
2503 check_cow_map->ClearDependsOnFlag(kElementsKind);
2507 return AddElementAccess(elements, checked_key, val, checked_object,
2508 elements_kind, access_type, load_mode);
2512 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2513 JSArrayBuilder* array_builder,
2514 HValue* length_argument) {
2515 if (length_argument->IsConstant() &&
2516 HConstant::cast(length_argument)->HasSmiValue()) {
2517 int array_length = HConstant::cast(length_argument)->Integer32Value();
2518 if (array_length == 0) {
2519 return array_builder->AllocateEmptyArray();
2521 return array_builder->AllocateArray(length_argument,
2527 HValue* constant_zero = graph()->GetConstant0();
2528 HConstant* max_alloc_length =
2529 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2530 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2532 IfBuilder if_builder(this);
2533 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2536 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2537 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2538 Push(initial_capacity_node); // capacity
2539 Push(constant_zero); // length
2541 if (!(top_info()->IsStub()) &&
2542 IsFastPackedElementsKind(array_builder->kind())) {
2543 // We'll come back later with better (holey) feedback.
2544 if_builder.Deopt("Holey array despite packed elements_kind feedback");
2546 Push(checked_length); // capacity
2547 Push(checked_length); // length
2551 // Figure out total size
2552 HValue* length = Pop();
2553 HValue* capacity = Pop();
2554 return array_builder->AllocateArray(capacity, max_alloc_length, length);
2558 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2560 int elements_size = IsFastDoubleElementsKind(kind)
2564 HConstant* elements_size_value = Add<HConstant>(elements_size);
2565 HInstruction* mul = HMul::NewImul(zone(), context(),
2566 capacity->ActualValue(),
2567 elements_size_value);
2568 AddInstruction(mul);
2569 mul->ClearFlag(HValue::kCanOverflow);
2571 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2573 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2574 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2575 total_size->ClearFlag(HValue::kCanOverflow);
2580 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2581 int base_size = JSArray::kSize;
2582 if (mode == TRACK_ALLOCATION_SITE) {
2583 base_size += AllocationMemento::kSize;
2585 HConstant* size_in_bytes = Add<HConstant>(base_size);
2586 return Add<HAllocate>(
2587 size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE);
2591 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2594 int base_size = IsFastDoubleElementsKind(kind)
2595 ? FixedDoubleArray::SizeFor(capacity)
2596 : FixedArray::SizeFor(capacity);
2598 return Add<HConstant>(base_size);
2602 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2603 HValue* size_in_bytes) {
2604 InstanceType instance_type = IsFastDoubleElementsKind(kind)
2605 ? FIXED_DOUBLE_ARRAY_TYPE
2608 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2613 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2616 Factory* factory = isolate()->factory();
2617 Handle<Map> map = IsFastDoubleElementsKind(kind)
2618 ? factory->fixed_double_array_map()
2619 : factory->fixed_array_map();
2621 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2622 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2627 HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2629 // The HForceRepresentation is to prevent possible deopt on int-smi
2630 // conversion after allocation but before the new object fields are set.
2631 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2632 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2633 HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
2634 BuildInitializeElementsHeader(new_array, kind, capacity);
2639 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2642 AllocationSiteMode mode,
2643 ElementsKind elements_kind,
2644 HValue* allocation_site_payload,
2645 HValue* length_field) {
2646 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2648 HConstant* empty_fixed_array =
2649 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2651 Add<HStoreNamedField>(
2652 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2654 Add<HStoreNamedField>(
2655 array, HObjectAccess::ForElementsPointer(),
2656 elements != NULL ? elements : empty_fixed_array);
2658 Add<HStoreNamedField>(
2659 array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2661 if (mode == TRACK_ALLOCATION_SITE) {
2662 BuildCreateAllocationMemento(
2663 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2668 HInstruction* HGraphBuilder::AddElementAccess(
2670 HValue* checked_key,
2673 ElementsKind elements_kind,
2674 PropertyAccessType access_type,
2675 LoadKeyedHoleMode load_mode) {
2676 if (access_type == STORE) {
2677 DCHECK(val != NULL);
2678 if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2679 elements_kind == UINT8_CLAMPED_ELEMENTS) {
2680 val = Add<HClampToUint8>(val);
2682 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2683 STORE_TO_INITIALIZED_ENTRY);
2686 DCHECK(access_type == LOAD);
2687 DCHECK(val == NULL);
2688 HLoadKeyed* load = Add<HLoadKeyed>(
2689 elements, checked_key, dependency, elements_kind, load_mode);
2690 if (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2691 elements_kind == UINT32_ELEMENTS) {
2692 graph()->RecordUint32Instruction(load);
2698 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2699 HValue* dependency) {
2700 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2704 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2705 HValue* dependency) {
2706 return Add<HLoadNamedField>(
2707 object, dependency, HObjectAccess::ForElementsPointer());
2711 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2713 HValue* dependency) {
2714 return Add<HLoadNamedField>(
2715 array, dependency, HObjectAccess::ForFixedArrayLength());
2719 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2721 HValue* dependency) {
2722 return Add<HLoadNamedField>(
2723 array, dependency, HObjectAccess::ForArrayLength(kind));
2727 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2728 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2729 graph_->GetConstant1());
2731 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2732 new_capacity->ClearFlag(HValue::kCanOverflow);
2734 HValue* min_growth = Add<HConstant>(16);
2736 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2737 new_capacity->ClearFlag(HValue::kCanOverflow);
2739 return new_capacity;
2743 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2746 ElementsKind new_kind,
2748 HValue* new_capacity) {
2749 Add<HBoundsCheck>(new_capacity, Add<HConstant>(
2750 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2751 ElementsKindToShiftSize(new_kind)));
2753 HValue* new_elements =
2754 BuildAllocateAndInitializeArray(new_kind, new_capacity);
2756 BuildCopyElements(elements, kind, new_elements,
2757 new_kind, length, new_capacity);
2759 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2762 return new_elements;
2766 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2767 ElementsKind elements_kind,
2772 to = AddLoadFixedArrayLength(elements);
2775 // Special loop unfolding case
2776 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2777 kElementLoopUnrollThreshold);
2778 int initial_capacity = -1;
2779 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2780 int constant_from = from->GetInteger32Constant();
2781 int constant_to = to->GetInteger32Constant();
2783 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2784 initial_capacity = constant_to;
2788 if (initial_capacity >= 0) {
2789 for (int i = 0; i < initial_capacity; i++) {
2790 HInstruction* key = Add<HConstant>(i);
2791 Add<HStoreKeyed>(elements, key, value, elements_kind);
2794 // Carefully loop backwards so that the "from" remains live through the loop
2795 // rather than the to. This often corresponds to keeping length live rather
2796 // then capacity, which helps register allocation, since length is used more
2797 // other than capacity after filling with holes.
2798 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2800 HValue* key = builder.BeginBody(to, from, Token::GT);
2802 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2803 adjusted_key->ClearFlag(HValue::kCanOverflow);
2805 Add<HStoreKeyed>(elements, adjusted_key, value, elements_kind);
2812 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2813 ElementsKind elements_kind,
2816 // Fast elements kinds need to be initialized in case statements below cause a
2817 // garbage collection.
2818 Factory* factory = isolate()->factory();
2820 double nan_double = FixedDoubleArray::hole_nan_as_double();
2821 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2822 ? Add<HConstant>(factory->the_hole_value())
2823 : Add<HConstant>(nan_double);
2825 // Since we're about to store a hole value, the store instruction below must
2826 // assume an elements kind that supports heap object values.
2827 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2828 elements_kind = FAST_HOLEY_ELEMENTS;
2831 BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2835 void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
2836 HValue* to_properties, HValue* length,
2838 ElementsKind kind = FAST_ELEMENTS;
2840 BuildFillElementsWithValue(to_properties, kind, length, capacity,
2841 graph()->GetConstantUndefined());
2843 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2845 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
2847 key = AddUncasted<HSub>(key, graph()->GetConstant1());
2848 key->ClearFlag(HValue::kCanOverflow);
2851 Add<HLoadKeyed>(from_properties, key, static_cast<HValue*>(NULL), kind);
2853 Add<HStoreKeyed>(to_properties, key, element, kind);
2859 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2860 ElementsKind from_elements_kind,
2861 HValue* to_elements,
2862 ElementsKind to_elements_kind,
2865 int constant_capacity = -1;
2866 if (capacity != NULL &&
2867 capacity->IsConstant() &&
2868 HConstant::cast(capacity)->HasInteger32Value()) {
2869 int constant_candidate = HConstant::cast(capacity)->Integer32Value();
2870 if (constant_candidate <= kElementLoopUnrollThreshold) {
2871 constant_capacity = constant_candidate;
2875 bool pre_fill_with_holes =
2876 IsFastDoubleElementsKind(from_elements_kind) &&
2877 IsFastObjectElementsKind(to_elements_kind);
2878 if (pre_fill_with_holes) {
2879 // If the copy might trigger a GC, make sure that the FixedArray is
2880 // pre-initialized with holes to make sure that it's always in a
2881 // consistent state.
2882 BuildFillElementsWithHole(to_elements, to_elements_kind,
2883 graph()->GetConstant0(), NULL);
2886 if (constant_capacity != -1) {
2887 // Unroll the loop for small elements kinds.
2888 for (int i = 0; i < constant_capacity; i++) {
2889 HValue* key_constant = Add<HConstant>(i);
2890 HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
2891 static_cast<HValue*>(NULL),
2892 from_elements_kind);
2893 Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
2896 if (!pre_fill_with_holes &&
2897 (capacity == NULL || !length->Equals(capacity))) {
2898 BuildFillElementsWithHole(to_elements, to_elements_kind,
2902 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2904 HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
2907 key = AddUncasted<HSub>(key, graph()->GetConstant1());
2908 key->ClearFlag(HValue::kCanOverflow);
2910 HValue* element = Add<HLoadKeyed>(from_elements, key,
2911 static_cast<HValue*>(NULL),
2915 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2916 IsFastSmiElementsKind(to_elements_kind))
2917 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2919 if (IsHoleyElementsKind(from_elements_kind) &&
2920 from_elements_kind != to_elements_kind) {
2921 IfBuilder if_hole(this);
2922 if_hole.If<HCompareHoleAndBranch>(element);
2924 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2925 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2926 : graph()->GetConstantHole();
2927 Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2929 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2930 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2933 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2934 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2940 Counters* counters = isolate()->counters();
2941 AddIncrementCounter(counters->inlined_copied_elements());
2945 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
2946 HValue* allocation_site,
2947 AllocationSiteMode mode,
2948 ElementsKind kind) {
2949 HAllocate* array = AllocateJSArrayObject(mode);
2951 HValue* map = AddLoadMap(boilerplate);
2952 HValue* elements = AddLoadElements(boilerplate);
2953 HValue* length = AddLoadArrayLength(boilerplate, kind);
2955 BuildJSArrayHeader(array,
2966 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
2967 HValue* allocation_site,
2968 AllocationSiteMode mode) {
2969 HAllocate* array = AllocateJSArrayObject(mode);
2971 HValue* map = AddLoadMap(boilerplate);
2973 BuildJSArrayHeader(array,
2975 NULL, // set elements to empty fixed array
2979 graph()->GetConstant0());
2984 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
2985 HValue* allocation_site,
2986 AllocationSiteMode mode,
2987 ElementsKind kind) {
2988 HValue* boilerplate_elements = AddLoadElements(boilerplate);
2989 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
2991 // Generate size calculation code here in order to make it dominate
2992 // the JSArray allocation.
2993 HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
2995 // Create empty JSArray object for now, store elimination should remove
2996 // redundant initialization of elements and length fields and at the same
2997 // time the object will be fully prepared for GC if it happens during
2998 // elements allocation.
2999 HValue* result = BuildCloneShallowArrayEmpty(
3000 boilerplate, allocation_site, mode);
3002 HAllocate* elements = BuildAllocateElements(kind, elements_size);
3004 // This function implicitly relies on the fact that the
3005 // FastCloneShallowArrayStub is called only for literals shorter than
3006 // JSObject::kInitialMaxFastElementArray.
3007 // Can't add HBoundsCheck here because otherwise the stub will eager a frame.
3008 HConstant* size_upper_bound = EstablishElementsAllocationSize(
3009 kind, JSObject::kInitialMaxFastElementArray);
3010 elements->set_size_upper_bound(size_upper_bound);
3012 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
3014 // The allocation for the cloned array above causes register pressure on
3015 // machines with low register counts. Force a reload of the boilerplate
3016 // elements here to free up a register for the allocation to avoid unnecessary
3018 boilerplate_elements = AddLoadElements(boilerplate);
3019 boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
3021 // Copy the elements array header.
3022 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
3023 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
3024 Add<HStoreNamedField>(elements, access,
3025 Add<HLoadNamedField>(boilerplate_elements,
3026 static_cast<HValue*>(NULL), access));
3029 // And the result of the length
3030 HValue* length = AddLoadArrayLength(boilerplate, kind);
3031 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
3033 BuildCopyElements(boilerplate_elements, kind, elements,
3034 kind, length, NULL);
3039 void HGraphBuilder::BuildCompareNil(
3042 HIfContinuation* continuation) {
3043 IfBuilder if_nil(this);
3044 bool some_case_handled = false;
3045 bool some_case_missing = false;
3047 if (type->Maybe(Type::Null())) {
3048 if (some_case_handled) if_nil.Or();
3049 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
3050 some_case_handled = true;
3052 some_case_missing = true;
3055 if (type->Maybe(Type::Undefined())) {
3056 if (some_case_handled) if_nil.Or();
3057 if_nil.If<HCompareObjectEqAndBranch>(value,
3058 graph()->GetConstantUndefined());
3059 some_case_handled = true;
3061 some_case_missing = true;
3064 if (type->Maybe(Type::Undetectable())) {
3065 if (some_case_handled) if_nil.Or();
3066 if_nil.If<HIsUndetectableAndBranch>(value);
3067 some_case_handled = true;
3069 some_case_missing = true;
3072 if (some_case_missing) {
3075 if (type->NumClasses() == 1) {
3076 BuildCheckHeapObject(value);
3077 // For ICs, the map checked below is a sentinel map that gets replaced by
3078 // the monomorphic map when the code is used as a template to generate a
3079 // new IC. For optimized functions, there is no sentinel map, the map
3080 // emitted below is the actual monomorphic map.
3081 Add<HCheckMaps>(value, type->Classes().Current());
3083 if_nil.Deopt("Too many undetectable types");
3087 if_nil.CaptureContinuation(continuation);
3091 void HGraphBuilder::BuildCreateAllocationMemento(
3092 HValue* previous_object,
3093 HValue* previous_object_size,
3094 HValue* allocation_site) {
3095 DCHECK(allocation_site != NULL);
3096 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3097 previous_object, previous_object_size, HType::HeapObject());
3098 AddStoreMapConstant(
3099 allocation_memento, isolate()->factory()->allocation_memento_map());
3100 Add<HStoreNamedField>(
3102 HObjectAccess::ForAllocationMementoSite(),
3104 if (FLAG_allocation_site_pretenuring) {
3105 HValue* memento_create_count = Add<HLoadNamedField>(
3106 allocation_site, static_cast<HValue*>(NULL),
3107 HObjectAccess::ForAllocationSiteOffset(
3108 AllocationSite::kPretenureCreateCountOffset));
3109 memento_create_count = AddUncasted<HAdd>(
3110 memento_create_count, graph()->GetConstant1());
3111 // This smi value is reset to zero after every gc, overflow isn't a problem
3112 // since the counter is bounded by the new space size.
3113 memento_create_count->ClearFlag(HValue::kCanOverflow);
3114 Add<HStoreNamedField>(
3115 allocation_site, HObjectAccess::ForAllocationSiteOffset(
3116 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3121 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3122 // Get the global context, then the native context
3123 HInstruction* context =
3124 Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
3125 HObjectAccess::ForFunctionContextPointer());
3126 HInstruction* global_object = Add<HLoadNamedField>(
3127 context, static_cast<HValue*>(NULL),
3128 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3129 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3130 GlobalObject::kNativeContextOffset);
3131 return Add<HLoadNamedField>(
3132 global_object, static_cast<HValue*>(NULL), access);
3136 HInstruction* HGraphBuilder::BuildGetNativeContext() {
3137 // Get the global context, then the native context
3138 HValue* global_object = Add<HLoadNamedField>(
3139 context(), static_cast<HValue*>(NULL),
3140 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3141 return Add<HLoadNamedField>(
3142 global_object, static_cast<HValue*>(NULL),
3143 HObjectAccess::ForObservableJSObjectOffset(
3144 GlobalObject::kNativeContextOffset));
3148 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3149 HInstruction* native_context = BuildGetNativeContext();
3150 HInstruction* index =
3151 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3152 return Add<HLoadKeyed>(
3153 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3157 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3159 HValue* allocation_site_payload,
3160 HValue* constructor_function,
3161 AllocationSiteOverrideMode override_mode) :
3164 allocation_site_payload_(allocation_site_payload),
3165 constructor_function_(constructor_function) {
3166 DCHECK(!allocation_site_payload->IsConstant() ||
3167 HConstant::cast(allocation_site_payload)->handle(
3168 builder_->isolate())->IsAllocationSite());
3169 mode_ = override_mode == DISABLE_ALLOCATION_SITES
3170 ? DONT_TRACK_ALLOCATION_SITE
3171 : AllocationSite::GetMode(kind);
3175 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3177 HValue* constructor_function) :
3180 mode_(DONT_TRACK_ALLOCATION_SITE),
3181 allocation_site_payload_(NULL),
3182 constructor_function_(constructor_function) {
3186 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3187 if (!builder()->top_info()->IsStub()) {
3188 // A constant map is fine.
3189 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3190 builder()->isolate());
3191 return builder()->Add<HConstant>(map);
3194 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3195 // No need for a context lookup if the kind_ matches the initial
3196 // map, because we can just load the map in that case.
3197 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3198 return builder()->Add<HLoadNamedField>(
3199 constructor_function_, static_cast<HValue*>(NULL), access);
3202 // TODO(mvstanton): we should always have a constructor function if we
3203 // are creating a stub.
3204 HInstruction* native_context = constructor_function_ != NULL
3205 ? builder()->BuildGetNativeContext(constructor_function_)
3206 : builder()->BuildGetNativeContext();
3208 HInstruction* index = builder()->Add<HConstant>(
3209 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
3211 HInstruction* map_array = builder()->Add<HLoadKeyed>(
3212 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3214 HInstruction* kind_index = builder()->Add<HConstant>(kind_);
3216 return builder()->Add<HLoadKeyed>(
3217 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3221 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3222 // Find the map near the constructor function
3223 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3224 return builder()->Add<HLoadNamedField>(
3225 constructor_function_, static_cast<HValue*>(NULL), access);
3229 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3230 HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3231 return AllocateArray(capacity,
3233 builder()->graph()->GetConstant0());
3237 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3239 HConstant* capacity_upper_bound,
3240 HValue* length_field,
3241 FillMode fill_mode) {
3242 return AllocateArray(capacity,
3243 capacity_upper_bound->GetInteger32Constant(),
3249 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3251 int capacity_upper_bound,
3252 HValue* length_field,
3253 FillMode fill_mode) {
3254 HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant()
3255 ? HConstant::cast(capacity)
3256 : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound);
3258 HAllocate* array = AllocateArray(capacity, length_field, fill_mode);
3259 if (!elements_location_->has_size_upper_bound()) {
3260 elements_location_->set_size_upper_bound(elememts_size_upper_bound);
3266 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3268 HValue* length_field,
3269 FillMode fill_mode) {
3270 // These HForceRepresentations are because we store these as fields in the
3271 // objects we construct, and an int32-to-smi HChange could deopt. Accept
3272 // the deopt possibility now, before allocation occurs.
3274 builder()->AddUncasted<HForceRepresentation>(capacity,
3275 Representation::Smi());
3277 builder()->AddUncasted<HForceRepresentation>(length_field,
3278 Representation::Smi());
3280 // Generate size calculation code here in order to make it dominate
3281 // the JSArray allocation.
3282 HValue* elements_size =
3283 builder()->BuildCalculateElementsSize(kind_, capacity);
3285 // Allocate (dealing with failure appropriately)
3286 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3288 // Fill in the fields: map, properties, length
3290 if (allocation_site_payload_ == NULL) {
3291 map = EmitInternalMapCode();
3293 map = EmitMapCode();
3296 builder()->BuildJSArrayHeader(array_object,
3298 NULL, // set elements to empty fixed array
3301 allocation_site_payload_,
3304 // Allocate and initialize the elements
3305 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3307 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3310 builder()->Add<HStoreNamedField>(
3311 array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3313 if (fill_mode == FILL_WITH_HOLE) {
3314 builder()->BuildFillElementsWithHole(elements_location_, kind_,
3315 graph()->GetConstant0(), capacity);
3318 return array_object;
3322 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
3323 HValue* global_object = Add<HLoadNamedField>(
3324 context(), static_cast<HValue*>(NULL),
3325 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3326 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3327 GlobalObject::kBuiltinsOffset);
3328 HValue* builtins = Add<HLoadNamedField>(
3329 global_object, static_cast<HValue*>(NULL), access);
3330 HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
3331 JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
3332 return Add<HLoadNamedField>(
3333 builtins, static_cast<HValue*>(NULL), function_access);
3337 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
3338 : HGraphBuilder(info),
3339 function_state_(NULL),
3340 initial_function_state_(this, info, NORMAL_RETURN, 0),
3344 globals_(10, info->zone()),
3345 osr_(new(info->zone()) HOsrBuilder(this)) {
3346 // This is not initialized in the initializer list because the
3347 // constructor for the initial state relies on function_state_ == NULL
3348 // to know it's the initial state.
3349 function_state_= &initial_function_state_;
3350 InitializeAstVisitor(info->zone());
3351 if (FLAG_hydrogen_track_positions) {
3352 SetSourcePosition(info->shared_info()->start_position());
3357 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3358 HBasicBlock* second,
3359 BailoutId join_id) {
3360 if (first == NULL) {
3362 } else if (second == NULL) {
3365 HBasicBlock* join_block = graph()->CreateBasicBlock();
3366 Goto(first, join_block);
3367 Goto(second, join_block);
3368 join_block->SetJoinId(join_id);
3374 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3375 HBasicBlock* exit_block,
3376 HBasicBlock* continue_block) {
3377 if (continue_block != NULL) {
3378 if (exit_block != NULL) Goto(exit_block, continue_block);
3379 continue_block->SetJoinId(statement->ContinueId());
3380 return continue_block;
3386 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3387 HBasicBlock* loop_entry,
3388 HBasicBlock* body_exit,
3389 HBasicBlock* loop_successor,
3390 HBasicBlock* break_block) {
3391 if (body_exit != NULL) Goto(body_exit, loop_entry);
3392 loop_entry->PostProcessLoopHeader(statement);
3393 if (break_block != NULL) {
3394 if (loop_successor != NULL) Goto(loop_successor, break_block);
3395 break_block->SetJoinId(statement->ExitId());
3398 return loop_successor;
3402 // Build a new loop header block and set it as the current block.
3403 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3404 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3406 set_current_block(loop_entry);
3411 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3412 IterationStatement* statement) {
3413 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3414 ? osr()->BuildOsrLoopEntry(statement)
3420 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3421 HSourcePosition position) {
3422 Finish(instruction, position);
3427 std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3428 return os << "B" << b.block_id();
3432 HGraph::HGraph(CompilationInfo* info)
3433 : isolate_(info->isolate()),
3436 blocks_(8, info->zone()),
3437 values_(16, info->zone()),
3439 uint32_instructions_(NULL),
3442 zone_(info->zone()),
3443 is_recursive_(false),
3444 use_optimistic_licm_(false),
3445 depends_on_empty_array_proto_elements_(false),
3446 type_change_checksum_(0),
3447 maximum_environment_size_(0),
3448 no_side_effects_scope_count_(0),
3449 disallow_adding_new_values_(false),
3450 inlined_functions_(FLAG_hydrogen_track_positions ? 5 : 0, info->zone()),
3451 inlining_id_to_function_id_(FLAG_hydrogen_track_positions ? 5 : 0,
3453 if (info->IsStub()) {
3454 CallInterfaceDescriptor descriptor =
3455 info->code_stub()->GetCallInterfaceDescriptor();
3456 start_environment_ = new (zone_)
3457 HEnvironment(zone_, descriptor.GetEnvironmentParameterCount());
3459 TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3460 start_environment_ =
3461 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3463 start_environment_->set_ast_id(BailoutId::FunctionEntry());
3464 entry_block_ = CreateBasicBlock();
3465 entry_block_->SetInitialEnvironment(start_environment_);
3469 HBasicBlock* HGraph::CreateBasicBlock() {
3470 HBasicBlock* result = new(zone()) HBasicBlock(this);
3471 blocks_.Add(result, zone());
3476 void HGraph::FinalizeUniqueness() {
3477 DisallowHeapAllocation no_gc;
3478 DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3479 for (int i = 0; i < blocks()->length(); ++i) {
3480 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3481 it.Current()->FinalizeUniqueness();
3487 int HGraph::TraceInlinedFunction(
3488 Handle<SharedFunctionInfo> shared,
3489 HSourcePosition position) {
3490 if (!FLAG_hydrogen_track_positions) {
3495 for (; id < inlined_functions_.length(); id++) {
3496 if (inlined_functions_[id].shared().is_identical_to(shared)) {
3501 if (id == inlined_functions_.length()) {
3502 inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3504 if (!shared->script()->IsUndefined()) {
3505 Handle<Script> script(Script::cast(shared->script()));
3506 if (!script->source()->IsUndefined()) {
3507 CodeTracer::Scope tracing_scopex(isolate()->GetCodeTracer());
3508 OFStream os(tracing_scopex.file());
3509 os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get()
3510 << ") id{" << info()->optimization_id() << "," << id << "} ---\n";
3512 StringCharacterStream stream(String::cast(script->source()),
3513 shared->start_position());
3514 // fun->end_position() points to the last character in the stream. We
3515 // need to compensate by adding one to calculate the length.
3517 shared->end_position() - shared->start_position() + 1;
3518 for (int i = 0; i < source_len; i++) {
3519 if (stream.HasMore()) {
3520 os << AsReversiblyEscapedUC16(stream.GetNext());
3525 os << "\n--- END ---\n";
3530 int inline_id = inlining_id_to_function_id_.length();
3531 inlining_id_to_function_id_.Add(id, zone());
3533 if (inline_id != 0) {
3534 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3535 OFStream os(tracing_scope.file());
3536 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
3537 << info()->optimization_id() << "," << id << "} AS " << inline_id
3538 << " AT " << position << std::endl;
3545 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3546 if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3550 const int id = inlining_id_to_function_id_[pos.inlining_id()];
3551 return inlined_functions_[id].start_position() + pos.position();
3555 // Block ordering was implemented with two mutually recursive methods,
3556 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3557 // The recursion could lead to stack overflow so the algorithm has been
3558 // implemented iteratively.
3559 // At a high level the algorithm looks like this:
3561 // Postorder(block, loop_header) : {
3562 // if (block has already been visited or is of another loop) return;
3563 // mark block as visited;
3564 // if (block is a loop header) {
3565 // VisitLoopMembers(block, loop_header);
3566 // VisitSuccessorsOfLoopHeader(block);
3568 // VisitSuccessors(block)
3570 // put block in result list;
3573 // VisitLoopMembers(block, outer_loop_header) {
3574 // foreach (block b in block loop members) {
3575 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
3576 // if (b is loop header) VisitLoopMembers(b);
3580 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3581 // foreach (block b in block successors) Postorder(b, outer_loop_header)
3584 // VisitSuccessorsOfLoopHeader(block) {
3585 // foreach (block b in block successors) Postorder(b, block)
3588 // VisitSuccessors(block, loop_header) {
3589 // foreach (block b in block successors) Postorder(b, loop_header)
3592 // The ordering is started calling Postorder(entry, NULL).
3594 // Each instance of PostorderProcessor represents the "stack frame" of the
3595 // recursion, and particularly keeps the state of the loop (iteration) of the
3596 // "Visit..." function it represents.
3597 // To recycle memory we keep all the frames in a double linked list but
3598 // this means that we cannot use constructors to initialize the frames.
3600 class PostorderProcessor : public ZoneObject {
3602 // Back link (towards the stack bottom).
3603 PostorderProcessor* parent() {return father_; }
3604 // Forward link (towards the stack top).
3605 PostorderProcessor* child() {return child_; }
3606 HBasicBlock* block() { return block_; }
3607 HLoopInformation* loop() { return loop_; }
3608 HBasicBlock* loop_header() { return loop_header_; }
3610 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3611 HBasicBlock* block) {
3612 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3613 return result->SetupSuccessors(zone, block, NULL);
3616 PostorderProcessor* PerformStep(Zone* zone,
3617 ZoneList<HBasicBlock*>* order) {
3618 PostorderProcessor* next =
3619 PerformNonBacktrackingStep(zone, order);
3623 return Backtrack(zone, order);
3628 explicit PostorderProcessor(PostorderProcessor* father)
3629 : father_(father), child_(NULL), successor_iterator(NULL) { }
3631 // Each enum value states the cycle whose state is kept by this instance.
3635 SUCCESSORS_OF_LOOP_HEADER,
3637 SUCCESSORS_OF_LOOP_MEMBER
3640 // Each "Setup..." method is like a constructor for a cycle state.
3641 PostorderProcessor* SetupSuccessors(Zone* zone,
3643 HBasicBlock* loop_header) {
3644 if (block == NULL || block->IsOrdered() ||
3645 block->parent_loop_header() != loop_header) {
3649 loop_header_ = NULL;
3654 block->MarkAsOrdered();
3656 if (block->IsLoopHeader()) {
3657 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3658 loop_header_ = block;
3659 InitializeSuccessors();
3660 PostorderProcessor* result = Push(zone);
3661 return result->SetupLoopMembers(zone, block, block->loop_information(),
3664 DCHECK(block->IsFinished());
3666 loop_header_ = loop_header;
3667 InitializeSuccessors();
3673 PostorderProcessor* SetupLoopMembers(Zone* zone,
3675 HLoopInformation* loop,
3676 HBasicBlock* loop_header) {
3677 kind_ = LOOP_MEMBERS;
3680 loop_header_ = loop_header;
3681 InitializeLoopMembers();
3685 PostorderProcessor* SetupSuccessorsOfLoopMember(
3687 HLoopInformation* loop,
3688 HBasicBlock* loop_header) {
3689 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3692 loop_header_ = loop_header;
3693 InitializeSuccessors();
3697 // This method "allocates" a new stack frame.
3698 PostorderProcessor* Push(Zone* zone) {
3699 if (child_ == NULL) {
3700 child_ = new(zone) PostorderProcessor(this);
3705 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3706 DCHECK(block_->end()->FirstSuccessor() == NULL ||
3707 order->Contains(block_->end()->FirstSuccessor()) ||
3708 block_->end()->FirstSuccessor()->IsLoopHeader());
3709 DCHECK(block_->end()->SecondSuccessor() == NULL ||
3710 order->Contains(block_->end()->SecondSuccessor()) ||
3711 block_->end()->SecondSuccessor()->IsLoopHeader());
3712 order->Add(block_, zone);
3715 // This method is the basic block to walk up the stack.
3716 PostorderProcessor* Pop(Zone* zone,
3717 ZoneList<HBasicBlock*>* order) {
3720 case SUCCESSORS_OF_LOOP_HEADER:
3721 ClosePostorder(order, zone);
3725 case SUCCESSORS_OF_LOOP_MEMBER:
3726 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3727 // In this case we need to perform a LOOP_MEMBERS cycle so we
3728 // initialize it and return this instead of father.
3729 return SetupLoopMembers(zone, block(),
3730 block()->loop_information(), loop_header_);
3741 // Walks up the stack.
3742 PostorderProcessor* Backtrack(Zone* zone,
3743 ZoneList<HBasicBlock*>* order) {
3744 PostorderProcessor* parent = Pop(zone, order);
3745 while (parent != NULL) {
3746 PostorderProcessor* next =
3747 parent->PerformNonBacktrackingStep(zone, order);
3751 parent = parent->Pop(zone, order);
3757 PostorderProcessor* PerformNonBacktrackingStep(
3759 ZoneList<HBasicBlock*>* order) {
3760 HBasicBlock* next_block;
3763 next_block = AdvanceSuccessors();
3764 if (next_block != NULL) {
3765 PostorderProcessor* result = Push(zone);
3766 return result->SetupSuccessors(zone, next_block, loop_header_);
3769 case SUCCESSORS_OF_LOOP_HEADER:
3770 next_block = AdvanceSuccessors();
3771 if (next_block != NULL) {
3772 PostorderProcessor* result = Push(zone);
3773 return result->SetupSuccessors(zone, next_block, block());
3777 next_block = AdvanceLoopMembers();
3778 if (next_block != NULL) {
3779 PostorderProcessor* result = Push(zone);
3780 return result->SetupSuccessorsOfLoopMember(next_block,
3781 loop_, loop_header_);
3784 case SUCCESSORS_OF_LOOP_MEMBER:
3785 next_block = AdvanceSuccessors();
3786 if (next_block != NULL) {
3787 PostorderProcessor* result = Push(zone);
3788 return result->SetupSuccessors(zone, next_block, loop_header_);
3797 // The following two methods implement a "foreach b in successors" cycle.
3798 void InitializeSuccessors() {
3801 successor_iterator = HSuccessorIterator(block_->end());
3804 HBasicBlock* AdvanceSuccessors() {
3805 if (!successor_iterator.Done()) {
3806 HBasicBlock* result = successor_iterator.Current();
3807 successor_iterator.Advance();
3813 // The following two methods implement a "foreach b in loop members" cycle.
3814 void InitializeLoopMembers() {
3816 loop_length = loop_->blocks()->length();
3819 HBasicBlock* AdvanceLoopMembers() {
3820 if (loop_index < loop_length) {
3821 HBasicBlock* result = loop_->blocks()->at(loop_index);
3830 PostorderProcessor* father_;
3831 PostorderProcessor* child_;
3832 HLoopInformation* loop_;
3833 HBasicBlock* block_;
3834 HBasicBlock* loop_header_;
3837 HSuccessorIterator successor_iterator;
3841 void HGraph::OrderBlocks() {
3842 CompilationPhase phase("H_Block ordering", info());
3845 // Initially the blocks must not be ordered.
3846 for (int i = 0; i < blocks_.length(); ++i) {
3847 DCHECK(!blocks_[i]->IsOrdered());
3851 PostorderProcessor* postorder =
3852 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
3855 postorder = postorder->PerformStep(zone(), &blocks_);
3859 // Now all blocks must be marked as ordered.
3860 for (int i = 0; i < blocks_.length(); ++i) {
3861 DCHECK(blocks_[i]->IsOrdered());
3865 // Reverse block list and assign block IDs.
3866 for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
3867 HBasicBlock* bi = blocks_[i];
3868 HBasicBlock* bj = blocks_[j];
3869 bi->set_block_id(j);
3870 bj->set_block_id(i);
3877 void HGraph::AssignDominators() {
3878 HPhase phase("H_Assign dominators", this);
3879 for (int i = 0; i < blocks_.length(); ++i) {
3880 HBasicBlock* block = blocks_[i];
3881 if (block->IsLoopHeader()) {
3882 // Only the first predecessor of a loop header is from outside the loop.
3883 // All others are back edges, and thus cannot dominate the loop header.
3884 block->AssignCommonDominator(block->predecessors()->first());
3885 block->AssignLoopSuccessorDominators();
3887 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3888 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3895 bool HGraph::CheckArgumentsPhiUses() {
3896 int block_count = blocks_.length();
3897 for (int i = 0; i < block_count; ++i) {
3898 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3899 HPhi* phi = blocks_[i]->phis()->at(j);
3900 // We don't support phi uses of arguments for now.
3901 if (phi->CheckFlag(HValue::kIsArguments)) return false;
3908 bool HGraph::CheckConstPhiUses() {
3909 int block_count = blocks_.length();
3910 for (int i = 0; i < block_count; ++i) {
3911 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3912 HPhi* phi = blocks_[i]->phis()->at(j);
3913 // Check for the hole value (from an uninitialized const).
3914 for (int k = 0; k < phi->OperandCount(); k++) {
3915 if (phi->OperandAt(k) == GetConstantHole()) return false;
3923 void HGraph::CollectPhis() {
3924 int block_count = blocks_.length();
3925 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3926 for (int i = 0; i < block_count; ++i) {
3927 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3928 HPhi* phi = blocks_[i]->phis()->at(j);
3929 phi_list_->Add(phi, zone());
3935 // Implementation of utility class to encapsulate the translation state for
3936 // a (possibly inlined) function.
3937 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3938 CompilationInfo* info,
3939 InliningKind inlining_kind,
3942 compilation_info_(info),
3943 call_context_(NULL),
3944 inlining_kind_(inlining_kind),
3945 function_return_(NULL),
3946 test_context_(NULL),
3948 arguments_object_(NULL),
3949 arguments_elements_(NULL),
3950 inlining_id_(inlining_id),
3951 outer_source_position_(HSourcePosition::Unknown()),
3952 outer_(owner->function_state()) {
3953 if (outer_ != NULL) {
3954 // State for an inline function.
3955 if (owner->ast_context()->IsTest()) {
3956 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3957 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3958 if_true->MarkAsInlineReturnTarget(owner->current_block());
3959 if_false->MarkAsInlineReturnTarget(owner->current_block());
3960 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3961 Expression* cond = outer_test_context->condition();
3962 // The AstContext constructor pushed on the context stack. This newed
3963 // instance is the reason that AstContext can't be BASE_EMBEDDED.
3964 test_context_ = new TestContext(owner, cond, if_true, if_false);
3966 function_return_ = owner->graph()->CreateBasicBlock();
3967 function_return()->MarkAsInlineReturnTarget(owner->current_block());
3969 // Set this after possibly allocating a new TestContext above.
3970 call_context_ = owner->ast_context();
3973 // Push on the state stack.
3974 owner->set_function_state(this);
3976 if (FLAG_hydrogen_track_positions) {
3977 outer_source_position_ = owner->source_position();
3978 owner->EnterInlinedSource(
3979 info->shared_info()->start_position(),
3981 owner->SetSourcePosition(info->shared_info()->start_position());
3986 FunctionState::~FunctionState() {
3987 delete test_context_;
3988 owner_->set_function_state(outer_);
3990 if (FLAG_hydrogen_track_positions) {
3991 owner_->set_source_position(outer_source_position_);
3992 owner_->EnterInlinedSource(
3993 outer_->compilation_info()->shared_info()->start_position(),
3994 outer_->inlining_id());
3999 // Implementation of utility classes to represent an expression's context in
4001 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
4004 outer_(owner->ast_context()),
4005 for_typeof_(false) {
4006 owner->set_ast_context(this); // Push.
4008 DCHECK(owner->environment()->frame_type() == JS_FUNCTION);
4009 original_length_ = owner->environment()->length();
4014 AstContext::~AstContext() {
4015 owner_->set_ast_context(outer_); // Pop.
4019 EffectContext::~EffectContext() {
4020 DCHECK(owner()->HasStackOverflow() ||
4021 owner()->current_block() == NULL ||
4022 (owner()->environment()->length() == original_length_ &&
4023 owner()->environment()->frame_type() == JS_FUNCTION));
4027 ValueContext::~ValueContext() {
4028 DCHECK(owner()->HasStackOverflow() ||
4029 owner()->current_block() == NULL ||
4030 (owner()->environment()->length() == original_length_ + 1 &&
4031 owner()->environment()->frame_type() == JS_FUNCTION));
4035 void EffectContext::ReturnValue(HValue* value) {
4036 // The value is simply ignored.
4040 void ValueContext::ReturnValue(HValue* value) {
4041 // The value is tracked in the bailout environment, and communicated
4042 // through the environment as the result of the expression.
4043 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
4044 owner()->Bailout(kBadValueContextForArgumentsValue);
4046 owner()->Push(value);
4050 void TestContext::ReturnValue(HValue* value) {
4055 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4056 DCHECK(!instr->IsControlInstruction());
4057 owner()->AddInstruction(instr);
4058 if (instr->HasObservableSideEffects()) {
4059 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4064 void EffectContext::ReturnControl(HControlInstruction* instr,
4066 DCHECK(!instr->HasObservableSideEffects());
4067 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4068 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4069 instr->SetSuccessorAt(0, empty_true);
4070 instr->SetSuccessorAt(1, empty_false);
4071 owner()->FinishCurrentBlock(instr);
4072 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
4073 owner()->set_current_block(join);
4077 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
4079 HBasicBlock* true_branch = NULL;
4080 HBasicBlock* false_branch = NULL;
4081 continuation->Continue(&true_branch, &false_branch);
4082 if (!continuation->IsTrueReachable()) {
4083 owner()->set_current_block(false_branch);
4084 } else if (!continuation->IsFalseReachable()) {
4085 owner()->set_current_block(true_branch);
4087 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
4088 owner()->set_current_block(join);
4093 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4094 DCHECK(!instr->IsControlInstruction());
4095 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4096 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4098 owner()->AddInstruction(instr);
4099 owner()->Push(instr);
4100 if (instr->HasObservableSideEffects()) {
4101 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4106 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4107 DCHECK(!instr->HasObservableSideEffects());
4108 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4109 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4111 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4112 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4113 instr->SetSuccessorAt(0, materialize_true);
4114 instr->SetSuccessorAt(1, materialize_false);
4115 owner()->FinishCurrentBlock(instr);
4116 owner()->set_current_block(materialize_true);
4117 owner()->Push(owner()->graph()->GetConstantTrue());
4118 owner()->set_current_block(materialize_false);
4119 owner()->Push(owner()->graph()->GetConstantFalse());
4121 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4122 owner()->set_current_block(join);
4126 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4128 HBasicBlock* materialize_true = NULL;
4129 HBasicBlock* materialize_false = NULL;
4130 continuation->Continue(&materialize_true, &materialize_false);
4131 if (continuation->IsTrueReachable()) {
4132 owner()->set_current_block(materialize_true);
4133 owner()->Push(owner()->graph()->GetConstantTrue());
4134 owner()->set_current_block(materialize_true);
4136 if (continuation->IsFalseReachable()) {
4137 owner()->set_current_block(materialize_false);
4138 owner()->Push(owner()->graph()->GetConstantFalse());
4139 owner()->set_current_block(materialize_false);
4141 if (continuation->TrueAndFalseReachable()) {
4143 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4144 owner()->set_current_block(join);
4149 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4150 DCHECK(!instr->IsControlInstruction());
4151 HOptimizedGraphBuilder* builder = owner();
4152 builder->AddInstruction(instr);
4153 // We expect a simulate after every expression with side effects, though
4154 // this one isn't actually needed (and wouldn't work if it were targeted).
4155 if (instr->HasObservableSideEffects()) {
4156 builder->Push(instr);
4157 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4164 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4165 DCHECK(!instr->HasObservableSideEffects());
4166 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4167 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4168 instr->SetSuccessorAt(0, empty_true);
4169 instr->SetSuccessorAt(1, empty_false);
4170 owner()->FinishCurrentBlock(instr);
4171 owner()->Goto(empty_true, if_true(), owner()->function_state());
4172 owner()->Goto(empty_false, if_false(), owner()->function_state());
4173 owner()->set_current_block(NULL);
4177 void TestContext::ReturnContinuation(HIfContinuation* continuation,
4179 HBasicBlock* true_branch = NULL;
4180 HBasicBlock* false_branch = NULL;
4181 continuation->Continue(&true_branch, &false_branch);
4182 if (continuation->IsTrueReachable()) {
4183 owner()->Goto(true_branch, if_true(), owner()->function_state());
4185 if (continuation->IsFalseReachable()) {
4186 owner()->Goto(false_branch, if_false(), owner()->function_state());
4188 owner()->set_current_block(NULL);
4192 void TestContext::BuildBranch(HValue* value) {
4193 // We expect the graph to be in edge-split form: there is no edge that
4194 // connects a branch node to a join node. We conservatively ensure that
4195 // property by always adding an empty block on the outgoing edges of this
4197 HOptimizedGraphBuilder* builder = owner();
4198 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4199 builder->Bailout(kArgumentsObjectValueInATestContext);
4201 ToBooleanStub::Types expected(condition()->to_boolean_types());
4202 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4206 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4207 #define CHECK_BAILOUT(call) \
4210 if (HasStackOverflow()) return; \
4214 #define CHECK_ALIVE(call) \
4217 if (HasStackOverflow() || current_block() == NULL) return; \
4221 #define CHECK_ALIVE_OR_RETURN(call, value) \
4224 if (HasStackOverflow() || current_block() == NULL) return value; \
4228 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
4229 current_info()->AbortOptimization(reason);
4234 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
4235 EffectContext for_effect(this);
4240 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
4241 ArgumentsAllowedFlag flag) {
4242 ValueContext for_value(this, flag);
4247 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
4248 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4249 for_value.set_for_typeof(true);
4254 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
4255 HBasicBlock* true_block,
4256 HBasicBlock* false_block) {
4257 TestContext for_test(this, expr, true_block, false_block);
4262 void HOptimizedGraphBuilder::VisitExpressions(
4263 ZoneList<Expression*>* exprs) {
4264 for (int i = 0; i < exprs->length(); ++i) {
4265 CHECK_ALIVE(VisitForValue(exprs->at(i)));
4270 bool HOptimizedGraphBuilder::BuildGraph() {
4271 if (current_info()->function()->is_generator()) {
4272 Bailout(kFunctionIsAGenerator);
4275 Scope* scope = current_info()->scope();
4276 if (scope->HasIllegalRedeclaration()) {
4277 Bailout(kFunctionWithIllegalRedeclaration);
4280 if (scope->calls_eval()) {
4281 Bailout(kFunctionCallsEval);
4286 // Add an edge to the body entry. This is warty: the graph's start
4287 // environment will be used by the Lithium translation as the initial
4288 // environment on graph entry, but it has now been mutated by the
4289 // Hydrogen translation of the instructions in the start block. This
4290 // environment uses values which have not been defined yet. These
4291 // Hydrogen instructions will then be replayed by the Lithium
4292 // translation, so they cannot have an environment effect. The edge to
4293 // the body's entry block (along with some special logic for the start
4294 // block in HInstruction::InsertAfter) seals the start block from
4295 // getting unwanted instructions inserted.
4297 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4298 // Make the Hydrogen instructions in the initial block into Hydrogen
4299 // values (but not instructions), present in the initial environment and
4300 // not replayed by the Lithium translation.
4301 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4302 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4304 body_entry->SetJoinId(BailoutId::FunctionEntry());
4305 set_current_block(body_entry);
4307 // Handle implicit declaration of the function name in named function
4308 // expressions before other declarations.
4309 if (scope->is_function_scope() && scope->function() != NULL) {
4310 VisitVariableDeclaration(scope->function());
4312 VisitDeclarations(scope->declarations());
4313 Add<HSimulate>(BailoutId::Declarations());
4315 Add<HStackCheck>(HStackCheck::kFunctionEntry);
4317 VisitStatements(current_info()->function()->body());
4318 if (HasStackOverflow()) return false;
4320 if (current_block() != NULL) {
4321 Add<HReturn>(graph()->GetConstantUndefined());
4322 set_current_block(NULL);
4325 // If the checksum of the number of type info changes is the same as the
4326 // last time this function was compiled, then this recompile is likely not
4327 // due to missing/inadequate type feedback, but rather too aggressive
4328 // optimization. Disable optimistic LICM in that case.
4329 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4330 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4331 Handle<TypeFeedbackInfo> type_info(
4332 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4333 int checksum = type_info->own_type_change_checksum();
4334 int composite_checksum = graph()->update_type_change_checksum(checksum);
4335 graph()->set_use_optimistic_licm(
4336 !type_info->matches_inlined_type_change_checksum(composite_checksum));
4337 type_info->set_inlined_type_change_checksum(composite_checksum);
4339 // Perform any necessary OSR-specific cleanups or changes to the graph.
4340 osr()->FinishGraph();
4346 bool HGraph::Optimize(BailoutReason* bailout_reason) {
4350 // We need to create a HConstant "zero" now so that GVN will fold every
4351 // zero-valued constant in the graph together.
4352 // The constant is needed to make idef-based bounds check work: the pass
4353 // evaluates relations with "zero" and that zero cannot be created after GVN.
4357 // Do a full verify after building the graph and computing dominators.
4361 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4362 Run<HEnvironmentLivenessAnalysisPhase>();
4365 if (!CheckConstPhiUses()) {
4366 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4369 Run<HRedundantPhiEliminationPhase>();
4370 if (!CheckArgumentsPhiUses()) {
4371 *bailout_reason = kUnsupportedPhiUseOfArguments;
4375 // Find and mark unreachable code to simplify optimizations, especially gvn,
4376 // where unreachable code could unnecessarily defeat LICM.
4377 Run<HMarkUnreachableBlocksPhase>();
4379 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4380 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4382 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4386 if (has_osr()) osr()->FinishOsrValues();
4388 Run<HInferRepresentationPhase>();
4390 // Remove HSimulate instructions that have turned out not to be needed
4391 // after all by folding them into the following HSimulate.
4392 // This must happen after inferring representations.
4393 Run<HMergeRemovableSimulatesPhase>();
4395 Run<HMarkDeoptimizeOnUndefinedPhase>();
4396 Run<HRepresentationChangesPhase>();
4398 Run<HInferTypesPhase>();
4400 // Must be performed before canonicalization to ensure that Canonicalize
4401 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4403 Run<HUint32AnalysisPhase>();
4405 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4407 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4409 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4411 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4413 Run<HRangeAnalysisPhase>();
4415 Run<HComputeChangeUndefinedToNaN>();
4417 // Eliminate redundant stack checks on backwards branches.
4418 Run<HStackCheckEliminationPhase>();
4420 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4421 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4422 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4423 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4425 RestoreActualValues();
4427 // Find unreachable code a second time, GVN and other optimizations may have
4428 // made blocks unreachable that were previously reachable.
4429 Run<HMarkUnreachableBlocksPhase>();
4435 void HGraph::RestoreActualValues() {
4436 HPhase phase("H_Restore actual values", this);
4438 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4439 HBasicBlock* block = blocks()->at(block_index);
4442 for (int i = 0; i < block->phis()->length(); i++) {
4443 HPhi* phi = block->phis()->at(i);
4444 DCHECK(phi->ActualValue() == phi);
4448 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4449 HInstruction* instruction = it.Current();
4450 if (instruction->ActualValue() == instruction) continue;
4451 if (instruction->CheckFlag(HValue::kIsDead)) {
4452 // The instruction was marked as deleted but left in the graph
4453 // as a control flow dependency point for subsequent
4455 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4457 DCHECK(instruction->IsInformativeDefinition());
4458 if (instruction->IsPurelyInformativeDefinition()) {
4459 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4461 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4469 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4470 ZoneList<HValue*> arguments(count, zone());
4471 for (int i = 0; i < count; ++i) {
4472 arguments.Add(Pop(), zone());
4475 HPushArguments* push_args = New<HPushArguments>();
4476 while (!arguments.is_empty()) {
4477 push_args->AddInput(arguments.RemoveLast());
4479 AddInstruction(push_args);
4483 template <class Instruction>
4484 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4485 PushArgumentsFromEnvironment(call->argument_count());
4490 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4491 // First special is HContext.
4492 HInstruction* context = Add<HContext>();
4493 environment()->BindContext(context);
4495 // Create an arguments object containing the initial parameters. Set the
4496 // initial values of parameters including "this" having parameter index 0.
4497 DCHECK_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4498 HArgumentsObject* arguments_object =
4499 New<HArgumentsObject>(environment()->parameter_count());
4500 for (int i = 0; i < environment()->parameter_count(); ++i) {
4501 HInstruction* parameter = Add<HParameter>(i);
4502 arguments_object->AddArgument(parameter, zone());
4503 environment()->Bind(i, parameter);
4505 AddInstruction(arguments_object);
4506 graph()->SetArgumentsObject(arguments_object);
4508 HConstant* undefined_constant = graph()->GetConstantUndefined();
4509 // Initialize specials and locals to undefined.
4510 for (int i = environment()->parameter_count() + 1;
4511 i < environment()->length();
4513 environment()->Bind(i, undefined_constant);
4516 // Handle the arguments and arguments shadow variables specially (they do
4517 // not have declarations).
4518 if (scope->arguments() != NULL) {
4519 if (!scope->arguments()->IsStackAllocated()) {
4520 return Bailout(kContextAllocatedArguments);
4523 environment()->Bind(scope->arguments(),
4524 graph()->GetArgumentsObject());
4529 Type* HOptimizedGraphBuilder::ToType(Handle<Map> map) {
4530 return IC::MapToType<Type>(map, zone());
4534 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4535 for (int i = 0; i < statements->length(); i++) {
4536 Statement* stmt = statements->at(i);
4537 CHECK_ALIVE(Visit(stmt));
4538 if (stmt->IsJump()) break;
4543 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4544 DCHECK(!HasStackOverflow());
4545 DCHECK(current_block() != NULL);
4546 DCHECK(current_block()->HasPredecessor());
4548 Scope* outer_scope = scope();
4549 Scope* scope = stmt->scope();
4550 BreakAndContinueInfo break_info(stmt, outer_scope);
4552 { BreakAndContinueScope push(&break_info, this);
4553 if (scope != NULL) {
4554 // Load the function object.
4555 Scope* declaration_scope = scope->DeclarationScope();
4556 HInstruction* function;
4557 HValue* outer_context = environment()->context();
4558 if (declaration_scope->is_global_scope() ||
4559 declaration_scope->is_eval_scope()) {
4560 function = new(zone()) HLoadContextSlot(
4561 outer_context, Context::CLOSURE_INDEX, HLoadContextSlot::kNoCheck);
4563 function = New<HThisFunction>();
4565 AddInstruction(function);
4566 // Allocate a block context and store it to the stack frame.
4567 HInstruction* inner_context = Add<HAllocateBlockContext>(
4568 outer_context, function, scope->GetScopeInfo());
4569 HInstruction* instr = Add<HStoreFrameContext>(inner_context);
4570 if (instr->HasObservableSideEffects()) {
4571 AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE);
4574 environment()->BindContext(inner_context);
4575 VisitDeclarations(scope->declarations());
4576 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4578 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4580 set_scope(outer_scope);
4581 if (scope != NULL && current_block() != NULL) {
4582 HValue* inner_context = environment()->context();
4583 HValue* outer_context = Add<HLoadNamedField>(
4584 inner_context, static_cast<HValue*>(NULL),
4585 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4587 HInstruction* instr = Add<HStoreFrameContext>(outer_context);
4588 if (instr->HasObservableSideEffects()) {
4589 AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE);
4591 environment()->BindContext(outer_context);
4593 HBasicBlock* break_block = break_info.break_block();
4594 if (break_block != NULL) {
4595 if (current_block() != NULL) Goto(break_block);
4596 break_block->SetJoinId(stmt->ExitId());
4597 set_current_block(break_block);
4602 void HOptimizedGraphBuilder::VisitExpressionStatement(
4603 ExpressionStatement* stmt) {
4604 DCHECK(!HasStackOverflow());
4605 DCHECK(current_block() != NULL);
4606 DCHECK(current_block()->HasPredecessor());
4607 VisitForEffect(stmt->expression());
4611 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4612 DCHECK(!HasStackOverflow());
4613 DCHECK(current_block() != NULL);
4614 DCHECK(current_block()->HasPredecessor());
4618 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4619 DCHECK(!HasStackOverflow());
4620 DCHECK(current_block() != NULL);
4621 DCHECK(current_block()->HasPredecessor());
4622 if (stmt->condition()->ToBooleanIsTrue()) {
4623 Add<HSimulate>(stmt->ThenId());
4624 Visit(stmt->then_statement());
4625 } else if (stmt->condition()->ToBooleanIsFalse()) {
4626 Add<HSimulate>(stmt->ElseId());
4627 Visit(stmt->else_statement());
4629 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4630 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4631 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4633 if (cond_true->HasPredecessor()) {
4634 cond_true->SetJoinId(stmt->ThenId());
4635 set_current_block(cond_true);
4636 CHECK_BAILOUT(Visit(stmt->then_statement()));
4637 cond_true = current_block();
4642 if (cond_false->HasPredecessor()) {
4643 cond_false->SetJoinId(stmt->ElseId());
4644 set_current_block(cond_false);
4645 CHECK_BAILOUT(Visit(stmt->else_statement()));
4646 cond_false = current_block();
4651 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4652 set_current_block(join);
4657 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4658 BreakableStatement* stmt,
4663 BreakAndContinueScope* current = this;
4664 while (current != NULL && current->info()->target() != stmt) {
4665 *drop_extra += current->info()->drop_extra();
4666 current = current->next();
4668 DCHECK(current != NULL); // Always found (unless stack is malformed).
4669 *scope = current->info()->scope();
4671 if (type == BREAK) {
4672 *drop_extra += current->info()->drop_extra();
4675 HBasicBlock* block = NULL;
4678 block = current->info()->break_block();
4679 if (block == NULL) {
4680 block = current->owner()->graph()->CreateBasicBlock();
4681 current->info()->set_break_block(block);
4686 block = current->info()->continue_block();
4687 if (block == NULL) {
4688 block = current->owner()->graph()->CreateBasicBlock();
4689 current->info()->set_continue_block(block);
4698 void HOptimizedGraphBuilder::VisitContinueStatement(
4699 ContinueStatement* stmt) {
4700 DCHECK(!HasStackOverflow());
4701 DCHECK(current_block() != NULL);
4702 DCHECK(current_block()->HasPredecessor());
4703 Scope* outer_scope = NULL;
4704 Scope* inner_scope = scope();
4706 HBasicBlock* continue_block = break_scope()->Get(
4707 stmt->target(), BreakAndContinueScope::CONTINUE,
4708 &outer_scope, &drop_extra);
4709 HValue* context = environment()->context();
4711 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4712 if (context_pop_count > 0) {
4713 while (context_pop_count-- > 0) {
4714 HInstruction* context_instruction = Add<HLoadNamedField>(
4715 context, static_cast<HValue*>(NULL),
4716 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4717 context = context_instruction;
4719 HInstruction* instr = Add<HStoreFrameContext>(context);
4720 if (instr->HasObservableSideEffects()) {
4721 AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE);
4723 environment()->BindContext(context);
4726 Goto(continue_block);
4727 set_current_block(NULL);
4731 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4732 DCHECK(!HasStackOverflow());
4733 DCHECK(current_block() != NULL);
4734 DCHECK(current_block()->HasPredecessor());
4735 Scope* outer_scope = NULL;
4736 Scope* inner_scope = scope();
4738 HBasicBlock* break_block = break_scope()->Get(
4739 stmt->target(), BreakAndContinueScope::BREAK,
4740 &outer_scope, &drop_extra);
4741 HValue* context = environment()->context();
4743 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4744 if (context_pop_count > 0) {
4745 while (context_pop_count-- > 0) {
4746 HInstruction* context_instruction = Add<HLoadNamedField>(
4747 context, static_cast<HValue*>(NULL),
4748 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4749 context = context_instruction;
4751 HInstruction* instr = Add<HStoreFrameContext>(context);
4752 if (instr->HasObservableSideEffects()) {
4753 AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE);
4755 environment()->BindContext(context);
4758 set_current_block(NULL);
4762 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4763 DCHECK(!HasStackOverflow());
4764 DCHECK(current_block() != NULL);
4765 DCHECK(current_block()->HasPredecessor());
4766 FunctionState* state = function_state();
4767 AstContext* context = call_context();
4768 if (context == NULL) {
4769 // Not an inlined return, so an actual one.
4770 CHECK_ALIVE(VisitForValue(stmt->expression()));
4771 HValue* result = environment()->Pop();
4772 Add<HReturn>(result);
4773 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4774 // Return from an inlined construct call. In a test context the return value
4775 // will always evaluate to true, in a value context the return value needs
4776 // to be a JSObject.
4777 if (context->IsTest()) {
4778 TestContext* test = TestContext::cast(context);
4779 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4780 Goto(test->if_true(), state);
4781 } else if (context->IsEffect()) {
4782 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4783 Goto(function_return(), state);
4785 DCHECK(context->IsValue());
4786 CHECK_ALIVE(VisitForValue(stmt->expression()));
4787 HValue* return_value = Pop();
4788 HValue* receiver = environment()->arguments_environment()->Lookup(0);
4789 HHasInstanceTypeAndBranch* typecheck =
4790 New<HHasInstanceTypeAndBranch>(return_value,
4791 FIRST_SPEC_OBJECT_TYPE,
4792 LAST_SPEC_OBJECT_TYPE);
4793 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4794 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4795 typecheck->SetSuccessorAt(0, if_spec_object);
4796 typecheck->SetSuccessorAt(1, not_spec_object);
4797 FinishCurrentBlock(typecheck);
4798 AddLeaveInlined(if_spec_object, return_value, state);
4799 AddLeaveInlined(not_spec_object, receiver, state);
4801 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4802 // Return from an inlined setter call. The returned value is never used, the
4803 // value of an assignment is always the value of the RHS of the assignment.
4804 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4805 if (context->IsTest()) {
4806 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4807 context->ReturnValue(rhs);
4808 } else if (context->IsEffect()) {
4809 Goto(function_return(), state);
4811 DCHECK(context->IsValue());
4812 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4813 AddLeaveInlined(rhs, state);
4816 // Return from a normal inlined function. Visit the subexpression in the
4817 // expression context of the call.
4818 if (context->IsTest()) {
4819 TestContext* test = TestContext::cast(context);
4820 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4821 } else if (context->IsEffect()) {
4822 // Visit in value context and ignore the result. This is needed to keep
4823 // environment in sync with full-codegen since some visitors (e.g.
4824 // VisitCountOperation) use the operand stack differently depending on
4826 CHECK_ALIVE(VisitForValue(stmt->expression()));
4828 Goto(function_return(), state);
4830 DCHECK(context->IsValue());
4831 CHECK_ALIVE(VisitForValue(stmt->expression()));
4832 AddLeaveInlined(Pop(), state);
4835 set_current_block(NULL);
4839 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4840 DCHECK(!HasStackOverflow());
4841 DCHECK(current_block() != NULL);
4842 DCHECK(current_block()->HasPredecessor());
4843 return Bailout(kWithStatement);
4847 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4848 DCHECK(!HasStackOverflow());
4849 DCHECK(current_block() != NULL);
4850 DCHECK(current_block()->HasPredecessor());
4852 ZoneList<CaseClause*>* clauses = stmt->cases();
4853 int clause_count = clauses->length();
4854 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4856 CHECK_ALIVE(VisitForValue(stmt->tag()));
4857 Add<HSimulate>(stmt->EntryId());
4858 HValue* tag_value = Top();
4859 Type* tag_type = stmt->tag()->bounds().lower;
4861 // 1. Build all the tests, with dangling true branches
4862 BailoutId default_id = BailoutId::None();
4863 for (int i = 0; i < clause_count; ++i) {
4864 CaseClause* clause = clauses->at(i);
4865 if (clause->is_default()) {
4866 body_blocks.Add(NULL, zone());
4867 if (default_id.IsNone()) default_id = clause->EntryId();
4871 // Generate a compare and branch.
4872 CHECK_ALIVE(VisitForValue(clause->label()));
4873 HValue* label_value = Pop();
4875 Type* label_type = clause->label()->bounds().lower;
4876 Type* combined_type = clause->compare_type();
4877 HControlInstruction* compare = BuildCompareInstruction(
4878 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4880 ScriptPositionToSourcePosition(stmt->tag()->position()),
4881 ScriptPositionToSourcePosition(clause->label()->position()),
4882 PUSH_BEFORE_SIMULATE, clause->id());
4884 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4885 HBasicBlock* body_block = graph()->CreateBasicBlock();
4886 body_blocks.Add(body_block, zone());
4887 compare->SetSuccessorAt(0, body_block);
4888 compare->SetSuccessorAt(1, next_test_block);
4889 FinishCurrentBlock(compare);
4891 set_current_block(body_block);
4892 Drop(1); // tag_value
4894 set_current_block(next_test_block);
4897 // Save the current block to use for the default or to join with the
4899 HBasicBlock* last_block = current_block();
4900 Drop(1); // tag_value
4902 // 2. Loop over the clauses and the linked list of tests in lockstep,
4903 // translating the clause bodies.
4904 HBasicBlock* fall_through_block = NULL;
4906 BreakAndContinueInfo break_info(stmt, scope());
4907 { BreakAndContinueScope push(&break_info, this);
4908 for (int i = 0; i < clause_count; ++i) {
4909 CaseClause* clause = clauses->at(i);
4911 // Identify the block where normal (non-fall-through) control flow
4913 HBasicBlock* normal_block = NULL;
4914 if (clause->is_default()) {
4915 if (last_block == NULL) continue;
4916 normal_block = last_block;
4917 last_block = NULL; // Cleared to indicate we've handled it.
4919 normal_block = body_blocks[i];
4922 if (fall_through_block == NULL) {
4923 set_current_block(normal_block);
4925 HBasicBlock* join = CreateJoin(fall_through_block,
4928 set_current_block(join);
4931 CHECK_BAILOUT(VisitStatements(clause->statements()));
4932 fall_through_block = current_block();
4936 // Create an up-to-3-way join. Use the break block if it exists since
4937 // it's already a join block.
4938 HBasicBlock* break_block = break_info.break_block();
4939 if (break_block == NULL) {
4940 set_current_block(CreateJoin(fall_through_block,
4944 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4945 if (last_block != NULL) Goto(last_block, break_block);
4946 break_block->SetJoinId(stmt->ExitId());
4947 set_current_block(break_block);
4952 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4953 HBasicBlock* loop_entry) {
4954 Add<HSimulate>(stmt->StackCheckId());
4955 HStackCheck* stack_check =
4956 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4957 DCHECK(loop_entry->IsLoopHeader());
4958 loop_entry->loop_information()->set_stack_check(stack_check);
4959 CHECK_BAILOUT(Visit(stmt->body()));
4963 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4964 DCHECK(!HasStackOverflow());
4965 DCHECK(current_block() != NULL);
4966 DCHECK(current_block()->HasPredecessor());
4967 DCHECK(current_block() != NULL);
4968 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4970 BreakAndContinueInfo break_info(stmt, scope());
4972 BreakAndContinueScope push(&break_info, this);
4973 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
4975 HBasicBlock* body_exit =
4976 JoinContinue(stmt, current_block(), break_info.continue_block());
4977 HBasicBlock* loop_successor = NULL;
4978 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4979 set_current_block(body_exit);
4980 loop_successor = graph()->CreateBasicBlock();
4981 if (stmt->cond()->ToBooleanIsFalse()) {
4982 loop_entry->loop_information()->stack_check()->Eliminate();
4983 Goto(loop_successor);
4986 // The block for a true condition, the actual predecessor block of the
4988 body_exit = graph()->CreateBasicBlock();
4989 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4991 if (body_exit != NULL && body_exit->HasPredecessor()) {
4992 body_exit->SetJoinId(stmt->BackEdgeId());
4996 if (loop_successor->HasPredecessor()) {
4997 loop_successor->SetJoinId(stmt->ExitId());
4999 loop_successor = NULL;
5002 HBasicBlock* loop_exit = CreateLoop(stmt,
5006 break_info.break_block());
5007 set_current_block(loop_exit);
5011 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
5012 DCHECK(!HasStackOverflow());
5013 DCHECK(current_block() != NULL);
5014 DCHECK(current_block()->HasPredecessor());
5015 DCHECK(current_block() != NULL);
5016 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5018 // If the condition is constant true, do not generate a branch.
5019 HBasicBlock* loop_successor = NULL;
5020 if (!stmt->cond()->ToBooleanIsTrue()) {
5021 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5022 loop_successor = graph()->CreateBasicBlock();
5023 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5024 if (body_entry->HasPredecessor()) {
5025 body_entry->SetJoinId(stmt->BodyId());
5026 set_current_block(body_entry);
5028 if (loop_successor->HasPredecessor()) {
5029 loop_successor->SetJoinId(stmt->ExitId());
5031 loop_successor = NULL;
5035 BreakAndContinueInfo break_info(stmt, scope());
5036 if (current_block() != NULL) {
5037 BreakAndContinueScope push(&break_info, this);
5038 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5040 HBasicBlock* body_exit =
5041 JoinContinue(stmt, current_block(), break_info.continue_block());
5042 HBasicBlock* loop_exit = CreateLoop(stmt,
5046 break_info.break_block());
5047 set_current_block(loop_exit);
5051 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
5052 DCHECK(!HasStackOverflow());
5053 DCHECK(current_block() != NULL);
5054 DCHECK(current_block()->HasPredecessor());
5055 if (stmt->init() != NULL) {
5056 CHECK_ALIVE(Visit(stmt->init()));
5058 DCHECK(current_block() != NULL);
5059 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5061 HBasicBlock* loop_successor = NULL;
5062 if (stmt->cond() != NULL) {
5063 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5064 loop_successor = graph()->CreateBasicBlock();
5065 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5066 if (body_entry->HasPredecessor()) {
5067 body_entry->SetJoinId(stmt->BodyId());
5068 set_current_block(body_entry);
5070 if (loop_successor->HasPredecessor()) {
5071 loop_successor->SetJoinId(stmt->ExitId());
5073 loop_successor = NULL;
5077 BreakAndContinueInfo break_info(stmt, scope());
5078 if (current_block() != NULL) {
5079 BreakAndContinueScope push(&break_info, this);
5080 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5082 HBasicBlock* body_exit =
5083 JoinContinue(stmt, current_block(), break_info.continue_block());
5085 if (stmt->next() != NULL && body_exit != NULL) {
5086 set_current_block(body_exit);
5087 CHECK_BAILOUT(Visit(stmt->next()));
5088 body_exit = current_block();
5091 HBasicBlock* loop_exit = CreateLoop(stmt,
5095 break_info.break_block());
5096 set_current_block(loop_exit);
5100 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5101 DCHECK(!HasStackOverflow());
5102 DCHECK(current_block() != NULL);
5103 DCHECK(current_block()->HasPredecessor());
5105 if (!FLAG_optimize_for_in) {
5106 return Bailout(kForInStatementOptimizationIsDisabled);
5109 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
5110 return Bailout(kForInStatementIsNotFastCase);
5113 if (!stmt->each()->IsVariableProxy() ||
5114 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5115 return Bailout(kForInStatementWithNonLocalEachVariable);
5118 Variable* each_var = stmt->each()->AsVariableProxy()->var();
5120 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5121 HValue* enumerable = Top(); // Leave enumerable at the top.
5123 HInstruction* map = Add<HForInPrepareMap>(enumerable);
5124 Add<HSimulate>(stmt->PrepareId());
5126 HInstruction* array = Add<HForInCacheArray>(
5127 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5129 HInstruction* enum_length = Add<HMapEnumLength>(map);
5131 HInstruction* start_index = Add<HConstant>(0);
5138 HInstruction* index_cache = Add<HForInCacheArray>(
5139 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
5140 HForInCacheArray::cast(array)->set_index_cache(
5141 HForInCacheArray::cast(index_cache));
5143 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5145 HValue* index = environment()->ExpressionStackAt(0);
5146 HValue* limit = environment()->ExpressionStackAt(1);
5148 // Check that we still have more keys.
5149 HCompareNumericAndBranch* compare_index =
5150 New<HCompareNumericAndBranch>(index, limit, Token::LT);
5151 compare_index->set_observed_input_representation(
5152 Representation::Smi(), Representation::Smi());
5154 HBasicBlock* loop_body = graph()->CreateBasicBlock();
5155 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5157 compare_index->SetSuccessorAt(0, loop_body);
5158 compare_index->SetSuccessorAt(1, loop_successor);
5159 FinishCurrentBlock(compare_index);
5161 set_current_block(loop_successor);
5164 set_current_block(loop_body);
5166 HValue* key = Add<HLoadKeyed>(
5167 environment()->ExpressionStackAt(2), // Enum cache.
5168 environment()->ExpressionStackAt(0), // Iteration index.
5169 environment()->ExpressionStackAt(0),
5172 // Check if the expected map still matches that of the enumerable.
5173 // If not just deoptimize.
5174 Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
5175 environment()->ExpressionStackAt(3));
5177 Bind(each_var, key);
5179 BreakAndContinueInfo break_info(stmt, scope(), 5);
5181 BreakAndContinueScope push(&break_info, this);
5182 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5185 HBasicBlock* body_exit =
5186 JoinContinue(stmt, current_block(), break_info.continue_block());
5188 if (body_exit != NULL) {
5189 set_current_block(body_exit);
5191 HValue* current_index = Pop();
5192 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
5193 body_exit = current_block();
5196 HBasicBlock* loop_exit = CreateLoop(stmt,
5200 break_info.break_block());
5202 set_current_block(loop_exit);
5206 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5207 DCHECK(!HasStackOverflow());
5208 DCHECK(current_block() != NULL);
5209 DCHECK(current_block()->HasPredecessor());
5210 return Bailout(kForOfStatement);
5214 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5215 DCHECK(!HasStackOverflow());
5216 DCHECK(current_block() != NULL);
5217 DCHECK(current_block()->HasPredecessor());
5218 return Bailout(kTryCatchStatement);
5222 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5223 TryFinallyStatement* stmt) {
5224 DCHECK(!HasStackOverflow());
5225 DCHECK(current_block() != NULL);
5226 DCHECK(current_block()->HasPredecessor());
5227 return Bailout(kTryFinallyStatement);
5231 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5232 DCHECK(!HasStackOverflow());
5233 DCHECK(current_block() != NULL);
5234 DCHECK(current_block()->HasPredecessor());
5235 return Bailout(kDebuggerStatement);
5239 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5244 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5245 DCHECK(!HasStackOverflow());
5246 DCHECK(current_block() != NULL);
5247 DCHECK(current_block()->HasPredecessor());
5248 Handle<SharedFunctionInfo> shared_info = expr->shared_info();
5249 if (shared_info.is_null()) {
5251 Compiler::BuildFunctionInfo(expr, current_info()->script(), top_info());
5253 // We also have a stack overflow if the recursive compilation did.
5254 if (HasStackOverflow()) return;
5255 HFunctionLiteral* instr =
5256 New<HFunctionLiteral>(shared_info, expr->pretenure());
5257 return ast_context()->ReturnInstruction(instr, expr->id());
5261 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5262 DCHECK(!HasStackOverflow());
5263 DCHECK(current_block() != NULL);
5264 DCHECK(current_block()->HasPredecessor());
5265 return Bailout(kClassLiteral);
5269 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5270 NativeFunctionLiteral* expr) {
5271 DCHECK(!HasStackOverflow());
5272 DCHECK(current_block() != NULL);
5273 DCHECK(current_block()->HasPredecessor());
5274 return Bailout(kNativeFunctionLiteral);
5278 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5279 DCHECK(!HasStackOverflow());
5280 DCHECK(current_block() != NULL);
5281 DCHECK(current_block()->HasPredecessor());
5282 HBasicBlock* cond_true = graph()->CreateBasicBlock();
5283 HBasicBlock* cond_false = graph()->CreateBasicBlock();
5284 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5286 // Visit the true and false subexpressions in the same AST context as the
5287 // whole expression.
5288 if (cond_true->HasPredecessor()) {
5289 cond_true->SetJoinId(expr->ThenId());
5290 set_current_block(cond_true);
5291 CHECK_BAILOUT(Visit(expr->then_expression()));
5292 cond_true = current_block();
5297 if (cond_false->HasPredecessor()) {
5298 cond_false->SetJoinId(expr->ElseId());
5299 set_current_block(cond_false);
5300 CHECK_BAILOUT(Visit(expr->else_expression()));
5301 cond_false = current_block();
5306 if (!ast_context()->IsTest()) {
5307 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5308 set_current_block(join);
5309 if (join != NULL && !ast_context()->IsEffect()) {
5310 return ast_context()->ReturnValue(Pop());
5316 HOptimizedGraphBuilder::GlobalPropertyAccess
5317 HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it,
5318 PropertyAccessType access_type) {
5319 if (var->is_this() || !current_info()->has_global_object()) {
5323 switch (it->state()) {
5324 case LookupIterator::ACCESSOR:
5325 case LookupIterator::ACCESS_CHECK:
5326 case LookupIterator::INTERCEPTOR:
5327 case LookupIterator::NOT_FOUND:
5329 case LookupIterator::DATA:
5330 if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
5332 case LookupIterator::JSPROXY:
5333 case LookupIterator::TRANSITION:
5341 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5342 DCHECK(var->IsContextSlot());
5343 HValue* context = environment()->context();
5344 int length = scope()->ContextChainLength(var->scope());
5345 while (length-- > 0) {
5346 context = Add<HLoadNamedField>(
5347 context, static_cast<HValue*>(NULL),
5348 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5354 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5355 if (expr->is_this()) {
5356 current_info()->set_this_has_uses(true);
5359 DCHECK(!HasStackOverflow());
5360 DCHECK(current_block() != NULL);
5361 DCHECK(current_block()->HasPredecessor());
5362 Variable* variable = expr->var();
5363 switch (variable->location()) {
5364 case Variable::UNALLOCATED: {
5365 if (IsLexicalVariableMode(variable->mode())) {
5366 // TODO(rossberg): should this be an DCHECK?
5367 return Bailout(kReferenceToGlobalLexicalVariable);
5369 // Handle known global constants like 'undefined' specially to avoid a
5370 // load from a global cell for them.
5371 Handle<Object> constant_value =
5372 isolate()->factory()->GlobalConstantFor(variable->name());
5373 if (!constant_value.is_null()) {
5374 HConstant* instr = New<HConstant>(constant_value);
5375 return ast_context()->ReturnInstruction(instr, expr->id());
5378 Handle<GlobalObject> global(current_info()->global_object());
5379 LookupIterator it(global, variable->name(),
5380 LookupIterator::OWN_SKIP_INTERCEPTOR);
5381 GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
5383 if (type == kUseCell) {
5384 Handle<PropertyCell> cell = it.GetPropertyCell();
5385 if (cell->type()->IsConstant()) {
5386 PropertyCell::AddDependentCompilationInfo(cell, top_info());
5387 Handle<Object> constant_object = cell->type()->AsConstant()->Value();
5388 if (constant_object->IsConsString()) {
5390 String::Flatten(Handle<String>::cast(constant_object));
5392 HConstant* constant = New<HConstant>(constant_object);
5393 return ast_context()->ReturnInstruction(constant, expr->id());
5395 HLoadGlobalCell* instr =
5396 New<HLoadGlobalCell>(cell, it.property_details());
5397 return ast_context()->ReturnInstruction(instr, expr->id());
5400 HValue* global_object = Add<HLoadNamedField>(
5401 context(), static_cast<HValue*>(NULL),
5402 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
5403 HLoadGlobalGeneric* instr =
5404 New<HLoadGlobalGeneric>(global_object,
5406 ast_context()->is_for_typeof());
5407 if (FLAG_vector_ics) {
5408 Handle<SharedFunctionInfo> current_shared =
5409 function_state()->compilation_info()->shared_info();
5410 instr->SetVectorAndSlot(
5411 handle(current_shared->feedback_vector(), isolate()),
5412 expr->VariableFeedbackSlot());
5414 return ast_context()->ReturnInstruction(instr, expr->id());
5418 case Variable::PARAMETER:
5419 case Variable::LOCAL: {
5420 HValue* value = LookupAndMakeLive(variable);
5421 if (value == graph()->GetConstantHole()) {
5422 DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5423 variable->mode() != VAR);
5424 return Bailout(kReferenceToUninitializedVariable);
5426 return ast_context()->ReturnValue(value);
5429 case Variable::CONTEXT: {
5430 HValue* context = BuildContextChainWalk(variable);
5431 HLoadContextSlot::Mode mode;
5432 switch (variable->mode()) {
5435 mode = HLoadContextSlot::kCheckDeoptimize;
5438 mode = HLoadContextSlot::kCheckReturnUndefined;
5441 mode = HLoadContextSlot::kNoCheck;
5444 HLoadContextSlot* instr =
5445 new(zone()) HLoadContextSlot(context, variable->index(), mode);
5446 return ast_context()->ReturnInstruction(instr, expr->id());
5449 case Variable::LOOKUP:
5450 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5455 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5456 DCHECK(!HasStackOverflow());
5457 DCHECK(current_block() != NULL);
5458 DCHECK(current_block()->HasPredecessor());
5459 HConstant* instr = New<HConstant>(expr->value());
5460 return ast_context()->ReturnInstruction(instr, expr->id());
5464 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5465 DCHECK(!HasStackOverflow());
5466 DCHECK(current_block() != NULL);
5467 DCHECK(current_block()->HasPredecessor());
5468 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5469 Handle<FixedArray> literals(closure->literals());
5470 HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
5473 expr->literal_index());
5474 return ast_context()->ReturnInstruction(instr, expr->id());
5478 static bool CanInlinePropertyAccess(Type* type) {
5479 if (type->Is(Type::NumberOrString())) return true;
5480 if (!type->IsClass()) return false;
5481 Handle<Map> map = type->AsClass()->Map();
5482 return map->IsJSObjectMap() &&
5483 !map->is_dictionary_map() &&
5484 !map->has_named_interceptor();
5488 // Determines whether the given array or object literal boilerplate satisfies
5489 // all limits to be considered for fast deep-copying and computes the total
5490 // size of all objects that are part of the graph.
5491 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5493 int* max_properties) {
5494 if (boilerplate->map()->is_deprecated() &&
5495 !JSObject::TryMigrateInstance(boilerplate)) {
5499 DCHECK(max_depth >= 0 && *max_properties >= 0);
5500 if (max_depth == 0) return false;
5502 Isolate* isolate = boilerplate->GetIsolate();
5503 Handle<FixedArrayBase> elements(boilerplate->elements());
5504 if (elements->length() > 0 &&
5505 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5506 if (boilerplate->HasFastObjectElements()) {
5507 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5508 int length = elements->length();
5509 for (int i = 0; i < length; i++) {
5510 if ((*max_properties)-- == 0) return false;
5511 Handle<Object> value(fast_elements->get(i), isolate);
5512 if (value->IsJSObject()) {
5513 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5514 if (!IsFastLiteral(value_object,
5521 } else if (!boilerplate->HasFastDoubleElements()) {
5526 Handle<FixedArray> properties(boilerplate->properties());
5527 if (properties->length() > 0) {
5530 Handle<DescriptorArray> descriptors(
5531 boilerplate->map()->instance_descriptors());
5532 int limit = boilerplate->map()->NumberOfOwnDescriptors();
5533 for (int i = 0; i < limit; i++) {
5534 PropertyDetails details = descriptors->GetDetails(i);
5535 if (details.type() != FIELD) continue;
5536 int index = descriptors->GetFieldIndex(i);
5537 if ((*max_properties)-- == 0) return false;
5538 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5539 if (value->IsJSObject()) {
5540 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5541 if (!IsFastLiteral(value_object,
5553 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5554 DCHECK(!HasStackOverflow());
5555 DCHECK(current_block() != NULL);
5556 DCHECK(current_block()->HasPredecessor());
5557 expr->BuildConstantProperties(isolate());
5558 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5559 HInstruction* literal;
5561 // Check whether to use fast or slow deep-copying for boilerplate.
5562 int max_properties = kMaxFastLiteralProperties;
5563 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5565 Handle<AllocationSite> site;
5566 Handle<JSObject> boilerplate;
5567 if (!literals_cell->IsUndefined()) {
5568 // Retrieve the boilerplate
5569 site = Handle<AllocationSite>::cast(literals_cell);
5570 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5574 if (!boilerplate.is_null() &&
5575 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5576 AllocationSiteUsageContext usage_context(isolate(), site, false);
5577 usage_context.EnterNewScope();
5578 literal = BuildFastLiteral(boilerplate, &usage_context);
5579 usage_context.ExitScope(site, boilerplate);
5581 NoObservableSideEffectsScope no_effects(this);
5582 Handle<FixedArray> closure_literals(closure->literals(), isolate());
5583 Handle<FixedArray> constant_properties = expr->constant_properties();
5584 int literal_index = expr->literal_index();
5585 int flags = expr->fast_elements()
5586 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5587 flags |= expr->has_function()
5588 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5590 Add<HPushArguments>(Add<HConstant>(closure_literals),
5591 Add<HConstant>(literal_index),
5592 Add<HConstant>(constant_properties),
5593 Add<HConstant>(flags));
5595 // TODO(mvstanton): Add a flag to turn off creation of any
5596 // AllocationMementos for this call: we are in crankshaft and should have
5597 // learned enough about transition behavior to stop emitting mementos.
5598 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
5599 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5600 Runtime::FunctionForId(function_id),
5604 // The object is expected in the bailout environment during computation
5605 // of the property values and is the value of the entire expression.
5608 expr->CalculateEmitStore(zone());
5610 for (int i = 0; i < expr->properties()->length(); i++) {
5611 ObjectLiteral::Property* property = expr->properties()->at(i);
5612 if (property->IsCompileTimeValue()) continue;
5614 Literal* key = property->key();
5615 Expression* value = property->value();
5617 switch (property->kind()) {
5618 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5619 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
5621 case ObjectLiteral::Property::COMPUTED:
5622 // It is safe to use [[Put]] here because the boilerplate already
5623 // contains computed properties with an uninitialized value.
5624 if (key->value()->IsInternalizedString()) {
5625 if (property->emit_store()) {
5626 CHECK_ALIVE(VisitForValue(value));
5627 HValue* value = Pop();
5628 Handle<Map> map = property->GetReceiverType();
5629 Handle<String> name = property->key()->AsPropertyName();
5630 HInstruction* store;
5631 if (map.is_null()) {
5632 // If we don't know the monomorphic type, do a generic store.
5633 CHECK_ALIVE(store = BuildNamedGeneric(
5634 STORE, NULL, literal, name, value));
5636 PropertyAccessInfo info(this, STORE, ToType(map), name);
5637 if (info.CanAccessMonomorphic()) {
5638 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5639 DCHECK(!info.IsAccessor());
5640 store = BuildMonomorphicAccess(
5641 &info, literal, checked_literal, value,
5642 BailoutId::None(), BailoutId::None());
5644 CHECK_ALIVE(store = BuildNamedGeneric(
5645 STORE, NULL, literal, name, value));
5648 AddInstruction(store);
5649 if (store->HasObservableSideEffects()) {
5650 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5653 CHECK_ALIVE(VisitForEffect(value));
5658 case ObjectLiteral::Property::PROTOTYPE:
5659 case ObjectLiteral::Property::SETTER:
5660 case ObjectLiteral::Property::GETTER:
5661 return Bailout(kObjectLiteralWithComplexProperty);
5662 default: UNREACHABLE();
5666 if (expr->has_function()) {
5667 // Return the result of the transformation to fast properties
5668 // instead of the original since this operation changes the map
5669 // of the object. This makes sure that the original object won't
5670 // be used by other optimized code before it is transformed
5671 // (e.g. because of code motion).
5672 HToFastProperties* result = Add<HToFastProperties>(Pop());
5673 return ast_context()->ReturnValue(result);
5675 return ast_context()->ReturnValue(Pop());
5680 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5681 DCHECK(!HasStackOverflow());
5682 DCHECK(current_block() != NULL);
5683 DCHECK(current_block()->HasPredecessor());
5684 expr->BuildConstantElements(isolate());
5685 ZoneList<Expression*>* subexprs = expr->values();
5686 int length = subexprs->length();
5687 HInstruction* literal;
5689 Handle<AllocationSite> site;
5690 Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5691 bool uninitialized = false;
5692 Handle<Object> literals_cell(literals->get(expr->literal_index()),
5694 Handle<JSObject> boilerplate_object;
5695 if (literals_cell->IsUndefined()) {
5696 uninitialized = true;
5697 Handle<Object> raw_boilerplate;
5698 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
5699 isolate(), raw_boilerplate,
5700 Runtime::CreateArrayLiteralBoilerplate(
5701 isolate(), literals, expr->constant_elements()),
5702 Bailout(kArrayBoilerplateCreationFailed));
5704 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5705 AllocationSiteCreationContext creation_context(isolate());
5706 site = creation_context.EnterNewScope();
5707 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5708 return Bailout(kArrayBoilerplateCreationFailed);
5710 creation_context.ExitScope(site, boilerplate_object);
5711 literals->set(expr->literal_index(), *site);
5713 if (boilerplate_object->elements()->map() ==
5714 isolate()->heap()->fixed_cow_array_map()) {
5715 isolate()->counters()->cow_arrays_created_runtime()->Increment();
5718 DCHECK(literals_cell->IsAllocationSite());
5719 site = Handle<AllocationSite>::cast(literals_cell);
5720 boilerplate_object = Handle<JSObject>(
5721 JSObject::cast(site->transition_info()), isolate());
5724 DCHECK(!boilerplate_object.is_null());
5725 DCHECK(site->SitePointsToLiteral());
5727 ElementsKind boilerplate_elements_kind =
5728 boilerplate_object->GetElementsKind();
5730 // Check whether to use fast or slow deep-copying for boilerplate.
5731 int max_properties = kMaxFastLiteralProperties;
5732 if (IsFastLiteral(boilerplate_object,
5733 kMaxFastLiteralDepth,
5735 AllocationSiteUsageContext usage_context(isolate(), site, false);
5736 usage_context.EnterNewScope();
5737 literal = BuildFastLiteral(boilerplate_object, &usage_context);
5738 usage_context.ExitScope(site, boilerplate_object);
5740 NoObservableSideEffectsScope no_effects(this);
5741 // Boilerplate already exists and constant elements are never accessed,
5742 // pass an empty fixed array to the runtime function instead.
5743 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5744 int literal_index = expr->literal_index();
5745 int flags = expr->depth() == 1
5746 ? ArrayLiteral::kShallowElements
5747 : ArrayLiteral::kNoFlags;
5748 flags |= ArrayLiteral::kDisableMementos;
5750 Add<HPushArguments>(Add<HConstant>(literals),
5751 Add<HConstant>(literal_index),
5752 Add<HConstant>(constants),
5753 Add<HConstant>(flags));
5755 // TODO(mvstanton): Consider a flag to turn off creation of any
5756 // AllocationMementos for this call: we are in crankshaft and should have
5757 // learned enough about transition behavior to stop emitting mementos.
5758 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
5759 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5760 Runtime::FunctionForId(function_id),
5763 // De-opt if elements kind changed from boilerplate_elements_kind.
5764 Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5765 literal = Add<HCheckMaps>(literal, map);
5768 // The array is expected in the bailout environment during computation
5769 // of the property values and is the value of the entire expression.
5771 // The literal index is on the stack, too.
5772 Push(Add<HConstant>(expr->literal_index()));
5774 HInstruction* elements = NULL;
5776 for (int i = 0; i < length; i++) {
5777 Expression* subexpr = subexprs->at(i);
5778 // If the subexpression is a literal or a simple materialized literal it
5779 // is already set in the cloned array.
5780 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5782 CHECK_ALIVE(VisitForValue(subexpr));
5783 HValue* value = Pop();
5784 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5786 elements = AddLoadElements(literal);
5788 HValue* key = Add<HConstant>(i);
5790 switch (boilerplate_elements_kind) {
5791 case FAST_SMI_ELEMENTS:
5792 case FAST_HOLEY_SMI_ELEMENTS:
5794 case FAST_HOLEY_ELEMENTS:
5795 case FAST_DOUBLE_ELEMENTS:
5796 case FAST_HOLEY_DOUBLE_ELEMENTS: {
5797 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5798 boilerplate_elements_kind);
5799 instr->SetUninitialized(uninitialized);
5807 Add<HSimulate>(expr->GetIdForElement(i));
5810 Drop(1); // array literal index
5811 return ast_context()->ReturnValue(Pop());
5815 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5817 BuildCheckHeapObject(object);
5818 return Add<HCheckMaps>(object, map);
5822 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5823 PropertyAccessInfo* info,
5824 HValue* checked_object) {
5825 // See if this is a load for an immutable property
5826 if (checked_object->ActualValue()->IsConstant()) {
5827 Handle<Object> object(
5828 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5830 if (object->IsJSObject()) {
5831 LookupIterator it(object, info->name(),
5832 LookupIterator::OWN_SKIP_INTERCEPTOR);
5833 Handle<Object> value = JSObject::GetDataProperty(&it);
5834 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
5835 return New<HConstant>(value);
5840 HObjectAccess access = info->access();
5841 if (access.representation().IsDouble()) {
5842 // Load the heap number.
5843 checked_object = Add<HLoadNamedField>(
5844 checked_object, static_cast<HValue*>(NULL),
5845 access.WithRepresentation(Representation::Tagged()));
5846 // Load the double value from it.
5847 access = HObjectAccess::ForHeapNumberValue();
5850 SmallMapList* map_list = info->field_maps();
5851 if (map_list->length() == 0) {
5852 return New<HLoadNamedField>(checked_object, checked_object, access);
5855 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
5856 for (int i = 0; i < map_list->length(); ++i) {
5857 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
5859 return New<HLoadNamedField>(
5860 checked_object, checked_object, access, maps, info->field_type());
5864 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5865 PropertyAccessInfo* info,
5866 HValue* checked_object,
5868 bool transition_to_field = info->IsTransition();
5869 // TODO(verwaest): Move this logic into PropertyAccessInfo.
5870 HObjectAccess field_access = info->access();
5872 HStoreNamedField *instr;
5873 if (field_access.representation().IsDouble()) {
5874 HObjectAccess heap_number_access =
5875 field_access.WithRepresentation(Representation::Tagged());
5876 if (transition_to_field) {
5877 // The store requires a mutable HeapNumber to be allocated.
5878 NoObservableSideEffectsScope no_side_effects(this);
5879 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5881 // TODO(hpayer): Allocation site pretenuring support.
5882 HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5883 HType::HeapObject(),
5885 MUTABLE_HEAP_NUMBER_TYPE);
5886 AddStoreMapConstant(
5887 heap_number, isolate()->factory()->mutable_heap_number_map());
5888 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5890 instr = New<HStoreNamedField>(checked_object->ActualValue(),
5894 // Already holds a HeapNumber; load the box and write its value field.
5895 HInstruction* heap_number = Add<HLoadNamedField>(
5896 checked_object, static_cast<HValue*>(NULL), heap_number_access);
5897 instr = New<HStoreNamedField>(heap_number,
5898 HObjectAccess::ForHeapNumberValue(),
5899 value, STORE_TO_INITIALIZED_ENTRY);
5902 if (field_access.representation().IsHeapObject()) {
5903 BuildCheckHeapObject(value);
5906 if (!info->field_maps()->is_empty()) {
5907 DCHECK(field_access.representation().IsHeapObject());
5908 value = Add<HCheckMaps>(value, info->field_maps());
5911 // This is a normal store.
5912 instr = New<HStoreNamedField>(
5913 checked_object->ActualValue(), field_access, value,
5914 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5917 if (transition_to_field) {
5918 Handle<Map> transition(info->transition());
5919 DCHECK(!transition->is_deprecated());
5920 instr->SetTransition(Add<HConstant>(transition));
5926 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5927 PropertyAccessInfo* info) {
5928 if (!CanInlinePropertyAccess(type_)) return false;
5930 // Currently only handle Type::Number as a polymorphic case.
5931 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5933 if (type_->Is(Type::Number())) return false;
5935 // Values are only compatible for monomorphic load if they all behave the same
5936 // regarding value wrappers.
5937 if (type_->Is(Type::NumberOrString())) {
5938 if (!info->type_->Is(Type::NumberOrString())) return false;
5940 if (info->type_->Is(Type::NumberOrString())) return false;
5943 if (!LookupDescriptor()) return false;
5946 return (!info->IsFound() || info->has_holder()) &&
5947 map()->prototype() == info->map()->prototype();
5950 // Mismatch if the other access info found the property in the prototype
5952 if (info->has_holder()) return false;
5955 return accessor_.is_identical_to(info->accessor_) &&
5956 api_holder_.is_identical_to(info->api_holder_);
5960 return constant_.is_identical_to(info->constant_);
5964 if (!info->IsField()) return false;
5966 Representation r = access_.representation();
5968 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5970 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5972 if (info->access_.offset() != access_.offset()) return false;
5973 if (info->access_.IsInobject() != access_.IsInobject()) return false;
5975 if (field_maps_.is_empty()) {
5976 info->field_maps_.Clear();
5977 } else if (!info->field_maps_.is_empty()) {
5978 for (int i = 0; i < field_maps_.length(); ++i) {
5979 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5981 info->field_maps_.Sort();
5984 // We can only merge stores that agree on their field maps. The comparison
5985 // below is safe, since we keep the field maps sorted.
5986 if (field_maps_.length() != info->field_maps_.length()) return false;
5987 for (int i = 0; i < field_maps_.length(); ++i) {
5988 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5993 info->GeneralizeRepresentation(r);
5994 info->field_type_ = info->field_type_.Combine(field_type_);
5999 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
6000 if (!type_->IsClass()) return true;
6001 map()->LookupDescriptor(NULL, *name_, &lookup_);
6002 return LoadResult(map());
6006 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
6007 if (!IsLoad() && IsProperty() && IsReadOnly()) {
6012 // Construct the object field access.
6013 int index = GetLocalFieldIndexFromMap(map);
6014 access_ = HObjectAccess::ForField(map, index, representation(), name_);
6016 // Load field map for heap objects.
6018 } else if (IsAccessor()) {
6019 Handle<Object> accessors = GetAccessorsFromMap(map);
6020 if (!accessors->IsAccessorPair()) return false;
6021 Object* raw_accessor =
6022 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
6023 : Handle<AccessorPair>::cast(accessors)->setter();
6024 if (!raw_accessor->IsJSFunction()) return false;
6025 Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
6026 if (accessor->shared()->IsApiFunction()) {
6027 CallOptimization call_optimization(accessor);
6028 if (call_optimization.is_simple_api_call()) {
6029 CallOptimization::HolderLookup holder_lookup;
6030 Handle<Map> receiver_map = this->map();
6031 api_holder_ = call_optimization.LookupHolderOfExpectedType(
6032 receiver_map, &holder_lookup);
6035 accessor_ = accessor;
6036 } else if (IsConstant()) {
6037 constant_ = GetConstantFromMap(map);
6044 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
6046 // Clear any previously collected field maps/type.
6047 field_maps_.Clear();
6048 field_type_ = HType::Tagged();
6050 // Figure out the field type from the accessor map.
6051 Handle<HeapType> field_type = GetFieldTypeFromMap(map);
6053 // Collect the (stable) maps from the field type.
6054 int num_field_maps = field_type->NumClasses();
6055 if (num_field_maps == 0) return;
6056 DCHECK(access_.representation().IsHeapObject());
6057 field_maps_.Reserve(num_field_maps, zone());
6058 HeapType::Iterator<Map> it = field_type->Classes();
6059 while (!it.Done()) {
6060 Handle<Map> field_map = it.Current();
6061 if (!field_map->is_stable()) {
6062 field_maps_.Clear();
6065 field_maps_.Add(field_map, zone());
6069 DCHECK_EQ(num_field_maps, field_maps_.length());
6071 // Determine field HType from field HeapType.
6072 field_type_ = HType::FromType<HeapType>(field_type);
6073 DCHECK(field_type_.IsHeapObject());
6075 // Add dependency on the map that introduced the field.
6076 Map::AddDependentCompilationInfo(GetFieldOwnerFromMap(map),
6077 DependentCode::kFieldTypeGroup, top_info());
6081 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
6082 Handle<Map> map = this->map();
6084 while (map->prototype()->IsJSObject()) {
6085 holder_ = handle(JSObject::cast(map->prototype()));
6086 if (holder_->map()->is_deprecated()) {
6087 JSObject::TryMigrateInstance(holder_);
6089 map = Handle<Map>(holder_->map());
6090 if (!CanInlinePropertyAccess(ToType(map))) {
6094 map->LookupDescriptor(*holder_, *name_, &lookup_);
6095 if (IsFound()) return LoadResult(map);
6102 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6103 if (!CanInlinePropertyAccess(type_)) return false;
6104 if (IsJSObjectFieldAccessor()) return IsLoad();
6105 if (this->map()->function_with_prototype() &&
6106 !this->map()->has_non_instance_prototype() &&
6107 name_.is_identical_to(isolate()->factory()->prototype_string())) {
6110 if (!LookupDescriptor()) return false;
6111 if (IsFound()) return IsLoad() || !IsReadOnly();
6112 if (!LookupInPrototypes()) return false;
6113 if (IsLoad()) return true;
6115 if (IsAccessor()) return true;
6116 Handle<Map> map = this->map();
6117 map->LookupTransition(NULL, *name_, &lookup_);
6118 if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
6119 // Construct the object field access.
6120 int descriptor = transition()->LastAdded();
6122 transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6123 map->inobject_properties();
6124 PropertyDetails details =
6125 transition()->instance_descriptors()->GetDetails(descriptor);
6126 Representation representation = details.representation();
6127 access_ = HObjectAccess::ForField(map, index, representation, name_);
6129 // Load field map for heap objects.
6130 LoadFieldMaps(transition());
6137 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6138 SmallMapList* types) {
6139 DCHECK(type_->Is(ToType(types->first())));
6140 if (!CanAccessMonomorphic()) return false;
6141 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6142 if (types->length() > kMaxLoadPolymorphism) return false;
6144 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6145 if (GetJSObjectFieldAccess(&access)) {
6146 for (int i = 1; i < types->length(); ++i) {
6147 PropertyAccessInfo test_info(
6148 builder_, access_type_, ToType(types->at(i)), name_);
6149 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6150 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6151 if (!access.Equals(test_access)) return false;
6156 // Currently only handle Type::Number as a polymorphic case.
6157 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6159 if (type_->Is(Type::Number())) return false;
6161 // Multiple maps cannot transition to the same target map.
6162 DCHECK(!IsLoad() || !IsTransition());
6163 if (IsTransition() && types->length() > 1) return false;
6165 for (int i = 1; i < types->length(); ++i) {
6166 PropertyAccessInfo test_info(
6167 builder_, access_type_, ToType(types->at(i)), name_);
6168 if (!test_info.IsCompatible(this)) return false;
6175 Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6176 JSFunction* ctor = IC::GetRootConstructor(
6177 type_, current_info()->closure()->context()->native_context());
6178 if (ctor != NULL) return handle(ctor->initial_map());
6179 return type_->AsClass()->Map();
6183 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
6184 return type->Is(Type::NumberOrString()) &&
6185 target->shared()->strict_mode() == SLOPPY &&
6186 !target->shared()->native();
6190 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6191 PropertyAccessInfo* info,
6193 HValue* checked_object,
6196 BailoutId return_id,
6197 bool can_inline_accessor) {
6199 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6200 if (info->GetJSObjectFieldAccess(&access)) {
6201 DCHECK(info->IsLoad());
6202 return New<HLoadNamedField>(object, checked_object, access);
6205 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6206 info->map()->function_with_prototype()) {
6207 DCHECK(!info->map()->has_non_instance_prototype());
6208 return New<HLoadFunctionPrototype>(checked_object);
6211 HValue* checked_holder = checked_object;
6212 if (info->has_holder()) {
6213 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6214 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6217 if (!info->IsFound()) {
6218 DCHECK(info->IsLoad());
6219 return graph()->GetConstantUndefined();
6222 if (info->IsField()) {
6223 if (info->IsLoad()) {
6224 return BuildLoadNamedField(info, checked_holder);
6226 return BuildStoreNamedField(info, checked_object, value);
6230 if (info->IsTransition()) {
6231 DCHECK(!info->IsLoad());
6232 return BuildStoreNamedField(info, checked_object, value);
6235 if (info->IsAccessor()) {
6236 Push(checked_object);
6237 int argument_count = 1;
6238 if (!info->IsLoad()) {
6243 if (NeedsWrappingFor(info->type(), info->accessor())) {
6244 HValue* function = Add<HConstant>(info->accessor());
6245 PushArgumentsFromEnvironment(argument_count);
6246 return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
6247 } else if (FLAG_inline_accessors && can_inline_accessor) {
6248 bool success = info->IsLoad()
6249 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6251 info->accessor(), info->map(), ast_id, return_id, value);
6252 if (success || HasStackOverflow()) return NULL;
6255 PushArgumentsFromEnvironment(argument_count);
6256 return BuildCallConstantFunction(info->accessor(), argument_count);
6259 DCHECK(info->IsConstant());
6260 if (info->IsLoad()) {
6261 return New<HConstant>(info->constant());
6263 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6268 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6269 PropertyAccessType access_type,
6272 BailoutId return_id,
6275 SmallMapList* types,
6276 Handle<String> name) {
6277 // Something did not match; must use a polymorphic load.
6279 HBasicBlock* join = NULL;
6280 HBasicBlock* number_block = NULL;
6281 bool handled_string = false;
6283 bool handle_smi = false;
6284 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6286 for (i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6287 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
6288 if (info.type()->Is(Type::String())) {
6289 if (handled_string) continue;
6290 handled_string = true;
6292 if (info.CanAccessMonomorphic()) {
6294 if (info.type()->Is(Type::Number())) {
6301 if (i < types->length()) {
6307 HControlInstruction* smi_check = NULL;
6308 handled_string = false;
6310 for (i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6311 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
6312 if (info.type()->Is(Type::String())) {
6313 if (handled_string) continue;
6314 handled_string = true;
6316 if (!info.CanAccessMonomorphic()) continue;
6319 join = graph()->CreateBasicBlock();
6321 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6322 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6323 number_block = graph()->CreateBasicBlock();
6324 smi_check = New<HIsSmiAndBranch>(
6325 object, empty_smi_block, not_smi_block);
6326 FinishCurrentBlock(smi_check);
6327 GotoNoSimulate(empty_smi_block, number_block);
6328 set_current_block(not_smi_block);
6330 BuildCheckHeapObject(object);
6334 HBasicBlock* if_true = graph()->CreateBasicBlock();
6335 HBasicBlock* if_false = graph()->CreateBasicBlock();
6336 HUnaryControlInstruction* compare;
6339 if (info.type()->Is(Type::Number())) {
6340 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6341 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6342 dependency = smi_check;
6343 } else if (info.type()->Is(Type::String())) {
6344 compare = New<HIsStringAndBranch>(object, if_true, if_false);
6345 dependency = compare;
6347 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6348 dependency = compare;
6350 FinishCurrentBlock(compare);
6352 if (info.type()->Is(Type::Number())) {
6353 GotoNoSimulate(if_true, number_block);
6354 if_true = number_block;
6357 set_current_block(if_true);
6359 HInstruction* access = BuildMonomorphicAccess(
6360 &info, object, dependency, value, ast_id,
6361 return_id, FLAG_polymorphic_inlining);
6363 HValue* result = NULL;
6364 switch (access_type) {
6373 if (access == NULL) {
6374 if (HasStackOverflow()) return;
6376 if (!access->IsLinked()) AddInstruction(access);
6377 if (!ast_context()->IsEffect()) Push(result);
6380 if (current_block() != NULL) Goto(join);
6381 set_current_block(if_false);
6384 // Finish up. Unconditionally deoptimize if we've handled all the maps we
6385 // know about and do not want to handle ones we've never seen. Otherwise
6386 // use a generic IC.
6387 if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
6388 FinishExitWithHardDeoptimization("Unknown map in polymorphic access");
6390 HInstruction* instr = BuildNamedGeneric(access_type, expr, object, name,
6392 AddInstruction(instr);
6393 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6398 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6399 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6404 DCHECK(join != NULL);
6405 if (join->HasPredecessor()) {
6406 join->SetJoinId(ast_id);
6407 set_current_block(join);
6408 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6410 set_current_block(NULL);
6415 static bool ComputeReceiverTypes(Expression* expr,
6419 SmallMapList* types = expr->GetReceiverTypes();
6421 bool monomorphic = expr->IsMonomorphic();
6422 if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
6423 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6424 types->FilterForPossibleTransitions(root_map);
6425 monomorphic = types->length() == 1;
6427 return monomorphic &&
6428 CanInlinePropertyAccess(IC::MapToType<Type>(types->first(), zone));
6432 static bool AreStringTypes(SmallMapList* types) {
6433 for (int i = 0; i < types->length(); i++) {
6434 if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6440 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
6443 BailoutId return_id,
6444 bool is_uninitialized) {
6445 if (!prop->key()->IsPropertyName()) {
6447 HValue* value = Pop();
6448 HValue* key = Pop();
6449 HValue* object = Pop();
6450 bool has_side_effects = false;
6451 HValue* result = HandleKeyedElementAccess(
6452 object, key, value, expr, ast_id, return_id, STORE, &has_side_effects);
6453 if (has_side_effects) {
6454 if (!ast_context()->IsEffect()) Push(value);
6455 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6456 if (!ast_context()->IsEffect()) Drop(1);
6458 if (result == NULL) return;
6459 return ast_context()->ReturnValue(value);
6463 HValue* value = Pop();
6464 HValue* object = Pop();
6466 Literal* key = prop->key()->AsLiteral();
6467 Handle<String> name = Handle<String>::cast(key->value());
6468 DCHECK(!name.is_null());
6470 HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
6471 object, name, value, is_uninitialized);
6472 if (instr == NULL) return;
6474 if (!ast_context()->IsEffect()) Push(value);
6475 AddInstruction(instr);
6476 if (instr->HasObservableSideEffects()) {
6477 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6479 if (!ast_context()->IsEffect()) Drop(1);
6480 return ast_context()->ReturnValue(value);
6484 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6485 Property* prop = expr->target()->AsProperty();
6486 DCHECK(prop != NULL);
6487 CHECK_ALIVE(VisitForValue(prop->obj()));
6488 if (!prop->key()->IsPropertyName()) {
6489 CHECK_ALIVE(VisitForValue(prop->key()));
6491 CHECK_ALIVE(VisitForValue(expr->value()));
6492 BuildStore(expr, prop, expr->id(),
6493 expr->AssignmentId(), expr->IsUninitialized());
6497 // Because not every expression has a position and there is not common
6498 // superclass of Assignment and CountOperation, we cannot just pass the
6499 // owning expression instead of position and ast_id separately.
6500 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6504 Handle<GlobalObject> global(current_info()->global_object());
6505 LookupIterator it(global, var->name(), LookupIterator::OWN_SKIP_INTERCEPTOR);
6506 GlobalPropertyAccess type = LookupGlobalProperty(var, &it, STORE);
6507 if (type == kUseCell) {
6508 Handle<PropertyCell> cell = it.GetPropertyCell();
6509 if (cell->type()->IsConstant()) {
6510 Handle<Object> constant = cell->type()->AsConstant()->Value();
6511 if (value->IsConstant()) {
6512 HConstant* c_value = HConstant::cast(value);
6513 if (!constant.is_identical_to(c_value->handle(isolate()))) {
6514 Add<HDeoptimize>("Constant global variable assignment",
6515 Deoptimizer::EAGER);
6518 HValue* c_constant = Add<HConstant>(constant);
6519 IfBuilder builder(this);
6520 if (constant->IsNumber()) {
6521 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6523 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6527 Add<HDeoptimize>("Constant global variable assignment",
6528 Deoptimizer::EAGER);
6532 HInstruction* instr =
6533 Add<HStoreGlobalCell>(value, cell, it.property_details());
6534 if (instr->HasObservableSideEffects()) {
6535 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6538 HValue* global_object = Add<HLoadNamedField>(
6539 context(), static_cast<HValue*>(NULL),
6540 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
6541 HStoreNamedGeneric* instr =
6542 Add<HStoreNamedGeneric>(global_object, var->name(),
6543 value, function_strict_mode());
6545 DCHECK(instr->HasObservableSideEffects());
6546 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6551 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6552 Expression* target = expr->target();
6553 VariableProxy* proxy = target->AsVariableProxy();
6554 Property* prop = target->AsProperty();
6555 DCHECK(proxy == NULL || prop == NULL);
6557 // We have a second position recorded in the FullCodeGenerator to have
6558 // type feedback for the binary operation.
6559 BinaryOperation* operation = expr->binary_operation();
6561 if (proxy != NULL) {
6562 Variable* var = proxy->var();
6563 if (var->mode() == LET) {
6564 return Bailout(kUnsupportedLetCompoundAssignment);
6567 CHECK_ALIVE(VisitForValue(operation));
6569 switch (var->location()) {
6570 case Variable::UNALLOCATED:
6571 HandleGlobalVariableAssignment(var,
6573 expr->AssignmentId());
6576 case Variable::PARAMETER:
6577 case Variable::LOCAL:
6578 if (var->mode() == CONST_LEGACY) {
6579 return Bailout(kUnsupportedConstCompoundAssignment);
6581 BindIfLive(var, Top());
6584 case Variable::CONTEXT: {
6585 // Bail out if we try to mutate a parameter value in a function
6586 // using the arguments object. We do not (yet) correctly handle the
6587 // arguments property of the function.
6588 if (current_info()->scope()->arguments() != NULL) {
6589 // Parameters will be allocated to context slots. We have no
6590 // direct way to detect that the variable is a parameter so we do
6591 // a linear search of the parameter variables.
6592 int count = current_info()->scope()->num_parameters();
6593 for (int i = 0; i < count; ++i) {
6594 if (var == current_info()->scope()->parameter(i)) {
6595 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6600 HStoreContextSlot::Mode mode;
6602 switch (var->mode()) {
6604 mode = HStoreContextSlot::kCheckDeoptimize;
6607 // This case is checked statically so no need to
6608 // perform checks here
6611 return ast_context()->ReturnValue(Pop());
6613 mode = HStoreContextSlot::kNoCheck;
6616 HValue* context = BuildContextChainWalk(var);
6617 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6618 context, var->index(), mode, Top());
6619 if (instr->HasObservableSideEffects()) {
6620 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6625 case Variable::LOOKUP:
6626 return Bailout(kCompoundAssignmentToLookupSlot);
6628 return ast_context()->ReturnValue(Pop());
6630 } else if (prop != NULL) {
6631 CHECK_ALIVE(VisitForValue(prop->obj()));
6632 HValue* object = Top();
6634 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
6635 CHECK_ALIVE(VisitForValue(prop->key()));
6639 CHECK_ALIVE(PushLoad(prop, object, key));
6641 CHECK_ALIVE(VisitForValue(expr->value()));
6642 HValue* right = Pop();
6643 HValue* left = Pop();
6645 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6647 BuildStore(expr, prop, expr->id(),
6648 expr->AssignmentId(), expr->IsUninitialized());
6650 return Bailout(kInvalidLhsInCompoundAssignment);
6655 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6656 DCHECK(!HasStackOverflow());
6657 DCHECK(current_block() != NULL);
6658 DCHECK(current_block()->HasPredecessor());
6659 VariableProxy* proxy = expr->target()->AsVariableProxy();
6660 Property* prop = expr->target()->AsProperty();
6661 DCHECK(proxy == NULL || prop == NULL);
6663 if (expr->is_compound()) {
6664 HandleCompoundAssignment(expr);
6669 HandlePropertyAssignment(expr);
6670 } else if (proxy != NULL) {
6671 Variable* var = proxy->var();
6673 if (var->mode() == CONST) {
6674 if (expr->op() != Token::INIT_CONST) {
6675 return Bailout(kNonInitializerAssignmentToConst);
6677 } else if (var->mode() == CONST_LEGACY) {
6678 if (expr->op() != Token::INIT_CONST_LEGACY) {
6679 CHECK_ALIVE(VisitForValue(expr->value()));
6680 return ast_context()->ReturnValue(Pop());
6683 if (var->IsStackAllocated()) {
6684 // We insert a use of the old value to detect unsupported uses of const
6685 // variables (e.g. initialization inside a loop).
6686 HValue* old_value = environment()->Lookup(var);
6687 Add<HUseConst>(old_value);
6691 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6693 // Handle the assignment.
6694 switch (var->location()) {
6695 case Variable::UNALLOCATED:
6696 CHECK_ALIVE(VisitForValue(expr->value()));
6697 HandleGlobalVariableAssignment(var,
6699 expr->AssignmentId());
6700 return ast_context()->ReturnValue(Pop());
6702 case Variable::PARAMETER:
6703 case Variable::LOCAL: {
6704 // Perform an initialization check for let declared variables
6706 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6707 HValue* env_value = environment()->Lookup(var);
6708 if (env_value == graph()->GetConstantHole()) {
6709 return Bailout(kAssignmentToLetVariableBeforeInitialization);
6712 // We do not allow the arguments object to occur in a context where it
6713 // may escape, but assignments to stack-allocated locals are
6715 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6716 HValue* value = Pop();
6717 BindIfLive(var, value);
6718 return ast_context()->ReturnValue(value);
6721 case Variable::CONTEXT: {
6722 // Bail out if we try to mutate a parameter value in a function using
6723 // the arguments object. We do not (yet) correctly handle the
6724 // arguments property of the function.
6725 if (current_info()->scope()->arguments() != NULL) {
6726 // Parameters will rewrite to context slots. We have no direct way
6727 // to detect that the variable is a parameter.
6728 int count = current_info()->scope()->num_parameters();
6729 for (int i = 0; i < count; ++i) {
6730 if (var == current_info()->scope()->parameter(i)) {
6731 return Bailout(kAssignmentToParameterInArgumentsObject);
6736 CHECK_ALIVE(VisitForValue(expr->value()));
6737 HStoreContextSlot::Mode mode;
6738 if (expr->op() == Token::ASSIGN) {
6739 switch (var->mode()) {
6741 mode = HStoreContextSlot::kCheckDeoptimize;
6744 // This case is checked statically so no need to
6745 // perform checks here
6748 return ast_context()->ReturnValue(Pop());
6750 mode = HStoreContextSlot::kNoCheck;
6752 } else if (expr->op() == Token::INIT_VAR ||
6753 expr->op() == Token::INIT_LET ||
6754 expr->op() == Token::INIT_CONST) {
6755 mode = HStoreContextSlot::kNoCheck;
6757 DCHECK(expr->op() == Token::INIT_CONST_LEGACY);
6759 mode = HStoreContextSlot::kCheckIgnoreAssignment;
6762 HValue* context = BuildContextChainWalk(var);
6763 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6764 context, var->index(), mode, Top());
6765 if (instr->HasObservableSideEffects()) {
6766 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6768 return ast_context()->ReturnValue(Pop());
6771 case Variable::LOOKUP:
6772 return Bailout(kAssignmentToLOOKUPVariable);
6775 return Bailout(kInvalidLeftHandSideInAssignment);
6780 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6781 // Generators are not optimized, so we should never get here.
6786 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6787 DCHECK(!HasStackOverflow());
6788 DCHECK(current_block() != NULL);
6789 DCHECK(current_block()->HasPredecessor());
6790 if (!ast_context()->IsEffect()) {
6791 // The parser turns invalid left-hand sides in assignments into throw
6792 // statements, which may not be in effect contexts. We might still try
6793 // to optimize such functions; bail out now if we do.
6794 return Bailout(kInvalidLeftHandSideInAssignment);
6796 CHECK_ALIVE(VisitForValue(expr->exception()));
6798 HValue* value = environment()->Pop();
6799 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6800 Add<HPushArguments>(value);
6801 Add<HCallRuntime>(isolate()->factory()->empty_string(),
6802 Runtime::FunctionForId(Runtime::kThrow), 1);
6803 Add<HSimulate>(expr->id());
6805 // If the throw definitely exits the function, we can finish with a dummy
6806 // control flow at this point. This is not the case if the throw is inside
6807 // an inlined function which may be replaced.
6808 if (call_context() == NULL) {
6809 FinishExitCurrentBlock(New<HAbnormalExit>());
6814 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6815 if (string->IsConstant()) {
6816 HConstant* c_string = HConstant::cast(string);
6817 if (c_string->HasStringValue()) {
6818 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6821 return Add<HLoadNamedField>(
6822 Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6823 HObjectAccess::ForMap()),
6824 static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6828 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6829 if (string->IsConstant()) {
6830 HConstant* c_string = HConstant::cast(string);
6831 if (c_string->HasStringValue()) {
6832 return Add<HConstant>(c_string->StringValue()->length());
6835 return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6836 HObjectAccess::ForStringLength());
6840 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6841 PropertyAccessType access_type,
6844 Handle<String> name,
6846 bool is_uninitialized) {
6847 if (is_uninitialized) {
6848 Add<HDeoptimize>("Insufficient type feedback for generic named access",
6851 if (access_type == LOAD) {
6852 HLoadNamedGeneric* result = New<HLoadNamedGeneric>(object, name);
6853 if (FLAG_vector_ics) {
6854 Handle<SharedFunctionInfo> current_shared =
6855 function_state()->compilation_info()->shared_info();
6856 result->SetVectorAndSlot(
6857 handle(current_shared->feedback_vector(), isolate()),
6858 expr->AsProperty()->PropertyFeedbackSlot());
6862 return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6868 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6869 PropertyAccessType access_type,
6874 if (access_type == LOAD) {
6875 HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(object, key);
6876 if (FLAG_vector_ics) {
6877 Handle<SharedFunctionInfo> current_shared =
6878 function_state()->compilation_info()->shared_info();
6879 result->SetVectorAndSlot(
6880 handle(current_shared->feedback_vector(), isolate()),
6881 expr->AsProperty()->PropertyFeedbackSlot());
6885 return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6890 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6891 // Loads from a "stock" fast holey double arrays can elide the hole check.
6892 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6893 if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6894 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6895 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6896 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6897 BuildCheckPrototypeMaps(prototype, object_prototype);
6898 load_mode = ALLOW_RETURN_HOLE;
6899 graph()->MarkDependsOnEmptyArrayProtoElements();
6906 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6912 PropertyAccessType access_type,
6913 KeyedAccessStoreMode store_mode) {
6914 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6916 checked_object->ClearDependsOnFlag(kElementsKind);
6919 if (access_type == STORE && map->prototype()->IsJSObject()) {
6920 // monomorphic stores need a prototype chain check because shape
6921 // changes could allow callbacks on elements in the chain that
6922 // aren't compatible with monomorphic keyed stores.
6923 PrototypeIterator iter(map);
6924 JSObject* holder = NULL;
6925 while (!iter.IsAtEnd()) {
6926 holder = JSObject::cast(*PrototypeIterator::GetCurrent(iter));
6929 DCHECK(holder && holder->IsJSObject());
6931 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
6932 Handle<JSObject>(holder));
6935 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6936 return BuildUncheckedMonomorphicElementAccess(
6937 checked_object, key, val,
6938 map->instance_type() == JS_ARRAY_TYPE,
6939 map->elements_kind(), access_type,
6940 load_mode, store_mode);
6944 static bool CanInlineElementAccess(Handle<Map> map) {
6945 return map->IsJSObjectMap() && !map->has_slow_elements_kind() &&
6946 !map->has_indexed_interceptor();
6950 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6954 SmallMapList* maps) {
6955 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6956 // double), always use the "worst case" code without a transition. This is
6957 // much faster than transitioning the elements to the worst case, trading a
6958 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6959 bool has_double_maps = false;
6960 bool has_smi_or_object_maps = false;
6961 bool has_js_array_access = false;
6962 bool has_non_js_array_access = false;
6963 bool has_seen_holey_elements = false;
6964 Handle<Map> most_general_consolidated_map;
6965 for (int i = 0; i < maps->length(); ++i) {
6966 Handle<Map> map = maps->at(i);
6967 if (!CanInlineElementAccess(map)) return NULL;
6968 // Don't allow mixing of JSArrays with JSObjects.
6969 if (map->instance_type() == JS_ARRAY_TYPE) {
6970 if (has_non_js_array_access) return NULL;
6971 has_js_array_access = true;
6972 } else if (has_js_array_access) {
6975 has_non_js_array_access = true;
6977 // Don't allow mixed, incompatible elements kinds.
6978 if (map->has_fast_double_elements()) {
6979 if (has_smi_or_object_maps) return NULL;
6980 has_double_maps = true;
6981 } else if (map->has_fast_smi_or_object_elements()) {
6982 if (has_double_maps) return NULL;
6983 has_smi_or_object_maps = true;
6987 // Remember if we've ever seen holey elements.
6988 if (IsHoleyElementsKind(map->elements_kind())) {
6989 has_seen_holey_elements = true;
6991 // Remember the most general elements kind, the code for its load will
6992 // properly handle all of the more specific cases.
6993 if ((i == 0) || IsMoreGeneralElementsKindTransition(
6994 most_general_consolidated_map->elements_kind(),
6995 map->elements_kind())) {
6996 most_general_consolidated_map = map;
6999 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7001 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7002 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7003 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7004 ElementsKind consolidated_elements_kind = has_seen_holey_elements
7005 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7006 : most_general_consolidated_map->elements_kind();
7007 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7008 checked_object, key, val,
7009 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7010 consolidated_elements_kind,
7011 LOAD, NEVER_RETURN_HOLE, STANDARD_STORE);
7016 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7022 PropertyAccessType access_type,
7023 KeyedAccessStoreMode store_mode,
7024 bool* has_side_effects) {
7025 *has_side_effects = false;
7026 BuildCheckHeapObject(object);
7028 if (access_type == LOAD) {
7029 HInstruction* consolidated_load =
7030 TryBuildConsolidatedElementLoad(object, key, val, maps);
7031 if (consolidated_load != NULL) {
7032 *has_side_effects |= consolidated_load->HasObservableSideEffects();
7033 return consolidated_load;
7037 // Elements_kind transition support.
7038 MapHandleList transition_target(maps->length());
7039 // Collect possible transition targets.
7040 MapHandleList possible_transitioned_maps(maps->length());
7041 for (int i = 0; i < maps->length(); ++i) {
7042 Handle<Map> map = maps->at(i);
7043 DCHECK(!map->IsStringMap());
7044 ElementsKind elements_kind = map->elements_kind();
7045 if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7046 elements_kind != GetInitialFastElementsKind()) {
7047 possible_transitioned_maps.Add(map);
7049 if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
7050 HInstruction* result = BuildKeyedGeneric(access_type, expr, object, key,
7052 *has_side_effects = result->HasObservableSideEffects();
7053 return AddInstruction(result);
7056 // Get transition target for each map (NULL == no transition).
7057 for (int i = 0; i < maps->length(); ++i) {
7058 Handle<Map> map = maps->at(i);
7059 Handle<Map> transitioned_map =
7060 map->FindTransitionedMap(&possible_transitioned_maps);
7061 transition_target.Add(transitioned_map);
7064 MapHandleList untransitionable_maps(maps->length());
7065 HTransitionElementsKind* transition = NULL;
7066 for (int i = 0; i < maps->length(); ++i) {
7067 Handle<Map> map = maps->at(i);
7068 DCHECK(map->IsMap());
7069 if (!transition_target.at(i).is_null()) {
7070 DCHECK(Map::IsValidElementsTransition(
7071 map->elements_kind(),
7072 transition_target.at(i)->elements_kind()));
7073 transition = Add<HTransitionElementsKind>(object, map,
7074 transition_target.at(i));
7076 untransitionable_maps.Add(map);
7080 // If only one map is left after transitioning, handle this case
7082 DCHECK(untransitionable_maps.length() >= 1);
7083 if (untransitionable_maps.length() == 1) {
7084 Handle<Map> untransitionable_map = untransitionable_maps[0];
7085 HInstruction* instr = NULL;
7086 if (!CanInlineElementAccess(untransitionable_map)) {
7087 instr = AddInstruction(BuildKeyedGeneric(access_type, expr, object, key,
7090 instr = BuildMonomorphicElementAccess(
7091 object, key, val, transition, untransitionable_map, access_type,
7094 *has_side_effects |= instr->HasObservableSideEffects();
7095 return access_type == STORE ? val : instr;
7098 HBasicBlock* join = graph()->CreateBasicBlock();
7100 for (int i = 0; i < untransitionable_maps.length(); ++i) {
7101 Handle<Map> map = untransitionable_maps[i];
7102 ElementsKind elements_kind = map->elements_kind();
7103 HBasicBlock* this_map = graph()->CreateBasicBlock();
7104 HBasicBlock* other_map = graph()->CreateBasicBlock();
7105 HCompareMap* mapcompare =
7106 New<HCompareMap>(object, map, this_map, other_map);
7107 FinishCurrentBlock(mapcompare);
7109 set_current_block(this_map);
7110 HInstruction* access = NULL;
7111 if (!CanInlineElementAccess(map)) {
7112 access = AddInstruction(BuildKeyedGeneric(access_type, expr, object, key,
7115 DCHECK(IsFastElementsKind(elements_kind) ||
7116 IsExternalArrayElementsKind(elements_kind) ||
7117 IsFixedTypedArrayElementsKind(elements_kind));
7118 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7119 // Happily, mapcompare is a checked object.
7120 access = BuildUncheckedMonomorphicElementAccess(
7121 mapcompare, key, val,
7122 map->instance_type() == JS_ARRAY_TYPE,
7123 elements_kind, access_type,
7127 *has_side_effects |= access->HasObservableSideEffects();
7128 // The caller will use has_side_effects and add a correct Simulate.
7129 access->SetFlag(HValue::kHasNoObservableSideEffects);
7130 if (access_type == LOAD) {
7133 NoObservableSideEffectsScope scope(this);
7134 GotoNoSimulate(join);
7135 set_current_block(other_map);
7138 // Ensure that we visited at least one map above that goes to join. This is
7139 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7140 // rather than joining the join block. If this becomes an issue, insert a
7141 // generic access in the case length() == 0.
7142 DCHECK(join->predecessors()->length() > 0);
7143 // Deopt if none of the cases matched.
7144 NoObservableSideEffectsScope scope(this);
7145 FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
7146 set_current_block(join);
7147 return access_type == STORE ? val : Pop();
7151 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7152 HValue* obj, HValue* key, HValue* val, Expression* expr, BailoutId ast_id,
7153 BailoutId return_id, PropertyAccessType access_type,
7154 bool* has_side_effects) {
7155 if (key->ActualValue()->IsConstant()) {
7156 Handle<Object> constant =
7157 HConstant::cast(key->ActualValue())->handle(isolate());
7158 uint32_t array_index;
7159 if (constant->IsString() &&
7160 !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) {
7161 if (!constant->IsUniqueName()) {
7162 constant = isolate()->factory()->InternalizeString(
7163 Handle<String>::cast(constant));
7165 HInstruction* instr =
7166 BuildNamedAccess(access_type, ast_id, return_id, expr, obj,
7167 Handle<String>::cast(constant), val, false);
7168 if (instr == NULL || instr->IsLinked()) {
7169 *has_side_effects = false;
7171 AddInstruction(instr);
7172 *has_side_effects = instr->HasObservableSideEffects();
7178 DCHECK(!expr->IsPropertyName());
7179 HInstruction* instr = NULL;
7181 SmallMapList* types;
7182 bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
7184 bool force_generic = false;
7185 if (access_type == STORE && expr->GetKeyType() == PROPERTY) {
7186 // Non-Generic accesses assume that elements are being accessed, and will
7187 // deopt for non-index keys, which the IC knows will occur.
7188 // TODO(jkummerow): Consider adding proper support for property accesses.
7189 force_generic = true;
7190 monomorphic = false;
7191 } else if (access_type == STORE &&
7192 (monomorphic || (types != NULL && !types->is_empty()))) {
7193 // Stores can't be mono/polymorphic if their prototype chain has dictionary
7194 // elements. However a receiver map that has dictionary elements itself
7195 // should be left to normal mono/poly behavior (the other maps may benefit
7196 // from highly optimized stores).
7197 for (int i = 0; i < types->length(); i++) {
7198 Handle<Map> current_map = types->at(i);
7199 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7200 force_generic = true;
7201 monomorphic = false;
7205 } else if (access_type == LOAD && !monomorphic &&
7206 (types != NULL && !types->is_empty())) {
7207 // Polymorphic loads have to go generic if any of the maps are strings.
7208 // If some, but not all of the maps are strings, we should go generic
7209 // because polymorphic access wants to key on ElementsKind and isn't
7210 // compatible with strings.
7211 for (int i = 0; i < types->length(); i++) {
7212 Handle<Map> current_map = types->at(i);
7213 if (current_map->IsStringMap()) {
7214 force_generic = true;
7221 Handle<Map> map = types->first();
7222 if (!CanInlineElementAccess(map)) {
7223 instr = AddInstruction(BuildKeyedGeneric(access_type, expr, obj, key,
7226 BuildCheckHeapObject(obj);
7227 instr = BuildMonomorphicElementAccess(
7228 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7230 } else if (!force_generic && (types != NULL && !types->is_empty())) {
7231 return HandlePolymorphicElementAccess(
7232 expr, obj, key, val, types, access_type,
7233 expr->GetStoreMode(), has_side_effects);
7235 if (access_type == STORE) {
7236 if (expr->IsAssignment() &&
7237 expr->AsAssignment()->HasNoTypeInformation()) {
7238 Add<HDeoptimize>("Insufficient type feedback for keyed store",
7242 if (expr->AsProperty()->HasNoTypeInformation()) {
7243 Add<HDeoptimize>("Insufficient type feedback for keyed load",
7247 instr = AddInstruction(BuildKeyedGeneric(access_type, expr, obj, key, val));
7249 *has_side_effects = instr->HasObservableSideEffects();
7254 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7255 // Outermost function already has arguments on the stack.
7256 if (function_state()->outer() == NULL) return;
7258 if (function_state()->arguments_pushed()) return;
7260 // Push arguments when entering inlined function.
7261 HEnterInlined* entry = function_state()->entry();
7262 entry->set_arguments_pushed();
7264 HArgumentsObject* arguments = entry->arguments_object();
7265 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7267 HInstruction* insert_after = entry;
7268 for (int i = 0; i < arguments_values->length(); i++) {
7269 HValue* argument = arguments_values->at(i);
7270 HInstruction* push_argument = New<HPushArguments>(argument);
7271 push_argument->InsertAfter(insert_after);
7272 insert_after = push_argument;
7275 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7276 arguments_elements->ClearFlag(HValue::kUseGVN);
7277 arguments_elements->InsertAfter(insert_after);
7278 function_state()->set_arguments_elements(arguments_elements);
7282 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7283 VariableProxy* proxy = expr->obj()->AsVariableProxy();
7284 if (proxy == NULL) return false;
7285 if (!proxy->var()->IsStackAllocated()) return false;
7286 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7290 HInstruction* result = NULL;
7291 if (expr->key()->IsPropertyName()) {
7292 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7293 if (!String::Equals(name, isolate()->factory()->length_string())) {
7297 if (function_state()->outer() == NULL) {
7298 HInstruction* elements = Add<HArgumentsElements>(false);
7299 result = New<HArgumentsLength>(elements);
7301 // Number of arguments without receiver.
7302 int argument_count = environment()->
7303 arguments_environment()->parameter_count() - 1;
7304 result = New<HConstant>(argument_count);
7307 Push(graph()->GetArgumentsObject());
7308 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7309 HValue* key = Pop();
7310 Drop(1); // Arguments object.
7311 if (function_state()->outer() == NULL) {
7312 HInstruction* elements = Add<HArgumentsElements>(false);
7313 HInstruction* length = Add<HArgumentsLength>(elements);
7314 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7315 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7317 EnsureArgumentsArePushedForAccess();
7319 // Number of arguments without receiver.
7320 HInstruction* elements = function_state()->arguments_elements();
7321 int argument_count = environment()->
7322 arguments_environment()->parameter_count() - 1;
7323 HInstruction* length = Add<HConstant>(argument_count);
7324 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7325 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7328 ast_context()->ReturnInstruction(result, expr->id());
7333 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
7334 PropertyAccessType access,
7336 BailoutId return_id,
7339 Handle<String> name,
7341 bool is_uninitialized) {
7342 SmallMapList* types;
7343 ComputeReceiverTypes(expr, object, &types, zone());
7344 DCHECK(types != NULL);
7346 if (types->length() > 0) {
7347 PropertyAccessInfo info(this, access, ToType(types->first()), name);
7348 if (!info.CanAccessAsMonomorphic(types)) {
7349 HandlePolymorphicNamedFieldAccess(
7350 access, expr, ast_id, return_id, object, value, types, name);
7354 HValue* checked_object;
7355 // Type::Number() is only supported by polymorphic load/call handling.
7356 DCHECK(!info.type()->Is(Type::Number()));
7357 BuildCheckHeapObject(object);
7358 if (AreStringTypes(types)) {
7360 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7362 checked_object = Add<HCheckMaps>(object, types);
7364 return BuildMonomorphicAccess(
7365 &info, object, checked_object, value, ast_id, return_id);
7368 return BuildNamedGeneric(access, expr, object, name, value, is_uninitialized);
7372 void HOptimizedGraphBuilder::PushLoad(Property* expr,
7375 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7377 if (key != NULL) Push(key);
7378 BuildLoad(expr, expr->LoadId());
7382 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
7384 HInstruction* instr = NULL;
7385 if (expr->IsStringAccess()) {
7386 HValue* index = Pop();
7387 HValue* string = Pop();
7388 HInstruction* char_code = BuildStringCharCodeAt(string, index);
7389 AddInstruction(char_code);
7390 instr = NewUncasted<HStringCharFromCode>(char_code);
7392 } else if (expr->key()->IsPropertyName()) {
7393 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7394 HValue* object = Pop();
7396 instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7397 object, name, NULL, expr->IsUninitialized());
7398 if (instr == NULL) return;
7399 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7402 HValue* key = Pop();
7403 HValue* obj = Pop();
7405 bool has_side_effects = false;
7406 HValue* load = HandleKeyedElementAccess(
7407 obj, key, NULL, expr, ast_id, expr->LoadId(), LOAD, &has_side_effects);
7408 if (has_side_effects) {
7409 if (ast_context()->IsEffect()) {
7410 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7413 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7417 if (load == NULL) return;
7418 return ast_context()->ReturnValue(load);
7420 return ast_context()->ReturnInstruction(instr, ast_id);
7424 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7425 DCHECK(!HasStackOverflow());
7426 DCHECK(current_block() != NULL);
7427 DCHECK(current_block()->HasPredecessor());
7429 if (TryArgumentsAccess(expr)) return;
7431 CHECK_ALIVE(VisitForValue(expr->obj()));
7432 if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
7433 CHECK_ALIVE(VisitForValue(expr->key()));
7436 BuildLoad(expr, expr->id());
7440 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7441 HCheckMaps* check = Add<HCheckMaps>(
7442 Add<HConstant>(constant), handle(constant->map()));
7443 check->ClearDependsOnFlag(kElementsKind);
7448 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7449 Handle<JSObject> holder) {
7450 PrototypeIterator iter(isolate(), prototype,
7451 PrototypeIterator::START_AT_RECEIVER);
7452 while (holder.is_null() ||
7453 !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
7454 BuildConstantMapCheck(
7455 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
7457 if (iter.IsAtEnd()) {
7461 return BuildConstantMapCheck(
7462 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
7466 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7467 Handle<Map> receiver_map) {
7468 if (!holder.is_null()) {
7469 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7470 BuildCheckPrototypeMaps(prototype, holder);
7475 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
7476 HValue* fun, int argument_count, bool pass_argument_count) {
7477 return New<HCallJSFunction>(
7478 fun, argument_count, pass_argument_count);
7482 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
7483 HValue* fun, HValue* context,
7484 int argument_count, HValue* expected_param_count) {
7485 ArgumentAdaptorDescriptor descriptor(isolate());
7486 HValue* arity = Add<HConstant>(argument_count - 1);
7488 HValue* op_vals[] = { context, fun, arity, expected_param_count };
7490 Handle<Code> adaptor =
7491 isolate()->builtins()->ArgumentsAdaptorTrampoline();
7492 HConstant* adaptor_value = Add<HConstant>(adaptor);
7494 return New<HCallWithDescriptor>(
7495 adaptor_value, argument_count, descriptor,
7496 Vector<HValue*>(op_vals, descriptor.GetEnvironmentLength()));
7500 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
7501 Handle<JSFunction> jsfun, int argument_count) {
7502 HValue* target = Add<HConstant>(jsfun);
7503 // For constant functions, we try to avoid calling the
7504 // argument adaptor and instead call the function directly
7505 int formal_parameter_count = jsfun->shared()->formal_parameter_count();
7506 bool dont_adapt_arguments =
7507 (formal_parameter_count ==
7508 SharedFunctionInfo::kDontAdaptArgumentsSentinel);
7509 int arity = argument_count - 1;
7510 bool can_invoke_directly =
7511 dont_adapt_arguments || formal_parameter_count == arity;
7512 if (can_invoke_directly) {
7513 if (jsfun.is_identical_to(current_info()->closure())) {
7514 graph()->MarkRecursive();
7516 return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
7518 HValue* param_count_value = Add<HConstant>(formal_parameter_count);
7519 HValue* context = Add<HLoadNamedField>(
7520 target, static_cast<HValue*>(NULL),
7521 HObjectAccess::ForFunctionContextPointer());
7522 return NewArgumentAdaptorCall(target, context,
7523 argument_count, param_count_value);
7530 class FunctionSorter {
7532 explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7533 : index_(index), ticks_(ticks), size_(size) {}
7535 int index() const { return index_; }
7536 int ticks() const { return ticks_; }
7537 int size() const { return size_; }
7546 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7547 int diff = lhs.ticks() - rhs.ticks();
7548 if (diff != 0) return diff > 0;
7549 return lhs.size() < rhs.size();
7553 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
7556 SmallMapList* types,
7557 Handle<String> name) {
7558 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
7559 FunctionSorter order[kMaxCallPolymorphism];
7561 bool handle_smi = false;
7562 bool handled_string = false;
7563 int ordered_functions = 0;
7566 for (i = 0; i < types->length() && ordered_functions < kMaxCallPolymorphism;
7568 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7569 if (info.CanAccessMonomorphic() && info.IsConstant() &&
7570 info.constant()->IsJSFunction()) {
7571 if (info.type()->Is(Type::String())) {
7572 if (handled_string) continue;
7573 handled_string = true;
7575 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7576 if (info.type()->Is(Type::Number())) {
7579 expr->set_target(target);
7580 order[ordered_functions++] = FunctionSorter(
7581 i, target->shared()->profiler_ticks(), InliningAstSize(target));
7585 std::sort(order, order + ordered_functions);
7587 if (i < types->length()) {
7589 ordered_functions = -1;
7592 HBasicBlock* number_block = NULL;
7593 HBasicBlock* join = NULL;
7594 handled_string = false;
7597 for (int fn = 0; fn < ordered_functions; ++fn) {
7598 int i = order[fn].index();
7599 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7600 if (info.type()->Is(Type::String())) {
7601 if (handled_string) continue;
7602 handled_string = true;
7604 // Reloads the target.
7605 info.CanAccessMonomorphic();
7606 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7608 expr->set_target(target);
7610 // Only needed once.
7611 join = graph()->CreateBasicBlock();
7613 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7614 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7615 number_block = graph()->CreateBasicBlock();
7616 FinishCurrentBlock(New<HIsSmiAndBranch>(
7617 receiver, empty_smi_block, not_smi_block));
7618 GotoNoSimulate(empty_smi_block, number_block);
7619 set_current_block(not_smi_block);
7621 BuildCheckHeapObject(receiver);
7625 HBasicBlock* if_true = graph()->CreateBasicBlock();
7626 HBasicBlock* if_false = graph()->CreateBasicBlock();
7627 HUnaryControlInstruction* compare;
7629 Handle<Map> map = info.map();
7630 if (info.type()->Is(Type::Number())) {
7631 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7632 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7633 } else if (info.type()->Is(Type::String())) {
7634 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7636 compare = New<HCompareMap>(receiver, map, if_true, if_false);
7638 FinishCurrentBlock(compare);
7640 if (info.type()->Is(Type::Number())) {
7641 GotoNoSimulate(if_true, number_block);
7642 if_true = number_block;
7645 set_current_block(if_true);
7647 AddCheckPrototypeMaps(info.holder(), map);
7649 HValue* function = Add<HConstant>(expr->target());
7650 environment()->SetExpressionStackAt(0, function);
7652 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7653 bool needs_wrapping = NeedsWrappingFor(info.type(), target);
7654 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7655 if (FLAG_trace_inlining && try_inline) {
7656 Handle<JSFunction> caller = current_info()->closure();
7657 SmartArrayPointer<char> caller_name =
7658 caller->shared()->DebugName()->ToCString();
7659 PrintF("Trying to inline the polymorphic call to %s from %s\n",
7660 name->ToCString().get(),
7663 if (try_inline && TryInlineCall(expr)) {
7664 // Trying to inline will signal that we should bailout from the
7665 // entire compilation by setting stack overflow on the visitor.
7666 if (HasStackOverflow()) return;
7668 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7669 // use the regular CallFunctionStub for method calls to wrap the receiver.
7670 // TODO(verwaest): Support creation of value wrappers directly in
7672 HInstruction* call = needs_wrapping
7673 ? NewUncasted<HCallFunction>(
7674 function, argument_count, WRAP_AND_CALL)
7675 : BuildCallConstantFunction(target, argument_count);
7676 PushArgumentsFromEnvironment(argument_count);
7677 AddInstruction(call);
7678 Drop(1); // Drop the function.
7679 if (!ast_context()->IsEffect()) Push(call);
7682 if (current_block() != NULL) Goto(join);
7683 set_current_block(if_false);
7686 // Finish up. Unconditionally deoptimize if we've handled all the maps we
7687 // know about and do not want to handle ones we've never seen. Otherwise
7688 // use a generic IC.
7689 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
7690 FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
7692 Property* prop = expr->expression()->AsProperty();
7693 HInstruction* function = BuildNamedGeneric(
7694 LOAD, prop, receiver, name, NULL, prop->IsUninitialized());
7695 AddInstruction(function);
7697 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7699 environment()->SetExpressionStackAt(1, function);
7700 environment()->SetExpressionStackAt(0, receiver);
7701 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7703 CallFunctionFlags flags = receiver->type().IsJSObject()
7704 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
7705 HInstruction* call = New<HCallFunction>(
7706 function, argument_count, flags);
7708 PushArgumentsFromEnvironment(argument_count);
7710 Drop(1); // Function.
7713 AddInstruction(call);
7714 if (!ast_context()->IsEffect()) Push(call);
7717 return ast_context()->ReturnInstruction(call, expr->id());
7721 // We assume that control flow is always live after an expression. So
7722 // even without predecessors to the join block, we set it as the exit
7723 // block and continue by adding instructions there.
7724 DCHECK(join != NULL);
7725 if (join->HasPredecessor()) {
7726 set_current_block(join);
7727 join->SetJoinId(expr->id());
7728 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7730 set_current_block(NULL);
7735 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7736 Handle<JSFunction> caller,
7737 const char* reason) {
7738 if (FLAG_trace_inlining) {
7739 SmartArrayPointer<char> target_name =
7740 target->shared()->DebugName()->ToCString();
7741 SmartArrayPointer<char> caller_name =
7742 caller->shared()->DebugName()->ToCString();
7743 if (reason == NULL) {
7744 PrintF("Inlined %s called from %s.\n", target_name.get(),
7747 PrintF("Did not inline %s called from %s (%s).\n",
7748 target_name.get(), caller_name.get(), reason);
7754 static const int kNotInlinable = 1000000000;
7757 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7758 if (!FLAG_use_inlining) return kNotInlinable;
7760 // Precondition: call is monomorphic and we have found a target with the
7761 // appropriate arity.
7762 Handle<JSFunction> caller = current_info()->closure();
7763 Handle<SharedFunctionInfo> target_shared(target->shared());
7765 // Always inline builtins marked for inlining.
7766 if (target->IsBuiltin()) {
7767 return target_shared->inline_builtin() ? 0 : kNotInlinable;
7770 if (target_shared->IsApiFunction()) {
7771 TraceInline(target, caller, "target is api function");
7772 return kNotInlinable;
7775 // Do a quick check on source code length to avoid parsing large
7776 // inlining candidates.
7777 if (target_shared->SourceSize() >
7778 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7779 TraceInline(target, caller, "target text too big");
7780 return kNotInlinable;
7783 // Target must be inlineable.
7784 if (!target_shared->IsInlineable()) {
7785 TraceInline(target, caller, "target not inlineable");
7786 return kNotInlinable;
7788 if (target_shared->DisableOptimizationReason() != kNoReason) {
7789 TraceInline(target, caller, "target contains unsupported syntax [early]");
7790 return kNotInlinable;
7793 int nodes_added = target_shared->ast_node_count();
7798 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7799 int arguments_count,
7800 HValue* implicit_return_value,
7802 BailoutId return_id,
7803 InliningKind inlining_kind,
7804 HSourcePosition position) {
7805 int nodes_added = InliningAstSize(target);
7806 if (nodes_added == kNotInlinable) return false;
7808 Handle<JSFunction> caller = current_info()->closure();
7810 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7811 TraceInline(target, caller, "target AST is too large [early]");
7815 // Don't inline deeper than the maximum number of inlining levels.
7816 HEnvironment* env = environment();
7817 int current_level = 1;
7818 while (env->outer() != NULL) {
7819 if (current_level == FLAG_max_inlining_levels) {
7820 TraceInline(target, caller, "inline depth limit reached");
7823 if (env->outer()->frame_type() == JS_FUNCTION) {
7829 // Don't inline recursive functions.
7830 for (FunctionState* state = function_state();
7832 state = state->outer()) {
7833 if (*state->compilation_info()->closure() == *target) {
7834 TraceInline(target, caller, "target is recursive");
7839 // We don't want to add more than a certain number of nodes from inlining.
7840 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7841 kUnlimitedMaxInlinedNodesCumulative)) {
7842 TraceInline(target, caller, "cumulative AST node limit reached");
7846 // Parse and allocate variables.
7847 CompilationInfo target_info(target, zone());
7848 // Use the same AstValueFactory for creating strings in the sub-compilation
7849 // step, but don't transfer ownership to target_info.
7850 target_info.SetAstValueFactory(top_info()->ast_value_factory(), false);
7851 Handle<SharedFunctionInfo> target_shared(target->shared());
7852 if (!Compiler::ParseAndAnalyze(&target_info)) {
7853 if (target_info.isolate()->has_pending_exception()) {
7854 // Parse or scope error, never optimize this function.
7856 target_shared->DisableOptimization(kParseScopeError);
7858 TraceInline(target, caller, "parse failure");
7862 if (target_info.scope()->num_heap_slots() > 0) {
7863 TraceInline(target, caller, "target has context-allocated variables");
7866 FunctionLiteral* function = target_info.function();
7868 // The following conditions must be checked again after re-parsing, because
7869 // earlier the information might not have been complete due to lazy parsing.
7870 nodes_added = function->ast_node_count();
7871 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7872 TraceInline(target, caller, "target AST is too large [late]");
7875 if (function->dont_optimize()) {
7876 TraceInline(target, caller, "target contains unsupported syntax [late]");
7880 // If the function uses the arguments object check that inlining of functions
7881 // with arguments object is enabled and the arguments-variable is
7883 if (function->scope()->arguments() != NULL) {
7884 if (!FLAG_inline_arguments) {
7885 TraceInline(target, caller, "target uses arguments object");
7889 if (!function->scope()->arguments()->IsStackAllocated()) {
7892 "target uses non-stackallocated arguments object");
7897 // All declarations must be inlineable.
7898 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7899 int decl_count = decls->length();
7900 for (int i = 0; i < decl_count; ++i) {
7901 if (!decls->at(i)->IsInlineable()) {
7902 TraceInline(target, caller, "target has non-trivial declaration");
7907 // Generate the deoptimization data for the unoptimized version of
7908 // the target function if we don't already have it.
7909 if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
7910 TraceInline(target, caller, "could not generate deoptimization info");
7914 // ----------------------------------------------------------------
7915 // After this point, we've made a decision to inline this function (so
7916 // TryInline should always return true).
7918 // Type-check the inlined function.
7919 DCHECK(target_shared->has_deoptimization_support());
7920 AstTyper::Run(&target_info);
7922 int function_id = graph()->TraceInlinedFunction(target_shared, position);
7924 // Save the pending call context. Set up new one for the inlined function.
7925 // The function state is new-allocated because we need to delete it
7926 // in two different places.
7927 FunctionState* target_state = new FunctionState(
7928 this, &target_info, inlining_kind, function_id);
7930 HConstant* undefined = graph()->GetConstantUndefined();
7932 HEnvironment* inner_env =
7933 environment()->CopyForInlining(target,
7937 function_state()->inlining_kind());
7939 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7940 inner_env->BindContext(context);
7942 // Create a dematerialized arguments object for the function, also copy the
7943 // current arguments values to use them for materialization.
7944 HEnvironment* arguments_env = inner_env->arguments_environment();
7945 int parameter_count = arguments_env->parameter_count();
7946 HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
7947 for (int i = 0; i < parameter_count; i++) {
7948 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7951 // If the function uses arguments object then bind bind one.
7952 if (function->scope()->arguments() != NULL) {
7953 DCHECK(function->scope()->arguments()->IsStackAllocated());
7954 inner_env->Bind(function->scope()->arguments(), arguments_object);
7957 // Capture the state before invoking the inlined function for deopt in the
7958 // inlined function. This simulate has no bailout-id since it's not directly
7959 // reachable for deopt, and is only used to capture the state. If the simulate
7960 // becomes reachable by merging, the ast id of the simulate merged into it is
7962 Add<HSimulate>(BailoutId::None());
7964 current_block()->UpdateEnvironment(inner_env);
7965 Scope* saved_scope = scope();
7966 set_scope(target_info.scope());
7967 HEnterInlined* enter_inlined =
7968 Add<HEnterInlined>(return_id, target, context, arguments_count, function,
7969 function_state()->inlining_kind(),
7970 function->scope()->arguments(), arguments_object);
7971 function_state()->set_entry(enter_inlined);
7973 VisitDeclarations(target_info.scope()->declarations());
7974 VisitStatements(function->body());
7975 set_scope(saved_scope);
7976 if (HasStackOverflow()) {
7977 // Bail out if the inline function did, as we cannot residualize a call
7978 // instead, but do not disable optimization for the outer function.
7979 TraceInline(target, caller, "inline graph construction failed");
7980 target_shared->DisableOptimization(kInliningBailedOut);
7981 current_info()->RetryOptimization(kInliningBailedOut);
7982 delete target_state;
7986 // Update inlined nodes count.
7987 inlined_count_ += nodes_added;
7989 Handle<Code> unoptimized_code(target_shared->code());
7990 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
7991 Handle<TypeFeedbackInfo> type_info(
7992 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7993 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7995 TraceInline(target, caller, NULL);
7997 if (current_block() != NULL) {
7998 FunctionState* state = function_state();
7999 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8000 // Falling off the end of an inlined construct call. In a test context the
8001 // return value will always evaluate to true, in a value context the
8002 // return value is the newly allocated receiver.
8003 if (call_context()->IsTest()) {
8004 Goto(inlined_test_context()->if_true(), state);
8005 } else if (call_context()->IsEffect()) {
8006 Goto(function_return(), state);
8008 DCHECK(call_context()->IsValue());
8009 AddLeaveInlined(implicit_return_value, state);
8011 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8012 // Falling off the end of an inlined setter call. The returned value is
8013 // never used, the value of an assignment is always the value of the RHS
8014 // of the assignment.
8015 if (call_context()->IsTest()) {
8016 inlined_test_context()->ReturnValue(implicit_return_value);
8017 } else if (call_context()->IsEffect()) {
8018 Goto(function_return(), state);
8020 DCHECK(call_context()->IsValue());
8021 AddLeaveInlined(implicit_return_value, state);
8024 // Falling off the end of a normal inlined function. This basically means
8025 // returning undefined.
8026 if (call_context()->IsTest()) {
8027 Goto(inlined_test_context()->if_false(), state);
8028 } else if (call_context()->IsEffect()) {
8029 Goto(function_return(), state);
8031 DCHECK(call_context()->IsValue());
8032 AddLeaveInlined(undefined, state);
8037 // Fix up the function exits.
8038 if (inlined_test_context() != NULL) {
8039 HBasicBlock* if_true = inlined_test_context()->if_true();
8040 HBasicBlock* if_false = inlined_test_context()->if_false();
8042 HEnterInlined* entry = function_state()->entry();
8044 // Pop the return test context from the expression context stack.
8045 DCHECK(ast_context() == inlined_test_context());
8046 ClearInlinedTestContext();
8047 delete target_state;
8049 // Forward to the real test context.
8050 if (if_true->HasPredecessor()) {
8051 entry->RegisterReturnTarget(if_true, zone());
8052 if_true->SetJoinId(ast_id);
8053 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8054 Goto(if_true, true_target, function_state());
8056 if (if_false->HasPredecessor()) {
8057 entry->RegisterReturnTarget(if_false, zone());
8058 if_false->SetJoinId(ast_id);
8059 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8060 Goto(if_false, false_target, function_state());
8062 set_current_block(NULL);
8065 } else if (function_return()->HasPredecessor()) {
8066 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8067 function_return()->SetJoinId(ast_id);
8068 set_current_block(function_return());
8070 set_current_block(NULL);
8072 delete target_state;
8077 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8078 return TryInline(expr->target(),
8079 expr->arguments()->length(),
8084 ScriptPositionToSourcePosition(expr->position()));
8088 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8089 HValue* implicit_return_value) {
8090 return TryInline(expr->target(),
8091 expr->arguments()->length(),
8092 implicit_return_value,
8095 CONSTRUCT_CALL_RETURN,
8096 ScriptPositionToSourcePosition(expr->position()));
8100 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
8101 Handle<Map> receiver_map,
8103 BailoutId return_id) {
8104 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8105 return TryInline(getter,
8115 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
8116 Handle<Map> receiver_map,
8118 BailoutId assignment_id,
8119 HValue* implicit_return_value) {
8120 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8121 return TryInline(setter,
8123 implicit_return_value,
8130 bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8132 int arguments_count) {
8133 return TryInline(function,
8139 ScriptPositionToSourcePosition(expr->position()));
8143 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8144 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8145 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8148 if (!FLAG_fast_math) break;
8149 // Fall through if FLAG_fast_math.
8157 if (expr->arguments()->length() == 1) {
8158 HValue* argument = Pop();
8159 Drop(2); // Receiver and function.
8160 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8161 ast_context()->ReturnInstruction(op, expr->id());
8166 if (expr->arguments()->length() == 2) {
8167 HValue* right = Pop();
8168 HValue* left = Pop();
8169 Drop(2); // Receiver and function.
8170 HInstruction* op = HMul::NewImul(zone(), context(), left, right);
8171 ast_context()->ReturnInstruction(op, expr->id());
8176 // Not supported for inlining yet.
8183 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8184 Call* expr, Handle<JSFunction> function, Handle<Map> receiver_map,
8185 int args_count_no_receiver) {
8186 if (!function->shared()->HasBuiltinFunctionId()) return false;
8187 BuiltinFunctionId id = function->shared()->builtin_function_id();
8188 int argument_count = args_count_no_receiver + 1; // Plus receiver.
8190 if (receiver_map.is_null()) {
8191 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8192 if (receiver->IsConstant() &&
8193 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8195 handle(Handle<HeapObject>::cast(
8196 HConstant::cast(receiver)->handle(isolate()))->map());
8199 // Try to inline calls like Math.* as operations in the calling function.
8201 case kStringCharCodeAt:
8203 if (argument_count == 2) {
8204 HValue* index = Pop();
8205 HValue* string = Pop();
8206 Drop(1); // Function.
8207 HInstruction* char_code =
8208 BuildStringCharCodeAt(string, index);
8209 if (id == kStringCharCodeAt) {
8210 ast_context()->ReturnInstruction(char_code, expr->id());
8213 AddInstruction(char_code);
8214 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8215 ast_context()->ReturnInstruction(result, expr->id());
8219 case kStringFromCharCode:
8220 if (argument_count == 2) {
8221 HValue* argument = Pop();
8222 Drop(2); // Receiver and function.
8223 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8224 ast_context()->ReturnInstruction(result, expr->id());
8229 if (!FLAG_fast_math) break;
8230 // Fall through if FLAG_fast_math.
8238 if (argument_count == 2) {
8239 HValue* argument = Pop();
8240 Drop(2); // Receiver and function.
8241 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8242 ast_context()->ReturnInstruction(op, expr->id());
8247 if (argument_count == 3) {
8248 HValue* right = Pop();
8249 HValue* left = Pop();
8250 Drop(2); // Receiver and function.
8251 HInstruction* result = NULL;
8252 // Use sqrt() if exponent is 0.5 or -0.5.
8253 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8254 double exponent = HConstant::cast(right)->DoubleValue();
8255 if (exponent == 0.5) {
8256 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8257 } else if (exponent == -0.5) {
8258 HValue* one = graph()->GetConstant1();
8259 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8260 left, kMathPowHalf);
8261 // MathPowHalf doesn't have side effects so there's no need for
8262 // an environment simulation here.
8263 DCHECK(!sqrt->HasObservableSideEffects());
8264 result = NewUncasted<HDiv>(one, sqrt);
8265 } else if (exponent == 2.0) {
8266 result = NewUncasted<HMul>(left, left);
8270 if (result == NULL) {
8271 result = NewUncasted<HPower>(left, right);
8273 ast_context()->ReturnInstruction(result, expr->id());
8279 if (argument_count == 3) {
8280 HValue* right = Pop();
8281 HValue* left = Pop();
8282 Drop(2); // Receiver and function.
8283 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8284 : HMathMinMax::kMathMax;
8285 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8286 ast_context()->ReturnInstruction(result, expr->id());
8291 if (argument_count == 3) {
8292 HValue* right = Pop();
8293 HValue* left = Pop();
8294 Drop(2); // Receiver and function.
8295 HInstruction* result = HMul::NewImul(zone(), context(), left, right);
8296 ast_context()->ReturnInstruction(result, expr->id());
8301 if (receiver_map.is_null()) return false;
8302 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8303 ElementsKind elements_kind = receiver_map->elements_kind();
8304 if (!IsFastElementsKind(elements_kind)) return false;
8305 if (receiver_map->is_observed()) return false;
8306 if (!receiver_map->is_extensible()) return false;
8308 Drop(args_count_no_receiver);
8310 HValue* reduced_length;
8311 HValue* receiver = Pop();
8313 HValue* checked_object = AddCheckMap(receiver, receiver_map);
8314 HValue* length = Add<HLoadNamedField>(
8315 checked_object, static_cast<HValue*>(NULL),
8316 HObjectAccess::ForArrayLength(elements_kind));
8318 Drop(1); // Function.
8320 { NoObservableSideEffectsScope scope(this);
8321 IfBuilder length_checker(this);
8323 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8324 length, graph()->GetConstant0(), Token::EQ);
8325 length_checker.Then();
8327 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8329 length_checker.Else();
8330 HValue* elements = AddLoadElements(checked_object);
8331 // Ensure that we aren't popping from a copy-on-write array.
8332 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8333 elements = BuildCopyElementsOnWrite(checked_object, elements,
8334 elements_kind, length);
8336 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8337 result = AddElementAccess(elements, reduced_length, NULL,
8338 bounds_check, elements_kind, LOAD);
8339 Factory* factory = isolate()->factory();
8340 double nan_double = FixedDoubleArray::hole_nan_as_double();
8341 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8342 ? Add<HConstant>(factory->the_hole_value())
8343 : Add<HConstant>(nan_double);
8344 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8345 elements_kind = FAST_HOLEY_ELEMENTS;
8348 elements, reduced_length, hole, bounds_check, elements_kind, STORE);
8349 Add<HStoreNamedField>(
8350 checked_object, HObjectAccess::ForArrayLength(elements_kind),
8351 reduced_length, STORE_TO_INITIALIZED_ENTRY);
8353 if (!ast_context()->IsEffect()) Push(result);
8355 length_checker.End();
8357 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8358 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8359 if (!ast_context()->IsEffect()) Drop(1);
8361 ast_context()->ReturnValue(result);
8365 if (receiver_map.is_null()) return false;
8366 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8367 ElementsKind elements_kind = receiver_map->elements_kind();
8368 if (!IsFastElementsKind(elements_kind)) return false;
8369 if (receiver_map->is_observed()) return false;
8370 if (JSArray::IsReadOnlyLengthDescriptor(receiver_map)) return false;
8371 if (!receiver_map->is_extensible()) return false;
8373 // If there may be elements accessors in the prototype chain, the fast
8374 // inlined version can't be used.
8375 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8376 // If there currently can be no elements accessors on the prototype chain,
8377 // it doesn't mean that there won't be any later. Install a full prototype
8378 // chain check to trap element accessors being installed on the prototype
8379 // chain, which would cause elements to go to dictionary mode and result
8381 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8382 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8384 const int argc = args_count_no_receiver;
8385 if (argc != 1) return false;
8387 HValue* value_to_push = Pop();
8388 HValue* array = Pop();
8389 Drop(1); // Drop function.
8391 HInstruction* new_size = NULL;
8392 HValue* length = NULL;
8395 NoObservableSideEffectsScope scope(this);
8397 length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL),
8398 HObjectAccess::ForArrayLength(elements_kind));
8400 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8402 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8403 BuildUncheckedMonomorphicElementAccess(array, length,
8404 value_to_push, is_array,
8405 elements_kind, STORE,
8407 STORE_AND_GROW_NO_TRANSITION);
8409 if (!ast_context()->IsEffect()) Push(new_size);
8410 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8411 if (!ast_context()->IsEffect()) Drop(1);
8414 ast_context()->ReturnValue(new_size);
8418 if (receiver_map.is_null()) return false;
8419 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8420 ElementsKind kind = receiver_map->elements_kind();
8421 if (!IsFastElementsKind(kind)) return false;
8422 if (receiver_map->is_observed()) return false;
8423 if (!receiver_map->is_extensible()) return false;
8425 // If there may be elements accessors in the prototype chain, the fast
8426 // inlined version can't be used.
8427 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8429 // If there currently can be no elements accessors on the prototype chain,
8430 // it doesn't mean that there won't be any later. Install a full prototype
8431 // chain check to trap element accessors being installed on the prototype
8432 // chain, which would cause elements to go to dictionary mode and result
8434 BuildCheckPrototypeMaps(
8435 handle(JSObject::cast(receiver_map->prototype()), isolate()),
8436 Handle<JSObject>::null());
8438 // Threshold for fast inlined Array.shift().
8439 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8441 Drop(args_count_no_receiver);
8442 HValue* receiver = Pop();
8443 HValue* function = Pop();
8447 NoObservableSideEffectsScope scope(this);
8449 HValue* length = Add<HLoadNamedField>(
8450 receiver, static_cast<HValue*>(NULL),
8451 HObjectAccess::ForArrayLength(kind));
8453 IfBuilder if_lengthiszero(this);
8454 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8455 length, graph()->GetConstant0(), Token::EQ);
8456 if_lengthiszero.Then();
8458 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8460 if_lengthiszero.Else();
8462 HValue* elements = AddLoadElements(receiver);
8464 // Check if we can use the fast inlined Array.shift().
8465 IfBuilder if_inline(this);
8466 if_inline.If<HCompareNumericAndBranch>(
8467 length, inline_threshold, Token::LTE);
8468 if (IsFastSmiOrObjectElementsKind(kind)) {
8469 // We cannot handle copy-on-write backing stores here.
8470 if_inline.AndIf<HCompareMap>(
8471 elements, isolate()->factory()->fixed_array_map());
8475 // Remember the result.
8476 if (!ast_context()->IsEffect()) {
8477 Push(AddElementAccess(elements, graph()->GetConstant0(), NULL,
8478 lengthiszero, kind, LOAD));
8481 // Compute the new length.
8482 HValue* new_length = AddUncasted<HSub>(
8483 length, graph()->GetConstant1());
8484 new_length->ClearFlag(HValue::kCanOverflow);
8486 // Copy the remaining elements.
8487 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
8489 HValue* new_key = loop.BeginBody(
8490 graph()->GetConstant0(), new_length, Token::LT);
8491 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
8492 key->ClearFlag(HValue::kCanOverflow);
8493 HValue* element = AddUncasted<HLoadKeyed>(
8494 elements, key, lengthiszero, kind, ALLOW_RETURN_HOLE);
8495 HStoreKeyed* store = Add<HStoreKeyed>(
8496 elements, new_key, element, kind);
8497 store->SetFlag(HValue::kAllowUndefinedAsNaN);
8501 // Put a hole at the end.
8502 HValue* hole = IsFastSmiOrObjectElementsKind(kind)
8503 ? Add<HConstant>(isolate()->factory()->the_hole_value())
8504 : Add<HConstant>(FixedDoubleArray::hole_nan_as_double());
8505 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
8507 elements, new_length, hole, kind, INITIALIZING_STORE);
8509 // Remember new length.
8510 Add<HStoreNamedField>(
8511 receiver, HObjectAccess::ForArrayLength(kind),
8512 new_length, STORE_TO_INITIALIZED_ENTRY);
8516 Add<HPushArguments>(receiver);
8517 result = Add<HCallJSFunction>(function, 1, true);
8518 if (!ast_context()->IsEffect()) Push(result);
8522 if_lengthiszero.End();
8524 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8525 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8526 if (!ast_context()->IsEffect()) Drop(1);
8527 ast_context()->ReturnValue(result);
8531 case kArrayLastIndexOf: {
8532 if (receiver_map.is_null()) return false;
8533 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8534 ElementsKind kind = receiver_map->elements_kind();
8535 if (!IsFastElementsKind(kind)) return false;
8536 if (receiver_map->is_observed()) return false;
8537 if (argument_count != 2) return false;
8538 if (!receiver_map->is_extensible()) return false;
8540 // If there may be elements accessors in the prototype chain, the fast
8541 // inlined version can't be used.
8542 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8544 // If there currently can be no elements accessors on the prototype chain,
8545 // it doesn't mean that there won't be any later. Install a full prototype
8546 // chain check to trap element accessors being installed on the prototype
8547 // chain, which would cause elements to go to dictionary mode and result
8549 BuildCheckPrototypeMaps(
8550 handle(JSObject::cast(receiver_map->prototype()), isolate()),
8551 Handle<JSObject>::null());
8553 HValue* search_element = Pop();
8554 HValue* receiver = Pop();
8555 Drop(1); // Drop function.
8557 ArrayIndexOfMode mode = (id == kArrayIndexOf)
8558 ? kFirstIndexOf : kLastIndexOf;
8559 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
8561 if (!ast_context()->IsEffect()) Push(index);
8562 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8563 if (!ast_context()->IsEffect()) Drop(1);
8564 ast_context()->ReturnValue(index);
8568 // Not yet supported for inlining.
8575 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8577 Handle<JSFunction> function = expr->target();
8578 int argc = expr->arguments()->length();
8579 SmallMapList receiver_maps;
8580 return TryInlineApiCall(function,
8589 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8592 SmallMapList* receiver_maps) {
8593 Handle<JSFunction> function = expr->target();
8594 int argc = expr->arguments()->length();
8595 return TryInlineApiCall(function,
8604 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
8605 Handle<Map> receiver_map,
8607 SmallMapList receiver_maps(1, zone());
8608 receiver_maps.Add(receiver_map, zone());
8609 return TryInlineApiCall(function,
8610 NULL, // Receiver is on expression stack.
8618 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
8619 Handle<Map> receiver_map,
8621 SmallMapList receiver_maps(1, zone());
8622 receiver_maps.Add(receiver_map, zone());
8623 return TryInlineApiCall(function,
8624 NULL, // Receiver is on expression stack.
8632 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
8634 SmallMapList* receiver_maps,
8637 ApiCallType call_type) {
8638 CallOptimization optimization(function);
8639 if (!optimization.is_simple_api_call()) return false;
8640 Handle<Map> holder_map;
8641 if (call_type == kCallApiFunction) {
8642 // Cannot embed a direct reference to the global proxy map
8643 // as it maybe dropped on deserialization.
8644 CHECK(!isolate()->serializer_enabled());
8645 DCHECK_EQ(0, receiver_maps->length());
8646 receiver_maps->Add(handle(function->global_proxy()->map()), zone());
8648 CallOptimization::HolderLookup holder_lookup =
8649 CallOptimization::kHolderNotFound;
8650 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8651 receiver_maps->first(), &holder_lookup);
8652 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8654 if (FLAG_trace_inlining) {
8655 PrintF("Inlining api function ");
8656 function->ShortPrint();
8660 bool drop_extra = false;
8661 bool is_store = false;
8662 switch (call_type) {
8663 case kCallApiFunction:
8664 case kCallApiMethod:
8665 // Need to check that none of the receiver maps could have changed.
8666 Add<HCheckMaps>(receiver, receiver_maps);
8667 // Need to ensure the chain between receiver and api_holder is intact.
8668 if (holder_lookup == CallOptimization::kHolderFound) {
8669 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
8671 DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
8673 // Includes receiver.
8674 PushArgumentsFromEnvironment(argc + 1);
8675 // Drop function after call.
8678 case kCallApiGetter:
8679 // Receiver and prototype chain cannot have changed.
8681 DCHECK_EQ(NULL, receiver);
8682 // Receiver is on expression stack.
8684 Add<HPushArguments>(receiver);
8686 case kCallApiSetter:
8689 // Receiver and prototype chain cannot have changed.
8691 DCHECK_EQ(NULL, receiver);
8692 // Receiver and value are on expression stack.
8693 HValue* value = Pop();
8695 Add<HPushArguments>(receiver, value);
8700 HValue* holder = NULL;
8701 switch (holder_lookup) {
8702 case CallOptimization::kHolderFound:
8703 holder = Add<HConstant>(api_holder);
8705 case CallOptimization::kHolderIsReceiver:
8708 case CallOptimization::kHolderNotFound:
8712 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
8713 Handle<Object> call_data_obj(api_call_info->data(), isolate());
8714 bool call_data_is_undefined = call_data_obj->IsUndefined();
8715 HValue* call_data = Add<HConstant>(call_data_obj);
8716 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
8717 ExternalReference ref = ExternalReference(&fun,
8718 ExternalReference::DIRECT_API_CALL,
8720 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
8722 HValue* op_vals[] = {
8724 Add<HConstant>(function),
8727 api_function_address
8730 ApiFunctionDescriptor descriptor(isolate());
8731 CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
8732 Handle<Code> code = stub.GetCode();
8733 HConstant* code_value = Add<HConstant>(code);
8735 DCHECK((sizeof(op_vals) / kPointerSize) == descriptor.GetEnvironmentLength());
8737 HInstruction* call = New<HCallWithDescriptor>(
8738 code_value, argc + 1, descriptor,
8739 Vector<HValue*>(op_vals, descriptor.GetEnvironmentLength()));
8741 if (drop_extra) Drop(1); // Drop function.
8742 ast_context()->ReturnInstruction(call, ast_id);
8747 void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
8748 int arguments_count) {
8749 Handle<JSFunction> known_function;
8750 int args_count_no_receiver = arguments_count - 1;
8751 if (function->IsConstant() &&
8752 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8753 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8754 Handle<Map> receiver_map;
8755 if (receiver->IsConstant() &&
8756 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8758 handle(Handle<HeapObject>::cast(
8759 HConstant::cast(receiver)->handle(isolate()))->map());
8763 Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
8764 if (TryInlineBuiltinMethodCall(expr, known_function, receiver_map,
8765 args_count_no_receiver)) {
8766 if (FLAG_trace_inlining) {
8767 PrintF("Inlining builtin ");
8768 known_function->ShortPrint();
8774 if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
8779 PushArgumentsFromEnvironment(arguments_count);
8780 HInvokeFunction* call =
8781 New<HInvokeFunction>(function, known_function, arguments_count);
8782 Drop(1); // Function
8783 ast_context()->ReturnInstruction(call, expr->id());
8787 bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
8788 DCHECK(expr->expression()->IsProperty());
8790 if (!expr->IsMonomorphic()) {
8793 Handle<Map> function_map = expr->GetReceiverTypes()->first();
8794 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
8795 !expr->target()->shared()->HasBuiltinFunctionId()) {
8799 switch (expr->target()->shared()->builtin_function_id()) {
8800 case kFunctionCall: {
8801 if (expr->arguments()->length() == 0) return false;
8802 BuildFunctionCall(expr);
8805 case kFunctionApply: {
8806 // For .apply, only the pattern f.apply(receiver, arguments)
8808 if (current_info()->scope()->arguments() == NULL) return false;
8810 ZoneList<Expression*>* args = expr->arguments();
8811 if (args->length() != 2) return false;
8813 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
8814 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
8815 HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
8816 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
8817 BuildFunctionApply(expr);
8820 default: { return false; }
8826 void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
8827 ZoneList<Expression*>* args = expr->arguments();
8828 CHECK_ALIVE(VisitForValue(args->at(0)));
8829 HValue* receiver = Pop(); // receiver
8830 HValue* function = Pop(); // f
8833 Handle<Map> function_map = expr->GetReceiverTypes()->first();
8834 HValue* checked_function = AddCheckMap(function, function_map);
8836 if (function_state()->outer() == NULL) {
8837 HInstruction* elements = Add<HArgumentsElements>(false);
8838 HInstruction* length = Add<HArgumentsLength>(elements);
8839 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
8840 HInstruction* result = New<HApplyArguments>(function,
8844 ast_context()->ReturnInstruction(result, expr->id());
8846 // We are inside inlined function and we know exactly what is inside
8847 // arguments object. But we need to be able to materialize at deopt.
8848 DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
8849 function_state()->entry()->arguments_object()->arguments_count());
8850 HArgumentsObject* args = function_state()->entry()->arguments_object();
8851 const ZoneList<HValue*>* arguments_values = args->arguments_values();
8852 int arguments_count = arguments_values->length();
8854 Push(BuildWrapReceiver(receiver, checked_function));
8855 for (int i = 1; i < arguments_count; i++) {
8856 Push(arguments_values->at(i));
8858 HandleIndirectCall(expr, function, arguments_count);
8864 void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
8865 HValue* function = Top(); // f
8866 Handle<Map> function_map = expr->GetReceiverTypes()->first();
8867 HValue* checked_function = AddCheckMap(function, function_map);
8869 // f and call are on the stack in the unoptimized code
8870 // during evaluation of the arguments.
8871 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8873 int args_length = expr->arguments()->length();
8874 int receiver_index = args_length - 1;
8875 // Patch the receiver.
8876 HValue* receiver = BuildWrapReceiver(
8877 environment()->ExpressionStackAt(receiver_index), checked_function);
8878 environment()->SetExpressionStackAt(receiver_index, receiver);
8880 // Call must not be on the stack from now on.
8881 int call_index = args_length + 1;
8882 environment()->RemoveExpressionStackAt(call_index);
8884 HandleIndirectCall(expr, function, args_length);
8888 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
8889 Handle<JSFunction> target) {
8890 SharedFunctionInfo* shared = target->shared();
8891 if (shared->strict_mode() == SLOPPY && !shared->native()) {
8892 // Cannot embed a direct reference to the global proxy
8893 // as is it dropped on deserialization.
8894 CHECK(!isolate()->serializer_enabled());
8895 Handle<JSObject> global_proxy(target->context()->global_proxy());
8896 return Add<HConstant>(global_proxy);
8898 return graph()->GetConstantUndefined();
8902 void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
8903 int arguments_count,
8905 Handle<AllocationSite> site) {
8906 Add<HCheckValue>(function, array_function());
8908 if (IsCallArrayInlineable(arguments_count, site)) {
8909 BuildInlinedCallArray(expression, arguments_count, site);
8913 HInstruction* call = PreProcessCall(New<HCallNewArray>(
8914 function, arguments_count + 1, site->GetElementsKind()));
8915 if (expression->IsCall()) {
8918 ast_context()->ReturnInstruction(call, expression->id());
8922 HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
8923 HValue* search_element,
8925 ArrayIndexOfMode mode) {
8926 DCHECK(IsFastElementsKind(kind));
8928 NoObservableSideEffectsScope no_effects(this);
8930 HValue* elements = AddLoadElements(receiver);
8931 HValue* length = AddLoadArrayLength(receiver, kind);
8934 HValue* terminating;
8936 LoopBuilder::Direction direction;
8937 if (mode == kFirstIndexOf) {
8938 initial = graph()->GetConstant0();
8939 terminating = length;
8941 direction = LoopBuilder::kPostIncrement;
8943 DCHECK_EQ(kLastIndexOf, mode);
8945 terminating = graph()->GetConstant0();
8947 direction = LoopBuilder::kPreDecrement;
8950 Push(graph()->GetConstantMinus1());
8951 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
8952 // Make sure that we can actually compare numbers correctly below, see
8953 // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
8954 search_element = AddUncasted<HForceRepresentation>(
8955 search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
8956 : Representation::Double());
8958 LoopBuilder loop(this, context(), direction);
8960 HValue* index = loop.BeginBody(initial, terminating, token);
8961 HValue* element = AddUncasted<HLoadKeyed>(
8962 elements, index, static_cast<HValue*>(NULL),
8963 kind, ALLOW_RETURN_HOLE);
8964 IfBuilder if_issame(this);
8965 if_issame.If<HCompareNumericAndBranch>(element, search_element,
8977 IfBuilder if_isstring(this);
8978 if_isstring.If<HIsStringAndBranch>(search_element);
8981 LoopBuilder loop(this, context(), direction);
8983 HValue* index = loop.BeginBody(initial, terminating, token);
8984 HValue* element = AddUncasted<HLoadKeyed>(
8985 elements, index, static_cast<HValue*>(NULL),
8986 kind, ALLOW_RETURN_HOLE);
8987 IfBuilder if_issame(this);
8988 if_issame.If<HIsStringAndBranch>(element);
8989 if_issame.AndIf<HStringCompareAndBranch>(
8990 element, search_element, Token::EQ_STRICT);
9003 IfBuilder if_isnumber(this);
9004 if_isnumber.If<HIsSmiAndBranch>(search_element);
9005 if_isnumber.OrIf<HCompareMap>(
9006 search_element, isolate()->factory()->heap_number_map());
9009 HValue* search_number =
9010 AddUncasted<HForceRepresentation>(search_element,
9011 Representation::Double());
9012 LoopBuilder loop(this, context(), direction);
9014 HValue* index = loop.BeginBody(initial, terminating, token);
9015 HValue* element = AddUncasted<HLoadKeyed>(
9016 elements, index, static_cast<HValue*>(NULL),
9017 kind, ALLOW_RETURN_HOLE);
9019 IfBuilder if_element_isnumber(this);
9020 if_element_isnumber.If<HIsSmiAndBranch>(element);
9021 if_element_isnumber.OrIf<HCompareMap>(
9022 element, isolate()->factory()->heap_number_map());
9023 if_element_isnumber.Then();
9026 AddUncasted<HForceRepresentation>(element,
9027 Representation::Double());
9028 IfBuilder if_issame(this);
9029 if_issame.If<HCompareNumericAndBranch>(
9030 number, search_number, Token::EQ_STRICT);
9039 if_element_isnumber.End();
9045 LoopBuilder loop(this, context(), direction);
9047 HValue* index = loop.BeginBody(initial, terminating, token);
9048 HValue* element = AddUncasted<HLoadKeyed>(
9049 elements, index, static_cast<HValue*>(NULL),
9050 kind, ALLOW_RETURN_HOLE);
9051 IfBuilder if_issame(this);
9052 if_issame.If<HCompareObjectEqAndBranch>(
9053 element, search_element);
9073 bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
9074 if (!array_function().is_identical_to(expr->target())) {
9078 Handle<AllocationSite> site = expr->allocation_site();
9079 if (site.is_null()) return false;
9081 BuildArrayCall(expr,
9082 expr->arguments()->length(),
9089 bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
9091 if (!array_function().is_identical_to(expr->target())) {
9095 BuildArrayCall(expr,
9096 expr->arguments()->length(),
9098 expr->allocation_site());
9103 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9104 DCHECK(!HasStackOverflow());
9105 DCHECK(current_block() != NULL);
9106 DCHECK(current_block()->HasPredecessor());
9107 Expression* callee = expr->expression();
9108 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
9109 HInstruction* call = NULL;
9111 Property* prop = callee->AsProperty();
9113 CHECK_ALIVE(VisitForValue(prop->obj()));
9114 HValue* receiver = Top();
9116 SmallMapList* types;
9117 ComputeReceiverTypes(expr, receiver, &types, zone());
9119 if (prop->key()->IsPropertyName() && types->length() > 0) {
9120 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9121 PropertyAccessInfo info(this, LOAD, ToType(types->first()), name);
9122 if (!info.CanAccessAsMonomorphic(types)) {
9123 HandlePolymorphicCallNamed(expr, receiver, types, name);
9129 if (!prop->key()->IsPropertyName()) {
9130 CHECK_ALIVE(VisitForValue(prop->key()));
9134 CHECK_ALIVE(PushLoad(prop, receiver, key));
9135 HValue* function = Pop();
9137 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9139 // Push the function under the receiver.
9140 environment()->SetExpressionStackAt(0, function);
9144 if (function->IsConstant() &&
9145 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9146 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9147 HConstant::cast(function)->handle(isolate()));
9148 expr->set_target(known_function);
9150 if (TryIndirectCall(expr)) return;
9151 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9153 Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
9154 if (TryInlineBuiltinMethodCall(expr, known_function, map,
9155 expr->arguments()->length())) {
9156 if (FLAG_trace_inlining) {
9157 PrintF("Inlining builtin ");
9158 known_function->ShortPrint();
9163 if (TryInlineApiMethodCall(expr, receiver, types)) return;
9165 // Wrap the receiver if necessary.
9166 if (NeedsWrappingFor(ToType(types->first()), known_function)) {
9167 // Since HWrapReceiver currently cannot actually wrap numbers and
9168 // strings, use the regular CallFunctionStub for method calls to wrap
9170 // TODO(verwaest): Support creation of value wrappers directly in
9172 call = New<HCallFunction>(
9173 function, argument_count, WRAP_AND_CALL);
9174 } else if (TryInlineCall(expr)) {
9177 call = BuildCallConstantFunction(known_function, argument_count);
9181 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9182 CallFunctionFlags flags = receiver->type().IsJSObject()
9183 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD;
9184 call = New<HCallFunction>(function, argument_count, flags);
9186 PushArgumentsFromEnvironment(argument_count);
9189 VariableProxy* proxy = expr->expression()->AsVariableProxy();
9190 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
9191 return Bailout(kPossibleDirectCallToEval);
9194 // The function is on the stack in the unoptimized code during
9195 // evaluation of the arguments.
9196 CHECK_ALIVE(VisitForValue(expr->expression()));
9197 HValue* function = Top();
9198 if (expr->global_call()) {
9199 Variable* var = proxy->var();
9200 bool known_global_function = false;
9201 // If there is a global property cell for the name at compile time and
9202 // access check is not enabled we assume that the function will not change
9203 // and generate optimized code for calling the function.
9204 Handle<GlobalObject> global(current_info()->global_object());
9205 LookupIterator it(global, var->name(),
9206 LookupIterator::OWN_SKIP_INTERCEPTOR);
9207 GlobalPropertyAccess type = LookupGlobalProperty(var, &it, LOAD);
9208 if (type == kUseCell) {
9209 known_global_function = expr->ComputeGlobalTarget(global, &it);
9211 if (known_global_function) {
9212 Add<HCheckValue>(function, expr->target());
9214 // Placeholder for the receiver.
9215 Push(graph()->GetConstantUndefined());
9216 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9218 // Patch the global object on the stack by the expected receiver.
9219 HValue* receiver = ImplicitReceiverFor(function, expr->target());
9220 const int receiver_index = argument_count - 1;
9221 environment()->SetExpressionStackAt(receiver_index, receiver);
9223 if (TryInlineBuiltinFunctionCall(expr)) {
9224 if (FLAG_trace_inlining) {
9225 PrintF("Inlining builtin ");
9226 expr->target()->ShortPrint();
9231 if (TryInlineApiFunctionCall(expr, receiver)) return;
9232 if (TryHandleArrayCall(expr, function)) return;
9233 if (TryInlineCall(expr)) return;
9235 PushArgumentsFromEnvironment(argument_count);
9236 call = BuildCallConstantFunction(expr->target(), argument_count);
9238 Push(graph()->GetConstantUndefined());
9239 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9240 PushArgumentsFromEnvironment(argument_count);
9241 call = New<HCallFunction>(function, argument_count);
9244 } else if (expr->IsMonomorphic()) {
9245 Add<HCheckValue>(function, expr->target());
9247 Push(graph()->GetConstantUndefined());
9248 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9250 HValue* receiver = ImplicitReceiverFor(function, expr->target());
9251 const int receiver_index = argument_count - 1;
9252 environment()->SetExpressionStackAt(receiver_index, receiver);
9254 if (TryInlineBuiltinFunctionCall(expr)) {
9255 if (FLAG_trace_inlining) {
9256 PrintF("Inlining builtin ");
9257 expr->target()->ShortPrint();
9262 if (TryInlineApiFunctionCall(expr, receiver)) return;
9264 if (TryInlineCall(expr)) return;
9266 call = PreProcessCall(New<HInvokeFunction>(
9267 function, expr->target(), argument_count));
9270 Push(graph()->GetConstantUndefined());
9271 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9272 PushArgumentsFromEnvironment(argument_count);
9273 call = New<HCallFunction>(function, argument_count);
9277 Drop(1); // Drop the function.
9278 return ast_context()->ReturnInstruction(call, expr->id());
9282 void HOptimizedGraphBuilder::BuildInlinedCallArray(
9283 Expression* expression,
9285 Handle<AllocationSite> site) {
9286 DCHECK(!site.is_null());
9287 DCHECK(argument_count >= 0 && argument_count <= 1);
9288 NoObservableSideEffectsScope no_effects(this);
9290 // We should at least have the constructor on the expression stack.
9291 HValue* constructor = environment()->ExpressionStackAt(argument_count);
9293 // Register on the site for deoptimization if the transition feedback changes.
9294 AllocationSite::AddDependentCompilationInfo(
9295 site, AllocationSite::TRANSITIONS, top_info());
9296 ElementsKind kind = site->GetElementsKind();
9297 HInstruction* site_instruction = Add<HConstant>(site);
9299 // In the single constant argument case, we may have to adjust elements kind
9300 // to avoid creating a packed non-empty array.
9301 if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
9302 HValue* argument = environment()->Top();
9303 if (argument->IsConstant()) {
9304 HConstant* constant_argument = HConstant::cast(argument);
9305 DCHECK(constant_argument->HasSmiValue());
9306 int constant_array_size = constant_argument->Integer32Value();
9307 if (constant_array_size != 0) {
9308 kind = GetHoleyElementsKind(kind);
9314 JSArrayBuilder array_builder(this,
9318 DISABLE_ALLOCATION_SITES);
9319 HValue* new_object = argument_count == 0
9320 ? array_builder.AllocateEmptyArray()
9321 : BuildAllocateArrayFromLength(&array_builder, Top());
9323 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9325 ast_context()->ReturnValue(new_object);
9329 // Checks whether allocation using the given constructor can be inlined.
9330 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9331 return constructor->has_initial_map() &&
9332 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9333 constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
9334 constructor->initial_map()->InitialPropertiesLength() == 0;
9338 bool HOptimizedGraphBuilder::IsCallArrayInlineable(
9340 Handle<AllocationSite> site) {
9341 Handle<JSFunction> caller = current_info()->closure();
9342 Handle<JSFunction> target = array_function();
9343 // We should have the function plus array arguments on the environment stack.
9344 DCHECK(environment()->length() >= (argument_count + 1));
9345 DCHECK(!site.is_null());
9347 bool inline_ok = false;
9348 if (site->CanInlineCall()) {
9349 // We also want to avoid inlining in certain 1 argument scenarios.
9350 if (argument_count == 1) {
9351 HValue* argument = Top();
9352 if (argument->IsConstant()) {
9353 // Do not inline if the constant length argument is not a smi or
9354 // outside the valid range for unrolled loop initialization.
9355 HConstant* constant_argument = HConstant::cast(argument);
9356 if (constant_argument->HasSmiValue()) {
9357 int value = constant_argument->Integer32Value();
9358 inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
9360 TraceInline(target, caller,
9361 "Constant length outside of valid inlining range.");
9365 TraceInline(target, caller,
9366 "Dont inline [new] Array(n) where n isn't constant.");
9368 } else if (argument_count == 0) {
9371 TraceInline(target, caller, "Too many arguments to inline.");
9374 TraceInline(target, caller, "AllocationSite requested no inlining.");
9378 TraceInline(target, caller, NULL);
9384 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9385 DCHECK(!HasStackOverflow());
9386 DCHECK(current_block() != NULL);
9387 DCHECK(current_block()->HasPredecessor());
9388 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9389 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
9390 Factory* factory = isolate()->factory();
9392 // The constructor function is on the stack in the unoptimized code
9393 // during evaluation of the arguments.
9394 CHECK_ALIVE(VisitForValue(expr->expression()));
9395 HValue* function = Top();
9396 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9398 if (FLAG_inline_construct &&
9399 expr->IsMonomorphic() &&
9400 IsAllocationInlineable(expr->target())) {
9401 Handle<JSFunction> constructor = expr->target();
9402 HValue* check = Add<HCheckValue>(function, constructor);
9404 // Force completion of inobject slack tracking before generating
9405 // allocation code to finalize instance size.
9406 if (constructor->IsInobjectSlackTrackingInProgress()) {
9407 constructor->CompleteInobjectSlackTracking();
9410 // Calculate instance size from initial map of constructor.
9411 DCHECK(constructor->has_initial_map());
9412 Handle<Map> initial_map(constructor->initial_map());
9413 int instance_size = initial_map->instance_size();
9414 DCHECK(initial_map->InitialPropertiesLength() == 0);
9416 // Allocate an instance of the implicit receiver object.
9417 HValue* size_in_bytes = Add<HConstant>(instance_size);
9418 HAllocationMode allocation_mode;
9419 if (FLAG_pretenuring_call_new) {
9420 if (FLAG_allocation_site_pretenuring) {
9421 // Try to use pretenuring feedback.
9422 Handle<AllocationSite> allocation_site = expr->allocation_site();
9423 allocation_mode = HAllocationMode(allocation_site);
9424 // Take a dependency on allocation site.
9425 AllocationSite::AddDependentCompilationInfo(allocation_site,
9426 AllocationSite::TENURING,
9431 HAllocate* receiver = BuildAllocate(
9432 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9433 receiver->set_known_initial_map(initial_map);
9435 // Initialize map and fields of the newly allocated object.
9436 { NoObservableSideEffectsScope no_effects(this);
9437 DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
9438 Add<HStoreNamedField>(receiver,
9439 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9440 Add<HConstant>(initial_map));
9441 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9442 Add<HStoreNamedField>(receiver,
9443 HObjectAccess::ForMapAndOffset(initial_map,
9444 JSObject::kPropertiesOffset),
9446 Add<HStoreNamedField>(receiver,
9447 HObjectAccess::ForMapAndOffset(initial_map,
9448 JSObject::kElementsOffset),
9450 if (initial_map->inobject_properties() != 0) {
9451 HConstant* undefined = graph()->GetConstantUndefined();
9452 for (int i = 0; i < initial_map->inobject_properties(); i++) {
9453 int property_offset = initial_map->GetInObjectPropertyOffset(i);
9454 Add<HStoreNamedField>(receiver,
9455 HObjectAccess::ForMapAndOffset(initial_map, property_offset),
9461 // Replace the constructor function with a newly allocated receiver using
9462 // the index of the receiver from the top of the expression stack.
9463 const int receiver_index = argument_count - 1;
9464 DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
9465 environment()->SetExpressionStackAt(receiver_index, receiver);
9467 if (TryInlineConstruct(expr, receiver)) {
9468 // Inlining worked, add a dependency on the initial map to make sure that
9469 // this code is deoptimized whenever the initial map of the constructor
9471 Map::AddDependentCompilationInfo(
9472 initial_map, DependentCode::kInitialMapChangedGroup, top_info());
9476 // TODO(mstarzinger): For now we remove the previous HAllocate and all
9477 // corresponding instructions and instead add HPushArguments for the
9478 // arguments in case inlining failed. What we actually should do is for
9479 // inlining to try to build a subgraph without mutating the parent graph.
9480 HInstruction* instr = current_block()->last();
9482 HInstruction* prev_instr = instr->previous();
9483 instr->DeleteAndReplaceWith(NULL);
9485 } while (instr != check);
9486 environment()->SetExpressionStackAt(receiver_index, function);
9487 HInstruction* call =
9488 PreProcessCall(New<HCallNew>(function, argument_count));
9489 return ast_context()->ReturnInstruction(call, expr->id());
9491 // The constructor function is both an operand to the instruction and an
9492 // argument to the construct call.
9493 if (TryHandleArrayCallNew(expr, function)) return;
9495 HInstruction* call =
9496 PreProcessCall(New<HCallNew>(function, argument_count));
9497 return ast_context()->ReturnInstruction(call, expr->id());
9502 // Support for generating inlined runtime functions.
9504 // Lookup table for generators for runtime calls that are generated inline.
9505 // Elements of the table are member pointers to functions of
9506 // HOptimizedGraphBuilder.
9507 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
9508 &HOptimizedGraphBuilder::Generate##Name,
9510 const HOptimizedGraphBuilder::InlineFunctionGenerator
9511 HOptimizedGraphBuilder::kInlineFunctionGenerators[] = {
9512 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
9513 INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
9515 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
9518 template <class ViewClass>
9519 void HGraphBuilder::BuildArrayBufferViewInitialization(
9522 HValue* byte_offset,
9523 HValue* byte_length) {
9525 for (int offset = ViewClass::kSize;
9526 offset < ViewClass::kSizeWithInternalFields;
9527 offset += kPointerSize) {
9528 Add<HStoreNamedField>(obj,
9529 HObjectAccess::ForObservableJSObjectOffset(offset),
9530 graph()->GetConstant0());
9533 Add<HStoreNamedField>(
9535 HObjectAccess::ForJSArrayBufferViewByteOffset(),
9537 Add<HStoreNamedField>(
9539 HObjectAccess::ForJSArrayBufferViewByteLength(),
9542 if (buffer != NULL) {
9543 Add<HStoreNamedField>(
9545 HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
9546 HObjectAccess weak_first_view_access =
9547 HObjectAccess::ForJSArrayBufferWeakFirstView();
9548 Add<HStoreNamedField>(obj,
9549 HObjectAccess::ForJSArrayBufferViewWeakNext(),
9550 Add<HLoadNamedField>(buffer,
9551 static_cast<HValue*>(NULL),
9552 weak_first_view_access));
9553 Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
9555 Add<HStoreNamedField>(
9557 HObjectAccess::ForJSArrayBufferViewBuffer(),
9558 Add<HConstant>(static_cast<int32_t>(0)));
9559 Add<HStoreNamedField>(obj,
9560 HObjectAccess::ForJSArrayBufferViewWeakNext(),
9561 graph()->GetConstantUndefined());
9566 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
9567 CallRuntime* expr) {
9568 ZoneList<Expression*>* arguments = expr->arguments();
9570 DCHECK(arguments->length()== 4);
9571 CHECK_ALIVE(VisitForValue(arguments->at(0)));
9572 HValue* obj = Pop();
9574 CHECK_ALIVE(VisitForValue(arguments->at(1)));
9575 HValue* buffer = Pop();
9577 CHECK_ALIVE(VisitForValue(arguments->at(2)));
9578 HValue* byte_offset = Pop();
9580 CHECK_ALIVE(VisitForValue(arguments->at(3)));
9581 HValue* byte_length = Pop();
9584 NoObservableSideEffectsScope scope(this);
9585 BuildArrayBufferViewInitialization<JSDataView>(
9586 obj, buffer, byte_offset, byte_length);
9591 static Handle<Map> TypedArrayMap(Isolate* isolate,
9592 ExternalArrayType array_type,
9593 ElementsKind target_kind) {
9594 Handle<Context> native_context = isolate->native_context();
9595 Handle<JSFunction> fun;
9596 switch (array_type) {
9597 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
9598 case kExternal##Type##Array: \
9599 fun = Handle<JSFunction>(native_context->type##_array_fun()); \
9602 TYPED_ARRAYS(TYPED_ARRAY_CASE)
9603 #undef TYPED_ARRAY_CASE
9605 Handle<Map> map(fun->initial_map());
9606 return Map::AsElementsKind(map, target_kind);
9610 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
9611 ExternalArrayType array_type,
9612 bool is_zero_byte_offset,
9613 HValue* buffer, HValue* byte_offset, HValue* length) {
9614 Handle<Map> external_array_map(
9615 isolate()->heap()->MapForExternalArrayType(array_type));
9617 // The HForceRepresentation is to prevent possible deopt on int-smi
9618 // conversion after allocation but before the new object fields are set.
9619 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9622 Add<HConstant>(ExternalArray::kAlignedSize),
9623 HType::HeapObject(),
9625 external_array_map->instance_type());
9627 AddStoreMapConstant(elements, external_array_map);
9628 Add<HStoreNamedField>(elements,
9629 HObjectAccess::ForFixedArrayLength(), length);
9631 HValue* backing_store = Add<HLoadNamedField>(
9632 buffer, static_cast<HValue*>(NULL),
9633 HObjectAccess::ForJSArrayBufferBackingStore());
9635 HValue* typed_array_start;
9636 if (is_zero_byte_offset) {
9637 typed_array_start = backing_store;
9639 HInstruction* external_pointer =
9640 AddUncasted<HAdd>(backing_store, byte_offset);
9641 // Arguments are checked prior to call to TypedArrayInitialize,
9642 // including byte_offset.
9643 external_pointer->ClearFlag(HValue::kCanOverflow);
9644 typed_array_start = external_pointer;
9647 Add<HStoreNamedField>(elements,
9648 HObjectAccess::ForExternalArrayExternalPointer(),
9655 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9656 ExternalArrayType array_type, size_t element_size,
9657 ElementsKind fixed_elements_kind,
9658 HValue* byte_length, HValue* length) {
9660 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
9663 // if fixed array's elements are not aligned to object's alignment,
9664 // we need to align the whole array to object alignment.
9665 if (element_size % kObjectAlignment != 0) {
9666 total_size = BuildObjectSizeAlignment(
9667 byte_length, FixedTypedArrayBase::kHeaderSize);
9669 total_size = AddUncasted<HAdd>(byte_length,
9670 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9671 total_size->ClearFlag(HValue::kCanOverflow);
9674 // The HForceRepresentation is to prevent possible deopt on int-smi
9675 // conversion after allocation but before the new object fields are set.
9676 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9677 Handle<Map> fixed_typed_array_map(
9678 isolate()->heap()->MapForFixedTypedArray(array_type));
9680 Add<HAllocate>(total_size, HType::HeapObject(),
9681 NOT_TENURED, fixed_typed_array_map->instance_type());
9682 AddStoreMapConstant(elements, fixed_typed_array_map);
9684 Add<HStoreNamedField>(elements,
9685 HObjectAccess::ForFixedArrayLength(),
9688 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9691 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9693 HValue* key = builder.BeginBody(
9694 Add<HConstant>(static_cast<int32_t>(0)),
9696 Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
9704 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
9705 CallRuntime* expr) {
9706 ZoneList<Expression*>* arguments = expr->arguments();
9708 static const int kObjectArg = 0;
9709 static const int kArrayIdArg = 1;
9710 static const int kBufferArg = 2;
9711 static const int kByteOffsetArg = 3;
9712 static const int kByteLengthArg = 4;
9713 static const int kArgsLength = 5;
9714 DCHECK(arguments->length() == kArgsLength);
9717 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
9718 HValue* obj = Pop();
9720 if (arguments->at(kArrayIdArg)->IsLiteral()) {
9721 // This should never happen in real use, but can happen when fuzzing.
9723 Bailout(kNeedSmiLiteral);
9726 Handle<Object> value =
9727 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
9728 if (!value->IsSmi()) {
9729 // This should never happen in real use, but can happen when fuzzing.
9731 Bailout(kNeedSmiLiteral);
9734 int array_id = Smi::cast(*value)->value();
9737 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
9738 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
9744 HValue* byte_offset;
9745 bool is_zero_byte_offset;
9747 if (arguments->at(kByteOffsetArg)->IsLiteral()
9748 && Smi::FromInt(0) ==
9749 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
9750 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
9751 is_zero_byte_offset = true;
9753 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
9754 byte_offset = Pop();
9755 is_zero_byte_offset = false;
9756 DCHECK(buffer != NULL);
9759 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
9760 HValue* byte_length = Pop();
9762 NoObservableSideEffectsScope scope(this);
9763 IfBuilder byte_offset_smi(this);
9765 if (!is_zero_byte_offset) {
9766 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
9767 byte_offset_smi.Then();
9770 ExternalArrayType array_type =
9771 kExternalInt8Array; // Bogus initialization.
9772 size_t element_size = 1; // Bogus initialization.
9773 ElementsKind external_elements_kind = // Bogus initialization.
9774 EXTERNAL_INT8_ELEMENTS;
9775 ElementsKind fixed_elements_kind = // Bogus initialization.
9777 Runtime::ArrayIdToTypeAndSize(array_id,
9779 &external_elements_kind,
9780 &fixed_elements_kind,
9784 { // byte_offset is Smi.
9785 BuildArrayBufferViewInitialization<JSTypedArray>(
9786 obj, buffer, byte_offset, byte_length);
9789 HInstruction* length = AddUncasted<HDiv>(byte_length,
9790 Add<HConstant>(static_cast<int32_t>(element_size)));
9792 Add<HStoreNamedField>(obj,
9793 HObjectAccess::ForJSTypedArrayLength(),
9797 if (buffer != NULL) {
9798 elements = BuildAllocateExternalElements(
9799 array_type, is_zero_byte_offset, buffer, byte_offset, length);
9800 Handle<Map> obj_map = TypedArrayMap(
9801 isolate(), array_type, external_elements_kind);
9802 AddStoreMapConstant(obj, obj_map);
9804 DCHECK(is_zero_byte_offset);
9805 elements = BuildAllocateFixedTypedArray(
9806 array_type, element_size, fixed_elements_kind,
9807 byte_length, length);
9809 Add<HStoreNamedField>(
9810 obj, HObjectAccess::ForElementsPointer(), elements);
9813 if (!is_zero_byte_offset) {
9814 byte_offset_smi.Else();
9815 { // byte_offset is not Smi.
9817 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
9821 PushArgumentsFromEnvironment(kArgsLength);
9822 Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
9825 byte_offset_smi.End();
9829 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
9830 DCHECK(expr->arguments()->length() == 0);
9831 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
9832 return ast_context()->ReturnInstruction(max_smi, expr->id());
9836 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
9837 CallRuntime* expr) {
9838 DCHECK(expr->arguments()->length() == 0);
9839 HConstant* result = New<HConstant>(static_cast<int32_t>(
9840 FLAG_typed_array_max_size_in_heap));
9841 return ast_context()->ReturnInstruction(result, expr->id());
9845 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
9846 CallRuntime* expr) {
9847 DCHECK(expr->arguments()->length() == 1);
9848 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9849 HValue* buffer = Pop();
9850 HInstruction* result = New<HLoadNamedField>(
9852 static_cast<HValue*>(NULL),
9853 HObjectAccess::ForJSArrayBufferByteLength());
9854 return ast_context()->ReturnInstruction(result, expr->id());
9858 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
9859 CallRuntime* expr) {
9860 DCHECK(expr->arguments()->length() == 1);
9861 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9862 HValue* buffer = Pop();
9863 HInstruction* result = New<HLoadNamedField>(
9865 static_cast<HValue*>(NULL),
9866 HObjectAccess::ForJSArrayBufferViewByteLength());
9867 return ast_context()->ReturnInstruction(result, expr->id());
9871 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
9872 CallRuntime* expr) {
9873 DCHECK(expr->arguments()->length() == 1);
9874 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9875 HValue* buffer = Pop();
9876 HInstruction* result = New<HLoadNamedField>(
9878 static_cast<HValue*>(NULL),
9879 HObjectAccess::ForJSArrayBufferViewByteOffset());
9880 return ast_context()->ReturnInstruction(result, expr->id());
9884 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
9885 CallRuntime* expr) {
9886 DCHECK(expr->arguments()->length() == 1);
9887 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9888 HValue* buffer = Pop();
9889 HInstruction* result = New<HLoadNamedField>(
9891 static_cast<HValue*>(NULL),
9892 HObjectAccess::ForJSTypedArrayLength());
9893 return ast_context()->ReturnInstruction(result, expr->id());
9897 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
9898 DCHECK(!HasStackOverflow());
9899 DCHECK(current_block() != NULL);
9900 DCHECK(current_block()->HasPredecessor());
9901 if (expr->is_jsruntime()) {
9902 return Bailout(kCallToAJavaScriptRuntimeFunction);
9905 const Runtime::Function* function = expr->function();
9906 DCHECK(function != NULL);
9908 if (function->intrinsic_type == Runtime::INLINE ||
9909 function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
9910 DCHECK(expr->name()->length() > 0);
9911 DCHECK(expr->name()->Get(0) == '_');
9912 // Call to an inline function.
9913 int lookup_index = static_cast<int>(function->function_id) -
9914 static_cast<int>(Runtime::kFirstInlineFunction);
9915 DCHECK(lookup_index >= 0);
9916 DCHECK(static_cast<size_t>(lookup_index) <
9917 arraysize(kInlineFunctionGenerators));
9918 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
9920 // Call the inline code generator using the pointer-to-member.
9921 (this->*generator)(expr);
9923 DCHECK(function->intrinsic_type == Runtime::RUNTIME);
9924 Handle<String> name = expr->name();
9925 int argument_count = expr->arguments()->length();
9926 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9927 PushArgumentsFromEnvironment(argument_count);
9928 HCallRuntime* call = New<HCallRuntime>(name, function,
9930 return ast_context()->ReturnInstruction(call, expr->id());
9935 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
9936 DCHECK(!HasStackOverflow());
9937 DCHECK(current_block() != NULL);
9938 DCHECK(current_block()->HasPredecessor());
9939 switch (expr->op()) {
9940 case Token::DELETE: return VisitDelete(expr);
9941 case Token::VOID: return VisitVoid(expr);
9942 case Token::TYPEOF: return VisitTypeof(expr);
9943 case Token::NOT: return VisitNot(expr);
9944 default: UNREACHABLE();
9949 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
9950 Property* prop = expr->expression()->AsProperty();
9951 VariableProxy* proxy = expr->expression()->AsVariableProxy();
9953 CHECK_ALIVE(VisitForValue(prop->obj()));
9954 CHECK_ALIVE(VisitForValue(prop->key()));
9955 HValue* key = Pop();
9956 HValue* obj = Pop();
9957 HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
9958 Add<HPushArguments>(obj, key, Add<HConstant>(function_strict_mode()));
9959 // TODO(olivf) InvokeFunction produces a check for the parameter count,
9960 // even though we are certain to pass the correct number of arguments here.
9961 HInstruction* instr = New<HInvokeFunction>(function, 3);
9962 return ast_context()->ReturnInstruction(instr, expr->id());
9963 } else if (proxy != NULL) {
9964 Variable* var = proxy->var();
9965 if (var->IsUnallocated()) {
9966 Bailout(kDeleteWithGlobalVariable);
9967 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
9968 // Result of deleting non-global variables is false. 'this' is not
9969 // really a variable, though we implement it as one. The
9970 // subexpression does not have side effects.
9971 HValue* value = var->is_this()
9972 ? graph()->GetConstantTrue()
9973 : graph()->GetConstantFalse();
9974 return ast_context()->ReturnValue(value);
9976 Bailout(kDeleteWithNonGlobalVariable);
9979 // Result of deleting non-property, non-variable reference is true.
9980 // Evaluate the subexpression for side effects.
9981 CHECK_ALIVE(VisitForEffect(expr->expression()));
9982 return ast_context()->ReturnValue(graph()->GetConstantTrue());
9987 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
9988 CHECK_ALIVE(VisitForEffect(expr->expression()));
9989 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9993 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
9994 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
9995 HValue* value = Pop();
9996 HInstruction* instr = New<HTypeof>(value);
9997 return ast_context()->ReturnInstruction(instr, expr->id());
10001 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10002 if (ast_context()->IsTest()) {
10003 TestContext* context = TestContext::cast(ast_context());
10004 VisitForControl(expr->expression(),
10005 context->if_false(),
10006 context->if_true());
10010 if (ast_context()->IsEffect()) {
10011 VisitForEffect(expr->expression());
10015 DCHECK(ast_context()->IsValue());
10016 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10017 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10018 CHECK_BAILOUT(VisitForControl(expr->expression(),
10020 materialize_true));
10022 if (materialize_false->HasPredecessor()) {
10023 materialize_false->SetJoinId(expr->MaterializeFalseId());
10024 set_current_block(materialize_false);
10025 Push(graph()->GetConstantFalse());
10027 materialize_false = NULL;
10030 if (materialize_true->HasPredecessor()) {
10031 materialize_true->SetJoinId(expr->MaterializeTrueId());
10032 set_current_block(materialize_true);
10033 Push(graph()->GetConstantTrue());
10035 materialize_true = NULL;
10038 HBasicBlock* join =
10039 CreateJoin(materialize_false, materialize_true, expr->id());
10040 set_current_block(join);
10041 if (join != NULL) return ast_context()->ReturnValue(Pop());
10045 HInstruction* HOptimizedGraphBuilder::BuildIncrement(
10046 bool returns_original_input,
10047 CountOperation* expr) {
10048 // The input to the count operation is on top of the expression stack.
10049 Representation rep = Representation::FromType(expr->type());
10050 if (rep.IsNone() || rep.IsTagged()) {
10051 rep = Representation::Smi();
10054 if (returns_original_input) {
10055 // We need an explicit HValue representing ToNumber(input). The
10056 // actual HChange instruction we need is (sometimes) added in a later
10057 // phase, so it is not available now to be used as an input to HAdd and
10058 // as the return value.
10059 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10060 if (!rep.IsDouble()) {
10061 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10062 number_input->SetFlag(HInstruction::kCannotBeTagged);
10064 Push(number_input);
10067 // The addition has no side effects, so we do not need
10068 // to simulate the expression stack after this instruction.
10069 // Any later failures deopt to the load of the input or earlier.
10070 HConstant* delta = (expr->op() == Token::INC)
10071 ? graph()->GetConstant1()
10072 : graph()->GetConstantMinus1();
10073 HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
10074 if (instr->IsAdd()) {
10075 HAdd* add = HAdd::cast(instr);
10076 add->set_observed_input_representation(1, rep);
10077 add->set_observed_input_representation(2, Representation::Smi());
10079 instr->SetFlag(HInstruction::kCannotBeTagged);
10080 instr->ClearAllSideEffects();
10085 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
10088 BailoutId return_id,
10092 EffectContext for_effect(this);
10094 if (key != NULL) Push(key);
10096 BuildStore(expr, prop, ast_id, return_id);
10100 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10101 DCHECK(!HasStackOverflow());
10102 DCHECK(current_block() != NULL);
10103 DCHECK(current_block()->HasPredecessor());
10104 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10105 Expression* target = expr->expression();
10106 VariableProxy* proxy = target->AsVariableProxy();
10107 Property* prop = target->AsProperty();
10108 if (proxy == NULL && prop == NULL) {
10109 return Bailout(kInvalidLhsInCountOperation);
10112 // Match the full code generator stack by simulating an extra stack
10113 // element for postfix operations in a non-effect context. The return
10114 // value is ToNumber(input).
10115 bool returns_original_input =
10116 expr->is_postfix() && !ast_context()->IsEffect();
10117 HValue* input = NULL; // ToNumber(original_input).
10118 HValue* after = NULL; // The result after incrementing or decrementing.
10120 if (proxy != NULL) {
10121 Variable* var = proxy->var();
10122 if (var->mode() == CONST_LEGACY) {
10123 return Bailout(kUnsupportedCountOperationWithConst);
10125 // Argument of the count operation is a variable, not a property.
10126 DCHECK(prop == NULL);
10127 CHECK_ALIVE(VisitForValue(target));
10129 after = BuildIncrement(returns_original_input, expr);
10130 input = returns_original_input ? Top() : Pop();
10133 switch (var->location()) {
10134 case Variable::UNALLOCATED:
10135 HandleGlobalVariableAssignment(var,
10137 expr->AssignmentId());
10140 case Variable::PARAMETER:
10141 case Variable::LOCAL:
10142 BindIfLive(var, after);
10145 case Variable::CONTEXT: {
10146 // Bail out if we try to mutate a parameter value in a function
10147 // using the arguments object. We do not (yet) correctly handle the
10148 // arguments property of the function.
10149 if (current_info()->scope()->arguments() != NULL) {
10150 // Parameters will rewrite to context slots. We have no direct
10151 // way to detect that the variable is a parameter so we use a
10152 // linear search of the parameter list.
10153 int count = current_info()->scope()->num_parameters();
10154 for (int i = 0; i < count; ++i) {
10155 if (var == current_info()->scope()->parameter(i)) {
10156 return Bailout(kAssignmentToParameterInArgumentsObject);
10161 HValue* context = BuildContextChainWalk(var);
10162 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10163 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10164 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10166 if (instr->HasObservableSideEffects()) {
10167 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10172 case Variable::LOOKUP:
10173 return Bailout(kLookupVariableInCountOperation);
10176 Drop(returns_original_input ? 2 : 1);
10177 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10180 // Argument of the count operation is a property.
10181 DCHECK(prop != NULL);
10182 if (returns_original_input) Push(graph()->GetConstantUndefined());
10184 CHECK_ALIVE(VisitForValue(prop->obj()));
10185 HValue* object = Top();
10187 HValue* key = NULL;
10188 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10189 CHECK_ALIVE(VisitForValue(prop->key()));
10193 CHECK_ALIVE(PushLoad(prop, object, key));
10195 after = BuildIncrement(returns_original_input, expr);
10197 if (returns_original_input) {
10199 // Drop object and key to push it again in the effect context below.
10200 Drop(key == NULL ? 1 : 2);
10201 environment()->SetExpressionStackAt(0, input);
10202 CHECK_ALIVE(BuildStoreForEffect(
10203 expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
10204 return ast_context()->ReturnValue(Pop());
10207 environment()->SetExpressionStackAt(0, after);
10208 return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
10212 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
10215 if (string->IsConstant() && index->IsConstant()) {
10216 HConstant* c_string = HConstant::cast(string);
10217 HConstant* c_index = HConstant::cast(index);
10218 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10219 int32_t i = c_index->NumberValueAsInteger32();
10220 Handle<String> s = c_string->StringValue();
10221 if (i < 0 || i >= s->length()) {
10222 return New<HConstant>(base::OS::nan_value());
10224 return New<HConstant>(s->Get(i));
10227 string = BuildCheckString(string);
10228 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10229 return New<HStringCharCodeAt>(string, index);
10233 // Checks if the given shift amounts have following forms:
10234 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
10235 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
10236 HValue* const32_minus_sa) {
10237 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10238 const HConstant* c1 = HConstant::cast(sa);
10239 const HConstant* c2 = HConstant::cast(const32_minus_sa);
10240 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10241 (c1->Integer32Value() + c2->Integer32Value() == 32);
10243 if (!const32_minus_sa->IsSub()) return false;
10244 HSub* sub = HSub::cast(const32_minus_sa);
10245 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10249 // Checks if the left and the right are shift instructions with the oposite
10250 // directions that can be replaced by one rotate right instruction or not.
10251 // Returns the operand and the shift amount for the rotate instruction in the
10253 bool HGraphBuilder::MatchRotateRight(HValue* left,
10256 HValue** shift_amount) {
10259 if (left->IsShl() && right->IsShr()) {
10260 shl = HShl::cast(left);
10261 shr = HShr::cast(right);
10262 } else if (left->IsShr() && right->IsShl()) {
10263 shl = HShl::cast(right);
10264 shr = HShr::cast(left);
10268 if (shl->left() != shr->left()) return false;
10270 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10271 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10274 *operand= shr->left();
10275 *shift_amount = shr->right();
10280 bool CanBeZero(HValue* right) {
10281 if (right->IsConstant()) {
10282 HConstant* right_const = HConstant::cast(right);
10283 if (right_const->HasInteger32Value() &&
10284 (right_const->Integer32Value() & 0x1f) != 0) {
10292 HValue* HGraphBuilder::EnforceNumberType(HValue* number,
10294 if (expected->Is(Type::SignedSmall())) {
10295 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10297 if (expected->Is(Type::Signed32())) {
10298 return AddUncasted<HForceRepresentation>(number,
10299 Representation::Integer32());
10305 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
10306 if (value->IsConstant()) {
10307 HConstant* constant = HConstant::cast(value);
10308 Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
10309 if (number.has_value) {
10310 *expected = Type::Number(zone());
10311 return AddInstruction(number.value);
10315 // We put temporary values on the stack, which don't correspond to anything
10316 // in baseline code. Since nothing is observable we avoid recording those
10317 // pushes with a NoObservableSideEffectsScope.
10318 NoObservableSideEffectsScope no_effects(this);
10320 Type* expected_type = *expected;
10322 // Separate the number type from the rest.
10323 Type* expected_obj =
10324 Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
10325 Type* expected_number =
10326 Type::Intersect(expected_type, Type::Number(zone()), zone());
10328 // We expect to get a number.
10329 // (We need to check first, since Type::None->Is(Type::Any()) == true.
10330 if (expected_obj->Is(Type::None())) {
10331 DCHECK(!expected_number->Is(Type::None(zone())));
10335 if (expected_obj->Is(Type::Undefined(zone()))) {
10336 // This is already done by HChange.
10337 *expected = Type::Union(expected_number, Type::Number(zone()), zone());
10345 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
10346 BinaryOperation* expr,
10349 PushBeforeSimulateBehavior push_sim_result) {
10350 Type* left_type = expr->left()->bounds().lower;
10351 Type* right_type = expr->right()->bounds().lower;
10352 Type* result_type = expr->bounds().lower;
10353 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10354 Handle<AllocationSite> allocation_site = expr->allocation_site();
10356 HAllocationMode allocation_mode;
10357 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10358 allocation_mode = HAllocationMode(allocation_site);
10361 HValue* result = HGraphBuilder::BuildBinaryOperation(
10362 expr->op(), left, right, left_type, right_type, result_type,
10363 fixed_right_arg, allocation_mode);
10364 // Add a simulate after instructions with observable side effects, and
10365 // after phis, which are the result of BuildBinaryOperation when we
10366 // inlined some complex subgraph.
10367 if (result->HasObservableSideEffects() || result->IsPhi()) {
10368 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10370 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10373 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10380 HValue* HGraphBuilder::BuildBinaryOperation(
10387 Maybe<int> fixed_right_arg,
10388 HAllocationMode allocation_mode) {
10390 Representation left_rep = Representation::FromType(left_type);
10391 Representation right_rep = Representation::FromType(right_type);
10393 bool maybe_string_add = op == Token::ADD &&
10394 (left_type->Maybe(Type::String()) ||
10395 left_type->Maybe(Type::Receiver()) ||
10396 right_type->Maybe(Type::String()) ||
10397 right_type->Maybe(Type::Receiver()));
10399 if (!left_type->IsInhabited()) {
10400 Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
10401 Deoptimizer::SOFT);
10402 // TODO(rossberg): we should be able to get rid of non-continuous
10404 left_type = Type::Any(zone());
10406 if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
10407 left_rep = Representation::FromType(left_type);
10410 if (!right_type->IsInhabited()) {
10411 Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
10412 Deoptimizer::SOFT);
10413 right_type = Type::Any(zone());
10415 if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
10416 right_rep = Representation::FromType(right_type);
10419 // Special case for string addition here.
10420 if (op == Token::ADD &&
10421 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
10422 // Validate type feedback for left argument.
10423 if (left_type->Is(Type::String())) {
10424 left = BuildCheckString(left);
10427 // Validate type feedback for right argument.
10428 if (right_type->Is(Type::String())) {
10429 right = BuildCheckString(right);
10432 // Convert left argument as necessary.
10433 if (left_type->Is(Type::Number())) {
10434 DCHECK(right_type->Is(Type::String()));
10435 left = BuildNumberToString(left, left_type);
10436 } else if (!left_type->Is(Type::String())) {
10437 DCHECK(right_type->Is(Type::String()));
10438 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
10439 Add<HPushArguments>(left, right);
10440 return AddUncasted<HInvokeFunction>(function, 2);
10443 // Convert right argument as necessary.
10444 if (right_type->Is(Type::Number())) {
10445 DCHECK(left_type->Is(Type::String()));
10446 right = BuildNumberToString(right, right_type);
10447 } else if (!right_type->Is(Type::String())) {
10448 DCHECK(left_type->Is(Type::String()));
10449 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
10450 Add<HPushArguments>(left, right);
10451 return AddUncasted<HInvokeFunction>(function, 2);
10454 // Fast path for empty constant strings.
10455 if (left->IsConstant() &&
10456 HConstant::cast(left)->HasStringValue() &&
10457 HConstant::cast(left)->StringValue()->length() == 0) {
10460 if (right->IsConstant() &&
10461 HConstant::cast(right)->HasStringValue() &&
10462 HConstant::cast(right)->StringValue()->length() == 0) {
10466 // Register the dependent code with the allocation site.
10467 if (!allocation_mode.feedback_site().is_null()) {
10468 DCHECK(!graph()->info()->IsStub());
10469 Handle<AllocationSite> site(allocation_mode.feedback_site());
10470 AllocationSite::AddDependentCompilationInfo(
10471 site, AllocationSite::TENURING, top_info());
10474 // Inline the string addition into the stub when creating allocation
10475 // mementos to gather allocation site feedback, or if we can statically
10476 // infer that we're going to create a cons string.
10477 if ((graph()->info()->IsStub() &&
10478 allocation_mode.CreateAllocationMementos()) ||
10479 (left->IsConstant() &&
10480 HConstant::cast(left)->HasStringValue() &&
10481 HConstant::cast(left)->StringValue()->length() + 1 >=
10482 ConsString::kMinLength) ||
10483 (right->IsConstant() &&
10484 HConstant::cast(right)->HasStringValue() &&
10485 HConstant::cast(right)->StringValue()->length() + 1 >=
10486 ConsString::kMinLength)) {
10487 return BuildStringAdd(left, right, allocation_mode);
10490 // Fallback to using the string add stub.
10491 return AddUncasted<HStringAdd>(
10492 left, right, allocation_mode.GetPretenureMode(),
10493 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
10496 if (graph()->info()->IsStub()) {
10497 left = EnforceNumberType(left, left_type);
10498 right = EnforceNumberType(right, right_type);
10501 Representation result_rep = Representation::FromType(result_type);
10503 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
10504 (right_rep.IsTagged() && !right_rep.IsSmi());
10506 HInstruction* instr = NULL;
10507 // Only the stub is allowed to call into the runtime, since otherwise we would
10508 // inline several instructions (including the two pushes) for every tagged
10509 // operation in optimized code, which is more expensive, than a stub call.
10510 if (graph()->info()->IsStub() && is_non_primitive) {
10511 HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
10512 Add<HPushArguments>(left, right);
10513 instr = AddUncasted<HInvokeFunction>(function, 2);
10517 instr = AddUncasted<HAdd>(left, right);
10520 instr = AddUncasted<HSub>(left, right);
10523 instr = AddUncasted<HMul>(left, right);
10526 if (fixed_right_arg.has_value &&
10527 !right->EqualsInteger32Constant(fixed_right_arg.value)) {
10528 HConstant* fixed_right = Add<HConstant>(
10529 static_cast<int>(fixed_right_arg.value));
10530 IfBuilder if_same(this);
10531 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
10533 if_same.ElseDeopt("Unexpected RHS of binary operation");
10534 right = fixed_right;
10536 instr = AddUncasted<HMod>(left, right);
10540 instr = AddUncasted<HDiv>(left, right);
10542 case Token::BIT_XOR:
10543 case Token::BIT_AND:
10544 instr = AddUncasted<HBitwise>(op, left, right);
10546 case Token::BIT_OR: {
10547 HValue* operand, *shift_amount;
10548 if (left_type->Is(Type::Signed32()) &&
10549 right_type->Is(Type::Signed32()) &&
10550 MatchRotateRight(left, right, &operand, &shift_amount)) {
10551 instr = AddUncasted<HRor>(operand, shift_amount);
10553 instr = AddUncasted<HBitwise>(op, left, right);
10558 instr = AddUncasted<HSar>(left, right);
10561 instr = AddUncasted<HShr>(left, right);
10562 if (instr->IsShr() && CanBeZero(right)) {
10563 graph()->RecordUint32Instruction(instr);
10567 instr = AddUncasted<HShl>(left, right);
10574 if (instr->IsBinaryOperation()) {
10575 HBinaryOperation* binop = HBinaryOperation::cast(instr);
10576 binop->set_observed_input_representation(1, left_rep);
10577 binop->set_observed_input_representation(2, right_rep);
10578 binop->initialize_output_representation(result_rep);
10579 if (graph()->info()->IsStub()) {
10580 // Stub should not call into stub.
10581 instr->SetFlag(HValue::kCannotBeTagged);
10582 // And should truncate on HForceRepresentation already.
10583 if (left->IsForceRepresentation()) {
10584 left->CopyFlag(HValue::kTruncatingToSmi, instr);
10585 left->CopyFlag(HValue::kTruncatingToInt32, instr);
10587 if (right->IsForceRepresentation()) {
10588 right->CopyFlag(HValue::kTruncatingToSmi, instr);
10589 right->CopyFlag(HValue::kTruncatingToInt32, instr);
10597 // Check for the form (%_ClassOf(foo) === 'BarClass').
10598 static bool IsClassOfTest(CompareOperation* expr) {
10599 if (expr->op() != Token::EQ_STRICT) return false;
10600 CallRuntime* call = expr->left()->AsCallRuntime();
10601 if (call == NULL) return false;
10602 Literal* literal = expr->right()->AsLiteral();
10603 if (literal == NULL) return false;
10604 if (!literal->value()->IsString()) return false;
10605 if (!call->name()->IsOneByteEqualTo(STATIC_CHAR_VECTOR("_ClassOf"))) {
10608 DCHECK(call->arguments()->length() == 1);
10613 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10614 DCHECK(!HasStackOverflow());
10615 DCHECK(current_block() != NULL);
10616 DCHECK(current_block()->HasPredecessor());
10617 switch (expr->op()) {
10619 return VisitComma(expr);
10622 return VisitLogicalExpression(expr);
10624 return VisitArithmeticExpression(expr);
10629 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
10630 CHECK_ALIVE(VisitForEffect(expr->left()));
10631 // Visit the right subexpression in the same AST context as the entire
10633 Visit(expr->right());
10637 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
10638 bool is_logical_and = expr->op() == Token::AND;
10639 if (ast_context()->IsTest()) {
10640 TestContext* context = TestContext::cast(ast_context());
10641 // Translate left subexpression.
10642 HBasicBlock* eval_right = graph()->CreateBasicBlock();
10643 if (is_logical_and) {
10644 CHECK_BAILOUT(VisitForControl(expr->left(),
10646 context->if_false()));
10648 CHECK_BAILOUT(VisitForControl(expr->left(),
10649 context->if_true(),
10653 // Translate right subexpression by visiting it in the same AST
10654 // context as the entire expression.
10655 if (eval_right->HasPredecessor()) {
10656 eval_right->SetJoinId(expr->RightId());
10657 set_current_block(eval_right);
10658 Visit(expr->right());
10661 } else if (ast_context()->IsValue()) {
10662 CHECK_ALIVE(VisitForValue(expr->left()));
10663 DCHECK(current_block() != NULL);
10664 HValue* left_value = Top();
10666 // Short-circuit left values that always evaluate to the same boolean value.
10667 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
10668 // l (evals true) && r -> r
10669 // l (evals true) || r -> l
10670 // l (evals false) && r -> l
10671 // l (evals false) || r -> r
10672 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
10674 CHECK_ALIVE(VisitForValue(expr->right()));
10676 return ast_context()->ReturnValue(Pop());
10679 // We need an extra block to maintain edge-split form.
10680 HBasicBlock* empty_block = graph()->CreateBasicBlock();
10681 HBasicBlock* eval_right = graph()->CreateBasicBlock();
10682 ToBooleanStub::Types expected(expr->left()->to_boolean_types());
10683 HBranch* test = is_logical_and
10684 ? New<HBranch>(left_value, expected, eval_right, empty_block)
10685 : New<HBranch>(left_value, expected, empty_block, eval_right);
10686 FinishCurrentBlock(test);
10688 set_current_block(eval_right);
10689 Drop(1); // Value of the left subexpression.
10690 CHECK_BAILOUT(VisitForValue(expr->right()));
10692 HBasicBlock* join_block =
10693 CreateJoin(empty_block, current_block(), expr->id());
10694 set_current_block(join_block);
10695 return ast_context()->ReturnValue(Pop());
10698 DCHECK(ast_context()->IsEffect());
10699 // In an effect context, we don't need the value of the left subexpression,
10700 // only its control flow and side effects. We need an extra block to
10701 // maintain edge-split form.
10702 HBasicBlock* empty_block = graph()->CreateBasicBlock();
10703 HBasicBlock* right_block = graph()->CreateBasicBlock();
10704 if (is_logical_and) {
10705 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
10707 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
10710 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
10711 // actually two empty blocks (one here and one inserted by
10712 // TestContext::BuildBranch, and that they both have an HSimulate though the
10713 // second one is not a merge node, and that we really have no good AST ID to
10714 // put on that first HSimulate.
10716 if (empty_block->HasPredecessor()) {
10717 empty_block->SetJoinId(expr->id());
10719 empty_block = NULL;
10722 if (right_block->HasPredecessor()) {
10723 right_block->SetJoinId(expr->RightId());
10724 set_current_block(right_block);
10725 CHECK_BAILOUT(VisitForEffect(expr->right()));
10726 right_block = current_block();
10728 right_block = NULL;
10731 HBasicBlock* join_block =
10732 CreateJoin(empty_block, right_block, expr->id());
10733 set_current_block(join_block);
10734 // We did not materialize any value in the predecessor environments,
10735 // so there is no need to handle it here.
10740 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
10741 CHECK_ALIVE(VisitForValue(expr->left()));
10742 CHECK_ALIVE(VisitForValue(expr->right()));
10743 SetSourcePosition(expr->position());
10744 HValue* right = Pop();
10745 HValue* left = Pop();
10747 BuildBinaryOperation(expr, left, right,
10748 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10749 : PUSH_BEFORE_SIMULATE);
10750 if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
10751 HBinaryOperation::cast(result)->SetOperandPositions(
10753 ScriptPositionToSourcePosition(expr->left()->position()),
10754 ScriptPositionToSourcePosition(expr->right()->position()));
10756 return ast_context()->ReturnValue(result);
10760 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
10761 Expression* sub_expr,
10762 Handle<String> check) {
10763 CHECK_ALIVE(VisitForTypeOf(sub_expr));
10764 SetSourcePosition(expr->position());
10765 HValue* value = Pop();
10766 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
10767 return ast_context()->ReturnControl(instr, expr->id());
10771 static bool IsLiteralCompareBool(Isolate* isolate,
10775 return op == Token::EQ_STRICT &&
10776 ((left->IsConstant() &&
10777 HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
10778 (right->IsConstant() &&
10779 HConstant::cast(right)->handle(isolate)->IsBoolean()));
10783 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
10784 DCHECK(!HasStackOverflow());
10785 DCHECK(current_block() != NULL);
10786 DCHECK(current_block()->HasPredecessor());
10788 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10790 // Check for a few fast cases. The AST visiting behavior must be in sync
10791 // with the full codegen: We don't push both left and right values onto
10792 // the expression stack when one side is a special-case literal.
10793 Expression* sub_expr = NULL;
10794 Handle<String> check;
10795 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
10796 return HandleLiteralCompareTypeof(expr, sub_expr, check);
10798 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
10799 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
10801 if (expr->IsLiteralCompareNull(&sub_expr)) {
10802 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
10805 if (IsClassOfTest(expr)) {
10806 CallRuntime* call = expr->left()->AsCallRuntime();
10807 DCHECK(call->arguments()->length() == 1);
10808 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10809 HValue* value = Pop();
10810 Literal* literal = expr->right()->AsLiteral();
10811 Handle<String> rhs = Handle<String>::cast(literal->value());
10812 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
10813 return ast_context()->ReturnControl(instr, expr->id());
10816 Type* left_type = expr->left()->bounds().lower;
10817 Type* right_type = expr->right()->bounds().lower;
10818 Type* combined_type = expr->combined_type();
10820 CHECK_ALIVE(VisitForValue(expr->left()));
10821 CHECK_ALIVE(VisitForValue(expr->right()));
10823 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10825 HValue* right = Pop();
10826 HValue* left = Pop();
10827 Token::Value op = expr->op();
10829 if (IsLiteralCompareBool(isolate(), left, op, right)) {
10830 HCompareObjectEqAndBranch* result =
10831 New<HCompareObjectEqAndBranch>(left, right);
10832 return ast_context()->ReturnControl(result, expr->id());
10835 if (op == Token::INSTANCEOF) {
10836 // Check to see if the rhs of the instanceof is a global function not
10837 // residing in new space. If it is we assume that the function will stay the
10839 Handle<JSFunction> target = Handle<JSFunction>::null();
10840 VariableProxy* proxy = expr->right()->AsVariableProxy();
10841 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
10842 if (global_function && current_info()->has_global_object()) {
10843 Handle<String> name = proxy->name();
10844 Handle<GlobalObject> global(current_info()->global_object());
10845 LookupIterator it(global, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
10846 Handle<Object> value = JSObject::GetDataProperty(&it);
10847 if (it.IsFound() && value->IsJSFunction()) {
10848 Handle<JSFunction> candidate = Handle<JSFunction>::cast(value);
10849 // If the function is in new space we assume it's more likely to
10850 // change and thus prefer the general IC code.
10851 if (!isolate()->heap()->InNewSpace(*candidate)) {
10852 target = candidate;
10857 // If the target is not null we have found a known global function that is
10858 // assumed to stay the same for this instanceof.
10859 if (target.is_null()) {
10860 HInstanceOf* result = New<HInstanceOf>(left, right);
10861 return ast_context()->ReturnInstruction(result, expr->id());
10863 Add<HCheckValue>(right, target);
10864 HInstanceOfKnownGlobal* result =
10865 New<HInstanceOfKnownGlobal>(left, target);
10866 return ast_context()->ReturnInstruction(result, expr->id());
10869 // Code below assumes that we don't fall through.
10871 } else if (op == Token::IN) {
10872 HValue* function = AddLoadJSBuiltin(Builtins::IN);
10873 Add<HPushArguments>(left, right);
10874 // TODO(olivf) InvokeFunction produces a check for the parameter count,
10875 // even though we are certain to pass the correct number of arguments here.
10876 HInstruction* result = New<HInvokeFunction>(function, 2);
10877 return ast_context()->ReturnInstruction(result, expr->id());
10880 PushBeforeSimulateBehavior push_behavior =
10881 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10882 : PUSH_BEFORE_SIMULATE;
10883 HControlInstruction* compare = BuildCompareInstruction(
10884 op, left, right, left_type, right_type, combined_type,
10885 ScriptPositionToSourcePosition(expr->left()->position()),
10886 ScriptPositionToSourcePosition(expr->right()->position()),
10887 push_behavior, expr->id());
10888 if (compare == NULL) return; // Bailed out.
10889 return ast_context()->ReturnControl(compare, expr->id());
10893 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
10899 Type* combined_type,
10900 HSourcePosition left_position,
10901 HSourcePosition right_position,
10902 PushBeforeSimulateBehavior push_sim_result,
10903 BailoutId bailout_id) {
10904 // Cases handled below depend on collected type feedback. They should
10905 // soft deoptimize when there is no type feedback.
10906 if (!combined_type->IsInhabited()) {
10907 Add<HDeoptimize>("Insufficient type feedback for combined type "
10908 "of binary operation",
10909 Deoptimizer::SOFT);
10910 combined_type = left_type = right_type = Type::Any(zone());
10913 Representation left_rep = Representation::FromType(left_type);
10914 Representation right_rep = Representation::FromType(right_type);
10915 Representation combined_rep = Representation::FromType(combined_type);
10917 if (combined_type->Is(Type::Receiver())) {
10918 if (Token::IsEqualityOp(op)) {
10919 // HCompareObjectEqAndBranch can only deal with object, so
10920 // exclude numbers.
10921 if ((left->IsConstant() &&
10922 HConstant::cast(left)->HasNumberValue()) ||
10923 (right->IsConstant() &&
10924 HConstant::cast(right)->HasNumberValue())) {
10925 Add<HDeoptimize>("Type mismatch between feedback and constant",
10926 Deoptimizer::SOFT);
10927 // The caller expects a branch instruction, so make it happy.
10928 return New<HBranch>(graph()->GetConstantTrue());
10930 // Can we get away with map check and not instance type check?
10931 HValue* operand_to_check =
10932 left->block()->block_id() < right->block()->block_id() ? left : right;
10933 if (combined_type->IsClass()) {
10934 Handle<Map> map = combined_type->AsClass()->Map();
10935 AddCheckMap(operand_to_check, map);
10936 HCompareObjectEqAndBranch* result =
10937 New<HCompareObjectEqAndBranch>(left, right);
10938 if (FLAG_hydrogen_track_positions) {
10939 result->set_operand_position(zone(), 0, left_position);
10940 result->set_operand_position(zone(), 1, right_position);
10944 BuildCheckHeapObject(operand_to_check);
10945 Add<HCheckInstanceType>(operand_to_check,
10946 HCheckInstanceType::IS_SPEC_OBJECT);
10947 HCompareObjectEqAndBranch* result =
10948 New<HCompareObjectEqAndBranch>(left, right);
10952 Bailout(kUnsupportedNonPrimitiveCompare);
10955 } else if (combined_type->Is(Type::InternalizedString()) &&
10956 Token::IsEqualityOp(op)) {
10957 // If we have a constant argument, it should be consistent with the type
10958 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
10959 if ((left->IsConstant() &&
10960 !HConstant::cast(left)->HasInternalizedStringValue()) ||
10961 (right->IsConstant() &&
10962 !HConstant::cast(right)->HasInternalizedStringValue())) {
10963 Add<HDeoptimize>("Type mismatch between feedback and constant",
10964 Deoptimizer::SOFT);
10965 // The caller expects a branch instruction, so make it happy.
10966 return New<HBranch>(graph()->GetConstantTrue());
10968 BuildCheckHeapObject(left);
10969 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
10970 BuildCheckHeapObject(right);
10971 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
10972 HCompareObjectEqAndBranch* result =
10973 New<HCompareObjectEqAndBranch>(left, right);
10975 } else if (combined_type->Is(Type::String())) {
10976 BuildCheckHeapObject(left);
10977 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
10978 BuildCheckHeapObject(right);
10979 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
10980 HStringCompareAndBranch* result =
10981 New<HStringCompareAndBranch>(left, right, op);
10984 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
10985 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
10986 result->set_observed_input_representation(1, left_rep);
10987 result->set_observed_input_representation(2, right_rep);
10988 if (result->HasObservableSideEffects()) {
10989 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10991 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10994 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10997 // TODO(jkummerow): Can we make this more efficient?
10998 HBranch* branch = New<HBranch>(result);
11001 HCompareNumericAndBranch* result =
11002 New<HCompareNumericAndBranch>(left, right, op);
11003 result->set_observed_input_representation(left_rep, right_rep);
11004 if (FLAG_hydrogen_track_positions) {
11005 result->SetOperandPositions(zone(), left_position, right_position);
11013 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11014 Expression* sub_expr,
11016 DCHECK(!HasStackOverflow());
11017 DCHECK(current_block() != NULL);
11018 DCHECK(current_block()->HasPredecessor());
11019 DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11020 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
11021 CHECK_ALIVE(VisitForValue(sub_expr));
11022 HValue* value = Pop();
11023 if (expr->op() == Token::EQ_STRICT) {
11024 HConstant* nil_constant = nil == kNullValue
11025 ? graph()->GetConstantNull()
11026 : graph()->GetConstantUndefined();
11027 HCompareObjectEqAndBranch* instr =
11028 New<HCompareObjectEqAndBranch>(value, nil_constant);
11029 return ast_context()->ReturnControl(instr, expr->id());
11031 DCHECK_EQ(Token::EQ, expr->op());
11032 Type* type = expr->combined_type()->Is(Type::None())
11033 ? Type::Any(zone()) : expr->combined_type();
11034 HIfContinuation continuation;
11035 BuildCompareNil(value, type, &continuation);
11036 return ast_context()->ReturnContinuation(&continuation, expr->id());
11041 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11042 // If we share optimized code between different closures, the
11043 // this-function is not a constant, except inside an inlined body.
11044 if (function_state()->outer() != NULL) {
11045 return New<HConstant>(
11046 function_state()->compilation_info()->closure());
11048 return New<HThisFunction>();
11053 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11054 Handle<JSObject> boilerplate_object,
11055 AllocationSiteUsageContext* site_context) {
11056 NoObservableSideEffectsScope no_effects(this);
11057 InstanceType instance_type = boilerplate_object->map()->instance_type();
11058 DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11060 HType type = instance_type == JS_ARRAY_TYPE
11061 ? HType::JSArray() : HType::JSObject();
11062 HValue* object_size_constant = Add<HConstant>(
11063 boilerplate_object->map()->instance_size());
11065 PretenureFlag pretenure_flag = NOT_TENURED;
11066 if (FLAG_allocation_site_pretenuring) {
11067 pretenure_flag = site_context->current()->GetPretenureMode();
11068 Handle<AllocationSite> site(site_context->current());
11069 AllocationSite::AddDependentCompilationInfo(
11070 site, AllocationSite::TENURING, top_info());
11073 HInstruction* object = Add<HAllocate>(object_size_constant, type,
11074 pretenure_flag, instance_type, site_context->current());
11076 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
11077 // elements array may not get folded into the object. Hence, we set the
11078 // elements pointer to empty fixed array and let store elimination remove
11079 // this store in the folding case.
11080 HConstant* empty_fixed_array = Add<HConstant>(
11081 isolate()->factory()->empty_fixed_array());
11082 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11083 empty_fixed_array);
11085 BuildEmitObjectHeader(boilerplate_object, object);
11087 Handle<FixedArrayBase> elements(boilerplate_object->elements());
11088 int elements_size = (elements->length() > 0 &&
11089 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
11090 elements->Size() : 0;
11092 if (pretenure_flag == TENURED &&
11093 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
11094 isolate()->heap()->InNewSpace(*elements)) {
11095 // If we would like to pretenure a fixed cow array, we must ensure that the
11096 // array is already in old space, otherwise we'll create too many old-to-
11097 // new-space pointers (overflowing the store buffer).
11098 elements = Handle<FixedArrayBase>(
11099 isolate()->factory()->CopyAndTenureFixedCOWArray(
11100 Handle<FixedArray>::cast(elements)));
11101 boilerplate_object->set_elements(*elements);
11104 HInstruction* object_elements = NULL;
11105 if (elements_size > 0) {
11106 HValue* object_elements_size = Add<HConstant>(elements_size);
11107 InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
11108 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
11109 object_elements = Add<HAllocate>(
11110 object_elements_size, HType::HeapObject(),
11111 pretenure_flag, instance_type, site_context->current());
11113 BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
11115 // Copy object elements if non-COW.
11116 if (object_elements != NULL) {
11117 BuildEmitElements(boilerplate_object, elements, object_elements,
11121 // Copy in-object properties.
11122 if (boilerplate_object->map()->NumberOfFields() != 0 ||
11123 boilerplate_object->map()->unused_property_fields() > 0) {
11124 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11131 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
11132 Handle<JSObject> boilerplate_object,
11133 HInstruction* object) {
11134 DCHECK(boilerplate_object->properties()->length() == 0);
11136 Handle<Map> boilerplate_object_map(boilerplate_object->map());
11137 AddStoreMapConstant(object, boilerplate_object_map);
11139 Handle<Object> properties_field =
11140 Handle<Object>(boilerplate_object->properties(), isolate());
11141 DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
11142 HInstruction* properties = Add<HConstant>(properties_field);
11143 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11144 Add<HStoreNamedField>(object, access, properties);
11146 if (boilerplate_object->IsJSArray()) {
11147 Handle<JSArray> boilerplate_array =
11148 Handle<JSArray>::cast(boilerplate_object);
11149 Handle<Object> length_field =
11150 Handle<Object>(boilerplate_array->length(), isolate());
11151 HInstruction* length = Add<HConstant>(length_field);
11153 DCHECK(boilerplate_array->length()->IsSmi());
11154 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11155 boilerplate_array->GetElementsKind()), length);
11160 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
11161 Handle<JSObject> boilerplate_object,
11162 HInstruction* object,
11163 HInstruction* object_elements) {
11164 DCHECK(boilerplate_object->properties()->length() == 0);
11165 if (object_elements == NULL) {
11166 Handle<Object> elements_field =
11167 Handle<Object>(boilerplate_object->elements(), isolate());
11168 object_elements = Add<HConstant>(elements_field);
11170 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11175 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
11176 Handle<JSObject> boilerplate_object,
11177 HInstruction* object,
11178 AllocationSiteUsageContext* site_context,
11179 PretenureFlag pretenure_flag) {
11180 Handle<Map> boilerplate_map(boilerplate_object->map());
11181 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11182 int limit = boilerplate_map->NumberOfOwnDescriptors();
11184 int copied_fields = 0;
11185 for (int i = 0; i < limit; i++) {
11186 PropertyDetails details = descriptors->GetDetails(i);
11187 if (details.type() != FIELD) continue;
11189 int index = descriptors->GetFieldIndex(i);
11190 int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
11191 Handle<Name> name(descriptors->GetKey(i));
11192 Handle<Object> value =
11193 Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
11196 // The access for the store depends on the type of the boilerplate.
11197 HObjectAccess access = boilerplate_object->IsJSArray() ?
11198 HObjectAccess::ForJSArrayOffset(property_offset) :
11199 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11201 if (value->IsJSObject()) {
11202 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11203 Handle<AllocationSite> current_site = site_context->EnterNewScope();
11204 HInstruction* result =
11205 BuildFastLiteral(value_object, site_context);
11206 site_context->ExitScope(current_site, value_object);
11207 Add<HStoreNamedField>(object, access, result);
11209 Representation representation = details.representation();
11210 HInstruction* value_instruction;
11212 if (representation.IsDouble()) {
11213 // Allocate a HeapNumber box and store the value into it.
11214 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11215 // This heap number alloc does not have a corresponding
11216 // AllocationSite. That is okay because
11217 // 1) it's a child object of another object with a valid allocation site
11218 // 2) we can just use the mode of the parent object for pretenuring
11219 HInstruction* double_box =
11220 Add<HAllocate>(heap_number_constant, HType::HeapObject(),
11221 pretenure_flag, MUTABLE_HEAP_NUMBER_TYPE);
11222 AddStoreMapConstant(double_box,
11223 isolate()->factory()->mutable_heap_number_map());
11224 // Unwrap the mutable heap number from the boilerplate.
11225 HValue* double_value =
11226 Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
11227 Add<HStoreNamedField>(
11228 double_box, HObjectAccess::ForHeapNumberValue(), double_value);
11229 value_instruction = double_box;
11230 } else if (representation.IsSmi()) {
11231 value_instruction = value->IsUninitialized()
11232 ? graph()->GetConstant0()
11233 : Add<HConstant>(value);
11234 // Ensure that value is stored as smi.
11235 access = access.WithRepresentation(representation);
11237 value_instruction = Add<HConstant>(value);
11240 Add<HStoreNamedField>(object, access, value_instruction);
11244 int inobject_properties = boilerplate_object->map()->inobject_properties();
11245 HInstruction* value_instruction =
11246 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11247 for (int i = copied_fields; i < inobject_properties; i++) {
11248 DCHECK(boilerplate_object->IsJSObject());
11249 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11250 HObjectAccess access =
11251 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11252 Add<HStoreNamedField>(object, access, value_instruction);
11257 void HOptimizedGraphBuilder::BuildEmitElements(
11258 Handle<JSObject> boilerplate_object,
11259 Handle<FixedArrayBase> elements,
11260 HValue* object_elements,
11261 AllocationSiteUsageContext* site_context) {
11262 ElementsKind kind = boilerplate_object->map()->elements_kind();
11263 int elements_length = elements->length();
11264 HValue* object_elements_length = Add<HConstant>(elements_length);
11265 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11267 // Copy elements backing store content.
11268 if (elements->IsFixedDoubleArray()) {
11269 BuildEmitFixedDoubleArray(elements, kind, object_elements);
11270 } else if (elements->IsFixedArray()) {
11271 BuildEmitFixedArray(elements, kind, object_elements,
11279 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
11280 Handle<FixedArrayBase> elements,
11282 HValue* object_elements) {
11283 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11284 int elements_length = elements->length();
11285 for (int i = 0; i < elements_length; i++) {
11286 HValue* key_constant = Add<HConstant>(i);
11287 HInstruction* value_instruction =
11288 Add<HLoadKeyed>(boilerplate_elements, key_constant,
11289 static_cast<HValue*>(NULL), kind,
11290 ALLOW_RETURN_HOLE);
11291 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11292 value_instruction, kind);
11293 store->SetFlag(HValue::kAllowUndefinedAsNaN);
11298 void HOptimizedGraphBuilder::BuildEmitFixedArray(
11299 Handle<FixedArrayBase> elements,
11301 HValue* object_elements,
11302 AllocationSiteUsageContext* site_context) {
11303 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11304 int elements_length = elements->length();
11305 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11306 for (int i = 0; i < elements_length; i++) {
11307 Handle<Object> value(fast_elements->get(i), isolate());
11308 HValue* key_constant = Add<HConstant>(i);
11309 if (value->IsJSObject()) {
11310 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11311 Handle<AllocationSite> current_site = site_context->EnterNewScope();
11312 HInstruction* result =
11313 BuildFastLiteral(value_object, site_context);
11314 site_context->ExitScope(current_site, value_object);
11315 Add<HStoreKeyed>(object_elements, key_constant, result, kind);
11317 HInstruction* value_instruction =
11318 Add<HLoadKeyed>(boilerplate_elements, key_constant,
11319 static_cast<HValue*>(NULL), kind,
11320 ALLOW_RETURN_HOLE);
11321 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
11327 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
11328 DCHECK(!HasStackOverflow());
11329 DCHECK(current_block() != NULL);
11330 DCHECK(current_block()->HasPredecessor());
11331 HInstruction* instr = BuildThisFunction();
11332 return ast_context()->ReturnInstruction(instr, expr->id());
11336 void HOptimizedGraphBuilder::VisitSuperReference(SuperReference* expr) {
11337 DCHECK(!HasStackOverflow());
11338 DCHECK(current_block() != NULL);
11339 DCHECK(current_block()->HasPredecessor());
11340 return Bailout(kSuperReference);
11344 void HOptimizedGraphBuilder::VisitDeclarations(
11345 ZoneList<Declaration*>* declarations) {
11346 DCHECK(globals_.is_empty());
11347 AstVisitor::VisitDeclarations(declarations);
11348 if (!globals_.is_empty()) {
11349 Handle<FixedArray> array =
11350 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
11351 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
11352 int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
11353 DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
11354 DeclareGlobalsStrictMode::encode(current_info()->strict_mode());
11355 Add<HDeclareGlobals>(array, flags);
11356 globals_.Rewind(0);
11361 void HOptimizedGraphBuilder::VisitVariableDeclaration(
11362 VariableDeclaration* declaration) {
11363 VariableProxy* proxy = declaration->proxy();
11364 VariableMode mode = declaration->mode();
11365 Variable* variable = proxy->var();
11366 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
11367 switch (variable->location()) {
11368 case Variable::UNALLOCATED:
11369 globals_.Add(variable->name(), zone());
11370 globals_.Add(variable->binding_needs_init()
11371 ? isolate()->factory()->the_hole_value()
11372 : isolate()->factory()->undefined_value(), zone());
11374 case Variable::PARAMETER:
11375 case Variable::LOCAL:
11377 HValue* value = graph()->GetConstantHole();
11378 environment()->Bind(variable, value);
11381 case Variable::CONTEXT:
11383 HValue* value = graph()->GetConstantHole();
11384 HValue* context = environment()->context();
11385 HStoreContextSlot* store = Add<HStoreContextSlot>(
11386 context, variable->index(), HStoreContextSlot::kNoCheck, value);
11387 if (store->HasObservableSideEffects()) {
11388 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11392 case Variable::LOOKUP:
11393 return Bailout(kUnsupportedLookupSlotInDeclaration);
11398 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
11399 FunctionDeclaration* declaration) {
11400 VariableProxy* proxy = declaration->proxy();
11401 Variable* variable = proxy->var();
11402 switch (variable->location()) {
11403 case Variable::UNALLOCATED: {
11404 globals_.Add(variable->name(), zone());
11405 Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
11406 declaration->fun(), current_info()->script(), top_info());
11407 // Check for stack-overflow exception.
11408 if (function.is_null()) return SetStackOverflow();
11409 globals_.Add(function, zone());
11412 case Variable::PARAMETER:
11413 case Variable::LOCAL: {
11414 CHECK_ALIVE(VisitForValue(declaration->fun()));
11415 HValue* value = Pop();
11416 BindIfLive(variable, value);
11419 case Variable::CONTEXT: {
11420 CHECK_ALIVE(VisitForValue(declaration->fun()));
11421 HValue* value = Pop();
11422 HValue* context = environment()->context();
11423 HStoreContextSlot* store = Add<HStoreContextSlot>(
11424 context, variable->index(), HStoreContextSlot::kNoCheck, value);
11425 if (store->HasObservableSideEffects()) {
11426 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11430 case Variable::LOOKUP:
11431 return Bailout(kUnsupportedLookupSlotInDeclaration);
11436 void HOptimizedGraphBuilder::VisitModuleDeclaration(
11437 ModuleDeclaration* declaration) {
11442 void HOptimizedGraphBuilder::VisitImportDeclaration(
11443 ImportDeclaration* declaration) {
11448 void HOptimizedGraphBuilder::VisitExportDeclaration(
11449 ExportDeclaration* declaration) {
11454 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
11459 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
11464 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
11469 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
11474 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
11479 // Generators for inline runtime functions.
11480 // Support for types.
11481 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
11482 DCHECK(call->arguments()->length() == 1);
11483 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11484 HValue* value = Pop();
11485 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
11486 return ast_context()->ReturnControl(result, call->id());
11490 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
11491 DCHECK(call->arguments()->length() == 1);
11492 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11493 HValue* value = Pop();
11494 HHasInstanceTypeAndBranch* result =
11495 New<HHasInstanceTypeAndBranch>(value,
11496 FIRST_SPEC_OBJECT_TYPE,
11497 LAST_SPEC_OBJECT_TYPE);
11498 return ast_context()->ReturnControl(result, call->id());
11502 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
11503 DCHECK(call->arguments()->length() == 1);
11504 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11505 HValue* value = Pop();
11506 HHasInstanceTypeAndBranch* result =
11507 New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
11508 return ast_context()->ReturnControl(result, call->id());
11512 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
11513 DCHECK(call->arguments()->length() == 1);
11514 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11515 HValue* value = Pop();
11516 HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
11517 return ast_context()->ReturnControl(result, call->id());
11521 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
11522 DCHECK(call->arguments()->length() == 1);
11523 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11524 HValue* value = Pop();
11525 HHasCachedArrayIndexAndBranch* result =
11526 New<HHasCachedArrayIndexAndBranch>(value);
11527 return ast_context()->ReturnControl(result, call->id());
11531 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
11532 DCHECK(call->arguments()->length() == 1);
11533 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11534 HValue* value = Pop();
11535 HHasInstanceTypeAndBranch* result =
11536 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
11537 return ast_context()->ReturnControl(result, call->id());
11541 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
11542 DCHECK(call->arguments()->length() == 1);
11543 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11544 HValue* value = Pop();
11545 HHasInstanceTypeAndBranch* result =
11546 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
11547 return ast_context()->ReturnControl(result, call->id());
11551 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
11552 DCHECK(call->arguments()->length() == 1);
11553 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11554 HValue* value = Pop();
11555 HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
11556 return ast_context()->ReturnControl(result, call->id());
11560 void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
11561 DCHECK(call->arguments()->length() == 1);
11562 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11563 HValue* value = Pop();
11564 HIfContinuation continuation;
11565 IfBuilder if_proxy(this);
11567 HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
11569 HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
11570 HValue* instance_type = Add<HLoadNamedField>(
11571 map, static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
11572 if_proxy.If<HCompareNumericAndBranch>(
11573 instance_type, Add<HConstant>(FIRST_JS_PROXY_TYPE), Token::GTE);
11575 if_proxy.If<HCompareNumericAndBranch>(
11576 instance_type, Add<HConstant>(LAST_JS_PROXY_TYPE), Token::LTE);
11578 if_proxy.CaptureContinuation(&continuation);
11579 return ast_context()->ReturnContinuation(&continuation, call->id());
11583 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
11584 return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
11588 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
11589 DCHECK(call->arguments()->length() == 1);
11590 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11591 HValue* value = Pop();
11592 HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
11593 return ast_context()->ReturnControl(result, call->id());
11597 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
11598 CallRuntime* call) {
11599 return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
11603 // Support for construct call checks.
11604 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
11605 DCHECK(call->arguments()->length() == 0);
11606 if (function_state()->outer() != NULL) {
11607 // We are generating graph for inlined function.
11608 HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
11609 ? graph()->GetConstantTrue()
11610 : graph()->GetConstantFalse();
11611 return ast_context()->ReturnValue(value);
11613 return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
11619 // Support for arguments.length and arguments[?].
11620 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
11621 DCHECK(call->arguments()->length() == 0);
11622 HInstruction* result = NULL;
11623 if (function_state()->outer() == NULL) {
11624 HInstruction* elements = Add<HArgumentsElements>(false);
11625 result = New<HArgumentsLength>(elements);
11627 // Number of arguments without receiver.
11628 int argument_count = environment()->
11629 arguments_environment()->parameter_count() - 1;
11630 result = New<HConstant>(argument_count);
11632 return ast_context()->ReturnInstruction(result, call->id());
11636 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
11637 DCHECK(call->arguments()->length() == 1);
11638 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11639 HValue* index = Pop();
11640 HInstruction* result = NULL;
11641 if (function_state()->outer() == NULL) {
11642 HInstruction* elements = Add<HArgumentsElements>(false);
11643 HInstruction* length = Add<HArgumentsLength>(elements);
11644 HInstruction* checked_index = Add<HBoundsCheck>(index, length);
11645 result = New<HAccessArgumentsAt>(elements, length, checked_index);
11647 EnsureArgumentsArePushedForAccess();
11649 // Number of arguments without receiver.
11650 HInstruction* elements = function_state()->arguments_elements();
11651 int argument_count = environment()->
11652 arguments_environment()->parameter_count() - 1;
11653 HInstruction* length = Add<HConstant>(argument_count);
11654 HInstruction* checked_key = Add<HBoundsCheck>(index, length);
11655 result = New<HAccessArgumentsAt>(elements, length, checked_key);
11657 return ast_context()->ReturnInstruction(result, call->id());
11661 // Support for accessing the class and value fields of an object.
11662 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
11663 // The special form detected by IsClassOfTest is detected before we get here
11664 // and does not cause a bailout.
11665 return Bailout(kInlinedRuntimeFunctionClassOf);
11669 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
11670 DCHECK(call->arguments()->length() == 1);
11671 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11672 HValue* object = Pop();
11674 IfBuilder if_objectisvalue(this);
11675 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
11676 object, JS_VALUE_TYPE);
11677 if_objectisvalue.Then();
11679 // Return the actual value.
11680 Push(Add<HLoadNamedField>(
11681 object, objectisvalue,
11682 HObjectAccess::ForObservableJSObjectOffset(
11683 JSValue::kValueOffset)));
11684 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11686 if_objectisvalue.Else();
11688 // If the object is not a value return the object.
11690 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11692 if_objectisvalue.End();
11693 return ast_context()->ReturnValue(Pop());
11697 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
11698 DCHECK(call->arguments()->length() == 2);
11699 DCHECK_NE(NULL, call->arguments()->at(1)->AsLiteral());
11700 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
11701 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11702 HValue* date = Pop();
11703 HDateField* result = New<HDateField>(date, index);
11704 return ast_context()->ReturnInstruction(result, call->id());
11708 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
11709 CallRuntime* call) {
11710 DCHECK(call->arguments()->length() == 3);
11711 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11712 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11713 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11714 HValue* string = Pop();
11715 HValue* value = Pop();
11716 HValue* index = Pop();
11717 Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
11719 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11720 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11724 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
11725 CallRuntime* call) {
11726 DCHECK(call->arguments()->length() == 3);
11727 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11728 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11729 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11730 HValue* string = Pop();
11731 HValue* value = Pop();
11732 HValue* index = Pop();
11733 Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
11735 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11736 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11740 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
11741 DCHECK(call->arguments()->length() == 2);
11742 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11743 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11744 HValue* value = Pop();
11745 HValue* object = Pop();
11747 // Check if object is a JSValue.
11748 IfBuilder if_objectisvalue(this);
11749 if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
11750 if_objectisvalue.Then();
11752 // Create in-object property store to kValueOffset.
11753 Add<HStoreNamedField>(object,
11754 HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
11756 if (!ast_context()->IsEffect()) {
11759 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11761 if_objectisvalue.Else();
11763 // Nothing to do in this case.
11764 if (!ast_context()->IsEffect()) {
11767 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11769 if_objectisvalue.End();
11770 if (!ast_context()->IsEffect()) {
11773 return ast_context()->ReturnValue(value);
11777 // Fast support for charCodeAt(n).
11778 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
11779 DCHECK(call->arguments()->length() == 2);
11780 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11781 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11782 HValue* index = Pop();
11783 HValue* string = Pop();
11784 HInstruction* result = BuildStringCharCodeAt(string, index);
11785 return ast_context()->ReturnInstruction(result, call->id());
11789 // Fast support for string.charAt(n) and string[n].
11790 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
11791 DCHECK(call->arguments()->length() == 1);
11792 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11793 HValue* char_code = Pop();
11794 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11795 return ast_context()->ReturnInstruction(result, call->id());
11799 // Fast support for string.charAt(n) and string[n].
11800 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
11801 DCHECK(call->arguments()->length() == 2);
11802 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11803 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11804 HValue* index = Pop();
11805 HValue* string = Pop();
11806 HInstruction* char_code = BuildStringCharCodeAt(string, index);
11807 AddInstruction(char_code);
11808 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11809 return ast_context()->ReturnInstruction(result, call->id());
11813 // Fast support for object equality testing.
11814 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
11815 DCHECK(call->arguments()->length() == 2);
11816 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11817 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11818 HValue* right = Pop();
11819 HValue* left = Pop();
11820 HCompareObjectEqAndBranch* result =
11821 New<HCompareObjectEqAndBranch>(left, right);
11822 return ast_context()->ReturnControl(result, call->id());
11826 // Fast support for StringAdd.
11827 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
11828 DCHECK_EQ(2, call->arguments()->length());
11829 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11830 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11831 HValue* right = Pop();
11832 HValue* left = Pop();
11833 HInstruction* result = NewUncasted<HStringAdd>(left, right);
11834 return ast_context()->ReturnInstruction(result, call->id());
11838 // Fast support for SubString.
11839 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
11840 DCHECK_EQ(3, call->arguments()->length());
11841 CHECK_ALIVE(VisitExpressions(call->arguments()));
11842 PushArgumentsFromEnvironment(call->arguments()->length());
11843 HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
11844 return ast_context()->ReturnInstruction(result, call->id());
11848 // Fast support for StringCompare.
11849 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
11850 DCHECK_EQ(2, call->arguments()->length());
11851 CHECK_ALIVE(VisitExpressions(call->arguments()));
11852 PushArgumentsFromEnvironment(call->arguments()->length());
11853 HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
11854 return ast_context()->ReturnInstruction(result, call->id());
11858 // Support for direct calls from JavaScript to native RegExp code.
11859 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
11860 DCHECK_EQ(4, call->arguments()->length());
11861 CHECK_ALIVE(VisitExpressions(call->arguments()));
11862 PushArgumentsFromEnvironment(call->arguments()->length());
11863 HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
11864 return ast_context()->ReturnInstruction(result, call->id());
11868 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
11869 DCHECK_EQ(1, call->arguments()->length());
11870 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11871 HValue* value = Pop();
11872 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
11873 return ast_context()->ReturnInstruction(result, call->id());
11877 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
11878 DCHECK_EQ(1, call->arguments()->length());
11879 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11880 HValue* value = Pop();
11881 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
11882 return ast_context()->ReturnInstruction(result, call->id());
11886 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
11887 DCHECK_EQ(2, call->arguments()->length());
11888 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11889 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11890 HValue* lo = Pop();
11891 HValue* hi = Pop();
11892 HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
11893 return ast_context()->ReturnInstruction(result, call->id());
11897 // Construct a RegExp exec result with two in-object properties.
11898 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
11899 DCHECK_EQ(3, call->arguments()->length());
11900 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11901 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11902 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11903 HValue* input = Pop();
11904 HValue* index = Pop();
11905 HValue* length = Pop();
11906 HValue* result = BuildRegExpConstructResult(length, index, input);
11907 return ast_context()->ReturnValue(result);
11911 // Support for fast native caches.
11912 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
11913 return Bailout(kInlinedRuntimeFunctionGetFromCache);
11917 // Fast support for number to string.
11918 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
11919 DCHECK_EQ(1, call->arguments()->length());
11920 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11921 HValue* number = Pop();
11922 HValue* result = BuildNumberToString(number, Type::Any(zone()));
11923 return ast_context()->ReturnValue(result);
11927 // Fast call for custom callbacks.
11928 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
11929 // 1 ~ The function to call is not itself an argument to the call.
11930 int arg_count = call->arguments()->length() - 1;
11931 DCHECK(arg_count >= 1); // There's always at least a receiver.
11933 CHECK_ALIVE(VisitExpressions(call->arguments()));
11934 // The function is the last argument
11935 HValue* function = Pop();
11936 // Push the arguments to the stack
11937 PushArgumentsFromEnvironment(arg_count);
11939 IfBuilder if_is_jsfunction(this);
11940 if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
11942 if_is_jsfunction.Then();
11944 HInstruction* invoke_result =
11945 Add<HInvokeFunction>(function, arg_count);
11946 if (!ast_context()->IsEffect()) {
11947 Push(invoke_result);
11949 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11952 if_is_jsfunction.Else();
11954 HInstruction* call_result =
11955 Add<HCallFunction>(function, arg_count);
11956 if (!ast_context()->IsEffect()) {
11959 Add<HSimulate>(call->id(), FIXED_SIMULATE);
11961 if_is_jsfunction.End();
11963 if (ast_context()->IsEffect()) {
11964 // EffectContext::ReturnValue ignores the value, so we can just pass
11965 // 'undefined' (as we do not have the call result anymore).
11966 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11968 return ast_context()->ReturnValue(Pop());
11973 // Fast call to math functions.
11974 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
11975 DCHECK_EQ(2, call->arguments()->length());
11976 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11977 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11978 HValue* right = Pop();
11979 HValue* left = Pop();
11980 HInstruction* result = NewUncasted<HPower>(left, right);
11981 return ast_context()->ReturnInstruction(result, call->id());
11985 void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) {
11986 DCHECK(call->arguments()->length() == 1);
11987 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11988 HValue* value = Pop();
11989 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
11990 return ast_context()->ReturnInstruction(result, call->id());
11994 void HOptimizedGraphBuilder::GenerateMathSqrtRT(CallRuntime* call) {
11995 DCHECK(call->arguments()->length() == 1);
11996 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11997 HValue* value = Pop();
11998 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
11999 return ast_context()->ReturnInstruction(result, call->id());
12003 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
12004 DCHECK(call->arguments()->length() == 1);
12005 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12006 HValue* value = Pop();
12007 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
12008 return ast_context()->ReturnInstruction(result, call->id());
12012 void HOptimizedGraphBuilder::GenerateFastOneByteArrayJoin(CallRuntime* call) {
12013 return Bailout(kInlinedRuntimeFunctionFastOneByteArrayJoin);
12017 // Support for generators.
12018 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
12019 return Bailout(kInlinedRuntimeFunctionGeneratorNext);
12023 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
12024 return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
12028 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
12029 CallRuntime* call) {
12030 Add<HDebugBreak>();
12031 return ast_context()->ReturnValue(graph()->GetConstant0());
12035 void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
12036 DCHECK(call->arguments()->length() == 0);
12038 Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
12039 HValue* value = Add<HLoadNamedField>(
12040 ref, static_cast<HValue*>(NULL), HObjectAccess::ForExternalUInteger8());
12041 return ast_context()->ReturnValue(value);
12045 #undef CHECK_BAILOUT
12049 HEnvironment::HEnvironment(HEnvironment* outer,
12051 Handle<JSFunction> closure,
12053 : closure_(closure),
12055 frame_type_(JS_FUNCTION),
12056 parameter_count_(0),
12057 specials_count_(1),
12063 ast_id_(BailoutId::None()),
12065 Scope* declaration_scope = scope->DeclarationScope();
12066 Initialize(declaration_scope->num_parameters() + 1,
12067 declaration_scope->num_stack_slots(), 0);
12071 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
12072 : values_(0, zone),
12074 parameter_count_(parameter_count),
12075 specials_count_(1),
12081 ast_id_(BailoutId::None()),
12083 Initialize(parameter_count, 0, 0);
12087 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
12088 : values_(0, zone),
12089 frame_type_(JS_FUNCTION),
12090 parameter_count_(0),
12091 specials_count_(0),
12097 ast_id_(other->ast_id()),
12103 HEnvironment::HEnvironment(HEnvironment* outer,
12104 Handle<JSFunction> closure,
12105 FrameType frame_type,
12108 : closure_(closure),
12109 values_(arguments, zone),
12110 frame_type_(frame_type),
12111 parameter_count_(arguments),
12112 specials_count_(0),
12118 ast_id_(BailoutId::None()),
12123 void HEnvironment::Initialize(int parameter_count,
12125 int stack_height) {
12126 parameter_count_ = parameter_count;
12127 local_count_ = local_count;
12129 // Avoid reallocating the temporaries' backing store on the first Push.
12130 int total = parameter_count + specials_count_ + local_count + stack_height;
12131 values_.Initialize(total + 4, zone());
12132 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
12136 void HEnvironment::Initialize(const HEnvironment* other) {
12137 closure_ = other->closure();
12138 values_.AddAll(other->values_, zone());
12139 assigned_variables_.Union(other->assigned_variables_, zone());
12140 frame_type_ = other->frame_type_;
12141 parameter_count_ = other->parameter_count_;
12142 local_count_ = other->local_count_;
12143 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
12144 entry_ = other->entry_;
12145 pop_count_ = other->pop_count_;
12146 push_count_ = other->push_count_;
12147 specials_count_ = other->specials_count_;
12148 ast_id_ = other->ast_id_;
12152 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
12153 DCHECK(!block->IsLoopHeader());
12154 DCHECK(values_.length() == other->values_.length());
12156 int length = values_.length();
12157 for (int i = 0; i < length; ++i) {
12158 HValue* value = values_[i];
12159 if (value != NULL && value->IsPhi() && value->block() == block) {
12160 // There is already a phi for the i'th value.
12161 HPhi* phi = HPhi::cast(value);
12162 // Assert index is correct and that we haven't missed an incoming edge.
12163 DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
12164 DCHECK(phi->OperandCount() == block->predecessors()->length());
12165 phi->AddInput(other->values_[i]);
12166 } else if (values_[i] != other->values_[i]) {
12167 // There is a fresh value on the incoming edge, a phi is needed.
12168 DCHECK(values_[i] != NULL && other->values_[i] != NULL);
12169 HPhi* phi = block->AddNewPhi(i);
12170 HValue* old_value = values_[i];
12171 for (int j = 0; j < block->predecessors()->length(); j++) {
12172 phi->AddInput(old_value);
12174 phi->AddInput(other->values_[i]);
12175 this->values_[i] = phi;
12181 void HEnvironment::Bind(int index, HValue* value) {
12182 DCHECK(value != NULL);
12183 assigned_variables_.Add(index, zone());
12184 values_[index] = value;
12188 bool HEnvironment::HasExpressionAt(int index) const {
12189 return index >= parameter_count_ + specials_count_ + local_count_;
12193 bool HEnvironment::ExpressionStackIsEmpty() const {
12194 DCHECK(length() >= first_expression_index());
12195 return length() == first_expression_index();
12199 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
12200 int count = index_from_top + 1;
12201 int index = values_.length() - count;
12202 DCHECK(HasExpressionAt(index));
12203 // The push count must include at least the element in question or else
12204 // the new value will not be included in this environment's history.
12205 if (push_count_ < count) {
12206 // This is the same effect as popping then re-pushing 'count' elements.
12207 pop_count_ += (count - push_count_);
12208 push_count_ = count;
12210 values_[index] = value;
12214 HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
12215 int count = index_from_top + 1;
12216 int index = values_.length() - count;
12217 DCHECK(HasExpressionAt(index));
12218 // Simulate popping 'count' elements and then
12219 // pushing 'count - 1' elements back.
12220 pop_count_ += Max(count - push_count_, 0);
12221 push_count_ = Max(push_count_ - count, 0) + (count - 1);
12222 return values_.Remove(index);
12226 void HEnvironment::Drop(int count) {
12227 for (int i = 0; i < count; ++i) {
12233 HEnvironment* HEnvironment::Copy() const {
12234 return new(zone()) HEnvironment(this, zone());
12238 HEnvironment* HEnvironment::CopyWithoutHistory() const {
12239 HEnvironment* result = Copy();
12240 result->ClearHistory();
12245 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
12246 HEnvironment* new_env = Copy();
12247 for (int i = 0; i < values_.length(); ++i) {
12248 HPhi* phi = loop_header->AddNewPhi(i);
12249 phi->AddInput(values_[i]);
12250 new_env->values_[i] = phi;
12252 new_env->ClearHistory();
12257 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
12258 Handle<JSFunction> target,
12259 FrameType frame_type,
12260 int arguments) const {
12261 HEnvironment* new_env =
12262 new(zone()) HEnvironment(outer, target, frame_type,
12263 arguments + 1, zone());
12264 for (int i = 0; i <= arguments; ++i) { // Include receiver.
12265 new_env->Push(ExpressionStackAt(arguments - i));
12267 new_env->ClearHistory();
12272 HEnvironment* HEnvironment::CopyForInlining(
12273 Handle<JSFunction> target,
12275 FunctionLiteral* function,
12276 HConstant* undefined,
12277 InliningKind inlining_kind) const {
12278 DCHECK(frame_type() == JS_FUNCTION);
12280 // Outer environment is a copy of this one without the arguments.
12281 int arity = function->scope()->num_parameters();
12283 HEnvironment* outer = Copy();
12284 outer->Drop(arguments + 1); // Including receiver.
12285 outer->ClearHistory();
12287 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
12288 // Create artificial constructor stub environment. The receiver should
12289 // actually be the constructor function, but we pass the newly allocated
12290 // object instead, DoComputeConstructStubFrame() relies on that.
12291 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
12292 } else if (inlining_kind == GETTER_CALL_RETURN) {
12293 // We need an additional StackFrame::INTERNAL frame for restoring the
12294 // correct context.
12295 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
12296 } else if (inlining_kind == SETTER_CALL_RETURN) {
12297 // We need an additional StackFrame::INTERNAL frame for temporarily saving
12298 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
12299 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
12302 if (arity != arguments) {
12303 // Create artificial arguments adaptation environment.
12304 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
12307 HEnvironment* inner =
12308 new(zone()) HEnvironment(outer, function->scope(), target, zone());
12309 // Get the argument values from the original environment.
12310 for (int i = 0; i <= arity; ++i) { // Include receiver.
12311 HValue* push = (i <= arguments) ?
12312 ExpressionStackAt(arguments - i) : undefined;
12313 inner->SetValueAt(i, push);
12315 inner->SetValueAt(arity + 1, context());
12316 for (int i = arity + 2; i < inner->length(); ++i) {
12317 inner->SetValueAt(i, undefined);
12320 inner->set_ast_id(BailoutId::FunctionEntry());
12325 std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
12326 for (int i = 0; i < env.length(); i++) {
12327 if (i == 0) os << "parameters\n";
12328 if (i == env.parameter_count()) os << "specials\n";
12329 if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
12330 if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
12331 os << "expressions\n";
12333 HValue* val = env.values()->at(i);
12346 void HTracer::TraceCompilation(CompilationInfo* info) {
12347 Tag tag(this, "compilation");
12348 if (info->IsOptimizing()) {
12349 Handle<String> name = info->function()->debug_name();
12350 PrintStringProperty("name", name->ToCString().get());
12352 trace_.Add("method \"%s:%d\"\n",
12353 name->ToCString().get(),
12354 info->optimization_id());
12356 CodeStub::Major major_key = info->code_stub()->MajorKey();
12357 PrintStringProperty("name", CodeStub::MajorName(major_key, false));
12358 PrintStringProperty("method", "stub");
12360 PrintLongProperty("date",
12361 static_cast<int64_t>(base::OS::TimeCurrentMillis()));
12365 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
12366 DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
12367 AllowHandleDereference allow_deref;
12368 AllowDeferredHandleDereference allow_deferred_deref;
12369 Trace(name, chunk->graph(), chunk);
12373 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
12374 DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
12375 AllowHandleDereference allow_deref;
12376 AllowDeferredHandleDereference allow_deferred_deref;
12377 Trace(name, graph, NULL);
12381 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
12382 Tag tag(this, "cfg");
12383 PrintStringProperty("name", name);
12384 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
12385 for (int i = 0; i < blocks->length(); i++) {
12386 HBasicBlock* current = blocks->at(i);
12387 Tag block_tag(this, "block");
12388 PrintBlockProperty("name", current->block_id());
12389 PrintIntProperty("from_bci", -1);
12390 PrintIntProperty("to_bci", -1);
12392 if (!current->predecessors()->is_empty()) {
12394 trace_.Add("predecessors");
12395 for (int j = 0; j < current->predecessors()->length(); ++j) {
12396 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
12400 PrintEmptyProperty("predecessors");
12403 if (current->end()->SuccessorCount() == 0) {
12404 PrintEmptyProperty("successors");
12407 trace_.Add("successors");
12408 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
12409 trace_.Add(" \"B%d\"", it.Current()->block_id());
12414 PrintEmptyProperty("xhandlers");
12418 trace_.Add("flags");
12419 if (current->IsLoopSuccessorDominator()) {
12420 trace_.Add(" \"dom-loop-succ\"");
12422 if (current->IsUnreachable()) {
12423 trace_.Add(" \"dead\"");
12425 if (current->is_osr_entry()) {
12426 trace_.Add(" \"osr\"");
12431 if (current->dominator() != NULL) {
12432 PrintBlockProperty("dominator", current->dominator()->block_id());
12435 PrintIntProperty("loop_depth", current->LoopNestingDepth());
12437 if (chunk != NULL) {
12438 int first_index = current->first_instruction_index();
12439 int last_index = current->last_instruction_index();
12442 LifetimePosition::FromInstructionIndex(first_index).Value());
12445 LifetimePosition::FromInstructionIndex(last_index).Value());
12449 Tag states_tag(this, "states");
12450 Tag locals_tag(this, "locals");
12451 int total = current->phis()->length();
12452 PrintIntProperty("size", current->phis()->length());
12453 PrintStringProperty("method", "None");
12454 for (int j = 0; j < total; ++j) {
12455 HPhi* phi = current->phis()->at(j);
12457 std::ostringstream os;
12458 os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
12459 trace_.Add(os.str().c_str());
12464 Tag HIR_tag(this, "HIR");
12465 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
12466 HInstruction* instruction = it.Current();
12467 int uses = instruction->UseCount();
12469 std::ostringstream os;
12470 os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
12471 if (FLAG_hydrogen_track_positions &&
12472 instruction->has_position() &&
12473 instruction->position().raw() != 0) {
12474 const HSourcePosition pos = instruction->position();
12476 if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
12477 os << pos.position();
12480 trace_.Add(os.str().c_str());
12485 if (chunk != NULL) {
12486 Tag LIR_tag(this, "LIR");
12487 int first_index = current->first_instruction_index();
12488 int last_index = current->last_instruction_index();
12489 if (first_index != -1 && last_index != -1) {
12490 const ZoneList<LInstruction*>* instructions = chunk->instructions();
12491 for (int i = first_index; i <= last_index; ++i) {
12492 LInstruction* linstr = instructions->at(i);
12493 if (linstr != NULL) {
12496 LifetimePosition::FromInstructionIndex(i).Value());
12497 linstr->PrintTo(&trace_);
12498 std::ostringstream os;
12499 os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
12500 trace_.Add(os.str().c_str());
12509 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
12510 Tag tag(this, "intervals");
12511 PrintStringProperty("name", name);
12513 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
12514 for (int i = 0; i < fixed_d->length(); ++i) {
12515 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
12518 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
12519 for (int i = 0; i < fixed->length(); ++i) {
12520 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
12523 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
12524 for (int i = 0; i < live_ranges->length(); ++i) {
12525 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
12530 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
12532 if (range != NULL && !range->IsEmpty()) {
12534 trace_.Add("%d %s", range->id(), type);
12535 if (range->HasRegisterAssigned()) {
12536 LOperand* op = range->CreateAssignedOperand(zone);
12537 int assigned_reg = op->index();
12538 if (op->IsDoubleRegister()) {
12539 trace_.Add(" \"%s\"",
12540 DoubleRegister::AllocationIndexToString(assigned_reg));
12542 DCHECK(op->IsRegister());
12543 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
12545 } else if (range->IsSpilled()) {
12546 LOperand* op = range->TopLevel()->GetSpillOperand();
12547 if (op->IsDoubleStackSlot()) {
12548 trace_.Add(" \"double_stack:%d\"", op->index());
12550 DCHECK(op->IsStackSlot());
12551 trace_.Add(" \"stack:%d\"", op->index());
12554 int parent_index = -1;
12555 if (range->IsChild()) {
12556 parent_index = range->parent()->id();
12558 parent_index = range->id();
12560 LOperand* op = range->FirstHint();
12561 int hint_index = -1;
12562 if (op != NULL && op->IsUnallocated()) {
12563 hint_index = LUnallocated::cast(op)->virtual_register();
12565 trace_.Add(" %d %d", parent_index, hint_index);
12566 UseInterval* cur_interval = range->first_interval();
12567 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
12568 trace_.Add(" [%d, %d[",
12569 cur_interval->start().Value(),
12570 cur_interval->end().Value());
12571 cur_interval = cur_interval->next();
12574 UsePosition* current_pos = range->first_pos();
12575 while (current_pos != NULL) {
12576 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
12577 trace_.Add(" %d M", current_pos->pos().Value());
12579 current_pos = current_pos->next();
12582 trace_.Add(" \"\"\n");
12587 void HTracer::FlushToFile() {
12588 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
12594 void HStatistics::Initialize(CompilationInfo* info) {
12595 if (info->shared_info().is_null()) return;
12596 source_size_ += info->shared_info()->SourceSize();
12600 void HStatistics::Print() {
12603 "----------------------------------------"
12604 "----------------------------------------\n"
12605 "--- Hydrogen timing results:\n"
12606 "----------------------------------------"
12607 "----------------------------------------\n");
12608 base::TimeDelta sum;
12609 for (int i = 0; i < times_.length(); ++i) {
12613 for (int i = 0; i < names_.length(); ++i) {
12614 PrintF("%33s", names_[i]);
12615 double ms = times_[i].InMillisecondsF();
12616 double percent = times_[i].PercentOf(sum);
12617 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12619 unsigned size = sizes_[i];
12620 double size_percent = static_cast<double>(size) * 100 / total_size_;
12621 PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
12625 "----------------------------------------"
12626 "----------------------------------------\n");
12627 base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12628 PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
12629 create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
12630 PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
12631 optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
12632 PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
12633 generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
12635 "----------------------------------------"
12636 "----------------------------------------\n");
12637 PrintF("%33s %8.3f ms %9u bytes\n", "Total",
12638 total.InMillisecondsF(), total_size_);
12639 PrintF("%33s (%.1f times slower than full code gen)\n", "",
12640 total.TimesOf(full_code_gen_));
12642 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
12643 double normalized_time = source_size_in_kb > 0
12644 ? total.InMillisecondsF() / source_size_in_kb
12646 double normalized_size_in_kb =
12647 source_size_in_kb > 0
12648 ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
12650 PrintF("%33s %8.3f ms %7.3f kB allocated\n",
12651 "Average per kB source", normalized_time, normalized_size_in_kb);
12655 void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
12657 total_size_ += size;
12658 for (int i = 0; i < names_.length(); ++i) {
12659 if (strcmp(names_[i], name) == 0) {
12671 HPhase::~HPhase() {
12672 if (ShouldProduceTraceOutput()) {
12673 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
12677 graph_->Verify(false); // No full verify.
12681 } } // namespace v8::internal