1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/base/bits.h"
8 #include "src/double.h"
9 #include "src/factory.h"
10 #include "src/hydrogen-infer-representation.h"
11 #include "src/property-details-inl.h"
13 #if V8_TARGET_ARCH_IA32
14 #include "src/ia32/lithium-ia32.h" // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM64
18 #include "src/arm64/lithium-arm64.h" // NOLINT
19 #elif V8_TARGET_ARCH_ARM
20 #include "src/arm/lithium-arm.h" // NOLINT
21 #elif V8_TARGET_ARCH_PPC
22 #include "src/ppc/lithium-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h" // NOLINT
25 #elif V8_TARGET_ARCH_MIPS64
26 #include "src/mips64/lithium-mips64.h" // NOLINT
27 #elif V8_TARGET_ARCH_X87
28 #include "src/x87/lithium-x87.h" // NOLINT
30 #error Unsupported target architecture.
33 #include "src/base/safe_math.h"
38 #define DEFINE_COMPILE(type) \
39 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
40 return builder->Do##type(this); \
42 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
46 Isolate* HValue::isolate() const {
47 DCHECK(block() != NULL);
48 return block()->isolate();
52 void HValue::AssumeRepresentation(Representation r) {
53 if (CheckFlag(kFlexibleRepresentation)) {
54 ChangeRepresentation(r);
55 // The representation of the value is dictated by type feedback and
56 // will not be changed later.
57 ClearFlag(kFlexibleRepresentation);
62 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
63 DCHECK(CheckFlag(kFlexibleRepresentation));
64 Representation new_rep = RepresentationFromInputs();
65 UpdateRepresentation(new_rep, h_infer, "inputs");
66 new_rep = RepresentationFromUses();
67 UpdateRepresentation(new_rep, h_infer, "uses");
68 if (representation().IsSmi() && HasNonSmiUse()) {
70 Representation::Integer32(), h_infer, "use requirements");
75 Representation HValue::RepresentationFromUses() {
76 if (HasNoUses()) return Representation::None();
78 // Array of use counts for each representation.
79 int use_count[Representation::kNumRepresentations] = { 0 };
81 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
82 HValue* use = it.value();
83 Representation rep = use->observed_input_representation(it.index());
84 if (rep.IsNone()) continue;
85 if (FLAG_trace_representation) {
86 PrintF("#%d %s is used by #%d %s as %s%s\n",
87 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
88 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
90 use_count[rep.kind()] += 1;
92 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
93 int tagged_count = use_count[Representation::kTagged];
94 int double_count = use_count[Representation::kDouble];
95 int int32_count = use_count[Representation::kInteger32];
96 int smi_count = use_count[Representation::kSmi];
98 if (tagged_count > 0) return Representation::Tagged();
99 if (double_count > 0) return Representation::Double();
100 if (int32_count > 0) return Representation::Integer32();
101 if (smi_count > 0) return Representation::Smi();
103 return Representation::None();
107 void HValue::UpdateRepresentation(Representation new_rep,
108 HInferRepresentationPhase* h_infer,
109 const char* reason) {
110 Representation r = representation();
111 if (new_rep.is_more_general_than(r)) {
112 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
113 if (FLAG_trace_representation) {
114 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
115 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
117 ChangeRepresentation(new_rep);
118 AddDependantsToWorklist(h_infer);
123 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
124 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
125 h_infer->AddToWorklist(it.value());
127 for (int i = 0; i < OperandCount(); ++i) {
128 h_infer->AddToWorklist(OperandAt(i));
133 static int32_t ConvertAndSetOverflow(Representation r,
137 if (result > Smi::kMaxValue) {
139 return Smi::kMaxValue;
141 if (result < Smi::kMinValue) {
143 return Smi::kMinValue;
146 if (result > kMaxInt) {
150 if (result < kMinInt) {
155 return static_cast<int32_t>(result);
159 static int32_t AddWithoutOverflow(Representation r,
163 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
164 return ConvertAndSetOverflow(r, result, overflow);
168 static int32_t SubWithoutOverflow(Representation r,
172 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
173 return ConvertAndSetOverflow(r, result, overflow);
177 static int32_t MulWithoutOverflow(const Representation& r,
181 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
182 return ConvertAndSetOverflow(r, result, overflow);
186 int32_t Range::Mask() const {
187 if (lower_ == upper_) return lower_;
190 while (res < upper_) {
191 res = (res << 1) | 1;
199 void Range::AddConstant(int32_t value) {
200 if (value == 0) return;
201 bool may_overflow = false; // Overflow is ignored here.
202 Representation r = Representation::Integer32();
203 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
204 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
211 void Range::Intersect(Range* other) {
212 upper_ = Min(upper_, other->upper_);
213 lower_ = Max(lower_, other->lower_);
214 bool b = CanBeMinusZero() && other->CanBeMinusZero();
215 set_can_be_minus_zero(b);
219 void Range::Union(Range* other) {
220 upper_ = Max(upper_, other->upper_);
221 lower_ = Min(lower_, other->lower_);
222 bool b = CanBeMinusZero() || other->CanBeMinusZero();
223 set_can_be_minus_zero(b);
227 void Range::CombinedMax(Range* other) {
228 upper_ = Max(upper_, other->upper_);
229 lower_ = Max(lower_, other->lower_);
230 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
234 void Range::CombinedMin(Range* other) {
235 upper_ = Min(upper_, other->upper_);
236 lower_ = Min(lower_, other->lower_);
237 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
241 void Range::Sar(int32_t value) {
242 int32_t bits = value & 0x1F;
243 lower_ = lower_ >> bits;
244 upper_ = upper_ >> bits;
245 set_can_be_minus_zero(false);
249 void Range::Shl(int32_t value) {
250 int32_t bits = value & 0x1F;
251 int old_lower = lower_;
252 int old_upper = upper_;
253 lower_ = lower_ << bits;
254 upper_ = upper_ << bits;
255 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
259 set_can_be_minus_zero(false);
263 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
264 bool may_overflow = false;
265 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
266 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
275 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
276 bool may_overflow = false;
277 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
278 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
287 void Range::KeepOrder() {
288 if (lower_ > upper_) {
289 int32_t tmp = lower_;
297 void Range::Verify() const {
298 DCHECK(lower_ <= upper_);
303 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
304 bool may_overflow = false;
305 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
306 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
307 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
308 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
309 lower_ = Min(Min(v1, v2), Min(v3, v4));
310 upper_ = Max(Max(v1, v2), Max(v3, v4));
318 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
319 return block()->block_id() > other->block_id();
323 HUseListNode* HUseListNode::tail() {
324 // Skip and remove dead items in the use list.
325 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
326 tail_ = tail_->tail_;
332 bool HValue::CheckUsesForFlag(Flag f) const {
333 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
334 if (it.value()->IsSimulate()) continue;
335 if (!it.value()->CheckFlag(f)) return false;
341 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
342 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
343 if (it.value()->IsSimulate()) continue;
344 if (!it.value()->CheckFlag(f)) {
353 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
354 bool return_value = false;
355 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
356 if (it.value()->IsSimulate()) continue;
357 if (!it.value()->CheckFlag(f)) return false;
364 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
369 void HUseIterator::Advance() {
371 if (current_ != NULL) {
372 next_ = current_->tail();
373 value_ = current_->value();
374 index_ = current_->index();
379 int HValue::UseCount() const {
381 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
386 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
387 HUseListNode* previous = NULL;
388 HUseListNode* current = use_list_;
389 while (current != NULL) {
390 if (current->value() == value && current->index() == index) {
391 if (previous == NULL) {
392 use_list_ = current->tail();
394 previous->set_tail(current->tail());
400 current = current->tail();
404 // Do not reuse use list nodes in debug mode, zap them.
405 if (current != NULL) {
408 HUseListNode(current->value(), current->index(), NULL);
417 bool HValue::Equals(HValue* other) {
418 if (other->opcode() != opcode()) return false;
419 if (!other->representation().Equals(representation())) return false;
420 if (!other->type_.Equals(type_)) return false;
421 if (other->flags() != flags()) return false;
422 if (OperandCount() != other->OperandCount()) return false;
423 for (int i = 0; i < OperandCount(); ++i) {
424 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
426 bool result = DataEquals(other);
427 DCHECK(!result || Hashcode() == other->Hashcode());
432 intptr_t HValue::Hashcode() {
433 intptr_t result = opcode();
434 int count = OperandCount();
435 for (int i = 0; i < count; ++i) {
436 result = result * 19 + OperandAt(i)->id() + (result >> 7);
442 const char* HValue::Mnemonic() const {
444 #define MAKE_CASE(type) case k##type: return #type;
445 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
447 case kPhi: return "Phi";
453 bool HValue::CanReplaceWithDummyUses() {
454 return FLAG_unreachable_code_elimination &&
455 !(block()->IsReachable() ||
457 IsControlInstruction() ||
458 IsArgumentsObject() ||
459 IsCapturedObject() ||
466 bool HValue::IsInteger32Constant() {
467 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
471 int32_t HValue::GetInteger32Constant() {
472 return HConstant::cast(this)->Integer32Value();
476 bool HValue::EqualsInteger32Constant(int32_t value) {
477 return IsInteger32Constant() && GetInteger32Constant() == value;
481 void HValue::SetOperandAt(int index, HValue* value) {
482 RegisterUse(index, value);
483 InternalSetOperandAt(index, value);
487 void HValue::DeleteAndReplaceWith(HValue* other) {
488 // We replace all uses first, so Delete can assert that there are none.
489 if (other != NULL) ReplaceAllUsesWith(other);
495 void HValue::ReplaceAllUsesWith(HValue* other) {
496 while (use_list_ != NULL) {
497 HUseListNode* list_node = use_list_;
498 HValue* value = list_node->value();
499 DCHECK(!value->block()->IsStartBlock());
500 value->InternalSetOperandAt(list_node->index(), other);
501 use_list_ = list_node->tail();
502 list_node->set_tail(other->use_list_);
503 other->use_list_ = list_node;
508 void HValue::Kill() {
509 // Instead of going through the entire use list of each operand, we only
510 // check the first item in each use list and rely on the tail() method to
511 // skip dead items, removing them lazily next time we traverse the list.
513 for (int i = 0; i < OperandCount(); ++i) {
514 HValue* operand = OperandAt(i);
515 if (operand == NULL) continue;
516 HUseListNode* first = operand->use_list_;
517 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
518 operand->use_list_ = first->tail();
524 void HValue::SetBlock(HBasicBlock* block) {
525 DCHECK(block_ == NULL || block == NULL);
527 if (id_ == kNoNumber && block != NULL) {
528 id_ = block->graph()->GetNextValueID(this);
533 std::ostream& operator<<(std::ostream& os, const HValue& v) {
534 return v.PrintTo(os);
538 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
539 if (t.value->representation().IsTagged() &&
540 !t.value->type().Equals(HType::Tagged()))
542 return os << " type:" << t.value->type();
546 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
547 GVNFlagSet changes_flags = c.value->ChangesFlags();
548 if (changes_flags.IsEmpty()) return os;
550 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
553 bool add_comma = false;
554 #define PRINT_DO(Type) \
555 if (changes_flags.Contains(k##Type)) { \
556 if (add_comma) os << ","; \
560 GVN_TRACKED_FLAG_LIST(PRINT_DO);
561 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
568 bool HValue::HasMonomorphicJSObjectType() {
569 return !GetMonomorphicJSObjectMap().is_null();
573 bool HValue::UpdateInferredType() {
574 HType type = CalculateInferredType();
575 bool result = (!type.Equals(type_));
581 void HValue::RegisterUse(int index, HValue* new_value) {
582 HValue* old_value = OperandAt(index);
583 if (old_value == new_value) return;
585 HUseListNode* removed = NULL;
586 if (old_value != NULL) {
587 removed = old_value->RemoveUse(this, index);
590 if (new_value != NULL) {
591 if (removed == NULL) {
592 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
593 this, index, new_value->use_list_);
595 removed->set_tail(new_value->use_list_);
596 new_value->use_list_ = removed;
602 void HValue::AddNewRange(Range* r, Zone* zone) {
603 if (!HasRange()) ComputeInitialRange(zone);
604 if (!HasRange()) range_ = new(zone) Range();
606 r->StackUpon(range_);
611 void HValue::RemoveLastAddedRange() {
613 DCHECK(range_->next() != NULL);
614 range_ = range_->next();
618 void HValue::ComputeInitialRange(Zone* zone) {
620 range_ = InferRange(zone);
625 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
626 os << Mnemonic() << " ";
627 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
628 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
629 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
634 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
635 for (int i = 0; i < OperandCount(); ++i) {
636 if (i > 0) os << " ";
637 os << NameOf(OperandAt(i));
643 void HInstruction::Unlink() {
645 DCHECK(!IsControlInstruction()); // Must never move control instructions.
646 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
647 DCHECK(previous_ != NULL);
648 previous_->next_ = next_;
650 DCHECK(block()->last() == this);
651 block()->set_last(previous_);
653 next_->previous_ = previous_;
659 void HInstruction::InsertBefore(HInstruction* next) {
661 DCHECK(!next->IsBlockEntry());
662 DCHECK(!IsControlInstruction());
663 DCHECK(!next->block()->IsStartBlock());
664 DCHECK(next->previous_ != NULL);
665 HInstruction* prev = next->previous();
667 next->previous_ = this;
670 SetBlock(next->block());
671 if (!has_position() && next->has_position()) {
672 set_position(next->position());
677 void HInstruction::InsertAfter(HInstruction* previous) {
679 DCHECK(!previous->IsControlInstruction());
680 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
681 HBasicBlock* block = previous->block();
682 // Never insert anything except constants into the start block after finishing
684 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
685 DCHECK(block->end()->SecondSuccessor() == NULL);
686 InsertAfter(block->end()->FirstSuccessor()->first());
690 // If we're inserting after an instruction with side-effects that is
691 // followed by a simulate instruction, we need to insert after the
692 // simulate instruction instead.
693 HInstruction* next = previous->next_;
694 if (previous->HasObservableSideEffects() && next != NULL) {
695 DCHECK(next->IsSimulate());
697 next = previous->next_;
700 previous_ = previous;
703 previous->next_ = this;
704 if (next != NULL) next->previous_ = this;
705 if (block->last() == previous) {
706 block->set_last(this);
708 if (!has_position() && previous->has_position()) {
709 set_position(previous->position());
714 bool HInstruction::Dominates(HInstruction* other) {
715 if (block() != other->block()) {
716 return block()->Dominates(other->block());
718 // Both instructions are in the same basic block. This instruction
719 // should precede the other one in order to dominate it.
720 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
721 if (instr == other) {
730 void HInstruction::Verify() {
731 // Verify that input operands are defined before use.
732 HBasicBlock* cur_block = block();
733 for (int i = 0; i < OperandCount(); ++i) {
734 HValue* other_operand = OperandAt(i);
735 if (other_operand == NULL) continue;
736 HBasicBlock* other_block = other_operand->block();
737 if (cur_block == other_block) {
738 if (!other_operand->IsPhi()) {
739 HInstruction* cur = this->previous();
740 while (cur != NULL) {
741 if (cur == other_operand) break;
742 cur = cur->previous();
744 // Must reach other operand in the same block!
745 DCHECK(cur == other_operand);
748 // If the following assert fires, you may have forgotten an
750 DCHECK(other_block->Dominates(cur_block));
754 // Verify that instructions that may have side-effects are followed
755 // by a simulate instruction.
756 if (HasObservableSideEffects() && !IsOsrEntry()) {
757 DCHECK(next()->IsSimulate());
760 // Verify that instructions that can be eliminated by GVN have overridden
761 // HValue::DataEquals. The default implementation is UNREACHABLE. We
762 // don't actually care whether DataEquals returns true or false here.
763 if (CheckFlag(kUseGVN)) DataEquals(this);
765 // Verify that all uses are in the graph.
766 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
767 if (use.value()->IsInstruction()) {
768 DCHECK(HInstruction::cast(use.value())->IsLinked());
775 bool HInstruction::CanDeoptimize() {
776 // TODO(titzer): make this a virtual method?
778 case HValue::kAbnormalExit:
779 case HValue::kAccessArgumentsAt:
780 case HValue::kAllocate:
781 case HValue::kArgumentsElements:
782 case HValue::kArgumentsLength:
783 case HValue::kArgumentsObject:
784 case HValue::kBlockEntry:
785 case HValue::kBoundsCheckBaseIndexInformation:
786 case HValue::kCallFunction:
787 case HValue::kCallNew:
788 case HValue::kCallNewArray:
789 case HValue::kCallStub:
790 case HValue::kCapturedObject:
791 case HValue::kClassOfTestAndBranch:
792 case HValue::kCompareGeneric:
793 case HValue::kCompareHoleAndBranch:
794 case HValue::kCompareMap:
795 case HValue::kCompareMinusZeroAndBranch:
796 case HValue::kCompareNumericAndBranch:
797 case HValue::kCompareObjectEqAndBranch:
798 case HValue::kConstant:
799 case HValue::kConstructDouble:
800 case HValue::kContext:
801 case HValue::kDebugBreak:
802 case HValue::kDeclareGlobals:
803 case HValue::kDoubleBits:
804 case HValue::kDummyUse:
805 case HValue::kEnterInlined:
806 case HValue::kEnvironmentMarker:
807 case HValue::kForceRepresentation:
808 case HValue::kGetCachedArrayIndex:
810 case HValue::kHasCachedArrayIndexAndBranch:
811 case HValue::kHasInstanceTypeAndBranch:
812 case HValue::kInnerAllocatedObject:
813 case HValue::kInstanceOf:
814 case HValue::kInstanceOfKnownGlobal:
815 case HValue::kIsConstructCallAndBranch:
816 case HValue::kIsObjectAndBranch:
817 case HValue::kIsSmiAndBranch:
818 case HValue::kIsStringAndBranch:
819 case HValue::kIsUndetectableAndBranch:
820 case HValue::kLeaveInlined:
821 case HValue::kLoadFieldByIndex:
822 case HValue::kLoadGlobalGeneric:
823 case HValue::kLoadNamedField:
824 case HValue::kLoadNamedGeneric:
825 case HValue::kLoadRoot:
826 case HValue::kMapEnumLength:
827 case HValue::kMathMinMax:
828 case HValue::kParameter:
830 case HValue::kPushArguments:
831 case HValue::kRegExpLiteral:
832 case HValue::kReturn:
833 case HValue::kSeqStringGetChar:
834 case HValue::kStoreCodeEntry:
835 case HValue::kStoreFrameContext:
836 case HValue::kStoreKeyed:
837 case HValue::kStoreNamedField:
838 case HValue::kStoreNamedGeneric:
839 case HValue::kStringCharCodeAt:
840 case HValue::kStringCharFromCode:
841 case HValue::kTailCallThroughMegamorphicCache:
842 case HValue::kThisFunction:
843 case HValue::kTypeofIsAndBranch:
844 case HValue::kUnknownOSRValue:
845 case HValue::kUseConst:
849 case HValue::kAllocateBlockContext:
850 case HValue::kApplyArguments:
851 case HValue::kBitwise:
852 case HValue::kBoundsCheck:
853 case HValue::kBranch:
854 case HValue::kCallJSFunction:
855 case HValue::kCallRuntime:
856 case HValue::kCallWithDescriptor:
857 case HValue::kChange:
858 case HValue::kCheckHeapObject:
859 case HValue::kCheckInstanceType:
860 case HValue::kCheckMapValue:
861 case HValue::kCheckMaps:
862 case HValue::kCheckSmi:
863 case HValue::kCheckValue:
864 case HValue::kClampToUint8:
865 case HValue::kDateField:
866 case HValue::kDeoptimize:
868 case HValue::kForInCacheArray:
869 case HValue::kForInPrepareMap:
870 case HValue::kFunctionLiteral:
871 case HValue::kInvokeFunction:
872 case HValue::kLoadContextSlot:
873 case HValue::kLoadFunctionPrototype:
874 case HValue::kLoadKeyed:
875 case HValue::kLoadKeyedGeneric:
876 case HValue::kMathFloorOfDiv:
879 case HValue::kOsrEntry:
883 case HValue::kSeqStringSetChar:
886 case HValue::kSimulate:
887 case HValue::kStackCheck:
888 case HValue::kStoreContextSlot:
889 case HValue::kStoreKeyedGeneric:
890 case HValue::kStringAdd:
891 case HValue::kStringCompareAndBranch:
893 case HValue::kToFastProperties:
894 case HValue::kTransitionElementsKind:
895 case HValue::kTrapAllocationMemento:
896 case HValue::kTypeof:
897 case HValue::kUnaryMathOperation:
898 case HValue::kWrapReceiver:
906 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
907 return os << v.value->representation().Mnemonic() << v.value->id();
910 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
911 return os << NameOf(value());
915 std::ostream& HEnvironmentMarker::PrintDataTo(
916 std::ostream& os) const { // NOLINT
917 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
922 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
923 return os << NameOf(value()) << " #" << argument_count();
927 std::ostream& HCallJSFunction::PrintDataTo(std::ostream& os) const { // NOLINT
928 return os << NameOf(function()) << " #" << argument_count();
932 HCallJSFunction* HCallJSFunction::New(Isolate* isolate, Zone* zone,
933 HValue* context, HValue* function,
935 bool pass_argument_count) {
936 bool has_stack_check = false;
937 if (function->IsConstant()) {
938 HConstant* fun_const = HConstant::cast(function);
939 Handle<JSFunction> jsfun =
940 Handle<JSFunction>::cast(fun_const->handle(isolate));
941 has_stack_check = !jsfun.is_null() &&
942 (jsfun->code()->kind() == Code::FUNCTION ||
943 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
946 return new(zone) HCallJSFunction(
947 function, argument_count, pass_argument_count,
952 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
953 return os << NameOf(first()) << " " << NameOf(second()) << " #"
958 std::ostream& HCallFunction::PrintDataTo(std::ostream& os) const { // NOLINT
959 os << NameOf(context()) << " " << NameOf(function());
960 if (HasVectorAndSlot()) {
961 os << " (type-feedback-vector icslot " << slot().ToInt() << ")";
967 void HBoundsCheck::ApplyIndexChange() {
968 if (skip_check()) return;
970 DecompositionResult decomposition;
971 bool index_is_decomposable = index()->TryDecompose(&decomposition);
972 if (index_is_decomposable) {
973 DCHECK(decomposition.base() == base());
974 if (decomposition.offset() == offset() &&
975 decomposition.scale() == scale()) return;
980 ReplaceAllUsesWith(index());
982 HValue* current_index = decomposition.base();
983 int actual_offset = decomposition.offset() + offset();
984 int actual_scale = decomposition.scale() + scale();
986 HGraph* graph = block()->graph();
987 Isolate* isolate = graph->isolate();
988 Zone* zone = graph->zone();
989 HValue* context = graph->GetInvalidContext();
990 if (actual_offset != 0) {
991 HConstant* add_offset =
992 HConstant::New(isolate, zone, context, actual_offset);
993 add_offset->InsertBefore(this);
995 HAdd::New(isolate, zone, context, current_index, add_offset);
996 add->InsertBefore(this);
997 add->AssumeRepresentation(index()->representation());
998 add->ClearFlag(kCanOverflow);
1002 if (actual_scale != 0) {
1003 HConstant* sar_scale = HConstant::New(isolate, zone, context, actual_scale);
1004 sar_scale->InsertBefore(this);
1006 HSar::New(isolate, zone, context, current_index, sar_scale);
1007 sar->InsertBefore(this);
1008 sar->AssumeRepresentation(index()->representation());
1009 current_index = sar;
1012 SetOperandAt(0, current_index);
1020 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
1021 os << NameOf(index()) << " " << NameOf(length());
1022 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1024 if (base() != index()) {
1025 os << NameOf(index());
1029 os << " + " << offset() << ") >> " << scale() << ")";
1031 if (skip_check()) os << " [DISABLED]";
1036 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1037 DCHECK(CheckFlag(kFlexibleRepresentation));
1038 HValue* actual_index = index()->ActualValue();
1039 HValue* actual_length = length()->ActualValue();
1040 Representation index_rep = actual_index->representation();
1041 Representation length_rep = actual_length->representation();
1042 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1043 index_rep = Representation::Smi();
1045 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1046 length_rep = Representation::Smi();
1048 Representation r = index_rep.generalize(length_rep);
1049 if (r.is_more_general_than(Representation::Integer32())) {
1050 r = Representation::Integer32();
1052 UpdateRepresentation(r, h_infer, "boundscheck");
1056 Range* HBoundsCheck::InferRange(Zone* zone) {
1057 Representation r = representation();
1058 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1059 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1062 Range* result = new(zone) Range(lower, upper);
1063 if (index()->HasRange()) {
1064 result->Intersect(index()->range());
1067 // In case of Smi representation, clamp result to Smi::kMaxValue.
1068 if (r.IsSmi()) result->ClampToSmi();
1071 return HValue::InferRange(zone);
1075 std::ostream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1076 std::ostream& os) const { // NOLINT
1077 // TODO(svenpanne) This 2nd base_index() looks wrong...
1078 return os << "base: " << NameOf(base_index())
1079 << ", check: " << NameOf(base_index());
1083 std::ostream& HCallWithDescriptor::PrintDataTo(
1084 std::ostream& os) const { // NOLINT
1085 for (int i = 0; i < OperandCount(); i++) {
1086 os << NameOf(OperandAt(i)) << " ";
1088 return os << "#" << argument_count();
1092 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1093 os << ElementsKindToString(elements_kind()) << " ";
1094 return HBinaryCall::PrintDataTo(os);
1098 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1099 os << name()->ToCString().get() << " ";
1100 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1101 return os << "#" << argument_count();
1105 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1106 std::ostream& os) const { // NOLINT
1107 return os << "class_of_test(" << NameOf(value()) << ", \""
1108 << class_name()->ToCString().get() << "\")";
1112 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1113 return os << NameOf(receiver()) << " " << NameOf(function());
1117 std::ostream& HAccessArgumentsAt::PrintDataTo(
1118 std::ostream& os) const { // NOLINT
1119 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1120 << NameOf(length());
1124 std::ostream& HAllocateBlockContext::PrintDataTo(
1125 std::ostream& os) const { // NOLINT
1126 return os << NameOf(context()) << " " << NameOf(function());
1130 std::ostream& HControlInstruction::PrintDataTo(
1131 std::ostream& os) const { // NOLINT
1133 bool first_block = true;
1134 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1135 if (!first_block) os << ", ";
1136 os << *it.Current();
1137 first_block = false;
1143 std::ostream& HUnaryControlInstruction::PrintDataTo(
1144 std::ostream& os) const { // NOLINT
1145 os << NameOf(value());
1146 return HControlInstruction::PrintDataTo(os);
1150 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1151 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1156 Representation HBranch::observed_input_representation(int index) {
1157 if (expected_input_types_.Contains(ToBooleanStub::NULL_TYPE) ||
1158 expected_input_types_.Contains(ToBooleanStub::SPEC_OBJECT) ||
1159 expected_input_types_.Contains(ToBooleanStub::STRING) ||
1160 expected_input_types_.Contains(ToBooleanStub::SYMBOL)) {
1161 return Representation::Tagged();
1163 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1164 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1165 return Representation::Double();
1167 return Representation::Tagged();
1169 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1170 return Representation::Double();
1172 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1173 return Representation::Smi();
1175 return Representation::None();
1179 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1180 HValue* value = this->value();
1181 if (value->EmitAtUses()) {
1182 DCHECK(value->IsConstant());
1183 DCHECK(!value->representation().IsDouble());
1184 *block = HConstant::cast(value)->BooleanValue()
1186 : SecondSuccessor();
1194 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1195 return HUnaryControlInstruction::PrintDataTo(os) << " "
1196 << expected_input_types();
1200 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1201 os << NameOf(value()) << " (" << *map().handle() << ")";
1202 HControlInstruction::PrintDataTo(os);
1203 if (known_successor_index() == 0) {
1205 } else if (known_successor_index() == 1) {
1212 const char* HUnaryMathOperation::OpName() const {
1239 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1240 Representation r = representation();
1241 if (op() == kMathClz32) return new(zone) Range(0, 32);
1242 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1243 if (op() == kMathAbs) {
1244 int upper = value()->range()->upper();
1245 int lower = value()->range()->lower();
1246 bool spans_zero = value()->range()->CanBeZero();
1247 // Math.abs(kMinInt) overflows its representation, on which the
1248 // instruction deopts. Hence clamp it to kMaxInt.
1249 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1250 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1252 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1253 Max(abs_lower, abs_upper));
1254 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1256 if (r.IsSmi()) result->ClampToSmi();
1260 return HValue::InferRange(zone);
1264 std::ostream& HUnaryMathOperation::PrintDataTo(
1265 std::ostream& os) const { // NOLINT
1266 return os << OpName() << " " << NameOf(value());
1270 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1271 return os << NameOf(value());
1275 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1276 std::ostream& os) const { // NOLINT
1277 os << NameOf(value());
1279 case FIRST_JS_RECEIVER_TYPE:
1280 if (to_ == LAST_TYPE) os << " spec_object";
1282 case JS_REGEXP_TYPE:
1283 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1286 if (to_ == JS_ARRAY_TYPE) os << " array";
1288 case JS_FUNCTION_TYPE:
1289 if (to_ == JS_FUNCTION_TYPE) os << " function";
1298 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1299 std::ostream& os) const { // NOLINT
1300 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1301 return HControlInstruction::PrintDataTo(os);
1305 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1306 Heap* heap = isolate->heap();
1307 if (constant->HasNumberValue()) return heap->number_string();
1308 if (constant->IsUndetectable()) return heap->undefined_string();
1309 if (constant->HasStringValue()) return heap->string_string();
1310 switch (constant->GetInstanceType()) {
1311 case ODDBALL_TYPE: {
1312 Unique<Object> unique = constant->GetUnique();
1313 if (unique.IsKnownGlobal(heap->true_value()) ||
1314 unique.IsKnownGlobal(heap->false_value())) {
1315 return heap->boolean_string();
1317 if (unique.IsKnownGlobal(heap->null_value())) {
1318 return heap->object_string();
1320 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1321 return heap->undefined_string();
1324 return heap->symbol_string();
1325 case JS_FUNCTION_TYPE:
1326 case JS_FUNCTION_PROXY_TYPE:
1327 return heap->function_string();
1329 return heap->object_string();
1334 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1335 if (FLAG_fold_constants && value()->IsConstant()) {
1336 HConstant* constant = HConstant::cast(value());
1337 String* type_string = TypeOfString(constant, isolate());
1338 bool same_type = type_literal_.IsKnownGlobal(type_string);
1339 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1341 } else if (value()->representation().IsSpecialization()) {
1343 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1344 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1352 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1353 return os << NameOf(value()) << " " << NameOf(map());
1357 HValue* HCheckMapValue::Canonicalize() {
1358 if (map()->IsConstant()) {
1359 HConstant* c_map = HConstant::cast(map());
1360 return HCheckMaps::CreateAndInsertAfter(
1361 block()->graph()->zone(), value(), c_map->MapValue(),
1362 c_map->HasStableMapValue(), this);
1368 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1369 return os << NameOf(enumerable());
1373 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1374 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1379 std::ostream& HLoadFieldByIndex::PrintDataTo(
1380 std::ostream& os) const { // NOLINT
1381 return os << NameOf(object()) << " " << NameOf(index());
1385 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1386 if (!l->EqualsInteger32Constant(~0)) return false;
1392 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1393 if (!instr->IsBitwise()) return false;
1394 HBitwise* b = HBitwise::cast(instr);
1395 return (b->op() == Token::BIT_XOR) &&
1396 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1397 MatchLeftIsOnes(b->right(), b->left(), negated));
1401 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1403 return MatchNegationViaXor(instr, &negated) &&
1404 MatchNegationViaXor(negated, arg);
1408 HValue* HBitwise::Canonicalize() {
1409 if (!representation().IsSmiOrInteger32()) return this;
1410 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1411 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1412 if (left()->EqualsInteger32Constant(nop_constant) &&
1413 !right()->CheckFlag(kUint32)) {
1416 if (right()->EqualsInteger32Constant(nop_constant) &&
1417 !left()->CheckFlag(kUint32)) {
1420 // Optimize double negation, a common pattern used for ToInt32(x).
1422 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1429 Representation HAdd::RepresentationFromInputs() {
1430 Representation left_rep = left()->representation();
1431 if (left_rep.IsExternal()) {
1432 return Representation::External();
1434 return HArithmeticBinaryOperation::RepresentationFromInputs();
1438 Representation HAdd::RequiredInputRepresentation(int index) {
1440 Representation left_rep = left()->representation();
1441 if (left_rep.IsExternal()) {
1442 return Representation::Integer32();
1445 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1449 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1450 return arg1->representation().IsSpecialization() &&
1451 arg2->EqualsInteger32Constant(identity);
1455 HValue* HAdd::Canonicalize() {
1456 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1457 if (IsIdentityOperation(left(), right(), 0) &&
1458 !left()->representation().IsDouble()) { // Left could be -0.
1461 if (IsIdentityOperation(right(), left(), 0) &&
1462 !left()->representation().IsDouble()) { // Right could be -0.
1469 HValue* HSub::Canonicalize() {
1470 if (IsIdentityOperation(left(), right(), 0)) return left();
1475 HValue* HMul::Canonicalize() {
1476 if (IsIdentityOperation(left(), right(), 1)) return left();
1477 if (IsIdentityOperation(right(), left(), 1)) return right();
1482 bool HMul::MulMinusOne() {
1483 if (left()->EqualsInteger32Constant(-1) ||
1484 right()->EqualsInteger32Constant(-1)) {
1492 HValue* HMod::Canonicalize() {
1497 HValue* HDiv::Canonicalize() {
1498 if (IsIdentityOperation(left(), right(), 1)) return left();
1503 HValue* HChange::Canonicalize() {
1504 return (from().Equals(to())) ? value() : this;
1508 HValue* HWrapReceiver::Canonicalize() {
1509 if (HasNoUses()) return NULL;
1510 if (receiver()->type().IsJSObject()) {
1517 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1518 return os << NameOf(value());
1522 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1523 HValue* context, HValue* value,
1524 Representation representation) {
1525 if (FLAG_fold_constants && value->IsConstant()) {
1526 HConstant* c = HConstant::cast(value);
1527 c = c->CopyToRepresentation(representation, zone);
1528 if (c != NULL) return c;
1530 return new(zone) HForceRepresentation(value, representation);
1534 std::ostream& HForceRepresentation::PrintDataTo(
1535 std::ostream& os) const { // NOLINT
1536 return os << representation().Mnemonic() << " " << NameOf(value());
1540 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1541 HUnaryOperation::PrintDataTo(os);
1542 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1544 if (CanTruncateToSmi()) os << " truncating-smi";
1545 if (CanTruncateToInt32()) os << " truncating-int32";
1546 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1547 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1552 HValue* HUnaryMathOperation::Canonicalize() {
1553 if (op() == kMathRound || op() == kMathFloor) {
1554 HValue* val = value();
1555 if (val->IsChange()) val = HChange::cast(val)->value();
1556 if (val->representation().IsSmiOrInteger32()) {
1557 if (val->representation().Equals(representation())) return val;
1558 return Prepend(new(block()->zone()) HChange(
1559 val, representation(), false, false));
1562 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1563 HDiv* hdiv = HDiv::cast(value());
1565 HValue* left = hdiv->left();
1566 if (left->representation().IsInteger32()) {
1567 // A value with an integer representation does not need to be transformed.
1568 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1569 // A change from an integer32 can be replaced by the integer32 value.
1570 left = HChange::cast(left)->value();
1571 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1572 left = Prepend(new(block()->zone()) HChange(
1573 left, Representation::Integer32(), false, false));
1578 HValue* right = hdiv->right();
1579 if (right->IsInteger32Constant()) {
1580 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1581 Representation::Integer32(), right->block()->zone()));
1582 } else if (right->representation().IsInteger32()) {
1583 // A value with an integer representation does not need to be transformed.
1584 } else if (right->IsChange() &&
1585 HChange::cast(right)->from().IsInteger32()) {
1586 // A change from an integer32 can be replaced by the integer32 value.
1587 right = HChange::cast(right)->value();
1588 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1589 right = Prepend(new(block()->zone()) HChange(
1590 right, Representation::Integer32(), false, false));
1595 return Prepend(HMathFloorOfDiv::New(
1596 block()->graph()->isolate(), block()->zone(), context(), left, right));
1602 HValue* HCheckInstanceType::Canonicalize() {
1603 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1604 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1605 (check_ == IS_STRING && value()->type().IsString())) {
1609 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1610 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1618 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1619 InstanceType* last) {
1620 DCHECK(is_interval_check());
1622 case IS_SPEC_OBJECT:
1623 *first = FIRST_SPEC_OBJECT_TYPE;
1624 *last = LAST_SPEC_OBJECT_TYPE;
1627 *first = *last = JS_ARRAY_TYPE;
1635 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1636 DCHECK(!is_interval_check());
1639 *mask = kIsNotStringMask;
1642 case IS_INTERNALIZED_STRING:
1643 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1644 *tag = kInternalizedTag;
1652 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1653 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1654 for (int i = 1; i < maps()->size(); ++i) {
1655 os << "," << *maps()->at(i).handle();
1658 if (IsStabilityCheck()) os << "(stability-check)";
1663 HValue* HCheckMaps::Canonicalize() {
1664 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1665 HConstant* c_value = HConstant::cast(value());
1666 if (c_value->HasObjectMap()) {
1667 for (int i = 0; i < maps()->size(); ++i) {
1668 if (c_value->ObjectMap() == maps()->at(i)) {
1669 if (maps()->size() > 1) {
1670 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1671 maps()->at(i), block()->graph()->zone()));
1673 MarkAsStabilityCheck();
1683 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1684 return os << NameOf(value()) << " " << Brief(*object().handle());
1688 HValue* HCheckValue::Canonicalize() {
1689 return (value()->IsConstant() &&
1690 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1694 const char* HCheckInstanceType::GetCheckName() const {
1696 case IS_SPEC_OBJECT: return "object";
1697 case IS_JS_ARRAY: return "array";
1698 case IS_STRING: return "string";
1699 case IS_INTERNALIZED_STRING: return "internalized_string";
1706 std::ostream& HCheckInstanceType::PrintDataTo(
1707 std::ostream& os) const { // NOLINT
1708 os << GetCheckName() << " ";
1709 return HUnaryOperation::PrintDataTo(os);
1713 std::ostream& HCallStub::PrintDataTo(std::ostream& os) const { // NOLINT
1714 os << CodeStub::MajorName(major_key_, false) << " ";
1715 return HUnaryCall::PrintDataTo(os);
1719 Code::Flags HTailCallThroughMegamorphicCache::flags() const {
1720 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
1721 Code::ComputeHandlerFlags(Code::LOAD_IC));
1726 std::ostream& HTailCallThroughMegamorphicCache::PrintDataTo(
1727 std::ostream& os) const { // NOLINT
1728 for (int i = 0; i < OperandCount(); i++) {
1729 os << NameOf(OperandAt(i)) << " ";
1731 return os << "flags: " << flags();
1735 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1736 const char* type = "expression";
1737 if (environment_->is_local_index(index_)) type = "local";
1738 if (environment_->is_special_index(index_)) type = "special";
1739 if (environment_->is_parameter_index(index_)) type = "parameter";
1740 return os << type << " @ " << index_;
1744 std::ostream& HInstanceOf::PrintDataTo(std::ostream& os) const { // NOLINT
1745 return os << NameOf(left()) << " " << NameOf(right()) << " "
1746 << NameOf(context());
1750 Range* HValue::InferRange(Zone* zone) {
1752 if (representation().IsSmi() || type().IsSmi()) {
1753 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1754 result->set_can_be_minus_zero(false);
1756 result = new(zone) Range();
1757 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1758 // TODO(jkummerow): The range cannot be minus zero when the upper type
1759 // bound is Integer32.
1765 Range* HChange::InferRange(Zone* zone) {
1766 Range* input_range = value()->range();
1767 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1770 input_range != NULL &&
1771 input_range->IsInSmiRange()))) {
1772 set_type(HType::Smi());
1773 ClearChangesFlag(kNewSpacePromotion);
1775 if (to().IsSmiOrTagged() &&
1776 input_range != NULL &&
1777 input_range->IsInSmiRange() &&
1778 (!SmiValuesAre32Bits() ||
1779 !value()->CheckFlag(HValue::kUint32) ||
1780 input_range->upper() != kMaxInt)) {
1781 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1782 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1783 ClearFlag(kCanOverflow);
1785 Range* result = (input_range != NULL)
1786 ? input_range->Copy(zone)
1787 : HValue::InferRange(zone);
1788 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1789 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1790 CheckFlag(kAllUsesTruncatingToSmi)));
1791 if (to().IsSmi()) result->ClampToSmi();
1796 Range* HConstant::InferRange(Zone* zone) {
1797 if (HasInteger32Value()) {
1798 Range* result = new(zone) Range(int32_value_, int32_value_);
1799 result->set_can_be_minus_zero(false);
1802 return HValue::InferRange(zone);
1806 SourcePosition HPhi::position() const { return block()->first()->position(); }
1809 Range* HPhi::InferRange(Zone* zone) {
1810 Representation r = representation();
1811 if (r.IsSmiOrInteger32()) {
1812 if (block()->IsLoopHeader()) {
1813 Range* range = r.IsSmi()
1814 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1815 : new(zone) Range(kMinInt, kMaxInt);
1818 Range* range = OperandAt(0)->range()->Copy(zone);
1819 for (int i = 1; i < OperandCount(); ++i) {
1820 range->Union(OperandAt(i)->range());
1825 return HValue::InferRange(zone);
1830 Range* HAdd::InferRange(Zone* zone) {
1831 Representation r = representation();
1832 if (r.IsSmiOrInteger32()) {
1833 Range* a = left()->range();
1834 Range* b = right()->range();
1835 Range* res = a->Copy(zone);
1836 if (!res->AddAndCheckOverflow(r, b) ||
1837 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1838 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1839 ClearFlag(kCanOverflow);
1841 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1842 !CheckFlag(kAllUsesTruncatingToInt32) &&
1843 a->CanBeMinusZero() && b->CanBeMinusZero());
1846 return HValue::InferRange(zone);
1851 Range* HSub::InferRange(Zone* zone) {
1852 Representation r = representation();
1853 if (r.IsSmiOrInteger32()) {
1854 Range* a = left()->range();
1855 Range* b = right()->range();
1856 Range* res = a->Copy(zone);
1857 if (!res->SubAndCheckOverflow(r, b) ||
1858 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1859 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1860 ClearFlag(kCanOverflow);
1862 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1863 !CheckFlag(kAllUsesTruncatingToInt32) &&
1864 a->CanBeMinusZero() && b->CanBeZero());
1867 return HValue::InferRange(zone);
1872 Range* HMul::InferRange(Zone* zone) {
1873 Representation r = representation();
1874 if (r.IsSmiOrInteger32()) {
1875 Range* a = left()->range();
1876 Range* b = right()->range();
1877 Range* res = a->Copy(zone);
1878 if (!res->MulAndCheckOverflow(r, b) ||
1879 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1880 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1882 // Truncated int multiplication is too precise and therefore not the
1883 // same as converting to Double and back.
1884 // Handle truncated integer multiplication by -1 special.
1885 ClearFlag(kCanOverflow);
1887 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1888 !CheckFlag(kAllUsesTruncatingToInt32) &&
1889 ((a->CanBeZero() && b->CanBeNegative()) ||
1890 (a->CanBeNegative() && b->CanBeZero())));
1893 return HValue::InferRange(zone);
1898 Range* HDiv::InferRange(Zone* zone) {
1899 if (representation().IsInteger32()) {
1900 Range* a = left()->range();
1901 Range* b = right()->range();
1902 Range* result = new(zone) Range();
1903 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1904 (a->CanBeMinusZero() ||
1905 (a->CanBeZero() && b->CanBeNegative())));
1906 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1907 ClearFlag(kCanOverflow);
1910 if (!b->CanBeZero()) {
1911 ClearFlag(kCanBeDivByZero);
1915 return HValue::InferRange(zone);
1920 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1921 if (representation().IsInteger32()) {
1922 Range* a = left()->range();
1923 Range* b = right()->range();
1924 Range* result = new(zone) Range();
1925 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1926 (a->CanBeMinusZero() ||
1927 (a->CanBeZero() && b->CanBeNegative())));
1928 if (!a->Includes(kMinInt)) {
1929 ClearFlag(kLeftCanBeMinInt);
1932 if (!a->CanBeNegative()) {
1933 ClearFlag(HValue::kLeftCanBeNegative);
1936 if (!a->CanBePositive()) {
1937 ClearFlag(HValue::kLeftCanBePositive);
1940 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1941 ClearFlag(kCanOverflow);
1944 if (!b->CanBeZero()) {
1945 ClearFlag(kCanBeDivByZero);
1949 return HValue::InferRange(zone);
1954 // Returns the absolute value of its argument minus one, avoiding undefined
1955 // behavior at kMinInt.
1956 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1959 Range* HMod::InferRange(Zone* zone) {
1960 if (representation().IsInteger32()) {
1961 Range* a = left()->range();
1962 Range* b = right()->range();
1964 // The magnitude of the modulus is bounded by the right operand.
1965 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1967 // The result of the modulo operation has the sign of its left operand.
1968 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1969 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1970 a->CanBePositive() ? positive_bound : 0);
1972 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1973 left_can_be_negative);
1975 if (!a->CanBeNegative()) {
1976 ClearFlag(HValue::kLeftCanBeNegative);
1979 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1980 ClearFlag(HValue::kCanOverflow);
1983 if (!b->CanBeZero()) {
1984 ClearFlag(HValue::kCanBeDivByZero);
1988 return HValue::InferRange(zone);
1993 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1994 if (phi->block()->loop_information() == NULL) return NULL;
1995 if (phi->OperandCount() != 2) return NULL;
1996 int32_t candidate_increment;
1998 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1999 if (candidate_increment != 0) {
2000 return new(phi->block()->graph()->zone())
2001 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2004 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2005 if (candidate_increment != 0) {
2006 return new(phi->block()->graph()->zone())
2007 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2015 * This function tries to match the following patterns (and all the relevant
2016 * variants related to |, & and + being commutative):
2017 * base | constant_or_mask
2018 * base & constant_and_mask
2019 * (base + constant_offset) & constant_and_mask
2020 * (base - constant_offset) & constant_and_mask
2022 void InductionVariableData::DecomposeBitwise(
2024 BitwiseDecompositionResult* result) {
2025 HValue* base = IgnoreOsrValue(value);
2026 result->base = value;
2028 if (!base->representation().IsInteger32()) return;
2030 if (base->IsBitwise()) {
2031 bool allow_offset = false;
2034 HBitwise* bitwise = HBitwise::cast(base);
2035 if (bitwise->right()->IsInteger32Constant()) {
2036 mask = bitwise->right()->GetInteger32Constant();
2037 base = bitwise->left();
2038 } else if (bitwise->left()->IsInteger32Constant()) {
2039 mask = bitwise->left()->GetInteger32Constant();
2040 base = bitwise->right();
2044 if (bitwise->op() == Token::BIT_AND) {
2045 result->and_mask = mask;
2046 allow_offset = true;
2047 } else if (bitwise->op() == Token::BIT_OR) {
2048 result->or_mask = mask;
2053 result->context = bitwise->context();
2056 if (base->IsAdd()) {
2057 HAdd* add = HAdd::cast(base);
2058 if (add->right()->IsInteger32Constant()) {
2060 } else if (add->left()->IsInteger32Constant()) {
2061 base = add->right();
2063 } else if (base->IsSub()) {
2064 HSub* sub = HSub::cast(base);
2065 if (sub->right()->IsInteger32Constant()) {
2071 result->base = base;
2076 void InductionVariableData::AddCheck(HBoundsCheck* check,
2077 int32_t upper_limit) {
2078 DCHECK(limit_validity() != NULL);
2079 if (limit_validity() != check->block() &&
2080 !limit_validity()->Dominates(check->block())) return;
2081 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2082 check->block()->current_loop())) return;
2084 ChecksRelatedToLength* length_checks = checks();
2085 while (length_checks != NULL) {
2086 if (length_checks->length() == check->length()) break;
2087 length_checks = length_checks->next();
2089 if (length_checks == NULL) {
2090 length_checks = new(check->block()->zone())
2091 ChecksRelatedToLength(check->length(), checks());
2092 checks_ = length_checks;
2095 length_checks->AddCheck(check, upper_limit);
2099 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2100 if (checks() != NULL) {
2101 InductionVariableCheck* c = checks();
2102 HBasicBlock* current_block = c->check()->block();
2103 while (c != NULL && c->check()->block() == current_block) {
2104 c->set_upper_limit(current_upper_limit_);
2111 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2116 DCHECK(first_check_in_block() != NULL);
2117 HValue* previous_index = first_check_in_block()->index();
2118 DCHECK(context != NULL);
2120 Zone* zone = index_base->block()->graph()->zone();
2121 Isolate* isolate = index_base->block()->graph()->isolate();
2122 set_added_constant(HConstant::New(isolate, zone, context, mask));
2123 if (added_index() != NULL) {
2124 added_constant()->InsertBefore(added_index());
2126 added_constant()->InsertBefore(first_check_in_block());
2129 if (added_index() == NULL) {
2130 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2131 HInstruction* new_index = HBitwise::New(isolate, zone, context, token,
2132 index_base, added_constant());
2133 DCHECK(new_index->IsBitwise());
2134 new_index->ClearAllSideEffects();
2135 new_index->AssumeRepresentation(Representation::Integer32());
2136 set_added_index(HBitwise::cast(new_index));
2137 added_index()->InsertBefore(first_check_in_block());
2139 DCHECK(added_index()->op() == token);
2141 added_index()->SetOperandAt(1, index_base);
2142 added_index()->SetOperandAt(2, added_constant());
2143 first_check_in_block()->SetOperandAt(0, added_index());
2144 if (previous_index->HasNoUses()) {
2145 previous_index->DeleteAndReplaceWith(NULL);
2149 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2150 HBoundsCheck* check,
2151 int32_t upper_limit) {
2152 BitwiseDecompositionResult decomposition;
2153 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2155 if (first_check_in_block() == NULL ||
2156 first_check_in_block()->block() != check->block()) {
2157 CloseCurrentBlock();
2159 first_check_in_block_ = check;
2160 set_added_index(NULL);
2161 set_added_constant(NULL);
2162 current_and_mask_in_block_ = decomposition.and_mask;
2163 current_or_mask_in_block_ = decomposition.or_mask;
2164 current_upper_limit_ = upper_limit;
2166 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2167 InductionVariableCheck(check, checks_, upper_limit);
2168 checks_ = new_check;
2172 if (upper_limit > current_upper_limit()) {
2173 current_upper_limit_ = upper_limit;
2176 if (decomposition.and_mask != 0 &&
2177 current_or_mask_in_block() == 0) {
2178 if (current_and_mask_in_block() == 0 ||
2179 decomposition.and_mask > current_and_mask_in_block()) {
2180 UseNewIndexInCurrentBlock(Token::BIT_AND,
2181 decomposition.and_mask,
2183 decomposition.context);
2184 current_and_mask_in_block_ = decomposition.and_mask;
2186 check->set_skip_check();
2188 if (current_and_mask_in_block() == 0) {
2189 if (decomposition.or_mask > current_or_mask_in_block()) {
2190 UseNewIndexInCurrentBlock(Token::BIT_OR,
2191 decomposition.or_mask,
2193 decomposition.context);
2194 current_or_mask_in_block_ = decomposition.or_mask;
2196 check->set_skip_check();
2199 if (!check->skip_check()) {
2200 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2201 InductionVariableCheck(check, checks_, upper_limit);
2202 checks_ = new_check;
2208 * This method detects if phi is an induction variable, with phi_operand as
2209 * its "incremented" value (the other operand would be the "base" value).
2211 * It cheks is phi_operand has the form "phi + constant".
2212 * If yes, the constant is the increment that the induction variable gets at
2213 * every loop iteration.
2214 * Otherwise it returns 0.
2216 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2217 HValue* phi_operand) {
2218 if (!phi_operand->representation().IsSmiOrInteger32()) return 0;
2220 if (phi_operand->IsAdd()) {
2221 HAdd* operation = HAdd::cast(phi_operand);
2222 if (operation->left() == phi &&
2223 operation->right()->IsInteger32Constant()) {
2224 return operation->right()->GetInteger32Constant();
2225 } else if (operation->right() == phi &&
2226 operation->left()->IsInteger32Constant()) {
2227 return operation->left()->GetInteger32Constant();
2229 } else if (phi_operand->IsSub()) {
2230 HSub* operation = HSub::cast(phi_operand);
2231 if (operation->left() == phi &&
2232 operation->right()->IsInteger32Constant()) {
2233 return -operation->right()->GetInteger32Constant();
2242 * Swaps the information in "update" with the one contained in "this".
2243 * The swapping is important because this method is used while doing a
2244 * dominator tree traversal, and "update" will retain the old data that
2245 * will be restored while backtracking.
2247 void InductionVariableData::UpdateAdditionalLimit(
2248 InductionVariableLimitUpdate* update) {
2249 DCHECK(update->updated_variable == this);
2250 if (update->limit_is_upper) {
2251 swap(&additional_upper_limit_, &update->limit);
2252 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2254 swap(&additional_lower_limit_, &update->limit);
2255 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2260 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2262 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2263 const int32_t MAX_LIMIT = 1 << 30;
2265 int32_t result = MAX_LIMIT;
2267 if (limit() != NULL &&
2268 limit()->IsInteger32Constant()) {
2269 int32_t limit_value = limit()->GetInteger32Constant();
2270 if (!limit_included()) {
2273 if (limit_value < result) result = limit_value;
2276 if (additional_upper_limit() != NULL &&
2277 additional_upper_limit()->IsInteger32Constant()) {
2278 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2279 if (!additional_upper_limit_is_included()) {
2282 if (limit_value < result) result = limit_value;
2285 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2286 if (and_mask < result) result = and_mask;
2290 // Add the effect of the or_mask.
2293 return result >= MAX_LIMIT ? kNoLimit : result;
2297 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2298 if (!v->IsPhi()) return v;
2299 HPhi* phi = HPhi::cast(v);
2300 if (phi->OperandCount() != 2) return v;
2301 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2302 return phi->OperandAt(1);
2303 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2304 return phi->OperandAt(0);
2311 InductionVariableData* InductionVariableData::GetInductionVariableData(
2313 v = IgnoreOsrValue(v);
2315 return HPhi::cast(v)->induction_variable_data();
2322 * Check if a conditional branch to "current_branch" with token "token" is
2323 * the branch that keeps the induction loop running (and, conversely, will
2324 * terminate it if the "other_branch" is taken).
2326 * Three conditions must be met:
2327 * - "current_branch" must be in the induction loop.
2328 * - "other_branch" must be out of the induction loop.
2329 * - "token" and the induction increment must be "compatible": the token should
2330 * be a condition that keeps the execution inside the loop until the limit is
2333 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2335 HBasicBlock* current_branch,
2336 HBasicBlock* other_branch) {
2337 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2338 current_branch->current_loop())) {
2342 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2343 other_branch->current_loop())) {
2347 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2350 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2353 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2361 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2363 LimitFromPredecessorBlock* result) {
2364 if (block->predecessors()->length() != 1) return;
2365 HBasicBlock* predecessor = block->predecessors()->at(0);
2366 HInstruction* end = predecessor->last();
2368 if (!end->IsCompareNumericAndBranch()) return;
2369 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2371 Token::Value token = branch->token();
2372 if (!Token::IsArithmeticCompareOp(token)) return;
2374 HBasicBlock* other_target;
2375 if (block == branch->SuccessorAt(0)) {
2376 other_target = branch->SuccessorAt(1);
2378 other_target = branch->SuccessorAt(0);
2379 token = Token::NegateCompareOp(token);
2380 DCHECK(block == branch->SuccessorAt(1));
2383 InductionVariableData* data;
2385 data = GetInductionVariableData(branch->left());
2386 HValue* limit = branch->right();
2388 data = GetInductionVariableData(branch->right());
2389 token = Token::ReverseCompareOp(token);
2390 limit = branch->left();
2394 result->variable = data;
2395 result->token = token;
2396 result->limit = limit;
2397 result->other_target = other_target;
2403 * Compute the limit that is imposed on an induction variable when entering
2405 * If the limit is the "proper" induction limit (the one that makes the loop
2406 * terminate when the induction variable reaches it) it is stored directly in
2407 * the induction variable data.
2408 * Otherwise the limit is written in "additional_limit" and the method
2411 bool InductionVariableData::ComputeInductionVariableLimit(
2413 InductionVariableLimitUpdate* additional_limit) {
2414 LimitFromPredecessorBlock limit;
2415 ComputeLimitFromPredecessorBlock(block, &limit);
2416 if (!limit.LimitIsValid()) return false;
2418 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2420 limit.other_target)) {
2421 limit.variable->limit_ = limit.limit;
2422 limit.variable->limit_included_ = limit.LimitIsIncluded();
2423 limit.variable->limit_validity_ = block;
2424 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2425 limit.variable->induction_exit_target_ = limit.other_target;
2428 additional_limit->updated_variable = limit.variable;
2429 additional_limit->limit = limit.limit;
2430 additional_limit->limit_is_upper = limit.LimitIsUpper();
2431 additional_limit->limit_is_included = limit.LimitIsIncluded();
2437 Range* HMathMinMax::InferRange(Zone* zone) {
2438 if (representation().IsSmiOrInteger32()) {
2439 Range* a = left()->range();
2440 Range* b = right()->range();
2441 Range* res = a->Copy(zone);
2442 if (operation_ == kMathMax) {
2443 res->CombinedMax(b);
2445 DCHECK(operation_ == kMathMin);
2446 res->CombinedMin(b);
2450 return HValue::InferRange(zone);
2455 void HPushArguments::AddInput(HValue* value) {
2456 inputs_.Add(NULL, value->block()->zone());
2457 SetOperandAt(OperandCount() - 1, value);
2461 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
2463 for (int i = 0; i < OperandCount(); ++i) {
2464 os << " " << NameOf(OperandAt(i)) << " ";
2466 return os << " uses:" << UseCount() << "_"
2467 << smi_non_phi_uses() + smi_indirect_uses() << "s_"
2468 << int32_non_phi_uses() + int32_indirect_uses() << "i_"
2469 << double_non_phi_uses() + double_indirect_uses() << "d_"
2470 << tagged_non_phi_uses() + tagged_indirect_uses() << "t"
2471 << TypeOf(this) << "]";
2475 void HPhi::AddInput(HValue* value) {
2476 inputs_.Add(NULL, value->block()->zone());
2477 SetOperandAt(OperandCount() - 1, value);
2478 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2479 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2480 SetFlag(kIsArguments);
2485 bool HPhi::HasRealUses() {
2486 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2487 if (!it.value()->IsPhi()) return true;
2493 HValue* HPhi::GetRedundantReplacement() {
2494 HValue* candidate = NULL;
2495 int count = OperandCount();
2497 while (position < count && candidate == NULL) {
2498 HValue* current = OperandAt(position++);
2499 if (current != this) candidate = current;
2501 while (position < count) {
2502 HValue* current = OperandAt(position++);
2503 if (current != this && current != candidate) return NULL;
2505 DCHECK(candidate != this);
2510 void HPhi::DeleteFromGraph() {
2511 DCHECK(block() != NULL);
2512 block()->RemovePhi(this);
2513 DCHECK(block() == NULL);
2517 void HPhi::InitRealUses(int phi_id) {
2518 // Initialize real uses.
2520 // Compute a conservative approximation of truncating uses before inferring
2521 // representations. The proper, exact computation will be done later, when
2522 // inserting representation changes.
2523 SetFlag(kTruncatingToSmi);
2524 SetFlag(kTruncatingToInt32);
2525 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2526 HValue* value = it.value();
2527 if (!value->IsPhi()) {
2528 Representation rep = value->observed_input_representation(it.index());
2529 non_phi_uses_[rep.kind()] += 1;
2530 if (FLAG_trace_representation) {
2531 PrintF("#%d Phi is used by real #%d %s as %s\n",
2532 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2534 if (!value->IsSimulate()) {
2535 if (!value->CheckFlag(kTruncatingToSmi)) {
2536 ClearFlag(kTruncatingToSmi);
2538 if (!value->CheckFlag(kTruncatingToInt32)) {
2539 ClearFlag(kTruncatingToInt32);
2547 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2548 if (FLAG_trace_representation) {
2549 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2551 other->non_phi_uses_[Representation::kSmi],
2552 other->non_phi_uses_[Representation::kInteger32],
2553 other->non_phi_uses_[Representation::kDouble],
2554 other->non_phi_uses_[Representation::kTagged]);
2557 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2558 indirect_uses_[i] += other->non_phi_uses_[i];
2563 void HPhi::AddIndirectUsesTo(int* dest) {
2564 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2565 dest[i] += indirect_uses_[i];
2570 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2571 while (!list->is_empty()) {
2572 HSimulate* from = list->RemoveLast();
2573 ZoneList<HValue*>* from_values = &from->values_;
2574 for (int i = 0; i < from_values->length(); ++i) {
2575 if (from->HasAssignedIndexAt(i)) {
2576 int index = from->GetAssignedIndexAt(i);
2577 if (HasValueForIndex(index)) continue;
2578 AddAssignedValue(index, from_values->at(i));
2580 if (pop_count_ > 0) {
2583 AddPushedValue(from_values->at(i));
2587 pop_count_ += from->pop_count_;
2588 from->DeleteAndReplaceWith(NULL);
2593 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2594 os << "id=" << ast_id().ToInt();
2595 if (pop_count_ > 0) os << " pop " << pop_count_;
2596 if (values_.length() > 0) {
2597 if (pop_count_ > 0) os << " /";
2598 for (int i = values_.length() - 1; i >= 0; --i) {
2599 if (HasAssignedIndexAt(i)) {
2600 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2604 os << NameOf(values_[i]);
2605 if (i > 0) os << ",";
2612 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2613 if (is_done_with_replay()) return;
2614 DCHECK(env != NULL);
2615 env->set_ast_id(ast_id());
2616 env->Drop(pop_count());
2617 for (int i = values()->length() - 1; i >= 0; --i) {
2618 HValue* value = values()->at(i);
2619 if (HasAssignedIndexAt(i)) {
2620 env->Bind(GetAssignedIndexAt(i), value);
2625 set_done_with_replay();
2629 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2630 HCapturedObject* other) {
2631 for (int i = 0; i < values->length(); ++i) {
2632 HValue* value = values->at(i);
2633 if (value->IsCapturedObject()) {
2634 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2635 values->at(i) = other;
2637 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2644 // Replay captured objects by replacing all captured objects with the
2645 // same capture id in the current and all outer environments.
2646 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2647 DCHECK(env != NULL);
2648 while (env != NULL) {
2649 ReplayEnvironmentNested(env->values(), this);
2655 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2656 os << "#" << capture_id() << " ";
2657 return HDematerializedObject::PrintDataTo(os);
2661 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2663 DCHECK(return_target->IsInlineReturnTarget());
2664 return_targets_.Add(return_target, zone);
2668 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2669 return os << function()->debug_name()->ToCString().get();
2673 static bool IsInteger32(double value) {
2674 if (value >= std::numeric_limits<int32_t>::min() &&
2675 value <= std::numeric_limits<int32_t>::max()) {
2676 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2677 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2683 HConstant::HConstant(Special special)
2684 : HTemplateInstruction<0>(HType::TaggedNumber()),
2685 object_(Handle<Object>::null()),
2686 object_map_(Handle<Map>::null()),
2687 bit_field_(HasDoubleValueField::encode(true) |
2688 InstanceTypeField::encode(kUnknownInstanceType)),
2690 DCHECK_EQ(kHoleNaN, special);
2691 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2692 Initialize(Representation::Double());
2696 HConstant::HConstant(Handle<Object> object, Representation r)
2697 : HTemplateInstruction<0>(HType::FromValue(object)),
2698 object_(Unique<Object>::CreateUninitialized(object)),
2699 object_map_(Handle<Map>::null()),
2700 bit_field_(HasStableMapValueField::encode(false) |
2701 HasSmiValueField::encode(false) |
2702 HasInt32ValueField::encode(false) |
2703 HasDoubleValueField::encode(false) |
2704 HasExternalReferenceValueField::encode(false) |
2705 IsNotInNewSpaceField::encode(true) |
2706 BooleanValueField::encode(object->BooleanValue()) |
2707 IsUndetectableField::encode(false) |
2708 InstanceTypeField::encode(kUnknownInstanceType)) {
2709 if (object->IsHeapObject()) {
2710 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2711 Isolate* isolate = heap_object->GetIsolate();
2712 Handle<Map> map(heap_object->map(), isolate);
2713 bit_field_ = IsNotInNewSpaceField::update(
2714 bit_field_, !isolate->heap()->InNewSpace(*object));
2715 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2717 IsUndetectableField::update(bit_field_, map->is_undetectable());
2718 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2719 bit_field_ = HasStableMapValueField::update(
2721 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2723 if (object->IsNumber()) {
2724 double n = object->Number();
2725 bool has_int32_value = IsInteger32(n);
2726 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2727 int32_value_ = DoubleToInt32(n);
2728 bit_field_ = HasSmiValueField::update(
2729 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2731 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2732 // TODO(titzer): if this heap number is new space, tenure a new one.
2739 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2740 bool has_stable_map_value, Representation r, HType type,
2741 bool is_not_in_new_space, bool boolean_value,
2742 bool is_undetectable, InstanceType instance_type)
2743 : HTemplateInstruction<0>(type),
2745 object_map_(object_map),
2746 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2747 HasSmiValueField::encode(false) |
2748 HasInt32ValueField::encode(false) |
2749 HasDoubleValueField::encode(false) |
2750 HasExternalReferenceValueField::encode(false) |
2751 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2752 BooleanValueField::encode(boolean_value) |
2753 IsUndetectableField::encode(is_undetectable) |
2754 InstanceTypeField::encode(instance_type)) {
2755 DCHECK(!object.handle().is_null());
2756 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2761 HConstant::HConstant(int32_t integer_value, Representation r,
2762 bool is_not_in_new_space, Unique<Object> object)
2764 object_map_(Handle<Map>::null()),
2765 bit_field_(HasStableMapValueField::encode(false) |
2766 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2767 HasInt32ValueField::encode(true) |
2768 HasDoubleValueField::encode(true) |
2769 HasExternalReferenceValueField::encode(false) |
2770 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2771 BooleanValueField::encode(integer_value != 0) |
2772 IsUndetectableField::encode(false) |
2773 InstanceTypeField::encode(kUnknownInstanceType)),
2774 int32_value_(integer_value),
2775 double_value_(FastI2D(integer_value)) {
2776 // It's possible to create a constant with a value in Smi-range but stored
2777 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2778 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2779 bool is_smi = HasSmiValue() && !could_be_heapobject;
2780 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2785 HConstant::HConstant(double double_value, Representation r,
2786 bool is_not_in_new_space, Unique<Object> object)
2788 object_map_(Handle<Map>::null()),
2789 bit_field_(HasStableMapValueField::encode(false) |
2790 HasInt32ValueField::encode(IsInteger32(double_value)) |
2791 HasDoubleValueField::encode(true) |
2792 HasExternalReferenceValueField::encode(false) |
2793 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2794 BooleanValueField::encode(double_value != 0 &&
2795 !std::isnan(double_value)) |
2796 IsUndetectableField::encode(false) |
2797 InstanceTypeField::encode(kUnknownInstanceType)),
2798 int32_value_(DoubleToInt32(double_value)),
2799 double_value_(double_value) {
2800 bit_field_ = HasSmiValueField::update(
2801 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2802 // It's possible to create a constant with a value in Smi-range but stored
2803 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2804 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2805 bool is_smi = HasSmiValue() && !could_be_heapobject;
2806 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2811 HConstant::HConstant(ExternalReference reference)
2812 : HTemplateInstruction<0>(HType::Any()),
2813 object_(Unique<Object>(Handle<Object>::null())),
2814 object_map_(Handle<Map>::null()),
2816 HasStableMapValueField::encode(false) |
2817 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2818 HasDoubleValueField::encode(false) |
2819 HasExternalReferenceValueField::encode(true) |
2820 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2821 IsUndetectableField::encode(false) |
2822 InstanceTypeField::encode(kUnknownInstanceType)),
2823 external_reference_value_(reference) {
2824 Initialize(Representation::External());
2828 void HConstant::Initialize(Representation r) {
2830 if (HasSmiValue() && SmiValuesAre31Bits()) {
2831 r = Representation::Smi();
2832 } else if (HasInteger32Value()) {
2833 r = Representation::Integer32();
2834 } else if (HasDoubleValue()) {
2835 r = Representation::Double();
2836 } else if (HasExternalReferenceValue()) {
2837 r = Representation::External();
2839 Handle<Object> object = object_.handle();
2840 if (object->IsJSObject()) {
2841 // Try to eagerly migrate JSObjects that have deprecated maps.
2842 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2843 if (js_object->map()->is_deprecated()) {
2844 JSObject::TryMigrateInstance(js_object);
2847 r = Representation::Tagged();
2851 // If we have an existing handle, zap it, because it might be a heap
2852 // number which we must not re-use when copying this HConstant to
2853 // Tagged representation later, because having Smi representation now
2854 // could cause heap object checks not to get emitted.
2855 object_ = Unique<Object>(Handle<Object>::null());
2857 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2858 // If it's not a heap object, it can't be in new space.
2859 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2861 set_representation(r);
2866 bool HConstant::ImmortalImmovable() const {
2867 if (HasInteger32Value()) {
2870 if (HasDoubleValue()) {
2871 if (IsSpecialDouble()) {
2876 if (HasExternalReferenceValue()) {
2880 DCHECK(!object_.handle().is_null());
2881 Heap* heap = isolate()->heap();
2882 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2883 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2885 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2886 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2887 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2888 #undef IMMORTAL_IMMOVABLE_ROOT
2889 #define INTERNALIZED_STRING(name, value) \
2890 object_.IsKnownGlobal(heap->name()) ||
2891 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2892 #undef INTERNALIZED_STRING
2893 #define STRING_TYPE(NAME, size, name, Name) \
2894 object_.IsKnownGlobal(heap->name##_map()) ||
2895 STRING_TYPE_LIST(STRING_TYPE)
2901 bool HConstant::EmitAtUses() {
2903 if (block()->graph()->has_osr() &&
2904 block()->graph()->IsStandardConstant(this)) {
2905 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2908 if (HasNoUses()) return true;
2909 if (IsCell()) return false;
2910 if (representation().IsDouble()) return false;
2911 if (representation().IsExternal()) return false;
2916 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2917 if (r.IsSmi() && !HasSmiValue()) return NULL;
2918 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2919 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2920 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2921 if (HasInteger32Value()) {
2922 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2924 if (HasDoubleValue()) {
2925 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2927 if (HasExternalReferenceValue()) {
2928 return new(zone) HConstant(external_reference_value_);
2930 DCHECK(!object_.handle().is_null());
2931 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2932 type_, NotInNewSpace(), BooleanValue(),
2933 IsUndetectable(), GetInstanceType());
2937 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2938 HConstant* res = NULL;
2939 if (HasInteger32Value()) {
2940 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2941 NotInNewSpace(), object_);
2942 } else if (HasDoubleValue()) {
2944 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2945 NotInNewSpace(), object_);
2947 return res != NULL ? Just(res) : Nothing<HConstant*>();
2951 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2953 HConstant* res = NULL;
2954 Handle<Object> handle = this->handle(isolate);
2955 if (handle->IsBoolean()) {
2956 res = handle->BooleanValue() ?
2957 new(zone) HConstant(1) : new(zone) HConstant(0);
2958 } else if (handle->IsUndefined()) {
2959 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2960 } else if (handle->IsNull()) {
2961 res = new(zone) HConstant(0);
2963 return res != NULL ? Just(res) : Nothing<HConstant*>();
2967 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2968 if (HasInteger32Value()) {
2969 os << int32_value_ << " ";
2970 } else if (HasDoubleValue()) {
2971 os << double_value_ << " ";
2972 } else if (HasExternalReferenceValue()) {
2973 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2975 // The handle() method is silently and lazily mutating the object.
2976 Handle<Object> h = const_cast<HConstant*>(this)->handle(Isolate::Current());
2977 os << Brief(*h) << " ";
2978 if (HasStableMapValue()) os << "[stable-map] ";
2979 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2981 if (!NotInNewSpace()) os << "[new space] ";
2986 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2987 os << NameOf(left()) << " " << NameOf(right());
2988 if (CheckFlag(kCanOverflow)) os << " !";
2989 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2994 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2995 DCHECK(CheckFlag(kFlexibleRepresentation));
2996 Representation new_rep = RepresentationFromInputs();
2997 UpdateRepresentation(new_rep, h_infer, "inputs");
2999 if (representation().IsSmi() && HasNonSmiUse()) {
3000 UpdateRepresentation(
3001 Representation::Integer32(), h_infer, "use requirements");
3004 if (observed_output_representation_.IsNone()) {
3005 new_rep = RepresentationFromUses();
3006 UpdateRepresentation(new_rep, h_infer, "uses");
3008 new_rep = RepresentationFromOutput();
3009 UpdateRepresentation(new_rep, h_infer, "output");
3014 Representation HBinaryOperation::RepresentationFromInputs() {
3015 // Determine the worst case of observed input representations and
3016 // the currently assumed output representation.
3017 Representation rep = representation();
3018 for (int i = 1; i <= 2; ++i) {
3019 rep = rep.generalize(observed_input_representation(i));
3021 // If any of the actual input representation is more general than what we
3022 // have so far but not Tagged, use that representation instead.
3023 Representation left_rep = left()->representation();
3024 Representation right_rep = right()->representation();
3025 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3026 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3032 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3033 Representation current_rep) {
3034 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3035 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3036 // Mul in Integer32 mode would be too precise.
3037 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3041 Representation HBinaryOperation::RepresentationFromOutput() {
3042 Representation rep = representation();
3043 // Consider observed output representation, but ignore it if it's Double,
3044 // this instruction is not a division, and all its uses are truncating
3046 if (observed_output_representation_.is_more_general_than(rep) &&
3047 !IgnoreObservedOutputRepresentation(rep)) {
3048 return observed_output_representation_;
3050 return Representation::None();
3054 void HBinaryOperation::AssumeRepresentation(Representation r) {
3055 set_observed_input_representation(1, r);
3056 set_observed_input_representation(2, r);
3057 HValue::AssumeRepresentation(r);
3061 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3062 DCHECK(CheckFlag(kFlexibleRepresentation));
3063 Representation new_rep = RepresentationFromInputs();
3064 UpdateRepresentation(new_rep, h_infer, "inputs");
3065 // Do not care about uses.
3069 Range* HBitwise::InferRange(Zone* zone) {
3070 if (op() == Token::BIT_XOR) {
3071 if (left()->HasRange() && right()->HasRange()) {
3072 // The maximum value has the high bit, and all bits below, set:
3074 // If the range can be negative, the minimum int is a negative number with
3075 // the high bit, and all bits below, unset:
3077 // If it cannot be negative, conservatively choose 0 as minimum int.
3078 int64_t left_upper = left()->range()->upper();
3079 int64_t left_lower = left()->range()->lower();
3080 int64_t right_upper = right()->range()->upper();
3081 int64_t right_lower = right()->range()->lower();
3083 if (left_upper < 0) left_upper = ~left_upper;
3084 if (left_lower < 0) left_lower = ~left_lower;
3085 if (right_upper < 0) right_upper = ~right_upper;
3086 if (right_lower < 0) right_lower = ~right_lower;
3088 int high = MostSignificantBit(
3089 static_cast<uint32_t>(
3090 left_upper | left_lower | right_upper | right_lower));
3094 int32_t min = (left()->range()->CanBeNegative() ||
3095 right()->range()->CanBeNegative())
3096 ? static_cast<int32_t>(-limit) : 0;
3097 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3099 Range* result = HValue::InferRange(zone);
3100 result->set_can_be_minus_zero(false);
3103 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3104 int32_t left_mask = (left()->range() != NULL)
3105 ? left()->range()->Mask()
3107 int32_t right_mask = (right()->range() != NULL)
3108 ? right()->range()->Mask()
3110 int32_t result_mask = (op() == Token::BIT_AND)
3111 ? left_mask & right_mask
3112 : left_mask | right_mask;
3113 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3115 Range* result = HValue::InferRange(zone);
3116 result->set_can_be_minus_zero(false);
3121 Range* HSar::InferRange(Zone* zone) {
3122 if (right()->IsConstant()) {
3123 HConstant* c = HConstant::cast(right());
3124 if (c->HasInteger32Value()) {
3125 Range* result = (left()->range() != NULL)
3126 ? left()->range()->Copy(zone)
3127 : new(zone) Range();
3128 result->Sar(c->Integer32Value());
3132 return HValue::InferRange(zone);
3136 Range* HShr::InferRange(Zone* zone) {
3137 if (right()->IsConstant()) {
3138 HConstant* c = HConstant::cast(right());
3139 if (c->HasInteger32Value()) {
3140 int shift_count = c->Integer32Value() & 0x1f;
3141 if (left()->range()->CanBeNegative()) {
3142 // Only compute bounds if the result always fits into an int32.
3143 return (shift_count >= 1)
3144 ? new(zone) Range(0,
3145 static_cast<uint32_t>(0xffffffff) >> shift_count)
3146 : new(zone) Range();
3148 // For positive inputs we can use the >> operator.
3149 Range* result = (left()->range() != NULL)
3150 ? left()->range()->Copy(zone)
3151 : new(zone) Range();
3152 result->Sar(c->Integer32Value());
3157 return HValue::InferRange(zone);
3161 Range* HShl::InferRange(Zone* zone) {
3162 if (right()->IsConstant()) {
3163 HConstant* c = HConstant::cast(right());
3164 if (c->HasInteger32Value()) {
3165 Range* result = (left()->range() != NULL)
3166 ? left()->range()->Copy(zone)
3167 : new(zone) Range();
3168 result->Shl(c->Integer32Value());
3172 return HValue::InferRange(zone);
3176 Range* HLoadNamedField::InferRange(Zone* zone) {
3177 if (access().representation().IsInteger8()) {
3178 return new(zone) Range(kMinInt8, kMaxInt8);
3180 if (access().representation().IsUInteger8()) {
3181 return new(zone) Range(kMinUInt8, kMaxUInt8);
3183 if (access().representation().IsInteger16()) {
3184 return new(zone) Range(kMinInt16, kMaxInt16);
3186 if (access().representation().IsUInteger16()) {
3187 return new(zone) Range(kMinUInt16, kMaxUInt16);
3189 if (access().IsStringLength()) {
3190 return new(zone) Range(0, String::kMaxLength);
3192 return HValue::InferRange(zone);
3196 Range* HLoadKeyed::InferRange(Zone* zone) {
3197 switch (elements_kind()) {
3198 case EXTERNAL_INT8_ELEMENTS:
3199 return new(zone) Range(kMinInt8, kMaxInt8);
3200 case EXTERNAL_UINT8_ELEMENTS:
3201 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3202 return new(zone) Range(kMinUInt8, kMaxUInt8);
3203 case EXTERNAL_INT16_ELEMENTS:
3204 return new(zone) Range(kMinInt16, kMaxInt16);
3205 case EXTERNAL_UINT16_ELEMENTS:
3206 return new(zone) Range(kMinUInt16, kMaxUInt16);
3208 return HValue::InferRange(zone);
3213 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
3214 os << Token::Name(token()) << " ";
3215 return HBinaryOperation::PrintDataTo(os);
3219 std::ostream& HStringCompareAndBranch::PrintDataTo(
3220 std::ostream& os) const { // NOLINT
3221 os << Token::Name(token()) << " ";
3222 return HControlInstruction::PrintDataTo(os);
3226 std::ostream& HCompareNumericAndBranch::PrintDataTo(
3227 std::ostream& os) const { // NOLINT
3228 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3229 return HControlInstruction::PrintDataTo(os);
3233 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
3234 std::ostream& os) const { // NOLINT
3235 os << NameOf(left()) << " " << NameOf(right());
3236 return HControlInstruction::PrintDataTo(os);
3240 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3241 if (known_successor_index() != kNoKnownSuccessorIndex) {
3242 *block = SuccessorAt(known_successor_index());
3245 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3246 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3247 ? FirstSuccessor() : SecondSuccessor();
3255 bool ConstantIsObject(HConstant* constant, Isolate* isolate) {
3256 if (constant->HasNumberValue()) return false;
3257 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) {
3260 if (constant->IsUndetectable()) return false;
3261 InstanceType type = constant->GetInstanceType();
3262 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) &&
3263 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3267 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3268 if (FLAG_fold_constants && value()->IsConstant()) {
3269 *block = ConstantIsObject(HConstant::cast(value()), isolate())
3270 ? FirstSuccessor() : SecondSuccessor();
3278 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3279 if (known_successor_index() != kNoKnownSuccessorIndex) {
3280 *block = SuccessorAt(known_successor_index());
3283 if (FLAG_fold_constants && value()->IsConstant()) {
3284 *block = HConstant::cast(value())->HasStringValue()
3285 ? FirstSuccessor() : SecondSuccessor();
3288 if (value()->type().IsString()) {
3289 *block = FirstSuccessor();
3292 if (value()->type().IsSmi() ||
3293 value()->type().IsNull() ||
3294 value()->type().IsBoolean() ||
3295 value()->type().IsUndefined() ||
3296 value()->type().IsJSObject()) {
3297 *block = SecondSuccessor();
3305 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3306 if (FLAG_fold_constants && value()->IsConstant()) {
3307 *block = HConstant::cast(value())->IsUndetectable()
3308 ? FirstSuccessor() : SecondSuccessor();
3316 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3317 if (FLAG_fold_constants && value()->IsConstant()) {
3318 InstanceType type = HConstant::cast(value())->GetInstanceType();
3319 *block = (from_ <= type) && (type <= to_)
3320 ? FirstSuccessor() : SecondSuccessor();
3328 void HCompareHoleAndBranch::InferRepresentation(
3329 HInferRepresentationPhase* h_infer) {
3330 ChangeRepresentation(value()->representation());
3334 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3335 if (left() == right() &&
3336 left()->representation().IsSmiOrInteger32()) {
3337 *block = (token() == Token::EQ ||
3338 token() == Token::EQ_STRICT ||
3339 token() == Token::LTE ||
3340 token() == Token::GTE)
3341 ? FirstSuccessor() : SecondSuccessor();
3349 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3350 if (FLAG_fold_constants && value()->IsConstant()) {
3351 HConstant* constant = HConstant::cast(value());
3352 if (constant->HasDoubleValue()) {
3353 *block = IsMinusZero(constant->DoubleValue())
3354 ? FirstSuccessor() : SecondSuccessor();
3358 if (value()->representation().IsSmiOrInteger32()) {
3359 // A Smi or Integer32 cannot contain minus zero.
3360 *block = SecondSuccessor();
3368 void HCompareMinusZeroAndBranch::InferRepresentation(
3369 HInferRepresentationPhase* h_infer) {
3370 ChangeRepresentation(value()->representation());
3374 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
3375 return os << *SuccessorAt(0);
3379 void HCompareNumericAndBranch::InferRepresentation(
3380 HInferRepresentationPhase* h_infer) {
3381 Representation left_rep = left()->representation();
3382 Representation right_rep = right()->representation();
3383 Representation observed_left = observed_input_representation(0);
3384 Representation observed_right = observed_input_representation(1);
3386 Representation rep = Representation::None();
3387 rep = rep.generalize(observed_left);
3388 rep = rep.generalize(observed_right);
3389 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3390 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3391 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3393 rep = Representation::Double();
3396 if (rep.IsDouble()) {
3397 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3398 // and !=) have special handling of undefined, e.g. undefined == undefined
3399 // is 'true'. Relational comparisons have a different semantic, first
3400 // calling ToPrimitive() on their arguments. The standard Crankshaft
3401 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3402 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3403 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3404 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3405 // it is not consistent with the spec. For example, it would cause undefined
3406 // == undefined (should be true) to be evaluated as NaN == NaN
3407 // (false). Therefore, any comparisons other than ordered relational
3408 // comparisons must cause a deopt when one of their arguments is undefined.
3410 if (Token::IsOrderedRelationalCompareOp(token_)) {
3411 SetFlag(kAllowUndefinedAsNaN);
3414 ChangeRepresentation(rep);
3418 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
3419 return os << index();
3423 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3424 os << NameOf(object()) << access_;
3426 if (maps() != NULL) {
3427 os << " [" << *maps()->at(0).handle();
3428 for (int i = 1; i < maps()->size(); ++i) {
3429 os << "," << *maps()->at(i).handle();
3434 if (HasDependency()) os << " " << NameOf(dependency());
3439 std::ostream& HLoadNamedGeneric::PrintDataTo(
3440 std::ostream& os) const { // NOLINT
3441 Handle<String> n = Handle<String>::cast(name());
3442 return os << NameOf(object()) << "." << n->ToCString().get();
3446 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3447 if (!is_external()) {
3448 os << NameOf(elements());
3450 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3451 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3452 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3455 os << "[" << NameOf(key());
3456 if (IsDehoisted()) os << " + " << base_offset();
3459 if (HasDependency()) os << " " << NameOf(dependency());
3460 if (RequiresHoleCheck()) os << " check_hole";
3465 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3466 // The base offset is usually simply the size of the array header, except
3467 // with dehoisting adds an addition offset due to a array index key
3468 // manipulation, in which case it becomes (array header size +
3469 // constant-offset-from-key * kPointerSize)
3470 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3471 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3472 addition_result += increase_by_value;
3473 if (!addition_result.IsValid()) return false;
3474 base_offset = addition_result.ValueOrDie();
3475 if (!BaseOffsetField::is_valid(base_offset)) return false;
3476 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3481 bool HLoadKeyed::UsesMustHandleHole() const {
3482 if (IsFastPackedElementsKind(elements_kind())) {
3486 if (IsExternalArrayElementsKind(elements_kind())) {
3490 if (hole_mode() == ALLOW_RETURN_HOLE) {
3491 if (IsFastDoubleElementsKind(elements_kind())) {
3492 return AllUsesCanTreatHoleAsNaN();
3497 if (IsFastDoubleElementsKind(elements_kind())) {
3501 // Holes are only returned as tagged values.
3502 if (!representation().IsTagged()) {
3506 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3507 HValue* use = it.value();
3508 if (!use->IsChange()) return false;
3515 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3516 return IsFastDoubleElementsKind(elements_kind()) &&
3517 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3521 bool HLoadKeyed::RequiresHoleCheck() const {
3522 if (IsFastPackedElementsKind(elements_kind())) {
3526 if (IsExternalArrayElementsKind(elements_kind())) {
3530 return !UsesMustHandleHole();
3534 std::ostream& HLoadKeyedGeneric::PrintDataTo(
3535 std::ostream& os) const { // NOLINT
3536 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3540 HValue* HLoadKeyedGeneric::Canonicalize() {
3541 // Recognize generic keyed loads that use property name generated
3542 // by for-in statement as a key and rewrite them into fast property load
3544 if (key()->IsLoadKeyed()) {
3545 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3546 if (key_load->elements()->IsForInCacheArray()) {
3547 HForInCacheArray* names_cache =
3548 HForInCacheArray::cast(key_load->elements());
3550 if (names_cache->enumerable() == object()) {
3551 HForInCacheArray* index_cache =
3552 names_cache->index_cache();
3553 HCheckMapValue* map_check = HCheckMapValue::New(
3554 block()->graph()->isolate(), block()->graph()->zone(),
3555 block()->graph()->GetInvalidContext(), object(),
3556 names_cache->map());
3557 HInstruction* index = HLoadKeyed::New(
3558 block()->graph()->isolate(), block()->graph()->zone(),
3559 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3560 key_load->key(), key_load->elements_kind());
3561 map_check->InsertBefore(this);
3562 index->InsertBefore(this);
3563 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3573 std::ostream& HStoreNamedGeneric::PrintDataTo(
3574 std::ostream& os) const { // NOLINT
3575 Handle<String> n = Handle<String>::cast(name());
3576 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3581 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3582 os << NameOf(object()) << access_ << " = " << NameOf(value());
3583 if (NeedsWriteBarrier()) os << " (write-barrier)";
3584 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3589 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3590 if (!is_external()) {
3591 os << NameOf(elements());
3593 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3594 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3595 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3598 os << "[" << NameOf(key());
3599 if (IsDehoisted()) os << " + " << base_offset();
3600 return os << "] = " << NameOf(value());
3604 std::ostream& HStoreKeyedGeneric::PrintDataTo(
3605 std::ostream& os) const { // NOLINT
3606 return os << NameOf(object()) << "[" << NameOf(key())
3607 << "] = " << NameOf(value());
3611 std::ostream& HTransitionElementsKind::PrintDataTo(
3612 std::ostream& os) const { // NOLINT
3613 os << NameOf(object());
3614 ElementsKind from_kind = original_map().handle()->elements_kind();
3615 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3616 os << " " << *original_map().handle() << " ["
3617 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3618 << *transitioned_map().handle() << " ["
3619 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3620 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3625 std::ostream& HLoadGlobalGeneric::PrintDataTo(
3626 std::ostream& os) const { // NOLINT
3627 return os << name()->ToCString().get() << " ";
3631 std::ostream& HInnerAllocatedObject::PrintDataTo(
3632 std::ostream& os) const { // NOLINT
3633 os << NameOf(base_object()) << " offset ";
3634 return offset()->PrintTo(os);
3638 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3639 return os << NameOf(value()) << "[" << slot_index() << "]";
3643 std::ostream& HStoreContextSlot::PrintDataTo(
3644 std::ostream& os) const { // NOLINT
3645 return os << NameOf(context()) << "[" << slot_index()
3646 << "] = " << NameOf(value());
3650 // Implementation of type inference and type conversions. Calculates
3651 // the inferred type of this instruction based on the input operands.
3653 HType HValue::CalculateInferredType() {
3658 HType HPhi::CalculateInferredType() {
3659 if (OperandCount() == 0) return HType::Tagged();
3660 HType result = OperandAt(0)->type();
3661 for (int i = 1; i < OperandCount(); ++i) {
3662 HType current = OperandAt(i)->type();
3663 result = result.Combine(current);
3669 HType HChange::CalculateInferredType() {
3670 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3675 Representation HUnaryMathOperation::RepresentationFromInputs() {
3676 if (SupportsFlexibleFloorAndRound() &&
3677 (op_ == kMathFloor || op_ == kMathRound)) {
3678 // Floor and Round always take a double input. The integral result can be
3679 // used as an integer or a double. Infer the representation from the uses.
3680 return Representation::None();
3682 Representation rep = representation();
3683 // If any of the actual input representation is more general than what we
3684 // have so far but not Tagged, use that representation instead.
3685 Representation input_rep = value()->representation();
3686 if (!input_rep.IsTagged()) {
3687 rep = rep.generalize(input_rep);
3693 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3694 HValue* dominator) {
3695 DCHECK(side_effect == kNewSpacePromotion);
3696 Zone* zone = block()->zone();
3697 Isolate* isolate = block()->isolate();
3698 if (!FLAG_use_allocation_folding) return false;
3700 // Try to fold allocations together with their dominating allocations.
3701 if (!dominator->IsAllocate()) {
3702 if (FLAG_trace_allocation_folding) {
3703 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3704 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3709 // Check whether we are folding within the same block for local folding.
3710 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3711 if (FLAG_trace_allocation_folding) {
3712 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3713 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3718 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3719 HValue* dominator_size = dominator_allocate->size();
3720 HValue* current_size = size();
3722 // TODO(hpayer): Add support for non-constant allocation in dominator.
3723 if (!dominator_size->IsInteger32Constant()) {
3724 if (FLAG_trace_allocation_folding) {
3725 PrintF("#%d (%s) cannot fold into #%d (%s), "
3726 "dynamic allocation size in dominator\n",
3727 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3732 dominator_allocate = GetFoldableDominator(dominator_allocate);
3733 if (dominator_allocate == NULL) {
3737 if (!has_size_upper_bound()) {
3738 if (FLAG_trace_allocation_folding) {
3739 PrintF("#%d (%s) cannot fold into #%d (%s), "
3740 "can't estimate total allocation size\n",
3741 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3746 if (!current_size->IsInteger32Constant()) {
3747 // If it's not constant then it is a size_in_bytes calculation graph
3748 // like this: (const_header_size + const_element_size * size).
3749 DCHECK(current_size->IsInstruction());
3751 HInstruction* current_instr = HInstruction::cast(current_size);
3752 if (!current_instr->Dominates(dominator_allocate)) {
3753 if (FLAG_trace_allocation_folding) {
3754 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3755 "value does not dominate target allocation\n",
3756 id(), Mnemonic(), dominator_allocate->id(),
3757 dominator_allocate->Mnemonic());
3764 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3765 (IsOldDataSpaceAllocation() &&
3766 dominator_allocate->IsOldDataSpaceAllocation()) ||
3767 (IsOldPointerSpaceAllocation() &&
3768 dominator_allocate->IsOldPointerSpaceAllocation()));
3770 // First update the size of the dominator allocate instruction.
3771 dominator_size = dominator_allocate->size();
3772 int32_t original_object_size =
3773 HConstant::cast(dominator_size)->GetInteger32Constant();
3774 int32_t dominator_size_constant = original_object_size;
3776 if (MustAllocateDoubleAligned()) {
3777 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3778 dominator_size_constant += kDoubleSize / 2;
3782 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3783 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3785 // Since we clear the first word after folded memory, we cannot use the
3786 // whole Page::kMaxRegularHeapObjectSize memory.
3787 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3788 if (FLAG_trace_allocation_folding) {
3789 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3790 id(), Mnemonic(), dominator_allocate->id(),
3791 dominator_allocate->Mnemonic(), new_dominator_size);
3796 HInstruction* new_dominator_size_value;
3798 if (current_size->IsInteger32Constant()) {
3799 new_dominator_size_value = HConstant::CreateAndInsertBefore(
3800 isolate, zone, context(), new_dominator_size, Representation::None(),
3801 dominator_allocate);
3803 HValue* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3804 isolate, zone, context(), dominator_size_constant,
3805 Representation::Integer32(), dominator_allocate);
3807 // Add old and new size together and insert.
3808 current_size->ChangeRepresentation(Representation::Integer32());
3810 new_dominator_size_value = HAdd::New(
3811 isolate, zone, context(), new_dominator_size_constant, current_size);
3812 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3813 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3815 new_dominator_size_value->InsertBefore(dominator_allocate);
3818 dominator_allocate->UpdateSize(new_dominator_size_value);
3820 if (MustAllocateDoubleAligned()) {
3821 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3822 dominator_allocate->MakeDoubleAligned();
3826 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3828 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3831 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3832 dominator_allocate->MakePrefillWithFiller();
3834 // TODO(hpayer): This is a short-term hack to make allocation mementos
3835 // work again in new space.
3836 dominator_allocate->ClearNextMapWord(original_object_size);
3839 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3841 // After that replace the dominated allocate instruction.
3842 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3843 isolate, zone, context(), dominator_size_constant, Representation::None(),
3846 HInstruction* dominated_allocate_instr = HInnerAllocatedObject::New(
3847 isolate, zone, context(), dominator_allocate, inner_offset, type());
3848 dominated_allocate_instr->InsertBefore(this);
3849 DeleteAndReplaceWith(dominated_allocate_instr);
3850 if (FLAG_trace_allocation_folding) {
3851 PrintF("#%d (%s) folded into #%d (%s)\n",
3852 id(), Mnemonic(), dominator_allocate->id(),
3853 dominator_allocate->Mnemonic());
3859 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3860 if (!IsFoldable(dominator)) {
3861 // We cannot hoist old space allocations over new space allocations.
3862 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3863 if (FLAG_trace_allocation_folding) {
3864 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", id(),
3865 Mnemonic(), dominator->id(), dominator->Mnemonic());
3870 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3872 // We can hoist old data space allocations over an old pointer space
3873 // allocation and vice versa. For that we have to check the dominator
3874 // of the dominator allocate instruction.
3875 if (dominator_dominator == NULL) {
3876 dominating_allocate_ = dominator;
3877 if (FLAG_trace_allocation_folding) {
3878 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3879 Mnemonic(), dominator->id(), dominator->Mnemonic());
3884 // We can just fold old space allocations that are in the same basic block,
3885 // since it is not guaranteed that we fill up the whole allocated old
3887 // TODO(hpayer): Remove this limitation and add filler maps for each each
3888 // allocation as soon as we have store elimination.
3889 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3890 if (FLAG_trace_allocation_folding) {
3891 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3892 id(), Mnemonic(), dominator_dominator->id(),
3893 dominator_dominator->Mnemonic());
3898 DCHECK((IsOldDataSpaceAllocation() &&
3899 dominator_dominator->IsOldDataSpaceAllocation()) ||
3900 (IsOldPointerSpaceAllocation() &&
3901 dominator_dominator->IsOldPointerSpaceAllocation()));
3903 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3904 HStoreNamedField* dominator_free_space_size =
3905 dominator->filler_free_space_size_;
3906 if (dominator_free_space_size != NULL) {
3907 // We already hoisted one old space allocation, i.e., we already installed
3908 // a filler map. Hence, we just have to update the free space size.
3909 dominator->UpdateFreeSpaceFiller(current_size);
3911 // This is the first old space allocation that gets hoisted. We have to
3912 // install a filler map since the follwing allocation may cause a GC.
3913 dominator->CreateFreeSpaceFiller(current_size);
3916 // We can hoist the old space allocation over the actual dominator.
3917 return dominator_dominator;
3923 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3924 DCHECK(filler_free_space_size_ != NULL);
3925 Zone* zone = block()->zone();
3926 // We must explicitly force Smi representation here because on x64 we
3927 // would otherwise automatically choose int32, but the actual store
3928 // requires a Smi-tagged value.
3929 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3930 block()->isolate(), zone, context(),
3931 filler_free_space_size_->value()->GetInteger32Constant() +
3933 Representation::Smi(), filler_free_space_size_);
3934 filler_free_space_size_->UpdateValue(new_free_space_size);
3938 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3939 DCHECK(filler_free_space_size_ == NULL);
3940 Isolate* isolate = block()->isolate();
3941 Zone* zone = block()->zone();
3942 HInstruction* free_space_instr =
3943 HInnerAllocatedObject::New(isolate, zone, context(), dominating_allocate_,
3944 dominating_allocate_->size(), type());
3945 free_space_instr->InsertBefore(this);
3946 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3947 zone, Unique<Map>::CreateImmovable(isolate->factory()->free_space_map()),
3948 true, free_space_instr);
3949 HInstruction* store_map =
3950 HStoreNamedField::New(isolate, zone, context(), free_space_instr,
3951 HObjectAccess::ForMap(), filler_map);
3952 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3953 store_map->InsertAfter(filler_map);
3955 // We must explicitly force Smi representation here because on x64 we
3956 // would otherwise automatically choose int32, but the actual store
3957 // requires a Smi-tagged value.
3958 HConstant* filler_size =
3959 HConstant::CreateAndInsertAfter(isolate, zone, context(), free_space_size,
3960 Representation::Smi(), store_map);
3961 // Must force Smi representation for x64 (see comment above).
3962 HObjectAccess access = HObjectAccess::ForMapAndOffset(
3963 isolate->factory()->free_space_map(), FreeSpace::kSizeOffset,
3964 Representation::Smi());
3965 HStoreNamedField* store_size = HStoreNamedField::New(
3966 isolate, zone, context(), free_space_instr, access, filler_size);
3967 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3968 store_size->InsertAfter(filler_size);
3969 filler_free_space_size_ = store_size;
3973 void HAllocate::ClearNextMapWord(int offset) {
3974 if (MustClearNextMapWord()) {
3975 Zone* zone = block()->zone();
3976 HObjectAccess access =
3977 HObjectAccess::ForObservableJSObjectOffset(offset);
3978 HStoreNamedField* clear_next_map =
3979 HStoreNamedField::New(block()->isolate(), zone, context(), this, access,
3980 block()->graph()->GetConstant0());
3981 clear_next_map->ClearAllSideEffects();
3982 clear_next_map->InsertAfter(this);
3987 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3988 os << NameOf(size()) << " (";
3989 if (IsNewSpaceAllocation()) os << "N";
3990 if (IsOldPointerSpaceAllocation()) os << "P";
3991 if (IsOldDataSpaceAllocation()) os << "D";
3992 if (MustAllocateDoubleAligned()) os << "A";
3993 if (MustPrefillWithFiller()) os << "F";
3998 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3999 // The base offset is usually simply the size of the array header, except
4000 // with dehoisting adds an addition offset due to a array index key
4001 // manipulation, in which case it becomes (array header size +
4002 // constant-offset-from-key * kPointerSize)
4003 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
4004 addition_result += increase_by_value;
4005 if (!addition_result.IsValid()) return false;
4006 base_offset_ = addition_result.ValueOrDie();
4011 bool HStoreKeyed::NeedsCanonicalization() {
4012 switch (value()->opcode()) {
4014 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
4015 return IsExternalFloatOrDoubleElementsKind(load_kind) ||
4016 IsFixedFloatElementsKind(load_kind);
4019 Representation from = HChange::cast(value())->from();
4020 return from.IsTagged() || from.IsHeapObject();
4022 case kLoadNamedField:
4024 // Better safe than sorry...
4033 #define H_CONSTANT_INT(val) \
4034 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
4035 #define H_CONSTANT_DOUBLE(val) \
4036 HConstant::New(isolate, zone, context, static_cast<double>(val))
4038 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4039 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4040 HValue* left, HValue* right) { \
4041 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4042 HConstant* c_left = HConstant::cast(left); \
4043 HConstant* c_right = HConstant::cast(right); \
4044 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4045 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4046 if (IsInt32Double(double_res)) { \
4047 return H_CONSTANT_INT(double_res); \
4049 return H_CONSTANT_DOUBLE(double_res); \
4052 return new (zone) HInstr(context, left, right); \
4056 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
4057 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
4058 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
4060 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4063 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
4064 HValue* left, HValue* right,
4065 PretenureFlag pretenure_flag,
4066 StringAddFlags flags,
4067 Handle<AllocationSite> allocation_site) {
4068 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4069 HConstant* c_right = HConstant::cast(right);
4070 HConstant* c_left = HConstant::cast(left);
4071 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4072 Handle<String> left_string = c_left->StringValue();
4073 Handle<String> right_string = c_right->StringValue();
4074 // Prevent possible exception by invalid string length.
4075 if (left_string->length() + right_string->length() < String::kMaxLength) {
4076 MaybeHandle<String> concat = isolate->factory()->NewConsString(
4077 c_left->StringValue(), c_right->StringValue());
4078 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
4082 return new(zone) HStringAdd(
4083 context, left, right, pretenure_flag, flags, allocation_site);
4087 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
4088 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4090 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4092 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4093 os << "_CheckRight";
4095 HBinaryOperation::PrintDataTo(os);
4097 if (pretenure_flag() == NOT_TENURED)
4099 else if (pretenure_flag() == TENURED)
4105 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
4106 HValue* context, HValue* char_code) {
4107 if (FLAG_fold_constants && char_code->IsConstant()) {
4108 HConstant* c_code = HConstant::cast(char_code);
4109 if (c_code->HasNumberValue()) {
4110 if (std::isfinite(c_code->DoubleValue())) {
4111 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4112 return HConstant::New(
4113 isolate, zone, context,
4114 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4116 return HConstant::New(isolate, zone, context,
4117 isolate->factory()->empty_string());
4120 return new(zone) HStringCharFromCode(context, char_code);
4124 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
4125 HValue* context, HValue* value,
4126 BuiltinFunctionId op) {
4128 if (!FLAG_fold_constants) break;
4129 if (!value->IsConstant()) break;
4130 HConstant* constant = HConstant::cast(value);
4131 if (!constant->HasNumberValue()) break;
4132 double d = constant->DoubleValue();
4133 if (std::isnan(d)) { // NaN poisons everything.
4134 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4136 if (std::isinf(d)) { // +Infinity and -Infinity.
4139 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4142 return H_CONSTANT_DOUBLE(
4143 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
4146 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4150 return H_CONSTANT_DOUBLE(d);
4152 return H_CONSTANT_INT(32);
4160 return H_CONSTANT_DOUBLE(fast_exp(d));
4162 return H_CONSTANT_DOUBLE(std::log(d));
4164 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4166 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4168 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4170 // -0.5 .. -0.0 round to -0.0.
4171 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4172 // Doubles are represented as Significant * 2 ^ Exponent. If the
4173 // Exponent is not negative, the double value is already an integer.
4174 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4175 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4177 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4179 return H_CONSTANT_DOUBLE(Floor(d));
4181 uint32_t i = DoubleToUint32(d);
4182 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4189 return new(zone) HUnaryMathOperation(context, value, op);
4193 Representation HUnaryMathOperation::RepresentationFromUses() {
4194 if (op_ != kMathFloor && op_ != kMathRound) {
4195 return HValue::RepresentationFromUses();
4198 // The instruction can have an int32 or double output. Prefer a double
4199 // representation if there are double uses.
4200 bool use_double = false;
4202 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4203 HValue* use = it.value();
4204 int use_index = it.index();
4205 Representation rep_observed = use->observed_input_representation(use_index);
4206 Representation rep_required = use->RequiredInputRepresentation(use_index);
4207 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4208 if (use_double && !FLAG_trace_representation) {
4209 // Having seen one double is enough.
4212 if (FLAG_trace_representation) {
4213 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4214 PrintF("#%d %s is used by #%d %s as %s%s\n",
4215 id(), Mnemonic(), use->id(),
4216 use->Mnemonic(), rep_observed.Mnemonic(),
4217 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4219 PrintF("#%d %s is required by #%d %s as %s%s\n",
4220 id(), Mnemonic(), use->id(),
4221 use->Mnemonic(), rep_required.Mnemonic(),
4222 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4226 return use_double ? Representation::Double() : Representation::Integer32();
4230 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
4231 HValue* left, HValue* right) {
4232 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4233 HConstant* c_left = HConstant::cast(left);
4234 HConstant* c_right = HConstant::cast(right);
4235 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4236 double result = power_helper(c_left->DoubleValue(),
4237 c_right->DoubleValue());
4238 return H_CONSTANT_DOUBLE(std::isnan(result)
4239 ? std::numeric_limits<double>::quiet_NaN()
4243 return new(zone) HPower(left, right);
4247 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
4248 HValue* left, HValue* right, Operation op) {
4249 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4250 HConstant* c_left = HConstant::cast(left);
4251 HConstant* c_right = HConstant::cast(right);
4252 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4253 double d_left = c_left->DoubleValue();
4254 double d_right = c_right->DoubleValue();
4255 if (op == kMathMin) {
4256 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4257 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4258 if (d_left == d_right) {
4259 // Handle +0 and -0.
4260 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4264 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4265 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4266 if (d_left == d_right) {
4267 // Handle +0 and -0.
4268 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4272 // All comparisons failed, must be NaN.
4273 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4276 return new(zone) HMathMinMax(context, left, right, op);
4280 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
4281 HValue* left, HValue* right) {
4282 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4283 HConstant* c_left = HConstant::cast(left);
4284 HConstant* c_right = HConstant::cast(right);
4285 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4286 int32_t dividend = c_left->Integer32Value();
4287 int32_t divisor = c_right->Integer32Value();
4288 if (dividend == kMinInt && divisor == -1) {
4289 return H_CONSTANT_DOUBLE(-0.0);
4292 int32_t res = dividend % divisor;
4293 if ((res == 0) && (dividend < 0)) {
4294 return H_CONSTANT_DOUBLE(-0.0);
4296 return H_CONSTANT_INT(res);
4300 return new(zone) HMod(context, left, right);
4304 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
4305 HValue* left, HValue* right) {
4306 // If left and right are constant values, try to return a constant value.
4307 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4308 HConstant* c_left = HConstant::cast(left);
4309 HConstant* c_right = HConstant::cast(right);
4310 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4311 if (c_right->DoubleValue() != 0) {
4312 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4313 if (IsInt32Double(double_res)) {
4314 return H_CONSTANT_INT(double_res);
4316 return H_CONSTANT_DOUBLE(double_res);
4318 int sign = Double(c_left->DoubleValue()).Sign() *
4319 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4320 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4324 return new(zone) HDiv(context, left, right);
4328 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
4329 Token::Value op, HValue* left, HValue* right) {
4330 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4331 HConstant* c_left = HConstant::cast(left);
4332 HConstant* c_right = HConstant::cast(right);
4333 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4335 int32_t v_left = c_left->NumberValueAsInteger32();
4336 int32_t v_right = c_right->NumberValueAsInteger32();
4338 case Token::BIT_XOR:
4339 result = v_left ^ v_right;
4341 case Token::BIT_AND:
4342 result = v_left & v_right;
4345 result = v_left | v_right;
4348 result = 0; // Please the compiler.
4351 return H_CONSTANT_INT(result);
4354 return new(zone) HBitwise(context, op, left, right);
4358 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4359 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4360 HValue* left, HValue* right) { \
4361 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4362 HConstant* c_left = HConstant::cast(left); \
4363 HConstant* c_right = HConstant::cast(right); \
4364 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4365 return H_CONSTANT_INT(result); \
4368 return new (zone) HInstr(context, left, right); \
4372 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4373 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4374 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4375 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4377 #undef DEFINE_NEW_H_BITWISE_INSTR
4380 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
4381 HValue* left, HValue* right) {
4382 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4383 HConstant* c_left = HConstant::cast(left);
4384 HConstant* c_right = HConstant::cast(right);
4385 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4386 int32_t left_val = c_left->NumberValueAsInteger32();
4387 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4388 if ((right_val == 0) && (left_val < 0)) {
4389 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4391 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4394 return new(zone) HShr(context, left, right);
4398 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
4399 HValue* context, String::Encoding encoding,
4400 HValue* string, HValue* index) {
4401 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4402 HConstant* c_string = HConstant::cast(string);
4403 HConstant* c_index = HConstant::cast(index);
4404 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4405 Handle<String> s = c_string->StringValue();
4406 int32_t i = c_index->Integer32Value();
4408 DCHECK_LT(i, s->length());
4409 return H_CONSTANT_INT(s->Get(i));
4412 return new(zone) HSeqStringGetChar(encoding, string, index);
4416 #undef H_CONSTANT_INT
4417 #undef H_CONSTANT_DOUBLE
4420 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
4421 os << Token::Name(op_) << " ";
4422 return HBitwiseBinaryOperation::PrintDataTo(os);
4426 void HPhi::SimplifyConstantInputs() {
4427 // Convert constant inputs to integers when all uses are truncating.
4428 // This must happen before representation inference takes place.
4429 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4430 for (int i = 0; i < OperandCount(); ++i) {
4431 if (!OperandAt(i)->IsConstant()) return;
4433 HGraph* graph = block()->graph();
4434 for (int i = 0; i < OperandCount(); ++i) {
4435 HConstant* operand = HConstant::cast(OperandAt(i));
4436 if (operand->HasInteger32Value()) {
4438 } else if (operand->HasDoubleValue()) {
4439 HConstant* integer_input = HConstant::New(
4440 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
4441 DoubleToInt32(operand->DoubleValue()));
4442 integer_input->InsertAfter(operand);
4443 SetOperandAt(i, integer_input);
4444 } else if (operand->HasBooleanValue()) {
4445 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4446 : graph->GetConstant0());
4447 } else if (operand->ImmortalImmovable()) {
4448 SetOperandAt(i, graph->GetConstant0());
4451 // Overwrite observed input representations because they are likely Tagged.
4452 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4453 HValue* use = it.value();
4454 if (use->IsBinaryOperation()) {
4455 HBinaryOperation::cast(use)->set_observed_input_representation(
4456 it.index(), Representation::Smi());
4462 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4463 DCHECK(CheckFlag(kFlexibleRepresentation));
4464 Representation new_rep = RepresentationFromUses();
4465 UpdateRepresentation(new_rep, h_infer, "uses");
4466 new_rep = RepresentationFromInputs();
4467 UpdateRepresentation(new_rep, h_infer, "inputs");
4468 new_rep = RepresentationFromUseRequirements();
4469 UpdateRepresentation(new_rep, h_infer, "use requirements");
4473 Representation HPhi::RepresentationFromInputs() {
4474 bool has_type_feedback =
4475 smi_non_phi_uses() + int32_non_phi_uses() + double_non_phi_uses() > 0;
4476 Representation r = representation();
4477 for (int i = 0; i < OperandCount(); ++i) {
4478 // Ignore conservative Tagged assumption of parameters if we have
4479 // reason to believe that it's too conservative.
4480 if (has_type_feedback && OperandAt(i)->IsParameter()) continue;
4482 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4488 // Returns a representation if all uses agree on the same representation.
4489 // Integer32 is also returned when some uses are Smi but others are Integer32.
4490 Representation HValue::RepresentationFromUseRequirements() {
4491 Representation rep = Representation::None();
4492 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4493 // Ignore the use requirement from never run code
4494 if (it.value()->block()->IsUnreachable()) continue;
4496 // We check for observed_input_representation elsewhere.
4497 Representation use_rep =
4498 it.value()->RequiredInputRepresentation(it.index());
4503 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4504 if (rep.generalize(use_rep).IsInteger32()) {
4505 rep = Representation::Integer32();
4508 return Representation::None();
4514 bool HValue::HasNonSmiUse() {
4515 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4516 // We check for observed_input_representation elsewhere.
4517 Representation use_rep =
4518 it.value()->RequiredInputRepresentation(it.index());
4519 if (!use_rep.IsNone() &&
4521 !use_rep.IsTagged()) {
4529 // Node-specific verification code is only included in debug mode.
4532 void HPhi::Verify() {
4533 DCHECK(OperandCount() == block()->predecessors()->length());
4534 for (int i = 0; i < OperandCount(); ++i) {
4535 HValue* value = OperandAt(i);
4536 HBasicBlock* defining_block = value->block();
4537 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4538 DCHECK(defining_block == predecessor_block ||
4539 defining_block->Dominates(predecessor_block));
4544 void HSimulate::Verify() {
4545 HInstruction::Verify();
4546 DCHECK(HasAstId() || next()->IsEnterInlined());
4550 void HCheckHeapObject::Verify() {
4551 HInstruction::Verify();
4552 DCHECK(HasNoUses());
4556 void HCheckValue::Verify() {
4557 HInstruction::Verify();
4558 DCHECK(HasNoUses());
4564 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4565 DCHECK(offset >= 0);
4566 DCHECK(offset < FixedArray::kHeaderSize);
4567 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4568 return HObjectAccess(kInobject, offset);
4572 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4573 Representation representation) {
4574 DCHECK(offset >= 0);
4575 Portion portion = kInobject;
4577 if (offset == JSObject::kElementsOffset) {
4578 portion = kElementsPointer;
4579 } else if (offset == JSObject::kMapOffset) {
4582 bool existing_inobject_property = true;
4583 if (!map.is_null()) {
4584 existing_inobject_property = (offset <
4585 map->instance_size() - map->unused_property_fields() * kPointerSize);
4587 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4588 false, existing_inobject_property);
4592 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4594 case AllocationSite::kTransitionInfoOffset:
4595 return HObjectAccess(kInobject, offset, Representation::Tagged());
4596 case AllocationSite::kNestedSiteOffset:
4597 return HObjectAccess(kInobject, offset, Representation::Tagged());
4598 case AllocationSite::kPretenureDataOffset:
4599 return HObjectAccess(kInobject, offset, Representation::Smi());
4600 case AllocationSite::kPretenureCreateCountOffset:
4601 return HObjectAccess(kInobject, offset, Representation::Smi());
4602 case AllocationSite::kDependentCodeOffset:
4603 return HObjectAccess(kInobject, offset, Representation::Tagged());
4604 case AllocationSite::kWeakNextOffset:
4605 return HObjectAccess(kInobject, offset, Representation::Tagged());
4609 return HObjectAccess(kInobject, offset);
4613 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4615 Portion portion = kInobject;
4616 int offset = Context::kHeaderSize + index * kPointerSize;
4617 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4618 return HObjectAccess(portion, offset, Representation::Tagged());
4622 HObjectAccess HObjectAccess::ForScriptContext(int index) {
4624 Portion portion = kInobject;
4625 int offset = ScriptContextTable::GetContextOffset(index);
4626 return HObjectAccess(portion, offset, Representation::Tagged());
4630 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4631 DCHECK(offset >= 0);
4632 Portion portion = kInobject;
4634 if (offset == JSObject::kElementsOffset) {
4635 portion = kElementsPointer;
4636 } else if (offset == JSArray::kLengthOffset) {
4637 portion = kArrayLengths;
4638 } else if (offset == JSObject::kMapOffset) {
4641 return HObjectAccess(portion, offset);
4645 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4646 Representation representation) {
4647 DCHECK(offset >= 0);
4648 return HObjectAccess(kBackingStore, offset, representation,
4649 Handle<String>::null(), false, false);
4653 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4654 Representation representation,
4655 Handle<String> name) {
4657 // Negative property indices are in-object properties, indexed
4658 // from the end of the fixed part of the object.
4659 int offset = (index * kPointerSize) + map->instance_size();
4660 return HObjectAccess(kInobject, offset, representation, name, false, true);
4662 // Non-negative property indices are in the properties array.
4663 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4664 return HObjectAccess(kBackingStore, offset, representation, name,
4670 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4671 // set the appropriate GVN flags for a given load or store instruction
4672 if (access_type == STORE) {
4673 // track dominating allocations in order to eliminate write barriers
4674 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4675 instr->SetFlag(HValue::kTrackSideEffectDominators);
4677 // try to GVN loads, but don't hoist above map changes
4678 instr->SetFlag(HValue::kUseGVN);
4679 instr->SetDependsOnFlag(::v8::internal::kMaps);
4682 switch (portion()) {
4684 if (access_type == STORE) {
4685 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4687 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4690 case kStringLengths:
4691 if (access_type == STORE) {
4692 instr->SetChangesFlag(::v8::internal::kStringLengths);
4694 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4698 if (access_type == STORE) {
4699 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4701 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4705 if (access_type == STORE) {
4706 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4708 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4712 if (access_type == STORE) {
4713 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4715 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4718 case kElementsPointer:
4719 if (access_type == STORE) {
4720 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4722 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4726 if (access_type == STORE) {
4727 instr->SetChangesFlag(::v8::internal::kMaps);
4729 instr->SetDependsOnFlag(::v8::internal::kMaps);
4732 case kExternalMemory:
4733 if (access_type == STORE) {
4734 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4736 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4743 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4746 switch (access.portion()) {
4747 case HObjectAccess::kArrayLengths:
4748 case HObjectAccess::kStringLengths:
4751 case HObjectAccess::kElementsPointer:
4754 case HObjectAccess::kMaps:
4757 case HObjectAccess::kDouble: // fall through
4758 case HObjectAccess::kInobject:
4759 if (!access.name().is_null()) {
4760 os << Handle<String>::cast(access.name())->ToCString().get();
4762 os << "[in-object]";
4764 case HObjectAccess::kBackingStore:
4765 if (!access.name().is_null()) {
4766 os << Handle<String>::cast(access.name())->ToCString().get();
4768 os << "[backing-store]";
4770 case HObjectAccess::kExternalMemory:
4771 os << "[external-memory]";
4775 return os << "@" << access.offset();
4778 } } // namespace v8::internal