1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/base/bits.h"
8 #include "src/double.h"
9 #include "src/elements.h"
10 #include "src/factory.h"
11 #include "src/hydrogen-infer-representation.h"
13 #if V8_TARGET_ARCH_IA32
14 #include "src/ia32/lithium-ia32.h" // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM64
18 #include "src/arm64/lithium-arm64.h" // NOLINT
19 #elif V8_TARGET_ARCH_ARM
20 #include "src/arm/lithium-arm.h" // NOLINT
21 #elif V8_TARGET_ARCH_PPC
22 #include "src/ppc/lithium-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h" // NOLINT
25 #elif V8_TARGET_ARCH_MIPS64
26 #include "src/mips64/lithium-mips64.h" // NOLINT
27 #elif V8_TARGET_ARCH_X87
28 #include "src/x87/lithium-x87.h" // NOLINT
30 #error Unsupported target architecture.
33 #include "src/base/safe_math.h"
38 #define DEFINE_COMPILE(type) \
39 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
40 return builder->Do##type(this); \
42 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
46 Isolate* HValue::isolate() const {
47 DCHECK(block() != NULL);
48 return block()->isolate();
52 void HValue::AssumeRepresentation(Representation r) {
53 if (CheckFlag(kFlexibleRepresentation)) {
54 ChangeRepresentation(r);
55 // The representation of the value is dictated by type feedback and
56 // will not be changed later.
57 ClearFlag(kFlexibleRepresentation);
62 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
63 DCHECK(CheckFlag(kFlexibleRepresentation));
64 Representation new_rep = RepresentationFromInputs();
65 UpdateRepresentation(new_rep, h_infer, "inputs");
66 new_rep = RepresentationFromUses();
67 UpdateRepresentation(new_rep, h_infer, "uses");
68 if (representation().IsSmi() && HasNonSmiUse()) {
70 Representation::Integer32(), h_infer, "use requirements");
75 Representation HValue::RepresentationFromUses() {
76 if (HasNoUses()) return Representation::None();
78 // Array of use counts for each representation.
79 int use_count[Representation::kNumRepresentations] = { 0 };
81 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
82 HValue* use = it.value();
83 Representation rep = use->observed_input_representation(it.index());
84 if (rep.IsNone()) continue;
85 if (FLAG_trace_representation) {
86 PrintF("#%d %s is used by #%d %s as %s%s\n",
87 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
88 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
90 use_count[rep.kind()] += 1;
92 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
93 int tagged_count = use_count[Representation::kTagged];
94 int double_count = use_count[Representation::kDouble];
95 int int32_count = use_count[Representation::kInteger32];
96 int smi_count = use_count[Representation::kSmi];
98 if (tagged_count > 0) return Representation::Tagged();
99 if (double_count > 0) return Representation::Double();
100 if (int32_count > 0) return Representation::Integer32();
101 if (smi_count > 0) return Representation::Smi();
103 return Representation::None();
107 void HValue::UpdateRepresentation(Representation new_rep,
108 HInferRepresentationPhase* h_infer,
109 const char* reason) {
110 Representation r = representation();
111 if (new_rep.is_more_general_than(r)) {
112 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
113 if (FLAG_trace_representation) {
114 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
115 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
117 ChangeRepresentation(new_rep);
118 AddDependantsToWorklist(h_infer);
123 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
124 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
125 h_infer->AddToWorklist(it.value());
127 for (int i = 0; i < OperandCount(); ++i) {
128 h_infer->AddToWorklist(OperandAt(i));
133 static int32_t ConvertAndSetOverflow(Representation r,
137 if (result > Smi::kMaxValue) {
139 return Smi::kMaxValue;
141 if (result < Smi::kMinValue) {
143 return Smi::kMinValue;
146 if (result > kMaxInt) {
150 if (result < kMinInt) {
155 return static_cast<int32_t>(result);
159 static int32_t AddWithoutOverflow(Representation r,
163 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
164 return ConvertAndSetOverflow(r, result, overflow);
168 static int32_t SubWithoutOverflow(Representation r,
172 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
173 return ConvertAndSetOverflow(r, result, overflow);
177 static int32_t MulWithoutOverflow(const Representation& r,
181 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
182 return ConvertAndSetOverflow(r, result, overflow);
186 int32_t Range::Mask() const {
187 if (lower_ == upper_) return lower_;
190 while (res < upper_) {
191 res = (res << 1) | 1;
199 void Range::AddConstant(int32_t value) {
200 if (value == 0) return;
201 bool may_overflow = false; // Overflow is ignored here.
202 Representation r = Representation::Integer32();
203 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
204 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
211 void Range::Intersect(Range* other) {
212 upper_ = Min(upper_, other->upper_);
213 lower_ = Max(lower_, other->lower_);
214 bool b = CanBeMinusZero() && other->CanBeMinusZero();
215 set_can_be_minus_zero(b);
219 void Range::Union(Range* other) {
220 upper_ = Max(upper_, other->upper_);
221 lower_ = Min(lower_, other->lower_);
222 bool b = CanBeMinusZero() || other->CanBeMinusZero();
223 set_can_be_minus_zero(b);
227 void Range::CombinedMax(Range* other) {
228 upper_ = Max(upper_, other->upper_);
229 lower_ = Max(lower_, other->lower_);
230 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
234 void Range::CombinedMin(Range* other) {
235 upper_ = Min(upper_, other->upper_);
236 lower_ = Min(lower_, other->lower_);
237 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
241 void Range::Sar(int32_t value) {
242 int32_t bits = value & 0x1F;
243 lower_ = lower_ >> bits;
244 upper_ = upper_ >> bits;
245 set_can_be_minus_zero(false);
249 void Range::Shl(int32_t value) {
250 int32_t bits = value & 0x1F;
251 int old_lower = lower_;
252 int old_upper = upper_;
253 lower_ = lower_ << bits;
254 upper_ = upper_ << bits;
255 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
259 set_can_be_minus_zero(false);
263 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
264 bool may_overflow = false;
265 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
266 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
275 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
276 bool may_overflow = false;
277 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
278 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
287 void Range::KeepOrder() {
288 if (lower_ > upper_) {
289 int32_t tmp = lower_;
297 void Range::Verify() const {
298 DCHECK(lower_ <= upper_);
303 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
304 bool may_overflow = false;
305 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
306 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
307 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
308 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
309 lower_ = Min(Min(v1, v2), Min(v3, v4));
310 upper_ = Max(Max(v1, v2), Max(v3, v4));
318 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
319 return block()->block_id() > other->block_id();
323 HUseListNode* HUseListNode::tail() {
324 // Skip and remove dead items in the use list.
325 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
326 tail_ = tail_->tail_;
332 bool HValue::CheckUsesForFlag(Flag f) const {
333 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
334 if (it.value()->IsSimulate()) continue;
335 if (!it.value()->CheckFlag(f)) return false;
341 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
342 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
343 if (it.value()->IsSimulate()) continue;
344 if (!it.value()->CheckFlag(f)) {
353 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
354 bool return_value = false;
355 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
356 if (it.value()->IsSimulate()) continue;
357 if (!it.value()->CheckFlag(f)) return false;
364 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
369 void HUseIterator::Advance() {
371 if (current_ != NULL) {
372 next_ = current_->tail();
373 value_ = current_->value();
374 index_ = current_->index();
379 int HValue::UseCount() const {
381 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
386 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
387 HUseListNode* previous = NULL;
388 HUseListNode* current = use_list_;
389 while (current != NULL) {
390 if (current->value() == value && current->index() == index) {
391 if (previous == NULL) {
392 use_list_ = current->tail();
394 previous->set_tail(current->tail());
400 current = current->tail();
404 // Do not reuse use list nodes in debug mode, zap them.
405 if (current != NULL) {
408 HUseListNode(current->value(), current->index(), NULL);
417 bool HValue::Equals(HValue* other) {
418 if (other->opcode() != opcode()) return false;
419 if (!other->representation().Equals(representation())) return false;
420 if (!other->type_.Equals(type_)) return false;
421 if (other->flags() != flags()) return false;
422 if (OperandCount() != other->OperandCount()) return false;
423 for (int i = 0; i < OperandCount(); ++i) {
424 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
426 bool result = DataEquals(other);
427 DCHECK(!result || Hashcode() == other->Hashcode());
432 intptr_t HValue::Hashcode() {
433 intptr_t result = opcode();
434 int count = OperandCount();
435 for (int i = 0; i < count; ++i) {
436 result = result * 19 + OperandAt(i)->id() + (result >> 7);
442 const char* HValue::Mnemonic() const {
444 #define MAKE_CASE(type) case k##type: return #type;
445 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
447 case kPhi: return "Phi";
453 bool HValue::CanReplaceWithDummyUses() {
454 return FLAG_unreachable_code_elimination &&
455 !(block()->IsReachable() ||
457 IsControlInstruction() ||
458 IsArgumentsObject() ||
459 IsCapturedObject() ||
466 bool HValue::IsInteger32Constant() {
467 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
471 int32_t HValue::GetInteger32Constant() {
472 return HConstant::cast(this)->Integer32Value();
476 bool HValue::EqualsInteger32Constant(int32_t value) {
477 return IsInteger32Constant() && GetInteger32Constant() == value;
481 void HValue::SetOperandAt(int index, HValue* value) {
482 RegisterUse(index, value);
483 InternalSetOperandAt(index, value);
487 void HValue::DeleteAndReplaceWith(HValue* other) {
488 // We replace all uses first, so Delete can assert that there are none.
489 if (other != NULL) ReplaceAllUsesWith(other);
495 void HValue::ReplaceAllUsesWith(HValue* other) {
496 while (use_list_ != NULL) {
497 HUseListNode* list_node = use_list_;
498 HValue* value = list_node->value();
499 DCHECK(!value->block()->IsStartBlock());
500 value->InternalSetOperandAt(list_node->index(), other);
501 use_list_ = list_node->tail();
502 list_node->set_tail(other->use_list_);
503 other->use_list_ = list_node;
508 void HValue::Kill() {
509 // Instead of going through the entire use list of each operand, we only
510 // check the first item in each use list and rely on the tail() method to
511 // skip dead items, removing them lazily next time we traverse the list.
513 for (int i = 0; i < OperandCount(); ++i) {
514 HValue* operand = OperandAt(i);
515 if (operand == NULL) continue;
516 HUseListNode* first = operand->use_list_;
517 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
518 operand->use_list_ = first->tail();
524 void HValue::SetBlock(HBasicBlock* block) {
525 DCHECK(block_ == NULL || block == NULL);
527 if (id_ == kNoNumber && block != NULL) {
528 id_ = block->graph()->GetNextValueID(this);
533 std::ostream& operator<<(std::ostream& os, const HValue& v) {
534 return v.PrintTo(os);
538 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
539 if (t.value->representation().IsTagged() &&
540 !t.value->type().Equals(HType::Tagged()))
542 return os << " type:" << t.value->type();
546 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
547 GVNFlagSet changes_flags = c.value->ChangesFlags();
548 if (changes_flags.IsEmpty()) return os;
550 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
553 bool add_comma = false;
554 #define PRINT_DO(Type) \
555 if (changes_flags.Contains(k##Type)) { \
556 if (add_comma) os << ","; \
560 GVN_TRACKED_FLAG_LIST(PRINT_DO);
561 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
568 bool HValue::HasMonomorphicJSObjectType() {
569 return !GetMonomorphicJSObjectMap().is_null();
573 bool HValue::UpdateInferredType() {
574 HType type = CalculateInferredType();
575 bool result = (!type.Equals(type_));
581 void HValue::RegisterUse(int index, HValue* new_value) {
582 HValue* old_value = OperandAt(index);
583 if (old_value == new_value) return;
585 HUseListNode* removed = NULL;
586 if (old_value != NULL) {
587 removed = old_value->RemoveUse(this, index);
590 if (new_value != NULL) {
591 if (removed == NULL) {
592 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
593 this, index, new_value->use_list_);
595 removed->set_tail(new_value->use_list_);
596 new_value->use_list_ = removed;
602 void HValue::AddNewRange(Range* r, Zone* zone) {
603 if (!HasRange()) ComputeInitialRange(zone);
604 if (!HasRange()) range_ = new(zone) Range();
606 r->StackUpon(range_);
611 void HValue::RemoveLastAddedRange() {
613 DCHECK(range_->next() != NULL);
614 range_ = range_->next();
618 void HValue::ComputeInitialRange(Zone* zone) {
620 range_ = InferRange(zone);
625 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
626 os << Mnemonic() << " ";
627 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
628 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
629 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
634 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
635 for (int i = 0; i < OperandCount(); ++i) {
636 if (i > 0) os << " ";
637 os << NameOf(OperandAt(i));
643 void HInstruction::Unlink() {
645 DCHECK(!IsControlInstruction()); // Must never move control instructions.
646 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
647 DCHECK(previous_ != NULL);
648 previous_->next_ = next_;
650 DCHECK(block()->last() == this);
651 block()->set_last(previous_);
653 next_->previous_ = previous_;
659 void HInstruction::InsertBefore(HInstruction* next) {
661 DCHECK(!next->IsBlockEntry());
662 DCHECK(!IsControlInstruction());
663 DCHECK(!next->block()->IsStartBlock());
664 DCHECK(next->previous_ != NULL);
665 HInstruction* prev = next->previous();
667 next->previous_ = this;
670 SetBlock(next->block());
671 if (!has_position() && next->has_position()) {
672 set_position(next->position());
677 void HInstruction::InsertAfter(HInstruction* previous) {
679 DCHECK(!previous->IsControlInstruction());
680 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
681 HBasicBlock* block = previous->block();
682 // Never insert anything except constants into the start block after finishing
684 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
685 DCHECK(block->end()->SecondSuccessor() == NULL);
686 InsertAfter(block->end()->FirstSuccessor()->first());
690 // If we're inserting after an instruction with side-effects that is
691 // followed by a simulate instruction, we need to insert after the
692 // simulate instruction instead.
693 HInstruction* next = previous->next_;
694 if (previous->HasObservableSideEffects() && next != NULL) {
695 DCHECK(next->IsSimulate());
697 next = previous->next_;
700 previous_ = previous;
703 previous->next_ = this;
704 if (next != NULL) next->previous_ = this;
705 if (block->last() == previous) {
706 block->set_last(this);
708 if (!has_position() && previous->has_position()) {
709 set_position(previous->position());
714 bool HInstruction::Dominates(HInstruction* other) {
715 if (block() != other->block()) {
716 return block()->Dominates(other->block());
718 // Both instructions are in the same basic block. This instruction
719 // should precede the other one in order to dominate it.
720 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
721 if (instr == other) {
730 void HInstruction::Verify() {
731 // Verify that input operands are defined before use.
732 HBasicBlock* cur_block = block();
733 for (int i = 0; i < OperandCount(); ++i) {
734 HValue* other_operand = OperandAt(i);
735 if (other_operand == NULL) continue;
736 HBasicBlock* other_block = other_operand->block();
737 if (cur_block == other_block) {
738 if (!other_operand->IsPhi()) {
739 HInstruction* cur = this->previous();
740 while (cur != NULL) {
741 if (cur == other_operand) break;
742 cur = cur->previous();
744 // Must reach other operand in the same block!
745 DCHECK(cur == other_operand);
748 // If the following assert fires, you may have forgotten an
750 DCHECK(other_block->Dominates(cur_block));
754 // Verify that instructions that may have side-effects are followed
755 // by a simulate instruction.
756 if (HasObservableSideEffects() && !IsOsrEntry()) {
757 DCHECK(next()->IsSimulate());
760 // Verify that instructions that can be eliminated by GVN have overridden
761 // HValue::DataEquals. The default implementation is UNREACHABLE. We
762 // don't actually care whether DataEquals returns true or false here.
763 if (CheckFlag(kUseGVN)) DataEquals(this);
765 // Verify that all uses are in the graph.
766 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
767 if (use.value()->IsInstruction()) {
768 DCHECK(HInstruction::cast(use.value())->IsLinked());
775 bool HInstruction::CanDeoptimize() {
776 // TODO(titzer): make this a virtual method?
778 case HValue::kAbnormalExit:
779 case HValue::kAccessArgumentsAt:
780 case HValue::kAllocate:
781 case HValue::kArgumentsElements:
782 case HValue::kArgumentsLength:
783 case HValue::kArgumentsObject:
784 case HValue::kBlockEntry:
785 case HValue::kBoundsCheckBaseIndexInformation:
786 case HValue::kCallFunction:
787 case HValue::kCallNew:
788 case HValue::kCallNewArray:
789 case HValue::kCallStub:
790 case HValue::kCapturedObject:
791 case HValue::kClassOfTestAndBranch:
792 case HValue::kCompareGeneric:
793 case HValue::kCompareHoleAndBranch:
794 case HValue::kCompareMap:
795 case HValue::kCompareMinusZeroAndBranch:
796 case HValue::kCompareNumericAndBranch:
797 case HValue::kCompareObjectEqAndBranch:
798 case HValue::kConstant:
799 case HValue::kConstructDouble:
800 case HValue::kContext:
801 case HValue::kDebugBreak:
802 case HValue::kDeclareGlobals:
803 case HValue::kDoubleBits:
804 case HValue::kDummyUse:
805 case HValue::kEnterInlined:
806 case HValue::kEnvironmentMarker:
807 case HValue::kForceRepresentation:
808 case HValue::kGetCachedArrayIndex:
810 case HValue::kHasCachedArrayIndexAndBranch:
811 case HValue::kHasInstanceTypeAndBranch:
812 case HValue::kInnerAllocatedObject:
813 case HValue::kInstanceOf:
814 case HValue::kInstanceOfKnownGlobal:
815 case HValue::kIsConstructCallAndBranch:
816 case HValue::kIsObjectAndBranch:
817 case HValue::kIsSmiAndBranch:
818 case HValue::kIsStringAndBranch:
819 case HValue::kIsUndetectableAndBranch:
820 case HValue::kLeaveInlined:
821 case HValue::kLoadFieldByIndex:
822 case HValue::kLoadGlobalGeneric:
823 case HValue::kLoadGlobalViaContext:
824 case HValue::kLoadNamedField:
825 case HValue::kLoadNamedGeneric:
826 case HValue::kLoadRoot:
827 case HValue::kMapEnumLength:
828 case HValue::kMathMinMax:
829 case HValue::kParameter:
831 case HValue::kPushArguments:
832 case HValue::kRegExpLiteral:
833 case HValue::kReturn:
834 case HValue::kSeqStringGetChar:
835 case HValue::kStoreCodeEntry:
836 case HValue::kStoreFrameContext:
837 case HValue::kStoreGlobalViaContext:
838 case HValue::kStoreKeyed:
839 case HValue::kStoreNamedField:
840 case HValue::kStoreNamedGeneric:
841 case HValue::kStringCharCodeAt:
842 case HValue::kStringCharFromCode:
843 case HValue::kThisFunction:
844 case HValue::kTypeofIsAndBranch:
845 case HValue::kUnknownOSRValue:
846 case HValue::kUseConst:
850 case HValue::kAllocateBlockContext:
851 case HValue::kApplyArguments:
852 case HValue::kBitwise:
853 case HValue::kBoundsCheck:
854 case HValue::kBranch:
855 case HValue::kCallJSFunction:
856 case HValue::kCallRuntime:
857 case HValue::kCallWithDescriptor:
858 case HValue::kChange:
859 case HValue::kCheckArrayBufferNotNeutered:
860 case HValue::kCheckHeapObject:
861 case HValue::kCheckInstanceType:
862 case HValue::kCheckMapValue:
863 case HValue::kCheckMaps:
864 case HValue::kCheckSmi:
865 case HValue::kCheckValue:
866 case HValue::kClampToUint8:
867 case HValue::kDateField:
868 case HValue::kDeoptimize:
870 case HValue::kForInCacheArray:
871 case HValue::kForInPrepareMap:
872 case HValue::kFunctionLiteral:
873 case HValue::kInvokeFunction:
874 case HValue::kLoadContextSlot:
875 case HValue::kLoadFunctionPrototype:
876 case HValue::kLoadKeyed:
877 case HValue::kLoadKeyedGeneric:
878 case HValue::kMathFloorOfDiv:
879 case HValue::kMaybeGrowElements:
882 case HValue::kOsrEntry:
886 case HValue::kSeqStringSetChar:
889 case HValue::kSimulate:
890 case HValue::kStackCheck:
891 case HValue::kStoreContextSlot:
892 case HValue::kStoreKeyedGeneric:
893 case HValue::kStringAdd:
894 case HValue::kStringCompareAndBranch:
896 case HValue::kToFastProperties:
897 case HValue::kTransitionElementsKind:
898 case HValue::kTrapAllocationMemento:
899 case HValue::kTypeof:
900 case HValue::kUnaryMathOperation:
901 case HValue::kWrapReceiver:
909 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
910 return os << v.value->representation().Mnemonic() << v.value->id();
913 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
914 return os << NameOf(value());
918 std::ostream& HEnvironmentMarker::PrintDataTo(
919 std::ostream& os) const { // NOLINT
920 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
925 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
926 return os << NameOf(value()) << " #" << argument_count();
930 std::ostream& HCallJSFunction::PrintDataTo(std::ostream& os) const { // NOLINT
931 return os << NameOf(function()) << " #" << argument_count();
935 HCallJSFunction* HCallJSFunction::New(Isolate* isolate, Zone* zone,
936 HValue* context, HValue* function,
938 bool pass_argument_count) {
939 bool has_stack_check = false;
940 if (function->IsConstant()) {
941 HConstant* fun_const = HConstant::cast(function);
942 Handle<JSFunction> jsfun =
943 Handle<JSFunction>::cast(fun_const->handle(isolate));
944 has_stack_check = !jsfun.is_null() &&
945 (jsfun->code()->kind() == Code::FUNCTION ||
946 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
949 return new(zone) HCallJSFunction(
950 function, argument_count, pass_argument_count,
955 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
956 return os << NameOf(first()) << " " << NameOf(second()) << " #"
961 std::ostream& HCallFunction::PrintDataTo(std::ostream& os) const { // NOLINT
962 os << NameOf(context()) << " " << NameOf(function());
963 if (HasVectorAndSlot()) {
964 os << " (type-feedback-vector icslot " << slot().ToInt() << ")";
970 void HBoundsCheck::ApplyIndexChange() {
971 if (skip_check()) return;
973 DecompositionResult decomposition;
974 bool index_is_decomposable = index()->TryDecompose(&decomposition);
975 if (index_is_decomposable) {
976 DCHECK(decomposition.base() == base());
977 if (decomposition.offset() == offset() &&
978 decomposition.scale() == scale()) return;
983 ReplaceAllUsesWith(index());
985 HValue* current_index = decomposition.base();
986 int actual_offset = decomposition.offset() + offset();
987 int actual_scale = decomposition.scale() + scale();
989 HGraph* graph = block()->graph();
990 Isolate* isolate = graph->isolate();
991 Zone* zone = graph->zone();
992 HValue* context = graph->GetInvalidContext();
993 if (actual_offset != 0) {
994 HConstant* add_offset =
995 HConstant::New(isolate, zone, context, actual_offset);
996 add_offset->InsertBefore(this);
998 HAdd::New(isolate, zone, context, current_index, add_offset);
999 add->InsertBefore(this);
1000 add->AssumeRepresentation(index()->representation());
1001 add->ClearFlag(kCanOverflow);
1002 current_index = add;
1005 if (actual_scale != 0) {
1006 HConstant* sar_scale = HConstant::New(isolate, zone, context, actual_scale);
1007 sar_scale->InsertBefore(this);
1009 HSar::New(isolate, zone, context, current_index, sar_scale);
1010 sar->InsertBefore(this);
1011 sar->AssumeRepresentation(index()->representation());
1012 current_index = sar;
1015 SetOperandAt(0, current_index);
1023 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
1024 os << NameOf(index()) << " " << NameOf(length());
1025 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1027 if (base() != index()) {
1028 os << NameOf(index());
1032 os << " + " << offset() << ") >> " << scale() << ")";
1034 if (skip_check()) os << " [DISABLED]";
1039 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1040 DCHECK(CheckFlag(kFlexibleRepresentation));
1041 HValue* actual_index = index()->ActualValue();
1042 HValue* actual_length = length()->ActualValue();
1043 Representation index_rep = actual_index->representation();
1044 Representation length_rep = actual_length->representation();
1045 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1046 index_rep = Representation::Smi();
1048 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1049 length_rep = Representation::Smi();
1051 Representation r = index_rep.generalize(length_rep);
1052 if (r.is_more_general_than(Representation::Integer32())) {
1053 r = Representation::Integer32();
1055 UpdateRepresentation(r, h_infer, "boundscheck");
1059 Range* HBoundsCheck::InferRange(Zone* zone) {
1060 Representation r = representation();
1061 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1062 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1065 Range* result = new(zone) Range(lower, upper);
1066 if (index()->HasRange()) {
1067 result->Intersect(index()->range());
1070 // In case of Smi representation, clamp result to Smi::kMaxValue.
1071 if (r.IsSmi()) result->ClampToSmi();
1074 return HValue::InferRange(zone);
1078 std::ostream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1079 std::ostream& os) const { // NOLINT
1080 // TODO(svenpanne) This 2nd base_index() looks wrong...
1081 return os << "base: " << NameOf(base_index())
1082 << ", check: " << NameOf(base_index());
1086 std::ostream& HCallWithDescriptor::PrintDataTo(
1087 std::ostream& os) const { // NOLINT
1088 for (int i = 0; i < OperandCount(); i++) {
1089 os << NameOf(OperandAt(i)) << " ";
1091 return os << "#" << argument_count();
1095 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1096 os << ElementsKindToString(elements_kind()) << " ";
1097 return HBinaryCall::PrintDataTo(os);
1101 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1102 os << name()->ToCString().get() << " ";
1103 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1104 return os << "#" << argument_count();
1108 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1109 std::ostream& os) const { // NOLINT
1110 return os << "class_of_test(" << NameOf(value()) << ", \""
1111 << class_name()->ToCString().get() << "\")";
1115 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1116 return os << NameOf(receiver()) << " " << NameOf(function());
1120 std::ostream& HAccessArgumentsAt::PrintDataTo(
1121 std::ostream& os) const { // NOLINT
1122 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1123 << NameOf(length());
1127 std::ostream& HAllocateBlockContext::PrintDataTo(
1128 std::ostream& os) const { // NOLINT
1129 return os << NameOf(context()) << " " << NameOf(function());
1133 std::ostream& HControlInstruction::PrintDataTo(
1134 std::ostream& os) const { // NOLINT
1136 bool first_block = true;
1137 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1138 if (!first_block) os << ", ";
1139 os << *it.Current();
1140 first_block = false;
1146 std::ostream& HUnaryControlInstruction::PrintDataTo(
1147 std::ostream& os) const { // NOLINT
1148 os << NameOf(value());
1149 return HControlInstruction::PrintDataTo(os);
1153 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1154 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1159 Representation HBranch::observed_input_representation(int index) {
1160 if (expected_input_types_.Contains(ToBooleanStub::NULL_TYPE) ||
1161 expected_input_types_.Contains(ToBooleanStub::SPEC_OBJECT) ||
1162 expected_input_types_.Contains(ToBooleanStub::STRING) ||
1163 expected_input_types_.Contains(ToBooleanStub::SYMBOL) ||
1164 expected_input_types_.Contains(ToBooleanStub::SIMD_VALUE)) {
1165 return Representation::Tagged();
1167 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1168 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1169 return Representation::Double();
1171 return Representation::Tagged();
1173 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1174 return Representation::Double();
1176 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1177 return Representation::Smi();
1179 return Representation::None();
1183 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1184 HValue* value = this->value();
1185 if (value->EmitAtUses()) {
1186 DCHECK(value->IsConstant());
1187 DCHECK(!value->representation().IsDouble());
1188 *block = HConstant::cast(value)->BooleanValue()
1190 : SecondSuccessor();
1198 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1199 return HUnaryControlInstruction::PrintDataTo(os) << " "
1200 << expected_input_types();
1204 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1205 os << NameOf(value()) << " (" << *map().handle() << ")";
1206 HControlInstruction::PrintDataTo(os);
1207 if (known_successor_index() == 0) {
1209 } else if (known_successor_index() == 1) {
1216 const char* HUnaryMathOperation::OpName() const {
1243 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1244 Representation r = representation();
1245 if (op() == kMathClz32) return new(zone) Range(0, 32);
1246 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1247 if (op() == kMathAbs) {
1248 int upper = value()->range()->upper();
1249 int lower = value()->range()->lower();
1250 bool spans_zero = value()->range()->CanBeZero();
1251 // Math.abs(kMinInt) overflows its representation, on which the
1252 // instruction deopts. Hence clamp it to kMaxInt.
1253 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1254 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1256 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1257 Max(abs_lower, abs_upper));
1258 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1260 if (r.IsSmi()) result->ClampToSmi();
1264 return HValue::InferRange(zone);
1268 std::ostream& HUnaryMathOperation::PrintDataTo(
1269 std::ostream& os) const { // NOLINT
1270 return os << OpName() << " " << NameOf(value());
1274 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1275 return os << NameOf(value());
1279 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1280 std::ostream& os) const { // NOLINT
1281 os << NameOf(value());
1283 case FIRST_JS_RECEIVER_TYPE:
1284 if (to_ == LAST_TYPE) os << " spec_object";
1286 case JS_REGEXP_TYPE:
1287 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1290 if (to_ == JS_ARRAY_TYPE) os << " array";
1292 case JS_FUNCTION_TYPE:
1293 if (to_ == JS_FUNCTION_TYPE) os << " function";
1302 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1303 std::ostream& os) const { // NOLINT
1304 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1305 return HControlInstruction::PrintDataTo(os);
1309 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1310 Heap* heap = isolate->heap();
1311 if (constant->HasNumberValue()) return heap->number_string();
1312 if (constant->IsUndetectable()) return heap->undefined_string();
1313 if (constant->HasStringValue()) return heap->string_string();
1314 switch (constant->GetInstanceType()) {
1315 case ODDBALL_TYPE: {
1316 Unique<Object> unique = constant->GetUnique();
1317 if (unique.IsKnownGlobal(heap->true_value()) ||
1318 unique.IsKnownGlobal(heap->false_value())) {
1319 return heap->boolean_string();
1321 if (unique.IsKnownGlobal(heap->null_value())) {
1322 return heap->object_string();
1324 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1325 return heap->undefined_string();
1328 return heap->symbol_string();
1329 case FLOAT32X4_TYPE:
1330 return heap->float32x4_string();
1331 case JS_FUNCTION_TYPE:
1332 case JS_FUNCTION_PROXY_TYPE:
1333 return heap->function_string();
1335 return heap->object_string();
1340 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1341 if (FLAG_fold_constants && value()->IsConstant()) {
1342 HConstant* constant = HConstant::cast(value());
1343 String* type_string = TypeOfString(constant, isolate());
1344 bool same_type = type_literal_.IsKnownGlobal(type_string);
1345 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1347 } else if (value()->representation().IsSpecialization()) {
1349 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1350 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1358 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1359 return os << NameOf(value()) << " " << NameOf(map());
1363 HValue* HCheckMapValue::Canonicalize() {
1364 if (map()->IsConstant()) {
1365 HConstant* c_map = HConstant::cast(map());
1366 return HCheckMaps::CreateAndInsertAfter(
1367 block()->graph()->zone(), value(), c_map->MapValue(),
1368 c_map->HasStableMapValue(), this);
1374 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1375 return os << NameOf(enumerable());
1379 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1380 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1385 std::ostream& HLoadFieldByIndex::PrintDataTo(
1386 std::ostream& os) const { // NOLINT
1387 return os << NameOf(object()) << " " << NameOf(index());
1391 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1392 if (!l->EqualsInteger32Constant(~0)) return false;
1398 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1399 if (!instr->IsBitwise()) return false;
1400 HBitwise* b = HBitwise::cast(instr);
1401 return (b->op() == Token::BIT_XOR) &&
1402 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1403 MatchLeftIsOnes(b->right(), b->left(), negated));
1407 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1409 return MatchNegationViaXor(instr, &negated) &&
1410 MatchNegationViaXor(negated, arg);
1414 HValue* HBitwise::Canonicalize() {
1415 if (!representation().IsSmiOrInteger32()) return this;
1416 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1417 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1418 if (left()->EqualsInteger32Constant(nop_constant) &&
1419 !right()->CheckFlag(kUint32)) {
1422 if (right()->EqualsInteger32Constant(nop_constant) &&
1423 !left()->CheckFlag(kUint32)) {
1426 // Optimize double negation, a common pattern used for ToInt32(x).
1428 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1436 HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
1437 HValue* left, HValue* right, Strength strength,
1438 ExternalAddType external_add_type) {
1439 // For everything else, you should use the other factory method without
1441 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
1442 return new (zone) HAdd(context, left, right, strength, external_add_type);
1446 Representation HAdd::RepresentationFromInputs() {
1447 Representation left_rep = left()->representation();
1448 if (left_rep.IsExternal()) {
1449 return Representation::External();
1451 return HArithmeticBinaryOperation::RepresentationFromInputs();
1455 Representation HAdd::RequiredInputRepresentation(int index) {
1457 Representation left_rep = left()->representation();
1458 if (left_rep.IsExternal()) {
1459 if (external_add_type_ == AddOfExternalAndTagged) {
1460 return Representation::Tagged();
1462 return Representation::Integer32();
1466 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1470 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1471 return arg1->representation().IsSpecialization() &&
1472 arg2->EqualsInteger32Constant(identity);
1476 HValue* HAdd::Canonicalize() {
1477 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1478 if (IsIdentityOperation(left(), right(), 0) &&
1479 !left()->representation().IsDouble()) { // Left could be -0.
1482 if (IsIdentityOperation(right(), left(), 0) &&
1483 !left()->representation().IsDouble()) { // Right could be -0.
1490 HValue* HSub::Canonicalize() {
1491 if (IsIdentityOperation(left(), right(), 0)) return left();
1496 HValue* HMul::Canonicalize() {
1497 if (IsIdentityOperation(left(), right(), 1)) return left();
1498 if (IsIdentityOperation(right(), left(), 1)) return right();
1503 bool HMul::MulMinusOne() {
1504 if (left()->EqualsInteger32Constant(-1) ||
1505 right()->EqualsInteger32Constant(-1)) {
1513 HValue* HMod::Canonicalize() {
1518 HValue* HDiv::Canonicalize() {
1519 if (IsIdentityOperation(left(), right(), 1)) return left();
1524 HValue* HChange::Canonicalize() {
1525 return (from().Equals(to())) ? value() : this;
1529 HValue* HWrapReceiver::Canonicalize() {
1530 if (HasNoUses()) return NULL;
1531 if (receiver()->type().IsJSObject()) {
1538 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1539 return os << NameOf(value());
1543 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1544 HValue* context, HValue* value,
1545 Representation representation) {
1546 if (FLAG_fold_constants && value->IsConstant()) {
1547 HConstant* c = HConstant::cast(value);
1548 c = c->CopyToRepresentation(representation, zone);
1549 if (c != NULL) return c;
1551 return new(zone) HForceRepresentation(value, representation);
1555 std::ostream& HForceRepresentation::PrintDataTo(
1556 std::ostream& os) const { // NOLINT
1557 return os << representation().Mnemonic() << " " << NameOf(value());
1561 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1562 HUnaryOperation::PrintDataTo(os);
1563 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1565 if (CanTruncateToSmi()) os << " truncating-smi";
1566 if (CanTruncateToInt32()) os << " truncating-int32";
1567 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1568 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1573 HValue* HUnaryMathOperation::Canonicalize() {
1574 if (op() == kMathRound || op() == kMathFloor) {
1575 HValue* val = value();
1576 if (val->IsChange()) val = HChange::cast(val)->value();
1577 if (val->representation().IsSmiOrInteger32()) {
1578 if (val->representation().Equals(representation())) return val;
1579 return Prepend(new(block()->zone()) HChange(
1580 val, representation(), false, false));
1583 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1584 HDiv* hdiv = HDiv::cast(value());
1586 HValue* left = hdiv->left();
1587 if (left->representation().IsInteger32()) {
1588 // A value with an integer representation does not need to be transformed.
1589 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1590 // A change from an integer32 can be replaced by the integer32 value.
1591 left = HChange::cast(left)->value();
1592 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1593 left = Prepend(new(block()->zone()) HChange(
1594 left, Representation::Integer32(), false, false));
1599 HValue* right = hdiv->right();
1600 if (right->IsInteger32Constant()) {
1601 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1602 Representation::Integer32(), right->block()->zone()));
1603 } else if (right->representation().IsInteger32()) {
1604 // A value with an integer representation does not need to be transformed.
1605 } else if (right->IsChange() &&
1606 HChange::cast(right)->from().IsInteger32()) {
1607 // A change from an integer32 can be replaced by the integer32 value.
1608 right = HChange::cast(right)->value();
1609 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1610 right = Prepend(new(block()->zone()) HChange(
1611 right, Representation::Integer32(), false, false));
1616 return Prepend(HMathFloorOfDiv::New(
1617 block()->graph()->isolate(), block()->zone(), context(), left, right));
1623 HValue* HCheckInstanceType::Canonicalize() {
1624 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1625 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1626 (check_ == IS_STRING && value()->type().IsString())) {
1630 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1631 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1639 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1640 InstanceType* last) {
1641 DCHECK(is_interval_check());
1643 case IS_SPEC_OBJECT:
1644 *first = FIRST_SPEC_OBJECT_TYPE;
1645 *last = LAST_SPEC_OBJECT_TYPE;
1648 *first = *last = JS_ARRAY_TYPE;
1651 *first = *last = JS_DATE_TYPE;
1659 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1660 DCHECK(!is_interval_check());
1663 *mask = kIsNotStringMask;
1666 case IS_INTERNALIZED_STRING:
1667 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1668 *tag = kInternalizedTag;
1676 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1677 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1678 for (int i = 1; i < maps()->size(); ++i) {
1679 os << "," << *maps()->at(i).handle();
1682 if (IsStabilityCheck()) os << "(stability-check)";
1687 HValue* HCheckMaps::Canonicalize() {
1688 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1689 HConstant* c_value = HConstant::cast(value());
1690 if (c_value->HasObjectMap()) {
1691 for (int i = 0; i < maps()->size(); ++i) {
1692 if (c_value->ObjectMap() == maps()->at(i)) {
1693 if (maps()->size() > 1) {
1694 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1695 maps()->at(i), block()->graph()->zone()));
1697 MarkAsStabilityCheck();
1707 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1708 return os << NameOf(value()) << " " << Brief(*object().handle());
1712 HValue* HCheckValue::Canonicalize() {
1713 return (value()->IsConstant() &&
1714 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1718 const char* HCheckInstanceType::GetCheckName() const {
1720 case IS_SPEC_OBJECT: return "object";
1721 case IS_JS_ARRAY: return "array";
1724 case IS_STRING: return "string";
1725 case IS_INTERNALIZED_STRING: return "internalized_string";
1732 std::ostream& HCheckInstanceType::PrintDataTo(
1733 std::ostream& os) const { // NOLINT
1734 os << GetCheckName() << " ";
1735 return HUnaryOperation::PrintDataTo(os);
1739 std::ostream& HCallStub::PrintDataTo(std::ostream& os) const { // NOLINT
1740 os << CodeStub::MajorName(major_key_, false) << " ";
1741 return HUnaryCall::PrintDataTo(os);
1745 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1746 const char* type = "expression";
1747 if (environment_->is_local_index(index_)) type = "local";
1748 if (environment_->is_special_index(index_)) type = "special";
1749 if (environment_->is_parameter_index(index_)) type = "parameter";
1750 return os << type << " @ " << index_;
1754 std::ostream& HInstanceOf::PrintDataTo(std::ostream& os) const { // NOLINT
1755 return os << NameOf(left()) << " " << NameOf(right()) << " "
1756 << NameOf(context());
1760 Range* HValue::InferRange(Zone* zone) {
1762 if (representation().IsSmi() || type().IsSmi()) {
1763 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1764 result->set_can_be_minus_zero(false);
1766 result = new(zone) Range();
1767 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1768 // TODO(jkummerow): The range cannot be minus zero when the upper type
1769 // bound is Integer32.
1775 Range* HChange::InferRange(Zone* zone) {
1776 Range* input_range = value()->range();
1777 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1780 input_range != NULL &&
1781 input_range->IsInSmiRange()))) {
1782 set_type(HType::Smi());
1783 ClearChangesFlag(kNewSpacePromotion);
1785 if (to().IsSmiOrTagged() &&
1786 input_range != NULL &&
1787 input_range->IsInSmiRange() &&
1788 (!SmiValuesAre32Bits() ||
1789 !value()->CheckFlag(HValue::kUint32) ||
1790 input_range->upper() != kMaxInt)) {
1791 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1792 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1793 ClearFlag(kCanOverflow);
1795 Range* result = (input_range != NULL)
1796 ? input_range->Copy(zone)
1797 : HValue::InferRange(zone);
1798 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1799 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1800 CheckFlag(kAllUsesTruncatingToSmi)));
1801 if (to().IsSmi()) result->ClampToSmi();
1806 Range* HConstant::InferRange(Zone* zone) {
1807 if (HasInteger32Value()) {
1808 Range* result = new(zone) Range(int32_value_, int32_value_);
1809 result->set_can_be_minus_zero(false);
1812 return HValue::InferRange(zone);
1816 SourcePosition HPhi::position() const { return block()->first()->position(); }
1819 Range* HPhi::InferRange(Zone* zone) {
1820 Representation r = representation();
1821 if (r.IsSmiOrInteger32()) {
1822 if (block()->IsLoopHeader()) {
1823 Range* range = r.IsSmi()
1824 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1825 : new(zone) Range(kMinInt, kMaxInt);
1828 Range* range = OperandAt(0)->range()->Copy(zone);
1829 for (int i = 1; i < OperandCount(); ++i) {
1830 range->Union(OperandAt(i)->range());
1835 return HValue::InferRange(zone);
1840 Range* HAdd::InferRange(Zone* zone) {
1841 Representation r = representation();
1842 if (r.IsSmiOrInteger32()) {
1843 Range* a = left()->range();
1844 Range* b = right()->range();
1845 Range* res = a->Copy(zone);
1846 if (!res->AddAndCheckOverflow(r, b) ||
1847 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1848 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1849 ClearFlag(kCanOverflow);
1851 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1852 !CheckFlag(kAllUsesTruncatingToInt32) &&
1853 a->CanBeMinusZero() && b->CanBeMinusZero());
1856 return HValue::InferRange(zone);
1861 Range* HSub::InferRange(Zone* zone) {
1862 Representation r = representation();
1863 if (r.IsSmiOrInteger32()) {
1864 Range* a = left()->range();
1865 Range* b = right()->range();
1866 Range* res = a->Copy(zone);
1867 if (!res->SubAndCheckOverflow(r, b) ||
1868 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1869 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1870 ClearFlag(kCanOverflow);
1872 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1873 !CheckFlag(kAllUsesTruncatingToInt32) &&
1874 a->CanBeMinusZero() && b->CanBeZero());
1877 return HValue::InferRange(zone);
1882 Range* HMul::InferRange(Zone* zone) {
1883 Representation r = representation();
1884 if (r.IsSmiOrInteger32()) {
1885 Range* a = left()->range();
1886 Range* b = right()->range();
1887 Range* res = a->Copy(zone);
1888 if (!res->MulAndCheckOverflow(r, b) ||
1889 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1890 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1892 // Truncated int multiplication is too precise and therefore not the
1893 // same as converting to Double and back.
1894 // Handle truncated integer multiplication by -1 special.
1895 ClearFlag(kCanOverflow);
1897 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1898 !CheckFlag(kAllUsesTruncatingToInt32) &&
1899 ((a->CanBeZero() && b->CanBeNegative()) ||
1900 (a->CanBeNegative() && b->CanBeZero())));
1903 return HValue::InferRange(zone);
1908 Range* HDiv::InferRange(Zone* zone) {
1909 if (representation().IsInteger32()) {
1910 Range* a = left()->range();
1911 Range* b = right()->range();
1912 Range* result = new(zone) Range();
1913 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1914 (a->CanBeMinusZero() ||
1915 (a->CanBeZero() && b->CanBeNegative())));
1916 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1917 ClearFlag(kCanOverflow);
1920 if (!b->CanBeZero()) {
1921 ClearFlag(kCanBeDivByZero);
1925 return HValue::InferRange(zone);
1930 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1931 if (representation().IsInteger32()) {
1932 Range* a = left()->range();
1933 Range* b = right()->range();
1934 Range* result = new(zone) Range();
1935 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1936 (a->CanBeMinusZero() ||
1937 (a->CanBeZero() && b->CanBeNegative())));
1938 if (!a->Includes(kMinInt)) {
1939 ClearFlag(kLeftCanBeMinInt);
1942 if (!a->CanBeNegative()) {
1943 ClearFlag(HValue::kLeftCanBeNegative);
1946 if (!a->CanBePositive()) {
1947 ClearFlag(HValue::kLeftCanBePositive);
1950 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1951 ClearFlag(kCanOverflow);
1954 if (!b->CanBeZero()) {
1955 ClearFlag(kCanBeDivByZero);
1959 return HValue::InferRange(zone);
1964 // Returns the absolute value of its argument minus one, avoiding undefined
1965 // behavior at kMinInt.
1966 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1969 Range* HMod::InferRange(Zone* zone) {
1970 if (representation().IsInteger32()) {
1971 Range* a = left()->range();
1972 Range* b = right()->range();
1974 // The magnitude of the modulus is bounded by the right operand.
1975 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1977 // The result of the modulo operation has the sign of its left operand.
1978 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1979 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1980 a->CanBePositive() ? positive_bound : 0);
1982 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1983 left_can_be_negative);
1985 if (!a->CanBeNegative()) {
1986 ClearFlag(HValue::kLeftCanBeNegative);
1989 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1990 ClearFlag(HValue::kCanOverflow);
1993 if (!b->CanBeZero()) {
1994 ClearFlag(HValue::kCanBeDivByZero);
1998 return HValue::InferRange(zone);
2003 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
2004 if (phi->block()->loop_information() == NULL) return NULL;
2005 if (phi->OperandCount() != 2) return NULL;
2006 int32_t candidate_increment;
2008 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2009 if (candidate_increment != 0) {
2010 return new(phi->block()->graph()->zone())
2011 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2014 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2015 if (candidate_increment != 0) {
2016 return new(phi->block()->graph()->zone())
2017 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2025 * This function tries to match the following patterns (and all the relevant
2026 * variants related to |, & and + being commutative):
2027 * base | constant_or_mask
2028 * base & constant_and_mask
2029 * (base + constant_offset) & constant_and_mask
2030 * (base - constant_offset) & constant_and_mask
2032 void InductionVariableData::DecomposeBitwise(
2034 BitwiseDecompositionResult* result) {
2035 HValue* base = IgnoreOsrValue(value);
2036 result->base = value;
2038 if (!base->representation().IsInteger32()) return;
2040 if (base->IsBitwise()) {
2041 bool allow_offset = false;
2044 HBitwise* bitwise = HBitwise::cast(base);
2045 if (bitwise->right()->IsInteger32Constant()) {
2046 mask = bitwise->right()->GetInteger32Constant();
2047 base = bitwise->left();
2048 } else if (bitwise->left()->IsInteger32Constant()) {
2049 mask = bitwise->left()->GetInteger32Constant();
2050 base = bitwise->right();
2054 if (bitwise->op() == Token::BIT_AND) {
2055 result->and_mask = mask;
2056 allow_offset = true;
2057 } else if (bitwise->op() == Token::BIT_OR) {
2058 result->or_mask = mask;
2063 result->context = bitwise->context();
2066 if (base->IsAdd()) {
2067 HAdd* add = HAdd::cast(base);
2068 if (add->right()->IsInteger32Constant()) {
2070 } else if (add->left()->IsInteger32Constant()) {
2071 base = add->right();
2073 } else if (base->IsSub()) {
2074 HSub* sub = HSub::cast(base);
2075 if (sub->right()->IsInteger32Constant()) {
2081 result->base = base;
2086 void InductionVariableData::AddCheck(HBoundsCheck* check,
2087 int32_t upper_limit) {
2088 DCHECK(limit_validity() != NULL);
2089 if (limit_validity() != check->block() &&
2090 !limit_validity()->Dominates(check->block())) return;
2091 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2092 check->block()->current_loop())) return;
2094 ChecksRelatedToLength* length_checks = checks();
2095 while (length_checks != NULL) {
2096 if (length_checks->length() == check->length()) break;
2097 length_checks = length_checks->next();
2099 if (length_checks == NULL) {
2100 length_checks = new(check->block()->zone())
2101 ChecksRelatedToLength(check->length(), checks());
2102 checks_ = length_checks;
2105 length_checks->AddCheck(check, upper_limit);
2109 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2110 if (checks() != NULL) {
2111 InductionVariableCheck* c = checks();
2112 HBasicBlock* current_block = c->check()->block();
2113 while (c != NULL && c->check()->block() == current_block) {
2114 c->set_upper_limit(current_upper_limit_);
2121 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2126 DCHECK(first_check_in_block() != NULL);
2127 HValue* previous_index = first_check_in_block()->index();
2128 DCHECK(context != NULL);
2130 Zone* zone = index_base->block()->graph()->zone();
2131 Isolate* isolate = index_base->block()->graph()->isolate();
2132 set_added_constant(HConstant::New(isolate, zone, context, mask));
2133 if (added_index() != NULL) {
2134 added_constant()->InsertBefore(added_index());
2136 added_constant()->InsertBefore(first_check_in_block());
2139 if (added_index() == NULL) {
2140 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2141 HInstruction* new_index = HBitwise::New(isolate, zone, context, token,
2142 index_base, added_constant());
2143 DCHECK(new_index->IsBitwise());
2144 new_index->ClearAllSideEffects();
2145 new_index->AssumeRepresentation(Representation::Integer32());
2146 set_added_index(HBitwise::cast(new_index));
2147 added_index()->InsertBefore(first_check_in_block());
2149 DCHECK(added_index()->op() == token);
2151 added_index()->SetOperandAt(1, index_base);
2152 added_index()->SetOperandAt(2, added_constant());
2153 first_check_in_block()->SetOperandAt(0, added_index());
2154 if (previous_index->HasNoUses()) {
2155 previous_index->DeleteAndReplaceWith(NULL);
2159 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2160 HBoundsCheck* check,
2161 int32_t upper_limit) {
2162 BitwiseDecompositionResult decomposition;
2163 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2165 if (first_check_in_block() == NULL ||
2166 first_check_in_block()->block() != check->block()) {
2167 CloseCurrentBlock();
2169 first_check_in_block_ = check;
2170 set_added_index(NULL);
2171 set_added_constant(NULL);
2172 current_and_mask_in_block_ = decomposition.and_mask;
2173 current_or_mask_in_block_ = decomposition.or_mask;
2174 current_upper_limit_ = upper_limit;
2176 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2177 InductionVariableCheck(check, checks_, upper_limit);
2178 checks_ = new_check;
2182 if (upper_limit > current_upper_limit()) {
2183 current_upper_limit_ = upper_limit;
2186 if (decomposition.and_mask != 0 &&
2187 current_or_mask_in_block() == 0) {
2188 if (current_and_mask_in_block() == 0 ||
2189 decomposition.and_mask > current_and_mask_in_block()) {
2190 UseNewIndexInCurrentBlock(Token::BIT_AND,
2191 decomposition.and_mask,
2193 decomposition.context);
2194 current_and_mask_in_block_ = decomposition.and_mask;
2196 check->set_skip_check();
2198 if (current_and_mask_in_block() == 0) {
2199 if (decomposition.or_mask > current_or_mask_in_block()) {
2200 UseNewIndexInCurrentBlock(Token::BIT_OR,
2201 decomposition.or_mask,
2203 decomposition.context);
2204 current_or_mask_in_block_ = decomposition.or_mask;
2206 check->set_skip_check();
2209 if (!check->skip_check()) {
2210 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2211 InductionVariableCheck(check, checks_, upper_limit);
2212 checks_ = new_check;
2218 * This method detects if phi is an induction variable, with phi_operand as
2219 * its "incremented" value (the other operand would be the "base" value).
2221 * It cheks is phi_operand has the form "phi + constant".
2222 * If yes, the constant is the increment that the induction variable gets at
2223 * every loop iteration.
2224 * Otherwise it returns 0.
2226 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2227 HValue* phi_operand) {
2228 if (!phi_operand->representation().IsSmiOrInteger32()) return 0;
2230 if (phi_operand->IsAdd()) {
2231 HAdd* operation = HAdd::cast(phi_operand);
2232 if (operation->left() == phi &&
2233 operation->right()->IsInteger32Constant()) {
2234 return operation->right()->GetInteger32Constant();
2235 } else if (operation->right() == phi &&
2236 operation->left()->IsInteger32Constant()) {
2237 return operation->left()->GetInteger32Constant();
2239 } else if (phi_operand->IsSub()) {
2240 HSub* operation = HSub::cast(phi_operand);
2241 if (operation->left() == phi &&
2242 operation->right()->IsInteger32Constant()) {
2243 int constant = operation->right()->GetInteger32Constant();
2244 if (constant == kMinInt) return 0;
2254 * Swaps the information in "update" with the one contained in "this".
2255 * The swapping is important because this method is used while doing a
2256 * dominator tree traversal, and "update" will retain the old data that
2257 * will be restored while backtracking.
2259 void InductionVariableData::UpdateAdditionalLimit(
2260 InductionVariableLimitUpdate* update) {
2261 DCHECK(update->updated_variable == this);
2262 if (update->limit_is_upper) {
2263 swap(&additional_upper_limit_, &update->limit);
2264 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2266 swap(&additional_lower_limit_, &update->limit);
2267 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2272 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2274 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2275 const int32_t MAX_LIMIT = 1 << 30;
2277 int32_t result = MAX_LIMIT;
2279 if (limit() != NULL &&
2280 limit()->IsInteger32Constant()) {
2281 int32_t limit_value = limit()->GetInteger32Constant();
2282 if (!limit_included()) {
2285 if (limit_value < result) result = limit_value;
2288 if (additional_upper_limit() != NULL &&
2289 additional_upper_limit()->IsInteger32Constant()) {
2290 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2291 if (!additional_upper_limit_is_included()) {
2294 if (limit_value < result) result = limit_value;
2297 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2298 if (and_mask < result) result = and_mask;
2302 // Add the effect of the or_mask.
2305 return result >= MAX_LIMIT ? kNoLimit : result;
2309 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2310 if (!v->IsPhi()) return v;
2311 HPhi* phi = HPhi::cast(v);
2312 if (phi->OperandCount() != 2) return v;
2313 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2314 return phi->OperandAt(1);
2315 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2316 return phi->OperandAt(0);
2323 InductionVariableData* InductionVariableData::GetInductionVariableData(
2325 v = IgnoreOsrValue(v);
2327 return HPhi::cast(v)->induction_variable_data();
2334 * Check if a conditional branch to "current_branch" with token "token" is
2335 * the branch that keeps the induction loop running (and, conversely, will
2336 * terminate it if the "other_branch" is taken).
2338 * Three conditions must be met:
2339 * - "current_branch" must be in the induction loop.
2340 * - "other_branch" must be out of the induction loop.
2341 * - "token" and the induction increment must be "compatible": the token should
2342 * be a condition that keeps the execution inside the loop until the limit is
2345 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2347 HBasicBlock* current_branch,
2348 HBasicBlock* other_branch) {
2349 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2350 current_branch->current_loop())) {
2354 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2355 other_branch->current_loop())) {
2359 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2362 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2365 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2373 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2375 LimitFromPredecessorBlock* result) {
2376 if (block->predecessors()->length() != 1) return;
2377 HBasicBlock* predecessor = block->predecessors()->at(0);
2378 HInstruction* end = predecessor->last();
2380 if (!end->IsCompareNumericAndBranch()) return;
2381 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2383 Token::Value token = branch->token();
2384 if (!Token::IsArithmeticCompareOp(token)) return;
2386 HBasicBlock* other_target;
2387 if (block == branch->SuccessorAt(0)) {
2388 other_target = branch->SuccessorAt(1);
2390 other_target = branch->SuccessorAt(0);
2391 token = Token::NegateCompareOp(token);
2392 DCHECK(block == branch->SuccessorAt(1));
2395 InductionVariableData* data;
2397 data = GetInductionVariableData(branch->left());
2398 HValue* limit = branch->right();
2400 data = GetInductionVariableData(branch->right());
2401 token = Token::ReverseCompareOp(token);
2402 limit = branch->left();
2406 result->variable = data;
2407 result->token = token;
2408 result->limit = limit;
2409 result->other_target = other_target;
2415 * Compute the limit that is imposed on an induction variable when entering
2417 * If the limit is the "proper" induction limit (the one that makes the loop
2418 * terminate when the induction variable reaches it) it is stored directly in
2419 * the induction variable data.
2420 * Otherwise the limit is written in "additional_limit" and the method
2423 bool InductionVariableData::ComputeInductionVariableLimit(
2425 InductionVariableLimitUpdate* additional_limit) {
2426 LimitFromPredecessorBlock limit;
2427 ComputeLimitFromPredecessorBlock(block, &limit);
2428 if (!limit.LimitIsValid()) return false;
2430 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2432 limit.other_target)) {
2433 limit.variable->limit_ = limit.limit;
2434 limit.variable->limit_included_ = limit.LimitIsIncluded();
2435 limit.variable->limit_validity_ = block;
2436 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2437 limit.variable->induction_exit_target_ = limit.other_target;
2440 additional_limit->updated_variable = limit.variable;
2441 additional_limit->limit = limit.limit;
2442 additional_limit->limit_is_upper = limit.LimitIsUpper();
2443 additional_limit->limit_is_included = limit.LimitIsIncluded();
2449 Range* HMathMinMax::InferRange(Zone* zone) {
2450 if (representation().IsSmiOrInteger32()) {
2451 Range* a = left()->range();
2452 Range* b = right()->range();
2453 Range* res = a->Copy(zone);
2454 if (operation_ == kMathMax) {
2455 res->CombinedMax(b);
2457 DCHECK(operation_ == kMathMin);
2458 res->CombinedMin(b);
2462 return HValue::InferRange(zone);
2467 void HPushArguments::AddInput(HValue* value) {
2468 inputs_.Add(NULL, value->block()->zone());
2469 SetOperandAt(OperandCount() - 1, value);
2473 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
2475 for (int i = 0; i < OperandCount(); ++i) {
2476 os << " " << NameOf(OperandAt(i)) << " ";
2478 return os << " uses:" << UseCount() << "_"
2479 << smi_non_phi_uses() + smi_indirect_uses() << "s_"
2480 << int32_non_phi_uses() + int32_indirect_uses() << "i_"
2481 << double_non_phi_uses() + double_indirect_uses() << "d_"
2482 << tagged_non_phi_uses() + tagged_indirect_uses() << "t"
2483 << TypeOf(this) << "]";
2487 void HPhi::AddInput(HValue* value) {
2488 inputs_.Add(NULL, value->block()->zone());
2489 SetOperandAt(OperandCount() - 1, value);
2490 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2491 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2492 SetFlag(kIsArguments);
2497 bool HPhi::HasRealUses() {
2498 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2499 if (!it.value()->IsPhi()) return true;
2505 HValue* HPhi::GetRedundantReplacement() {
2506 HValue* candidate = NULL;
2507 int count = OperandCount();
2509 while (position < count && candidate == NULL) {
2510 HValue* current = OperandAt(position++);
2511 if (current != this) candidate = current;
2513 while (position < count) {
2514 HValue* current = OperandAt(position++);
2515 if (current != this && current != candidate) return NULL;
2517 DCHECK(candidate != this);
2522 void HPhi::DeleteFromGraph() {
2523 DCHECK(block() != NULL);
2524 block()->RemovePhi(this);
2525 DCHECK(block() == NULL);
2529 void HPhi::InitRealUses(int phi_id) {
2530 // Initialize real uses.
2532 // Compute a conservative approximation of truncating uses before inferring
2533 // representations. The proper, exact computation will be done later, when
2534 // inserting representation changes.
2535 SetFlag(kTruncatingToSmi);
2536 SetFlag(kTruncatingToInt32);
2537 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2538 HValue* value = it.value();
2539 if (!value->IsPhi()) {
2540 Representation rep = value->observed_input_representation(it.index());
2541 non_phi_uses_[rep.kind()] += 1;
2542 if (FLAG_trace_representation) {
2543 PrintF("#%d Phi is used by real #%d %s as %s\n",
2544 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2546 if (!value->IsSimulate()) {
2547 if (!value->CheckFlag(kTruncatingToSmi)) {
2548 ClearFlag(kTruncatingToSmi);
2550 if (!value->CheckFlag(kTruncatingToInt32)) {
2551 ClearFlag(kTruncatingToInt32);
2559 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2560 if (FLAG_trace_representation) {
2561 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2563 other->non_phi_uses_[Representation::kSmi],
2564 other->non_phi_uses_[Representation::kInteger32],
2565 other->non_phi_uses_[Representation::kDouble],
2566 other->non_phi_uses_[Representation::kTagged]);
2569 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2570 indirect_uses_[i] += other->non_phi_uses_[i];
2575 void HPhi::AddIndirectUsesTo(int* dest) {
2576 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2577 dest[i] += indirect_uses_[i];
2582 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2583 while (!list->is_empty()) {
2584 HSimulate* from = list->RemoveLast();
2585 ZoneList<HValue*>* from_values = &from->values_;
2586 for (int i = 0; i < from_values->length(); ++i) {
2587 if (from->HasAssignedIndexAt(i)) {
2588 int index = from->GetAssignedIndexAt(i);
2589 if (HasValueForIndex(index)) continue;
2590 AddAssignedValue(index, from_values->at(i));
2592 if (pop_count_ > 0) {
2595 AddPushedValue(from_values->at(i));
2599 pop_count_ += from->pop_count_;
2600 from->DeleteAndReplaceWith(NULL);
2605 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2606 os << "id=" << ast_id().ToInt();
2607 if (pop_count_ > 0) os << " pop " << pop_count_;
2608 if (values_.length() > 0) {
2609 if (pop_count_ > 0) os << " /";
2610 for (int i = values_.length() - 1; i >= 0; --i) {
2611 if (HasAssignedIndexAt(i)) {
2612 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2616 os << NameOf(values_[i]);
2617 if (i > 0) os << ",";
2624 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2625 if (is_done_with_replay()) return;
2626 DCHECK(env != NULL);
2627 env->set_ast_id(ast_id());
2628 env->Drop(pop_count());
2629 for (int i = values()->length() - 1; i >= 0; --i) {
2630 HValue* value = values()->at(i);
2631 if (HasAssignedIndexAt(i)) {
2632 env->Bind(GetAssignedIndexAt(i), value);
2637 set_done_with_replay();
2641 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2642 HCapturedObject* other) {
2643 for (int i = 0; i < values->length(); ++i) {
2644 HValue* value = values->at(i);
2645 if (value->IsCapturedObject()) {
2646 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2647 values->at(i) = other;
2649 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2656 // Replay captured objects by replacing all captured objects with the
2657 // same capture id in the current and all outer environments.
2658 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2659 DCHECK(env != NULL);
2660 while (env != NULL) {
2661 ReplayEnvironmentNested(env->values(), this);
2667 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2668 os << "#" << capture_id() << " ";
2669 return HDematerializedObject::PrintDataTo(os);
2673 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2675 DCHECK(return_target->IsInlineReturnTarget());
2676 return_targets_.Add(return_target, zone);
2680 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2681 return os << function()->debug_name()->ToCString().get();
2685 static bool IsInteger32(double value) {
2686 if (value >= std::numeric_limits<int32_t>::min() &&
2687 value <= std::numeric_limits<int32_t>::max()) {
2688 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2689 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2695 HConstant::HConstant(Special special)
2696 : HTemplateInstruction<0>(HType::TaggedNumber()),
2697 object_(Handle<Object>::null()),
2698 object_map_(Handle<Map>::null()),
2699 bit_field_(HasDoubleValueField::encode(true) |
2700 InstanceTypeField::encode(kUnknownInstanceType)),
2702 DCHECK_EQ(kHoleNaN, special);
2703 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2704 Initialize(Representation::Double());
2708 HConstant::HConstant(Handle<Object> object, Representation r)
2709 : HTemplateInstruction<0>(HType::FromValue(object)),
2710 object_(Unique<Object>::CreateUninitialized(object)),
2711 object_map_(Handle<Map>::null()),
2712 bit_field_(HasStableMapValueField::encode(false) |
2713 HasSmiValueField::encode(false) |
2714 HasInt32ValueField::encode(false) |
2715 HasDoubleValueField::encode(false) |
2716 HasExternalReferenceValueField::encode(false) |
2717 IsNotInNewSpaceField::encode(true) |
2718 BooleanValueField::encode(object->BooleanValue()) |
2719 IsUndetectableField::encode(false) |
2720 InstanceTypeField::encode(kUnknownInstanceType)) {
2721 if (object->IsHeapObject()) {
2722 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2723 Isolate* isolate = heap_object->GetIsolate();
2724 Handle<Map> map(heap_object->map(), isolate);
2725 bit_field_ = IsNotInNewSpaceField::update(
2726 bit_field_, !isolate->heap()->InNewSpace(*object));
2727 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2729 IsUndetectableField::update(bit_field_, map->is_undetectable());
2730 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2731 bit_field_ = HasStableMapValueField::update(
2733 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2735 if (object->IsNumber()) {
2736 double n = object->Number();
2737 bool has_int32_value = IsInteger32(n);
2738 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2739 int32_value_ = DoubleToInt32(n);
2740 bit_field_ = HasSmiValueField::update(
2741 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2743 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2744 // TODO(titzer): if this heap number is new space, tenure a new one.
2751 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2752 bool has_stable_map_value, Representation r, HType type,
2753 bool is_not_in_new_space, bool boolean_value,
2754 bool is_undetectable, InstanceType instance_type)
2755 : HTemplateInstruction<0>(type),
2757 object_map_(object_map),
2758 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2759 HasSmiValueField::encode(false) |
2760 HasInt32ValueField::encode(false) |
2761 HasDoubleValueField::encode(false) |
2762 HasExternalReferenceValueField::encode(false) |
2763 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2764 BooleanValueField::encode(boolean_value) |
2765 IsUndetectableField::encode(is_undetectable) |
2766 InstanceTypeField::encode(instance_type)) {
2767 DCHECK(!object.handle().is_null());
2768 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2773 HConstant::HConstant(int32_t integer_value, Representation r,
2774 bool is_not_in_new_space, Unique<Object> object)
2776 object_map_(Handle<Map>::null()),
2777 bit_field_(HasStableMapValueField::encode(false) |
2778 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2779 HasInt32ValueField::encode(true) |
2780 HasDoubleValueField::encode(true) |
2781 HasExternalReferenceValueField::encode(false) |
2782 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2783 BooleanValueField::encode(integer_value != 0) |
2784 IsUndetectableField::encode(false) |
2785 InstanceTypeField::encode(kUnknownInstanceType)),
2786 int32_value_(integer_value),
2787 double_value_(FastI2D(integer_value)) {
2788 // It's possible to create a constant with a value in Smi-range but stored
2789 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2790 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2791 bool is_smi = HasSmiValue() && !could_be_heapobject;
2792 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2797 HConstant::HConstant(double double_value, Representation r,
2798 bool is_not_in_new_space, Unique<Object> object)
2800 object_map_(Handle<Map>::null()),
2801 bit_field_(HasStableMapValueField::encode(false) |
2802 HasInt32ValueField::encode(IsInteger32(double_value)) |
2803 HasDoubleValueField::encode(true) |
2804 HasExternalReferenceValueField::encode(false) |
2805 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2806 BooleanValueField::encode(double_value != 0 &&
2807 !std::isnan(double_value)) |
2808 IsUndetectableField::encode(false) |
2809 InstanceTypeField::encode(kUnknownInstanceType)),
2810 int32_value_(DoubleToInt32(double_value)),
2811 double_value_(double_value) {
2812 bit_field_ = HasSmiValueField::update(
2813 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2814 // It's possible to create a constant with a value in Smi-range but stored
2815 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2816 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2817 bool is_smi = HasSmiValue() && !could_be_heapobject;
2818 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2823 HConstant::HConstant(ExternalReference reference)
2824 : HTemplateInstruction<0>(HType::Any()),
2825 object_(Unique<Object>(Handle<Object>::null())),
2826 object_map_(Handle<Map>::null()),
2828 HasStableMapValueField::encode(false) |
2829 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2830 HasDoubleValueField::encode(false) |
2831 HasExternalReferenceValueField::encode(true) |
2832 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2833 IsUndetectableField::encode(false) |
2834 InstanceTypeField::encode(kUnknownInstanceType)),
2835 external_reference_value_(reference) {
2836 Initialize(Representation::External());
2840 void HConstant::Initialize(Representation r) {
2842 if (HasSmiValue() && SmiValuesAre31Bits()) {
2843 r = Representation::Smi();
2844 } else if (HasInteger32Value()) {
2845 r = Representation::Integer32();
2846 } else if (HasDoubleValue()) {
2847 r = Representation::Double();
2848 } else if (HasExternalReferenceValue()) {
2849 r = Representation::External();
2851 Handle<Object> object = object_.handle();
2852 if (object->IsJSObject()) {
2853 // Try to eagerly migrate JSObjects that have deprecated maps.
2854 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2855 if (js_object->map()->is_deprecated()) {
2856 JSObject::TryMigrateInstance(js_object);
2859 r = Representation::Tagged();
2863 // If we have an existing handle, zap it, because it might be a heap
2864 // number which we must not re-use when copying this HConstant to
2865 // Tagged representation later, because having Smi representation now
2866 // could cause heap object checks not to get emitted.
2867 object_ = Unique<Object>(Handle<Object>::null());
2869 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2870 // If it's not a heap object, it can't be in new space.
2871 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2873 set_representation(r);
2878 bool HConstant::ImmortalImmovable() const {
2879 if (HasInteger32Value()) {
2882 if (HasDoubleValue()) {
2883 if (IsSpecialDouble()) {
2888 if (HasExternalReferenceValue()) {
2892 DCHECK(!object_.handle().is_null());
2893 Heap* heap = isolate()->heap();
2894 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2895 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2897 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2898 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2899 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2900 #undef IMMORTAL_IMMOVABLE_ROOT
2901 #define INTERNALIZED_STRING(name, value) \
2902 object_.IsKnownGlobal(heap->name()) ||
2903 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2904 #undef INTERNALIZED_STRING
2905 #define STRING_TYPE(NAME, size, name, Name) \
2906 object_.IsKnownGlobal(heap->name##_map()) ||
2907 STRING_TYPE_LIST(STRING_TYPE)
2913 bool HConstant::EmitAtUses() {
2915 if (block()->graph()->has_osr() &&
2916 block()->graph()->IsStandardConstant(this)) {
2917 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2920 if (HasNoUses()) return true;
2921 if (IsCell()) return false;
2922 if (representation().IsDouble()) return false;
2923 if (representation().IsExternal()) return false;
2928 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2929 if (r.IsSmi() && !HasSmiValue()) return NULL;
2930 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2931 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2932 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2933 if (HasInteger32Value()) {
2934 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2936 if (HasDoubleValue()) {
2937 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2939 if (HasExternalReferenceValue()) {
2940 return new(zone) HConstant(external_reference_value_);
2942 DCHECK(!object_.handle().is_null());
2943 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2944 type_, NotInNewSpace(), BooleanValue(),
2945 IsUndetectable(), GetInstanceType());
2949 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2950 HConstant* res = NULL;
2951 if (HasInteger32Value()) {
2952 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2953 NotInNewSpace(), object_);
2954 } else if (HasDoubleValue()) {
2956 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2957 NotInNewSpace(), object_);
2959 return res != NULL ? Just(res) : Nothing<HConstant*>();
2963 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2965 HConstant* res = NULL;
2966 Handle<Object> handle = this->handle(isolate);
2967 if (handle->IsBoolean()) {
2968 res = handle->BooleanValue() ?
2969 new(zone) HConstant(1) : new(zone) HConstant(0);
2970 } else if (handle->IsUndefined()) {
2971 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2972 } else if (handle->IsNull()) {
2973 res = new(zone) HConstant(0);
2975 return res != NULL ? Just(res) : Nothing<HConstant*>();
2979 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2980 if (HasInteger32Value()) {
2981 os << int32_value_ << " ";
2982 } else if (HasDoubleValue()) {
2983 os << double_value_ << " ";
2984 } else if (HasExternalReferenceValue()) {
2985 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2987 // The handle() method is silently and lazily mutating the object.
2988 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2989 os << Brief(*h) << " ";
2990 if (HasStableMapValue()) os << "[stable-map] ";
2991 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2993 if (!NotInNewSpace()) os << "[new space] ";
2998 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2999 os << NameOf(left()) << " " << NameOf(right());
3000 if (CheckFlag(kCanOverflow)) os << " !";
3001 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
3006 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
3007 DCHECK(CheckFlag(kFlexibleRepresentation));
3008 Representation new_rep = RepresentationFromInputs();
3009 UpdateRepresentation(new_rep, h_infer, "inputs");
3011 if (representation().IsSmi() && HasNonSmiUse()) {
3012 UpdateRepresentation(
3013 Representation::Integer32(), h_infer, "use requirements");
3016 if (observed_output_representation_.IsNone()) {
3017 new_rep = RepresentationFromUses();
3018 UpdateRepresentation(new_rep, h_infer, "uses");
3020 new_rep = RepresentationFromOutput();
3021 UpdateRepresentation(new_rep, h_infer, "output");
3026 Representation HBinaryOperation::RepresentationFromInputs() {
3027 // Determine the worst case of observed input representations and
3028 // the currently assumed output representation.
3029 Representation rep = representation();
3030 for (int i = 1; i <= 2; ++i) {
3031 rep = rep.generalize(observed_input_representation(i));
3033 // If any of the actual input representation is more general than what we
3034 // have so far but not Tagged, use that representation instead.
3035 Representation left_rep = left()->representation();
3036 Representation right_rep = right()->representation();
3037 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3038 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3044 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3045 Representation current_rep) {
3046 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3047 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3048 // Mul in Integer32 mode would be too precise.
3049 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3053 Representation HBinaryOperation::RepresentationFromOutput() {
3054 Representation rep = representation();
3055 // Consider observed output representation, but ignore it if it's Double,
3056 // this instruction is not a division, and all its uses are truncating
3058 if (observed_output_representation_.is_more_general_than(rep) &&
3059 !IgnoreObservedOutputRepresentation(rep)) {
3060 return observed_output_representation_;
3062 return Representation::None();
3066 void HBinaryOperation::AssumeRepresentation(Representation r) {
3067 set_observed_input_representation(1, r);
3068 set_observed_input_representation(2, r);
3069 HValue::AssumeRepresentation(r);
3073 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3074 DCHECK(CheckFlag(kFlexibleRepresentation));
3075 Representation new_rep = RepresentationFromInputs();
3076 UpdateRepresentation(new_rep, h_infer, "inputs");
3077 // Do not care about uses.
3081 Range* HBitwise::InferRange(Zone* zone) {
3082 if (op() == Token::BIT_XOR) {
3083 if (left()->HasRange() && right()->HasRange()) {
3084 // The maximum value has the high bit, and all bits below, set:
3086 // If the range can be negative, the minimum int is a negative number with
3087 // the high bit, and all bits below, unset:
3089 // If it cannot be negative, conservatively choose 0 as minimum int.
3090 int64_t left_upper = left()->range()->upper();
3091 int64_t left_lower = left()->range()->lower();
3092 int64_t right_upper = right()->range()->upper();
3093 int64_t right_lower = right()->range()->lower();
3095 if (left_upper < 0) left_upper = ~left_upper;
3096 if (left_lower < 0) left_lower = ~left_lower;
3097 if (right_upper < 0) right_upper = ~right_upper;
3098 if (right_lower < 0) right_lower = ~right_lower;
3100 int high = MostSignificantBit(
3101 static_cast<uint32_t>(
3102 left_upper | left_lower | right_upper | right_lower));
3106 int32_t min = (left()->range()->CanBeNegative() ||
3107 right()->range()->CanBeNegative())
3108 ? static_cast<int32_t>(-limit) : 0;
3109 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3111 Range* result = HValue::InferRange(zone);
3112 result->set_can_be_minus_zero(false);
3115 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3116 int32_t left_mask = (left()->range() != NULL)
3117 ? left()->range()->Mask()
3119 int32_t right_mask = (right()->range() != NULL)
3120 ? right()->range()->Mask()
3122 int32_t result_mask = (op() == Token::BIT_AND)
3123 ? left_mask & right_mask
3124 : left_mask | right_mask;
3125 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3127 Range* result = HValue::InferRange(zone);
3128 result->set_can_be_minus_zero(false);
3133 Range* HSar::InferRange(Zone* zone) {
3134 if (right()->IsConstant()) {
3135 HConstant* c = HConstant::cast(right());
3136 if (c->HasInteger32Value()) {
3137 Range* result = (left()->range() != NULL)
3138 ? left()->range()->Copy(zone)
3139 : new(zone) Range();
3140 result->Sar(c->Integer32Value());
3144 return HValue::InferRange(zone);
3148 Range* HShr::InferRange(Zone* zone) {
3149 if (right()->IsConstant()) {
3150 HConstant* c = HConstant::cast(right());
3151 if (c->HasInteger32Value()) {
3152 int shift_count = c->Integer32Value() & 0x1f;
3153 if (left()->range()->CanBeNegative()) {
3154 // Only compute bounds if the result always fits into an int32.
3155 return (shift_count >= 1)
3156 ? new(zone) Range(0,
3157 static_cast<uint32_t>(0xffffffff) >> shift_count)
3158 : new(zone) Range();
3160 // For positive inputs we can use the >> operator.
3161 Range* result = (left()->range() != NULL)
3162 ? left()->range()->Copy(zone)
3163 : new(zone) Range();
3164 result->Sar(c->Integer32Value());
3169 return HValue::InferRange(zone);
3173 Range* HShl::InferRange(Zone* zone) {
3174 if (right()->IsConstant()) {
3175 HConstant* c = HConstant::cast(right());
3176 if (c->HasInteger32Value()) {
3177 Range* result = (left()->range() != NULL)
3178 ? left()->range()->Copy(zone)
3179 : new(zone) Range();
3180 result->Shl(c->Integer32Value());
3184 return HValue::InferRange(zone);
3188 Range* HLoadNamedField::InferRange(Zone* zone) {
3189 if (access().representation().IsInteger8()) {
3190 return new(zone) Range(kMinInt8, kMaxInt8);
3192 if (access().representation().IsUInteger8()) {
3193 return new(zone) Range(kMinUInt8, kMaxUInt8);
3195 if (access().representation().IsInteger16()) {
3196 return new(zone) Range(kMinInt16, kMaxInt16);
3198 if (access().representation().IsUInteger16()) {
3199 return new(zone) Range(kMinUInt16, kMaxUInt16);
3201 if (access().IsStringLength()) {
3202 return new(zone) Range(0, String::kMaxLength);
3204 return HValue::InferRange(zone);
3208 Range* HLoadKeyed::InferRange(Zone* zone) {
3209 switch (elements_kind()) {
3210 case EXTERNAL_INT8_ELEMENTS:
3212 return new(zone) Range(kMinInt8, kMaxInt8);
3213 case EXTERNAL_UINT8_ELEMENTS:
3214 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3215 case UINT8_ELEMENTS:
3216 case UINT8_CLAMPED_ELEMENTS:
3217 return new(zone) Range(kMinUInt8, kMaxUInt8);
3218 case EXTERNAL_INT16_ELEMENTS:
3219 case INT16_ELEMENTS:
3220 return new(zone) Range(kMinInt16, kMaxInt16);
3221 case EXTERNAL_UINT16_ELEMENTS:
3222 case UINT16_ELEMENTS:
3223 return new(zone) Range(kMinUInt16, kMaxUInt16);
3225 return HValue::InferRange(zone);
3230 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
3231 os << Token::Name(token()) << " ";
3232 return HBinaryOperation::PrintDataTo(os);
3236 std::ostream& HStringCompareAndBranch::PrintDataTo(
3237 std::ostream& os) const { // NOLINT
3238 os << Token::Name(token()) << " ";
3239 return HControlInstruction::PrintDataTo(os);
3243 std::ostream& HCompareNumericAndBranch::PrintDataTo(
3244 std::ostream& os) const { // NOLINT
3245 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3246 return HControlInstruction::PrintDataTo(os);
3250 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
3251 std::ostream& os) const { // NOLINT
3252 os << NameOf(left()) << " " << NameOf(right());
3253 return HControlInstruction::PrintDataTo(os);
3257 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3258 if (known_successor_index() != kNoKnownSuccessorIndex) {
3259 *block = SuccessorAt(known_successor_index());
3262 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3263 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3264 ? FirstSuccessor() : SecondSuccessor();
3272 bool ConstantIsObject(HConstant* constant, Isolate* isolate) {
3273 if (constant->HasNumberValue()) return false;
3274 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) {
3277 if (constant->IsUndetectable()) return false;
3278 InstanceType type = constant->GetInstanceType();
3279 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) &&
3280 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3284 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3285 if (FLAG_fold_constants && value()->IsConstant()) {
3286 *block = ConstantIsObject(HConstant::cast(value()), isolate())
3287 ? FirstSuccessor() : SecondSuccessor();
3295 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3296 if (known_successor_index() != kNoKnownSuccessorIndex) {
3297 *block = SuccessorAt(known_successor_index());
3300 if (FLAG_fold_constants && value()->IsConstant()) {
3301 *block = HConstant::cast(value())->HasStringValue()
3302 ? FirstSuccessor() : SecondSuccessor();
3305 if (value()->type().IsString()) {
3306 *block = FirstSuccessor();
3309 if (value()->type().IsSmi() ||
3310 value()->type().IsNull() ||
3311 value()->type().IsBoolean() ||
3312 value()->type().IsUndefined() ||
3313 value()->type().IsJSObject()) {
3314 *block = SecondSuccessor();
3322 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3323 if (FLAG_fold_constants && value()->IsConstant()) {
3324 *block = HConstant::cast(value())->IsUndetectable()
3325 ? FirstSuccessor() : SecondSuccessor();
3333 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3334 if (FLAG_fold_constants && value()->IsConstant()) {
3335 InstanceType type = HConstant::cast(value())->GetInstanceType();
3336 *block = (from_ <= type) && (type <= to_)
3337 ? FirstSuccessor() : SecondSuccessor();
3345 void HCompareHoleAndBranch::InferRepresentation(
3346 HInferRepresentationPhase* h_infer) {
3347 ChangeRepresentation(value()->representation());
3351 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3352 if (left() == right() &&
3353 left()->representation().IsSmiOrInteger32()) {
3354 *block = (token() == Token::EQ ||
3355 token() == Token::EQ_STRICT ||
3356 token() == Token::LTE ||
3357 token() == Token::GTE)
3358 ? FirstSuccessor() : SecondSuccessor();
3366 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3367 if (FLAG_fold_constants && value()->IsConstant()) {
3368 HConstant* constant = HConstant::cast(value());
3369 if (constant->HasDoubleValue()) {
3370 *block = IsMinusZero(constant->DoubleValue())
3371 ? FirstSuccessor() : SecondSuccessor();
3375 if (value()->representation().IsSmiOrInteger32()) {
3376 // A Smi or Integer32 cannot contain minus zero.
3377 *block = SecondSuccessor();
3385 void HCompareMinusZeroAndBranch::InferRepresentation(
3386 HInferRepresentationPhase* h_infer) {
3387 ChangeRepresentation(value()->representation());
3391 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
3392 return os << *SuccessorAt(0);
3396 void HCompareNumericAndBranch::InferRepresentation(
3397 HInferRepresentationPhase* h_infer) {
3398 Representation left_rep = left()->representation();
3399 Representation right_rep = right()->representation();
3400 Representation observed_left = observed_input_representation(0);
3401 Representation observed_right = observed_input_representation(1);
3403 Representation rep = Representation::None();
3404 rep = rep.generalize(observed_left);
3405 rep = rep.generalize(observed_right);
3406 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3407 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3408 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3410 rep = Representation::Double();
3413 if (rep.IsDouble()) {
3414 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3415 // and !=) have special handling of undefined, e.g. undefined == undefined
3416 // is 'true'. Relational comparisons have a different semantic, first
3417 // calling ToPrimitive() on their arguments. The standard Crankshaft
3418 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3419 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3420 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3421 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3422 // it is not consistent with the spec. For example, it would cause undefined
3423 // == undefined (should be true) to be evaluated as NaN == NaN
3424 // (false). Therefore, any comparisons other than ordered relational
3425 // comparisons must cause a deopt when one of their arguments is undefined.
3427 if (Token::IsOrderedRelationalCompareOp(token_) && !is_strong(strength())) {
3428 SetFlag(kAllowUndefinedAsNaN);
3431 ChangeRepresentation(rep);
3435 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
3436 return os << index();
3440 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3441 os << NameOf(object()) << access_;
3443 if (maps() != NULL) {
3444 os << " [" << *maps()->at(0).handle();
3445 for (int i = 1; i < maps()->size(); ++i) {
3446 os << "," << *maps()->at(i).handle();
3451 if (HasDependency()) os << " " << NameOf(dependency());
3456 std::ostream& HLoadNamedGeneric::PrintDataTo(
3457 std::ostream& os) const { // NOLINT
3458 Handle<String> n = Handle<String>::cast(name());
3459 return os << NameOf(object()) << "." << n->ToCString().get();
3463 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3464 if (!is_external()) {
3465 os << NameOf(elements());
3467 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3468 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3469 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3472 os << "[" << NameOf(key());
3473 if (IsDehoisted()) os << " + " << base_offset();
3476 if (HasDependency()) os << " " << NameOf(dependency());
3477 if (RequiresHoleCheck()) os << " check_hole";
3482 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3483 // The base offset is usually simply the size of the array header, except
3484 // with dehoisting adds an addition offset due to a array index key
3485 // manipulation, in which case it becomes (array header size +
3486 // constant-offset-from-key * kPointerSize)
3487 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3488 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3489 addition_result += increase_by_value;
3490 if (!addition_result.IsValid()) return false;
3491 base_offset = addition_result.ValueOrDie();
3492 if (!BaseOffsetField::is_valid(base_offset)) return false;
3493 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3498 bool HLoadKeyed::UsesMustHandleHole() const {
3499 if (IsFastPackedElementsKind(elements_kind())) {
3503 if (IsExternalArrayElementsKind(elements_kind())) {
3507 if (hole_mode() == ALLOW_RETURN_HOLE) {
3508 if (IsFastDoubleElementsKind(elements_kind())) {
3509 return AllUsesCanTreatHoleAsNaN();
3514 if (IsFastDoubleElementsKind(elements_kind())) {
3518 // Holes are only returned as tagged values.
3519 if (!representation().IsTagged()) {
3523 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3524 HValue* use = it.value();
3525 if (!use->IsChange()) return false;
3532 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3533 return IsFastDoubleElementsKind(elements_kind()) &&
3534 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3538 bool HLoadKeyed::RequiresHoleCheck() const {
3539 if (IsFastPackedElementsKind(elements_kind())) {
3543 if (IsExternalArrayElementsKind(elements_kind())) {
3547 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
3551 return !UsesMustHandleHole();
3555 std::ostream& HLoadKeyedGeneric::PrintDataTo(
3556 std::ostream& os) const { // NOLINT
3557 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3561 HValue* HLoadKeyedGeneric::Canonicalize() {
3562 // Recognize generic keyed loads that use property name generated
3563 // by for-in statement as a key and rewrite them into fast property load
3565 if (key()->IsLoadKeyed()) {
3566 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3567 if (key_load->elements()->IsForInCacheArray()) {
3568 HForInCacheArray* names_cache =
3569 HForInCacheArray::cast(key_load->elements());
3571 if (names_cache->enumerable() == object()) {
3572 HForInCacheArray* index_cache =
3573 names_cache->index_cache();
3574 HCheckMapValue* map_check = HCheckMapValue::New(
3575 block()->graph()->isolate(), block()->graph()->zone(),
3576 block()->graph()->GetInvalidContext(), object(),
3577 names_cache->map());
3578 HInstruction* index = HLoadKeyed::New(
3579 block()->graph()->isolate(), block()->graph()->zone(),
3580 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3581 key_load->key(), key_load->elements_kind());
3582 map_check->InsertBefore(this);
3583 index->InsertBefore(this);
3584 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3594 std::ostream& HStoreNamedGeneric::PrintDataTo(
3595 std::ostream& os) const { // NOLINT
3596 Handle<String> n = Handle<String>::cast(name());
3597 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3602 std::ostream& HStoreGlobalViaContext::PrintDataTo(
3603 std::ostream& os) const { // NOLINT
3604 return os << " depth:" << depth() << " slot:" << slot_index() << " = "
3609 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3610 os << NameOf(object()) << access_ << " = " << NameOf(value());
3611 if (NeedsWriteBarrier()) os << " (write-barrier)";
3612 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3617 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3618 if (!is_external()) {
3619 os << NameOf(elements());
3621 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3622 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3623 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3626 os << "[" << NameOf(key());
3627 if (IsDehoisted()) os << " + " << base_offset();
3628 return os << "] = " << NameOf(value());
3632 std::ostream& HStoreKeyedGeneric::PrintDataTo(
3633 std::ostream& os) const { // NOLINT
3634 return os << NameOf(object()) << "[" << NameOf(key())
3635 << "] = " << NameOf(value());
3639 std::ostream& HTransitionElementsKind::PrintDataTo(
3640 std::ostream& os) const { // NOLINT
3641 os << NameOf(object());
3642 ElementsKind from_kind = original_map().handle()->elements_kind();
3643 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3644 os << " " << *original_map().handle() << " ["
3645 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3646 << *transitioned_map().handle() << " ["
3647 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3648 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3653 std::ostream& HLoadGlobalGeneric::PrintDataTo(
3654 std::ostream& os) const { // NOLINT
3655 return os << name()->ToCString().get() << " ";
3659 std::ostream& HLoadGlobalViaContext::PrintDataTo(
3660 std::ostream& os) const { // NOLINT
3661 return os << "depth:" << depth() << " slot:" << slot_index();
3665 std::ostream& HInnerAllocatedObject::PrintDataTo(
3666 std::ostream& os) const { // NOLINT
3667 os << NameOf(base_object()) << " offset ";
3668 return offset()->PrintTo(os);
3672 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3673 return os << NameOf(value()) << "[" << slot_index() << "]";
3677 std::ostream& HStoreContextSlot::PrintDataTo(
3678 std::ostream& os) const { // NOLINT
3679 return os << NameOf(context()) << "[" << slot_index()
3680 << "] = " << NameOf(value());
3684 // Implementation of type inference and type conversions. Calculates
3685 // the inferred type of this instruction based on the input operands.
3687 HType HValue::CalculateInferredType() {
3692 HType HPhi::CalculateInferredType() {
3693 if (OperandCount() == 0) return HType::Tagged();
3694 HType result = OperandAt(0)->type();
3695 for (int i = 1; i < OperandCount(); ++i) {
3696 HType current = OperandAt(i)->type();
3697 result = result.Combine(current);
3703 HType HChange::CalculateInferredType() {
3704 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3709 Representation HUnaryMathOperation::RepresentationFromInputs() {
3710 if (SupportsFlexibleFloorAndRound() &&
3711 (op_ == kMathFloor || op_ == kMathRound)) {
3712 // Floor and Round always take a double input. The integral result can be
3713 // used as an integer or a double. Infer the representation from the uses.
3714 return Representation::None();
3716 Representation rep = representation();
3717 // If any of the actual input representation is more general than what we
3718 // have so far but not Tagged, use that representation instead.
3719 Representation input_rep = value()->representation();
3720 if (!input_rep.IsTagged()) {
3721 rep = rep.generalize(input_rep);
3727 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3728 HValue* dominator) {
3729 DCHECK(side_effect == kNewSpacePromotion);
3730 Zone* zone = block()->zone();
3731 Isolate* isolate = block()->isolate();
3732 if (!FLAG_use_allocation_folding) return false;
3734 // Try to fold allocations together with their dominating allocations.
3735 if (!dominator->IsAllocate()) {
3736 if (FLAG_trace_allocation_folding) {
3737 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3738 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3743 // Check whether we are folding within the same block for local folding.
3744 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3745 if (FLAG_trace_allocation_folding) {
3746 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3747 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3752 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3753 HValue* dominator_size = dominator_allocate->size();
3754 HValue* current_size = size();
3756 // TODO(hpayer): Add support for non-constant allocation in dominator.
3757 if (!dominator_size->IsInteger32Constant()) {
3758 if (FLAG_trace_allocation_folding) {
3759 PrintF("#%d (%s) cannot fold into #%d (%s), "
3760 "dynamic allocation size in dominator\n",
3761 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3767 if (!IsFoldable(dominator_allocate)) {
3768 if (FLAG_trace_allocation_folding) {
3769 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3770 Mnemonic(), dominator->id(), dominator->Mnemonic());
3775 if (!has_size_upper_bound()) {
3776 if (FLAG_trace_allocation_folding) {
3777 PrintF("#%d (%s) cannot fold into #%d (%s), "
3778 "can't estimate total allocation size\n",
3779 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3784 if (!current_size->IsInteger32Constant()) {
3785 // If it's not constant then it is a size_in_bytes calculation graph
3786 // like this: (const_header_size + const_element_size * size).
3787 DCHECK(current_size->IsInstruction());
3789 HInstruction* current_instr = HInstruction::cast(current_size);
3790 if (!current_instr->Dominates(dominator_allocate)) {
3791 if (FLAG_trace_allocation_folding) {
3792 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3793 "value does not dominate target allocation\n",
3794 id(), Mnemonic(), dominator_allocate->id(),
3795 dominator_allocate->Mnemonic());
3802 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3803 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3805 // First update the size of the dominator allocate instruction.
3806 dominator_size = dominator_allocate->size();
3807 int32_t original_object_size =
3808 HConstant::cast(dominator_size)->GetInteger32Constant();
3809 int32_t dominator_size_constant = original_object_size;
3811 if (MustAllocateDoubleAligned()) {
3812 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3813 dominator_size_constant += kDoubleSize / 2;
3817 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3818 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3820 // Since we clear the first word after folded memory, we cannot use the
3821 // whole Page::kMaxRegularHeapObjectSize memory.
3822 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3823 if (FLAG_trace_allocation_folding) {
3824 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3825 id(), Mnemonic(), dominator_allocate->id(),
3826 dominator_allocate->Mnemonic(), new_dominator_size);
3831 HInstruction* new_dominator_size_value;
3833 if (current_size->IsInteger32Constant()) {
3834 new_dominator_size_value = HConstant::CreateAndInsertBefore(
3835 isolate, zone, context(), new_dominator_size, Representation::None(),
3836 dominator_allocate);
3838 HValue* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3839 isolate, zone, context(), dominator_size_constant,
3840 Representation::Integer32(), dominator_allocate);
3842 // Add old and new size together and insert.
3843 current_size->ChangeRepresentation(Representation::Integer32());
3845 new_dominator_size_value = HAdd::New(
3846 isolate, zone, context(), new_dominator_size_constant, current_size);
3847 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3848 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3850 new_dominator_size_value->InsertBefore(dominator_allocate);
3853 dominator_allocate->UpdateSize(new_dominator_size_value);
3855 if (MustAllocateDoubleAligned()) {
3856 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3857 dominator_allocate->MakeDoubleAligned();
3861 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3863 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3866 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3867 dominator_allocate->MakePrefillWithFiller();
3869 // TODO(hpayer): This is a short-term hack to make allocation mementos
3870 // work again in new space.
3871 dominator_allocate->ClearNextMapWord(original_object_size);
3874 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3876 // After that replace the dominated allocate instruction.
3877 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3878 isolate, zone, context(), dominator_size_constant, Representation::None(),
3881 HInstruction* dominated_allocate_instr = HInnerAllocatedObject::New(
3882 isolate, zone, context(), dominator_allocate, inner_offset, type());
3883 dominated_allocate_instr->InsertBefore(this);
3884 DeleteAndReplaceWith(dominated_allocate_instr);
3885 if (FLAG_trace_allocation_folding) {
3886 PrintF("#%d (%s) folded into #%d (%s)\n",
3887 id(), Mnemonic(), dominator_allocate->id(),
3888 dominator_allocate->Mnemonic());
3894 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3895 DCHECK(filler_free_space_size_ != NULL);
3896 Zone* zone = block()->zone();
3897 // We must explicitly force Smi representation here because on x64 we
3898 // would otherwise automatically choose int32, but the actual store
3899 // requires a Smi-tagged value.
3900 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3901 block()->isolate(), zone, context(),
3902 filler_free_space_size_->value()->GetInteger32Constant() +
3904 Representation::Smi(), filler_free_space_size_);
3905 filler_free_space_size_->UpdateValue(new_free_space_size);
3909 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3910 DCHECK(filler_free_space_size_ == NULL);
3911 Isolate* isolate = block()->isolate();
3912 Zone* zone = block()->zone();
3913 HInstruction* free_space_instr =
3914 HInnerAllocatedObject::New(isolate, zone, context(), dominating_allocate_,
3915 dominating_allocate_->size(), type());
3916 free_space_instr->InsertBefore(this);
3917 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3918 zone, Unique<Map>::CreateImmovable(isolate->factory()->free_space_map()),
3919 true, free_space_instr);
3920 HInstruction* store_map =
3921 HStoreNamedField::New(isolate, zone, context(), free_space_instr,
3922 HObjectAccess::ForMap(), filler_map);
3923 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3924 store_map->InsertAfter(filler_map);
3926 // We must explicitly force Smi representation here because on x64 we
3927 // would otherwise automatically choose int32, but the actual store
3928 // requires a Smi-tagged value.
3929 HConstant* filler_size =
3930 HConstant::CreateAndInsertAfter(isolate, zone, context(), free_space_size,
3931 Representation::Smi(), store_map);
3932 // Must force Smi representation for x64 (see comment above).
3933 HObjectAccess access = HObjectAccess::ForMapAndOffset(
3934 isolate->factory()->free_space_map(), FreeSpace::kSizeOffset,
3935 Representation::Smi());
3936 HStoreNamedField* store_size = HStoreNamedField::New(
3937 isolate, zone, context(), free_space_instr, access, filler_size);
3938 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3939 store_size->InsertAfter(filler_size);
3940 filler_free_space_size_ = store_size;
3944 void HAllocate::ClearNextMapWord(int offset) {
3945 if (MustClearNextMapWord()) {
3946 Zone* zone = block()->zone();
3947 HObjectAccess access =
3948 HObjectAccess::ForObservableJSObjectOffset(offset);
3949 HStoreNamedField* clear_next_map =
3950 HStoreNamedField::New(block()->isolate(), zone, context(), this, access,
3951 block()->graph()->GetConstant0());
3952 clear_next_map->ClearAllSideEffects();
3953 clear_next_map->InsertAfter(this);
3958 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3959 os << NameOf(size()) << " (";
3960 if (IsNewSpaceAllocation()) os << "N";
3961 if (IsOldSpaceAllocation()) os << "P";
3962 if (MustAllocateDoubleAligned()) os << "A";
3963 if (MustPrefillWithFiller()) os << "F";
3968 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3969 // The base offset is usually simply the size of the array header, except
3970 // with dehoisting adds an addition offset due to a array index key
3971 // manipulation, in which case it becomes (array header size +
3972 // constant-offset-from-key * kPointerSize)
3973 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3974 addition_result += increase_by_value;
3975 if (!addition_result.IsValid()) return false;
3976 base_offset_ = addition_result.ValueOrDie();
3981 bool HStoreKeyed::NeedsCanonicalization() {
3982 switch (value()->opcode()) {
3984 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3985 return IsExternalFloatOrDoubleElementsKind(load_kind) ||
3986 IsFixedFloatElementsKind(load_kind);
3989 Representation from = HChange::cast(value())->from();
3990 return from.IsTagged() || from.IsHeapObject();
3992 case kLoadNamedField:
3994 // Better safe than sorry...
4003 #define H_CONSTANT_INT(val) \
4004 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
4005 #define H_CONSTANT_DOUBLE(val) \
4006 HConstant::New(isolate, zone, context, static_cast<double>(val))
4008 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4009 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4010 HValue* left, HValue* right, Strength strength) { \
4011 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4012 HConstant* c_left = HConstant::cast(left); \
4013 HConstant* c_right = HConstant::cast(right); \
4014 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4015 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4016 if (IsInt32Double(double_res)) { \
4017 return H_CONSTANT_INT(double_res); \
4019 return H_CONSTANT_DOUBLE(double_res); \
4022 return new (zone) HInstr(context, left, right, strength); \
4026 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
4027 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
4028 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
4030 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4033 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
4034 HValue* left, HValue* right, Strength strength,
4035 PretenureFlag pretenure_flag,
4036 StringAddFlags flags,
4037 Handle<AllocationSite> allocation_site) {
4038 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4039 HConstant* c_right = HConstant::cast(right);
4040 HConstant* c_left = HConstant::cast(left);
4041 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4042 Handle<String> left_string = c_left->StringValue();
4043 Handle<String> right_string = c_right->StringValue();
4044 // Prevent possible exception by invalid string length.
4045 if (left_string->length() + right_string->length() < String::kMaxLength) {
4046 MaybeHandle<String> concat = isolate->factory()->NewConsString(
4047 c_left->StringValue(), c_right->StringValue());
4048 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
4052 return new (zone) HStringAdd(context, left, right, strength, pretenure_flag,
4053 flags, allocation_site);
4057 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
4058 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4060 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4062 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4063 os << "_CheckRight";
4065 HBinaryOperation::PrintDataTo(os);
4067 if (pretenure_flag() == NOT_TENURED)
4069 else if (pretenure_flag() == TENURED)
4075 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
4076 HValue* context, HValue* char_code) {
4077 if (FLAG_fold_constants && char_code->IsConstant()) {
4078 HConstant* c_code = HConstant::cast(char_code);
4079 if (c_code->HasNumberValue()) {
4080 if (std::isfinite(c_code->DoubleValue())) {
4081 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4082 return HConstant::New(
4083 isolate, zone, context,
4084 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4086 return HConstant::New(isolate, zone, context,
4087 isolate->factory()->empty_string());
4090 return new(zone) HStringCharFromCode(context, char_code);
4094 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
4095 HValue* context, HValue* value,
4096 BuiltinFunctionId op) {
4098 if (!FLAG_fold_constants) break;
4099 if (!value->IsConstant()) break;
4100 HConstant* constant = HConstant::cast(value);
4101 if (!constant->HasNumberValue()) break;
4102 double d = constant->DoubleValue();
4103 if (std::isnan(d)) { // NaN poisons everything.
4104 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4106 if (std::isinf(d)) { // +Infinity and -Infinity.
4109 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4112 return H_CONSTANT_DOUBLE(
4113 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
4116 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4120 return H_CONSTANT_DOUBLE(d);
4122 return H_CONSTANT_INT(32);
4130 return H_CONSTANT_DOUBLE(fast_exp(d));
4132 return H_CONSTANT_DOUBLE(std::log(d));
4134 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4136 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4138 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4140 // -0.5 .. -0.0 round to -0.0.
4141 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4142 // Doubles are represented as Significant * 2 ^ Exponent. If the
4143 // Exponent is not negative, the double value is already an integer.
4144 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4145 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4147 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4149 return H_CONSTANT_DOUBLE(Floor(d));
4151 uint32_t i = DoubleToUint32(d);
4152 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4159 return new(zone) HUnaryMathOperation(context, value, op);
4163 Representation HUnaryMathOperation::RepresentationFromUses() {
4164 if (op_ != kMathFloor && op_ != kMathRound) {
4165 return HValue::RepresentationFromUses();
4168 // The instruction can have an int32 or double output. Prefer a double
4169 // representation if there are double uses.
4170 bool use_double = false;
4172 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4173 HValue* use = it.value();
4174 int use_index = it.index();
4175 Representation rep_observed = use->observed_input_representation(use_index);
4176 Representation rep_required = use->RequiredInputRepresentation(use_index);
4177 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4178 if (use_double && !FLAG_trace_representation) {
4179 // Having seen one double is enough.
4182 if (FLAG_trace_representation) {
4183 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4184 PrintF("#%d %s is used by #%d %s as %s%s\n",
4185 id(), Mnemonic(), use->id(),
4186 use->Mnemonic(), rep_observed.Mnemonic(),
4187 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4189 PrintF("#%d %s is required by #%d %s as %s%s\n",
4190 id(), Mnemonic(), use->id(),
4191 use->Mnemonic(), rep_required.Mnemonic(),
4192 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4196 return use_double ? Representation::Double() : Representation::Integer32();
4200 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
4201 HValue* left, HValue* right) {
4202 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4203 HConstant* c_left = HConstant::cast(left);
4204 HConstant* c_right = HConstant::cast(right);
4205 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4206 double result = power_helper(c_left->DoubleValue(),
4207 c_right->DoubleValue());
4208 return H_CONSTANT_DOUBLE(std::isnan(result)
4209 ? std::numeric_limits<double>::quiet_NaN()
4213 return new(zone) HPower(left, right);
4217 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
4218 HValue* left, HValue* right, Operation op) {
4219 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4220 HConstant* c_left = HConstant::cast(left);
4221 HConstant* c_right = HConstant::cast(right);
4222 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4223 double d_left = c_left->DoubleValue();
4224 double d_right = c_right->DoubleValue();
4225 if (op == kMathMin) {
4226 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4227 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4228 if (d_left == d_right) {
4229 // Handle +0 and -0.
4230 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4234 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4235 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4236 if (d_left == d_right) {
4237 // Handle +0 and -0.
4238 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4242 // All comparisons failed, must be NaN.
4243 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4246 return new(zone) HMathMinMax(context, left, right, op);
4250 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
4251 HValue* left, HValue* right, Strength strength) {
4252 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4253 HConstant* c_left = HConstant::cast(left);
4254 HConstant* c_right = HConstant::cast(right);
4255 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4256 int32_t dividend = c_left->Integer32Value();
4257 int32_t divisor = c_right->Integer32Value();
4258 if (dividend == kMinInt && divisor == -1) {
4259 return H_CONSTANT_DOUBLE(-0.0);
4262 int32_t res = dividend % divisor;
4263 if ((res == 0) && (dividend < 0)) {
4264 return H_CONSTANT_DOUBLE(-0.0);
4266 return H_CONSTANT_INT(res);
4270 return new (zone) HMod(context, left, right, strength);
4274 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
4275 HValue* left, HValue* right, Strength strength) {
4276 // If left and right are constant values, try to return a constant value.
4277 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4278 HConstant* c_left = HConstant::cast(left);
4279 HConstant* c_right = HConstant::cast(right);
4280 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4281 if (c_right->DoubleValue() != 0) {
4282 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4283 if (IsInt32Double(double_res)) {
4284 return H_CONSTANT_INT(double_res);
4286 return H_CONSTANT_DOUBLE(double_res);
4288 int sign = Double(c_left->DoubleValue()).Sign() *
4289 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4290 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4294 return new (zone) HDiv(context, left, right, strength);
4298 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
4299 Token::Value op, HValue* left, HValue* right,
4300 Strength strength) {
4301 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4302 HConstant* c_left = HConstant::cast(left);
4303 HConstant* c_right = HConstant::cast(right);
4304 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4306 int32_t v_left = c_left->NumberValueAsInteger32();
4307 int32_t v_right = c_right->NumberValueAsInteger32();
4309 case Token::BIT_XOR:
4310 result = v_left ^ v_right;
4312 case Token::BIT_AND:
4313 result = v_left & v_right;
4316 result = v_left | v_right;
4319 result = 0; // Please the compiler.
4322 return H_CONSTANT_INT(result);
4325 return new (zone) HBitwise(context, op, left, right, strength);
4329 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4330 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4331 HValue* left, HValue* right, Strength strength) { \
4332 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4333 HConstant* c_left = HConstant::cast(left); \
4334 HConstant* c_right = HConstant::cast(right); \
4335 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4336 return H_CONSTANT_INT(result); \
4339 return new (zone) HInstr(context, left, right, strength); \
4343 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4344 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4345 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4346 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4348 #undef DEFINE_NEW_H_BITWISE_INSTR
4351 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
4352 HValue* left, HValue* right, Strength strength) {
4353 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4354 HConstant* c_left = HConstant::cast(left);
4355 HConstant* c_right = HConstant::cast(right);
4356 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4357 int32_t left_val = c_left->NumberValueAsInteger32();
4358 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4359 if ((right_val == 0) && (left_val < 0)) {
4360 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4362 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4365 return new (zone) HShr(context, left, right, strength);
4369 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
4370 HValue* context, String::Encoding encoding,
4371 HValue* string, HValue* index) {
4372 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4373 HConstant* c_string = HConstant::cast(string);
4374 HConstant* c_index = HConstant::cast(index);
4375 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4376 Handle<String> s = c_string->StringValue();
4377 int32_t i = c_index->Integer32Value();
4379 DCHECK_LT(i, s->length());
4380 return H_CONSTANT_INT(s->Get(i));
4383 return new(zone) HSeqStringGetChar(encoding, string, index);
4387 #undef H_CONSTANT_INT
4388 #undef H_CONSTANT_DOUBLE
4391 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
4392 os << Token::Name(op_) << " ";
4393 return HBitwiseBinaryOperation::PrintDataTo(os);
4397 void HPhi::SimplifyConstantInputs() {
4398 // Convert constant inputs to integers when all uses are truncating.
4399 // This must happen before representation inference takes place.
4400 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4401 for (int i = 0; i < OperandCount(); ++i) {
4402 if (!OperandAt(i)->IsConstant()) return;
4404 HGraph* graph = block()->graph();
4405 for (int i = 0; i < OperandCount(); ++i) {
4406 HConstant* operand = HConstant::cast(OperandAt(i));
4407 if (operand->HasInteger32Value()) {
4409 } else if (operand->HasDoubleValue()) {
4410 HConstant* integer_input = HConstant::New(
4411 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
4412 DoubleToInt32(operand->DoubleValue()));
4413 integer_input->InsertAfter(operand);
4414 SetOperandAt(i, integer_input);
4415 } else if (operand->HasBooleanValue()) {
4416 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4417 : graph->GetConstant0());
4418 } else if (operand->ImmortalImmovable()) {
4419 SetOperandAt(i, graph->GetConstant0());
4422 // Overwrite observed input representations because they are likely Tagged.
4423 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4424 HValue* use = it.value();
4425 if (use->IsBinaryOperation()) {
4426 HBinaryOperation::cast(use)->set_observed_input_representation(
4427 it.index(), Representation::Smi());
4433 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4434 DCHECK(CheckFlag(kFlexibleRepresentation));
4435 Representation new_rep = RepresentationFromUses();
4436 UpdateRepresentation(new_rep, h_infer, "uses");
4437 new_rep = RepresentationFromInputs();
4438 UpdateRepresentation(new_rep, h_infer, "inputs");
4439 new_rep = RepresentationFromUseRequirements();
4440 UpdateRepresentation(new_rep, h_infer, "use requirements");
4444 Representation HPhi::RepresentationFromInputs() {
4445 bool has_type_feedback =
4446 smi_non_phi_uses() + int32_non_phi_uses() + double_non_phi_uses() > 0;
4447 Representation r = representation();
4448 for (int i = 0; i < OperandCount(); ++i) {
4449 // Ignore conservative Tagged assumption of parameters if we have
4450 // reason to believe that it's too conservative.
4451 if (has_type_feedback && OperandAt(i)->IsParameter()) continue;
4453 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4459 // Returns a representation if all uses agree on the same representation.
4460 // Integer32 is also returned when some uses are Smi but others are Integer32.
4461 Representation HValue::RepresentationFromUseRequirements() {
4462 Representation rep = Representation::None();
4463 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4464 // Ignore the use requirement from never run code
4465 if (it.value()->block()->IsUnreachable()) continue;
4467 // We check for observed_input_representation elsewhere.
4468 Representation use_rep =
4469 it.value()->RequiredInputRepresentation(it.index());
4474 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4475 if (rep.generalize(use_rep).IsInteger32()) {
4476 rep = Representation::Integer32();
4479 return Representation::None();
4485 bool HValue::HasNonSmiUse() {
4486 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4487 // We check for observed_input_representation elsewhere.
4488 Representation use_rep =
4489 it.value()->RequiredInputRepresentation(it.index());
4490 if (!use_rep.IsNone() &&
4492 !use_rep.IsTagged()) {
4500 // Node-specific verification code is only included in debug mode.
4503 void HPhi::Verify() {
4504 DCHECK(OperandCount() == block()->predecessors()->length());
4505 for (int i = 0; i < OperandCount(); ++i) {
4506 HValue* value = OperandAt(i);
4507 HBasicBlock* defining_block = value->block();
4508 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4509 DCHECK(defining_block == predecessor_block ||
4510 defining_block->Dominates(predecessor_block));
4515 void HSimulate::Verify() {
4516 HInstruction::Verify();
4517 DCHECK(HasAstId() || next()->IsEnterInlined());
4521 void HCheckHeapObject::Verify() {
4522 HInstruction::Verify();
4523 DCHECK(HasNoUses());
4527 void HCheckValue::Verify() {
4528 HInstruction::Verify();
4529 DCHECK(HasNoUses());
4535 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4536 DCHECK(offset >= 0);
4537 DCHECK(offset < FixedArray::kHeaderSize);
4538 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4539 return HObjectAccess(kInobject, offset);
4543 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4544 Representation representation) {
4545 DCHECK(offset >= 0);
4546 Portion portion = kInobject;
4548 if (offset == JSObject::kElementsOffset) {
4549 portion = kElementsPointer;
4550 } else if (offset == JSObject::kMapOffset) {
4553 bool existing_inobject_property = true;
4554 if (!map.is_null()) {
4555 existing_inobject_property = (offset <
4556 map->instance_size() - map->unused_property_fields() * kPointerSize);
4558 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4559 false, existing_inobject_property);
4563 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4565 case AllocationSite::kTransitionInfoOffset:
4566 return HObjectAccess(kInobject, offset, Representation::Tagged());
4567 case AllocationSite::kNestedSiteOffset:
4568 return HObjectAccess(kInobject, offset, Representation::Tagged());
4569 case AllocationSite::kPretenureDataOffset:
4570 return HObjectAccess(kInobject, offset, Representation::Smi());
4571 case AllocationSite::kPretenureCreateCountOffset:
4572 return HObjectAccess(kInobject, offset, Representation::Smi());
4573 case AllocationSite::kDependentCodeOffset:
4574 return HObjectAccess(kInobject, offset, Representation::Tagged());
4575 case AllocationSite::kWeakNextOffset:
4576 return HObjectAccess(kInobject, offset, Representation::Tagged());
4580 return HObjectAccess(kInobject, offset);
4584 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4586 Portion portion = kInobject;
4587 int offset = Context::kHeaderSize + index * kPointerSize;
4588 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4589 return HObjectAccess(portion, offset, Representation::Tagged());
4593 HObjectAccess HObjectAccess::ForScriptContext(int index) {
4595 Portion portion = kInobject;
4596 int offset = ScriptContextTable::GetContextOffset(index);
4597 return HObjectAccess(portion, offset, Representation::Tagged());
4601 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4602 DCHECK(offset >= 0);
4603 Portion portion = kInobject;
4605 if (offset == JSObject::kElementsOffset) {
4606 portion = kElementsPointer;
4607 } else if (offset == JSArray::kLengthOffset) {
4608 portion = kArrayLengths;
4609 } else if (offset == JSObject::kMapOffset) {
4612 return HObjectAccess(portion, offset);
4616 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4617 Representation representation) {
4618 DCHECK(offset >= 0);
4619 return HObjectAccess(kBackingStore, offset, representation,
4620 Handle<String>::null(), false, false);
4624 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4625 Representation representation,
4626 Handle<String> name) {
4628 // Negative property indices are in-object properties, indexed
4629 // from the end of the fixed part of the object.
4630 int offset = (index * kPointerSize) + map->instance_size();
4631 return HObjectAccess(kInobject, offset, representation, name, false, true);
4633 // Non-negative property indices are in the properties array.
4634 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4635 return HObjectAccess(kBackingStore, offset, representation, name,
4641 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4642 // set the appropriate GVN flags for a given load or store instruction
4643 if (access_type == STORE) {
4644 // track dominating allocations in order to eliminate write barriers
4645 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4646 instr->SetFlag(HValue::kTrackSideEffectDominators);
4648 // try to GVN loads, but don't hoist above map changes
4649 instr->SetFlag(HValue::kUseGVN);
4650 instr->SetDependsOnFlag(::v8::internal::kMaps);
4653 switch (portion()) {
4655 if (access_type == STORE) {
4656 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4658 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4661 case kStringLengths:
4662 if (access_type == STORE) {
4663 instr->SetChangesFlag(::v8::internal::kStringLengths);
4665 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4669 if (access_type == STORE) {
4670 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4672 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4676 if (access_type == STORE) {
4677 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4679 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4683 if (access_type == STORE) {
4684 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4686 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4689 case kElementsPointer:
4690 if (access_type == STORE) {
4691 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4693 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4697 if (access_type == STORE) {
4698 instr->SetChangesFlag(::v8::internal::kMaps);
4700 instr->SetDependsOnFlag(::v8::internal::kMaps);
4703 case kExternalMemory:
4704 if (access_type == STORE) {
4705 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4707 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4714 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4717 switch (access.portion()) {
4718 case HObjectAccess::kArrayLengths:
4719 case HObjectAccess::kStringLengths:
4722 case HObjectAccess::kElementsPointer:
4725 case HObjectAccess::kMaps:
4728 case HObjectAccess::kDouble: // fall through
4729 case HObjectAccess::kInobject:
4730 if (!access.name().is_null()) {
4731 os << Handle<String>::cast(access.name())->ToCString().get();
4733 os << "[in-object]";
4735 case HObjectAccess::kBackingStore:
4736 if (!access.name().is_null()) {
4737 os << Handle<String>::cast(access.name())->ToCString().get();
4739 os << "[backing-store]";
4741 case HObjectAccess::kExternalMemory:
4742 os << "[external-memory]";
4746 return os << "@" << access.offset();
4749 } // namespace internal