1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/base/bits.h"
8 #include "src/double.h"
9 #include "src/factory.h"
10 #include "src/hydrogen-infer-representation.h"
11 #include "src/property-details-inl.h"
13 #if V8_TARGET_ARCH_IA32
14 #include "src/ia32/lithium-ia32.h" // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM64
18 #include "src/arm64/lithium-arm64.h" // NOLINT
19 #elif V8_TARGET_ARCH_ARM
20 #include "src/arm/lithium-arm.h" // NOLINT
21 #elif V8_TARGET_ARCH_PPC
22 #include "src/ppc/lithium-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h" // NOLINT
25 #elif V8_TARGET_ARCH_MIPS64
26 #include "src/mips64/lithium-mips64.h" // NOLINT
27 #elif V8_TARGET_ARCH_X87
28 #include "src/x87/lithium-x87.h" // NOLINT
30 #error Unsupported target architecture.
33 #include "src/base/safe_math.h"
38 #define DEFINE_COMPILE(type) \
39 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
40 return builder->Do##type(this); \
42 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
46 Isolate* HValue::isolate() const {
47 DCHECK(block() != NULL);
48 return block()->isolate();
52 void HValue::AssumeRepresentation(Representation r) {
53 if (CheckFlag(kFlexibleRepresentation)) {
54 ChangeRepresentation(r);
55 // The representation of the value is dictated by type feedback and
56 // will not be changed later.
57 ClearFlag(kFlexibleRepresentation);
62 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
63 DCHECK(CheckFlag(kFlexibleRepresentation));
64 Representation new_rep = RepresentationFromInputs();
65 UpdateRepresentation(new_rep, h_infer, "inputs");
66 new_rep = RepresentationFromUses();
67 UpdateRepresentation(new_rep, h_infer, "uses");
68 if (representation().IsSmi() && HasNonSmiUse()) {
70 Representation::Integer32(), h_infer, "use requirements");
75 Representation HValue::RepresentationFromUses() {
76 if (HasNoUses()) return Representation::None();
78 // Array of use counts for each representation.
79 int use_count[Representation::kNumRepresentations] = { 0 };
81 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
82 HValue* use = it.value();
83 Representation rep = use->observed_input_representation(it.index());
84 if (rep.IsNone()) continue;
85 if (FLAG_trace_representation) {
86 PrintF("#%d %s is used by #%d %s as %s%s\n",
87 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
88 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
90 use_count[rep.kind()] += 1;
92 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
93 int tagged_count = use_count[Representation::kTagged];
94 int double_count = use_count[Representation::kDouble];
95 int int32_count = use_count[Representation::kInteger32];
96 int smi_count = use_count[Representation::kSmi];
98 if (tagged_count > 0) return Representation::Tagged();
99 if (double_count > 0) return Representation::Double();
100 if (int32_count > 0) return Representation::Integer32();
101 if (smi_count > 0) return Representation::Smi();
103 return Representation::None();
107 void HValue::UpdateRepresentation(Representation new_rep,
108 HInferRepresentationPhase* h_infer,
109 const char* reason) {
110 Representation r = representation();
111 if (new_rep.is_more_general_than(r)) {
112 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
113 if (FLAG_trace_representation) {
114 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
115 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
117 ChangeRepresentation(new_rep);
118 AddDependantsToWorklist(h_infer);
123 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
124 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
125 h_infer->AddToWorklist(it.value());
127 for (int i = 0; i < OperandCount(); ++i) {
128 h_infer->AddToWorklist(OperandAt(i));
133 static int32_t ConvertAndSetOverflow(Representation r,
137 if (result > Smi::kMaxValue) {
139 return Smi::kMaxValue;
141 if (result < Smi::kMinValue) {
143 return Smi::kMinValue;
146 if (result > kMaxInt) {
150 if (result < kMinInt) {
155 return static_cast<int32_t>(result);
159 static int32_t AddWithoutOverflow(Representation r,
163 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
164 return ConvertAndSetOverflow(r, result, overflow);
168 static int32_t SubWithoutOverflow(Representation r,
172 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
173 return ConvertAndSetOverflow(r, result, overflow);
177 static int32_t MulWithoutOverflow(const Representation& r,
181 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
182 return ConvertAndSetOverflow(r, result, overflow);
186 int32_t Range::Mask() const {
187 if (lower_ == upper_) return lower_;
190 while (res < upper_) {
191 res = (res << 1) | 1;
199 void Range::AddConstant(int32_t value) {
200 if (value == 0) return;
201 bool may_overflow = false; // Overflow is ignored here.
202 Representation r = Representation::Integer32();
203 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
204 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
211 void Range::Intersect(Range* other) {
212 upper_ = Min(upper_, other->upper_);
213 lower_ = Max(lower_, other->lower_);
214 bool b = CanBeMinusZero() && other->CanBeMinusZero();
215 set_can_be_minus_zero(b);
219 void Range::Union(Range* other) {
220 upper_ = Max(upper_, other->upper_);
221 lower_ = Min(lower_, other->lower_);
222 bool b = CanBeMinusZero() || other->CanBeMinusZero();
223 set_can_be_minus_zero(b);
227 void Range::CombinedMax(Range* other) {
228 upper_ = Max(upper_, other->upper_);
229 lower_ = Max(lower_, other->lower_);
230 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
234 void Range::CombinedMin(Range* other) {
235 upper_ = Min(upper_, other->upper_);
236 lower_ = Min(lower_, other->lower_);
237 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
241 void Range::Sar(int32_t value) {
242 int32_t bits = value & 0x1F;
243 lower_ = lower_ >> bits;
244 upper_ = upper_ >> bits;
245 set_can_be_minus_zero(false);
249 void Range::Shl(int32_t value) {
250 int32_t bits = value & 0x1F;
251 int old_lower = lower_;
252 int old_upper = upper_;
253 lower_ = lower_ << bits;
254 upper_ = upper_ << bits;
255 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
259 set_can_be_minus_zero(false);
263 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
264 bool may_overflow = false;
265 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
266 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
275 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
276 bool may_overflow = false;
277 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
278 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
287 void Range::KeepOrder() {
288 if (lower_ > upper_) {
289 int32_t tmp = lower_;
297 void Range::Verify() const {
298 DCHECK(lower_ <= upper_);
303 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
304 bool may_overflow = false;
305 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
306 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
307 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
308 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
309 lower_ = Min(Min(v1, v2), Min(v3, v4));
310 upper_ = Max(Max(v1, v2), Max(v3, v4));
318 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
319 return block()->block_id() > other->block_id();
323 HUseListNode* HUseListNode::tail() {
324 // Skip and remove dead items in the use list.
325 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
326 tail_ = tail_->tail_;
332 bool HValue::CheckUsesForFlag(Flag f) const {
333 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
334 if (it.value()->IsSimulate()) continue;
335 if (!it.value()->CheckFlag(f)) return false;
341 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
342 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
343 if (it.value()->IsSimulate()) continue;
344 if (!it.value()->CheckFlag(f)) {
353 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
354 bool return_value = false;
355 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
356 if (it.value()->IsSimulate()) continue;
357 if (!it.value()->CheckFlag(f)) return false;
364 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
369 void HUseIterator::Advance() {
371 if (current_ != NULL) {
372 next_ = current_->tail();
373 value_ = current_->value();
374 index_ = current_->index();
379 int HValue::UseCount() const {
381 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
386 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
387 HUseListNode* previous = NULL;
388 HUseListNode* current = use_list_;
389 while (current != NULL) {
390 if (current->value() == value && current->index() == index) {
391 if (previous == NULL) {
392 use_list_ = current->tail();
394 previous->set_tail(current->tail());
400 current = current->tail();
404 // Do not reuse use list nodes in debug mode, zap them.
405 if (current != NULL) {
408 HUseListNode(current->value(), current->index(), NULL);
417 bool HValue::Equals(HValue* other) {
418 if (other->opcode() != opcode()) return false;
419 if (!other->representation().Equals(representation())) return false;
420 if (!other->type_.Equals(type_)) return false;
421 if (other->flags() != flags()) return false;
422 if (OperandCount() != other->OperandCount()) return false;
423 for (int i = 0; i < OperandCount(); ++i) {
424 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
426 bool result = DataEquals(other);
427 DCHECK(!result || Hashcode() == other->Hashcode());
432 intptr_t HValue::Hashcode() {
433 intptr_t result = opcode();
434 int count = OperandCount();
435 for (int i = 0; i < count; ++i) {
436 result = result * 19 + OperandAt(i)->id() + (result >> 7);
442 const char* HValue::Mnemonic() const {
444 #define MAKE_CASE(type) case k##type: return #type;
445 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
447 case kPhi: return "Phi";
453 bool HValue::CanReplaceWithDummyUses() {
454 return FLAG_unreachable_code_elimination &&
455 !(block()->IsReachable() ||
457 IsControlInstruction() ||
458 IsArgumentsObject() ||
459 IsCapturedObject() ||
466 bool HValue::IsInteger32Constant() {
467 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
471 int32_t HValue::GetInteger32Constant() {
472 return HConstant::cast(this)->Integer32Value();
476 bool HValue::EqualsInteger32Constant(int32_t value) {
477 return IsInteger32Constant() && GetInteger32Constant() == value;
481 void HValue::SetOperandAt(int index, HValue* value) {
482 RegisterUse(index, value);
483 InternalSetOperandAt(index, value);
487 void HValue::DeleteAndReplaceWith(HValue* other) {
488 // We replace all uses first, so Delete can assert that there are none.
489 if (other != NULL) ReplaceAllUsesWith(other);
495 void HValue::ReplaceAllUsesWith(HValue* other) {
496 while (use_list_ != NULL) {
497 HUseListNode* list_node = use_list_;
498 HValue* value = list_node->value();
499 DCHECK(!value->block()->IsStartBlock());
500 value->InternalSetOperandAt(list_node->index(), other);
501 use_list_ = list_node->tail();
502 list_node->set_tail(other->use_list_);
503 other->use_list_ = list_node;
508 void HValue::Kill() {
509 // Instead of going through the entire use list of each operand, we only
510 // check the first item in each use list and rely on the tail() method to
511 // skip dead items, removing them lazily next time we traverse the list.
513 for (int i = 0; i < OperandCount(); ++i) {
514 HValue* operand = OperandAt(i);
515 if (operand == NULL) continue;
516 HUseListNode* first = operand->use_list_;
517 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
518 operand->use_list_ = first->tail();
524 void HValue::SetBlock(HBasicBlock* block) {
525 DCHECK(block_ == NULL || block == NULL);
527 if (id_ == kNoNumber && block != NULL) {
528 id_ = block->graph()->GetNextValueID(this);
533 std::ostream& operator<<(std::ostream& os, const HValue& v) {
534 return v.PrintTo(os);
538 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
539 if (t.value->representation().IsTagged() &&
540 !t.value->type().Equals(HType::Tagged()))
542 return os << " type:" << t.value->type();
546 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
547 GVNFlagSet changes_flags = c.value->ChangesFlags();
548 if (changes_flags.IsEmpty()) return os;
550 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
553 bool add_comma = false;
554 #define PRINT_DO(Type) \
555 if (changes_flags.Contains(k##Type)) { \
556 if (add_comma) os << ","; \
560 GVN_TRACKED_FLAG_LIST(PRINT_DO);
561 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
568 bool HValue::HasMonomorphicJSObjectType() {
569 return !GetMonomorphicJSObjectMap().is_null();
573 bool HValue::UpdateInferredType() {
574 HType type = CalculateInferredType();
575 bool result = (!type.Equals(type_));
581 void HValue::RegisterUse(int index, HValue* new_value) {
582 HValue* old_value = OperandAt(index);
583 if (old_value == new_value) return;
585 HUseListNode* removed = NULL;
586 if (old_value != NULL) {
587 removed = old_value->RemoveUse(this, index);
590 if (new_value != NULL) {
591 if (removed == NULL) {
592 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
593 this, index, new_value->use_list_);
595 removed->set_tail(new_value->use_list_);
596 new_value->use_list_ = removed;
602 void HValue::AddNewRange(Range* r, Zone* zone) {
603 if (!HasRange()) ComputeInitialRange(zone);
604 if (!HasRange()) range_ = new(zone) Range();
606 r->StackUpon(range_);
611 void HValue::RemoveLastAddedRange() {
613 DCHECK(range_->next() != NULL);
614 range_ = range_->next();
618 void HValue::ComputeInitialRange(Zone* zone) {
620 range_ = InferRange(zone);
625 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
626 os << Mnemonic() << " ";
627 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
628 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
629 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
634 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
635 for (int i = 0; i < OperandCount(); ++i) {
636 if (i > 0) os << " ";
637 os << NameOf(OperandAt(i));
643 void HInstruction::Unlink() {
645 DCHECK(!IsControlInstruction()); // Must never move control instructions.
646 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
647 DCHECK(previous_ != NULL);
648 previous_->next_ = next_;
650 DCHECK(block()->last() == this);
651 block()->set_last(previous_);
653 next_->previous_ = previous_;
659 void HInstruction::InsertBefore(HInstruction* next) {
661 DCHECK(!next->IsBlockEntry());
662 DCHECK(!IsControlInstruction());
663 DCHECK(!next->block()->IsStartBlock());
664 DCHECK(next->previous_ != NULL);
665 HInstruction* prev = next->previous();
667 next->previous_ = this;
670 SetBlock(next->block());
671 if (!has_position() && next->has_position()) {
672 set_position(next->position());
677 void HInstruction::InsertAfter(HInstruction* previous) {
679 DCHECK(!previous->IsControlInstruction());
680 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
681 HBasicBlock* block = previous->block();
682 // Never insert anything except constants into the start block after finishing
684 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
685 DCHECK(block->end()->SecondSuccessor() == NULL);
686 InsertAfter(block->end()->FirstSuccessor()->first());
690 // If we're inserting after an instruction with side-effects that is
691 // followed by a simulate instruction, we need to insert after the
692 // simulate instruction instead.
693 HInstruction* next = previous->next_;
694 if (previous->HasObservableSideEffects() && next != NULL) {
695 DCHECK(next->IsSimulate());
697 next = previous->next_;
700 previous_ = previous;
703 previous->next_ = this;
704 if (next != NULL) next->previous_ = this;
705 if (block->last() == previous) {
706 block->set_last(this);
708 if (!has_position() && previous->has_position()) {
709 set_position(previous->position());
714 bool HInstruction::Dominates(HInstruction* other) {
715 if (block() != other->block()) {
716 return block()->Dominates(other->block());
718 // Both instructions are in the same basic block. This instruction
719 // should precede the other one in order to dominate it.
720 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
721 if (instr == other) {
730 void HInstruction::Verify() {
731 // Verify that input operands are defined before use.
732 HBasicBlock* cur_block = block();
733 for (int i = 0; i < OperandCount(); ++i) {
734 HValue* other_operand = OperandAt(i);
735 if (other_operand == NULL) continue;
736 HBasicBlock* other_block = other_operand->block();
737 if (cur_block == other_block) {
738 if (!other_operand->IsPhi()) {
739 HInstruction* cur = this->previous();
740 while (cur != NULL) {
741 if (cur == other_operand) break;
742 cur = cur->previous();
744 // Must reach other operand in the same block!
745 DCHECK(cur == other_operand);
748 // If the following assert fires, you may have forgotten an
750 DCHECK(other_block->Dominates(cur_block));
754 // Verify that instructions that may have side-effects are followed
755 // by a simulate instruction.
756 if (HasObservableSideEffects() && !IsOsrEntry()) {
757 DCHECK(next()->IsSimulate());
760 // Verify that instructions that can be eliminated by GVN have overridden
761 // HValue::DataEquals. The default implementation is UNREACHABLE. We
762 // don't actually care whether DataEquals returns true or false here.
763 if (CheckFlag(kUseGVN)) DataEquals(this);
765 // Verify that all uses are in the graph.
766 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
767 if (use.value()->IsInstruction()) {
768 DCHECK(HInstruction::cast(use.value())->IsLinked());
775 bool HInstruction::CanDeoptimize() {
776 // TODO(titzer): make this a virtual method?
778 case HValue::kAbnormalExit:
779 case HValue::kAccessArgumentsAt:
780 case HValue::kAllocate:
781 case HValue::kArgumentsElements:
782 case HValue::kArgumentsLength:
783 case HValue::kArgumentsObject:
784 case HValue::kBlockEntry:
785 case HValue::kBoundsCheckBaseIndexInformation:
786 case HValue::kCallFunction:
787 case HValue::kCallNew:
788 case HValue::kCallNewArray:
789 case HValue::kCallStub:
790 case HValue::kCapturedObject:
791 case HValue::kClassOfTestAndBranch:
792 case HValue::kCompareGeneric:
793 case HValue::kCompareHoleAndBranch:
794 case HValue::kCompareMap:
795 case HValue::kCompareMinusZeroAndBranch:
796 case HValue::kCompareNumericAndBranch:
797 case HValue::kCompareObjectEqAndBranch:
798 case HValue::kConstant:
799 case HValue::kConstructDouble:
800 case HValue::kContext:
801 case HValue::kDebugBreak:
802 case HValue::kDeclareGlobals:
803 case HValue::kDoubleBits:
804 case HValue::kDummyUse:
805 case HValue::kEnterInlined:
806 case HValue::kEnvironmentMarker:
807 case HValue::kForceRepresentation:
808 case HValue::kGetCachedArrayIndex:
810 case HValue::kHasCachedArrayIndexAndBranch:
811 case HValue::kHasInstanceTypeAndBranch:
812 case HValue::kInnerAllocatedObject:
813 case HValue::kInstanceOf:
814 case HValue::kInstanceOfKnownGlobal:
815 case HValue::kIsConstructCallAndBranch:
816 case HValue::kIsObjectAndBranch:
817 case HValue::kIsSmiAndBranch:
818 case HValue::kIsStringAndBranch:
819 case HValue::kIsUndetectableAndBranch:
820 case HValue::kLeaveInlined:
821 case HValue::kLoadFieldByIndex:
822 case HValue::kLoadGlobalGeneric:
823 case HValue::kLoadNamedField:
824 case HValue::kLoadNamedGeneric:
825 case HValue::kLoadRoot:
826 case HValue::kMapEnumLength:
827 case HValue::kMathMinMax:
828 case HValue::kParameter:
830 case HValue::kPushArguments:
831 case HValue::kRegExpLiteral:
832 case HValue::kReturn:
833 case HValue::kSeqStringGetChar:
834 case HValue::kStoreCodeEntry:
835 case HValue::kStoreFrameContext:
836 case HValue::kStoreKeyed:
837 case HValue::kStoreNamedField:
838 case HValue::kStoreNamedGeneric:
839 case HValue::kStringCharCodeAt:
840 case HValue::kStringCharFromCode:
841 case HValue::kTailCallThroughMegamorphicCache:
842 case HValue::kThisFunction:
843 case HValue::kTypeofIsAndBranch:
844 case HValue::kUnknownOSRValue:
845 case HValue::kUseConst:
849 case HValue::kAllocateBlockContext:
850 case HValue::kApplyArguments:
851 case HValue::kBitwise:
852 case HValue::kBoundsCheck:
853 case HValue::kBranch:
854 case HValue::kCallJSFunction:
855 case HValue::kCallRuntime:
856 case HValue::kCallWithDescriptor:
857 case HValue::kChange:
858 case HValue::kCheckHeapObject:
859 case HValue::kCheckInstanceType:
860 case HValue::kCheckMapValue:
861 case HValue::kCheckMaps:
862 case HValue::kCheckSmi:
863 case HValue::kCheckValue:
864 case HValue::kClampToUint8:
865 case HValue::kDateField:
866 case HValue::kDeoptimize:
868 case HValue::kForInCacheArray:
869 case HValue::kForInPrepareMap:
870 case HValue::kFunctionLiteral:
871 case HValue::kInvokeFunction:
872 case HValue::kLoadContextSlot:
873 case HValue::kLoadFunctionPrototype:
874 case HValue::kLoadGlobalCell:
875 case HValue::kLoadKeyed:
876 case HValue::kLoadKeyedGeneric:
877 case HValue::kMathFloorOfDiv:
880 case HValue::kOsrEntry:
884 case HValue::kSeqStringSetChar:
887 case HValue::kSimulate:
888 case HValue::kStackCheck:
889 case HValue::kStoreContextSlot:
890 case HValue::kStoreGlobalCell:
891 case HValue::kStoreKeyedGeneric:
892 case HValue::kStringAdd:
893 case HValue::kStringCompareAndBranch:
895 case HValue::kToFastProperties:
896 case HValue::kTransitionElementsKind:
897 case HValue::kTrapAllocationMemento:
898 case HValue::kTypeof:
899 case HValue::kUnaryMathOperation:
900 case HValue::kWrapReceiver:
908 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
909 return os << v.value->representation().Mnemonic() << v.value->id();
912 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
913 return os << NameOf(value());
917 std::ostream& HEnvironmentMarker::PrintDataTo(
918 std::ostream& os) const { // NOLINT
919 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
924 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
925 return os << NameOf(value()) << " #" << argument_count();
929 std::ostream& HCallJSFunction::PrintDataTo(std::ostream& os) const { // NOLINT
930 return os << NameOf(function()) << " #" << argument_count();
934 HCallJSFunction* HCallJSFunction::New(Isolate* isolate, Zone* zone,
935 HValue* context, HValue* function,
937 bool pass_argument_count) {
938 bool has_stack_check = false;
939 if (function->IsConstant()) {
940 HConstant* fun_const = HConstant::cast(function);
941 Handle<JSFunction> jsfun =
942 Handle<JSFunction>::cast(fun_const->handle(isolate));
943 has_stack_check = !jsfun.is_null() &&
944 (jsfun->code()->kind() == Code::FUNCTION ||
945 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
948 return new(zone) HCallJSFunction(
949 function, argument_count, pass_argument_count,
954 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
955 return os << NameOf(first()) << " " << NameOf(second()) << " #"
960 std::ostream& HCallFunction::PrintDataTo(std::ostream& os) const { // NOLINT
961 os << NameOf(context()) << " " << NameOf(function());
962 if (HasVectorAndSlot()) {
963 os << " (type-feedback-vector icslot " << slot().ToInt() << ")";
969 void HBoundsCheck::ApplyIndexChange() {
970 if (skip_check()) return;
972 DecompositionResult decomposition;
973 bool index_is_decomposable = index()->TryDecompose(&decomposition);
974 if (index_is_decomposable) {
975 DCHECK(decomposition.base() == base());
976 if (decomposition.offset() == offset() &&
977 decomposition.scale() == scale()) return;
982 ReplaceAllUsesWith(index());
984 HValue* current_index = decomposition.base();
985 int actual_offset = decomposition.offset() + offset();
986 int actual_scale = decomposition.scale() + scale();
988 HGraph* graph = block()->graph();
989 Isolate* isolate = graph->isolate();
990 Zone* zone = graph->zone();
991 HValue* context = graph->GetInvalidContext();
992 if (actual_offset != 0) {
993 HConstant* add_offset =
994 HConstant::New(isolate, zone, context, actual_offset);
995 add_offset->InsertBefore(this);
997 HAdd::New(isolate, zone, context, current_index, add_offset);
998 add->InsertBefore(this);
999 add->AssumeRepresentation(index()->representation());
1000 add->ClearFlag(kCanOverflow);
1001 current_index = add;
1004 if (actual_scale != 0) {
1005 HConstant* sar_scale = HConstant::New(isolate, zone, context, actual_scale);
1006 sar_scale->InsertBefore(this);
1008 HSar::New(isolate, zone, context, current_index, sar_scale);
1009 sar->InsertBefore(this);
1010 sar->AssumeRepresentation(index()->representation());
1011 current_index = sar;
1014 SetOperandAt(0, current_index);
1022 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
1023 os << NameOf(index()) << " " << NameOf(length());
1024 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1026 if (base() != index()) {
1027 os << NameOf(index());
1031 os << " + " << offset() << ") >> " << scale() << ")";
1033 if (skip_check()) os << " [DISABLED]";
1038 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1039 DCHECK(CheckFlag(kFlexibleRepresentation));
1040 HValue* actual_index = index()->ActualValue();
1041 HValue* actual_length = length()->ActualValue();
1042 Representation index_rep = actual_index->representation();
1043 Representation length_rep = actual_length->representation();
1044 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1045 index_rep = Representation::Smi();
1047 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1048 length_rep = Representation::Smi();
1050 Representation r = index_rep.generalize(length_rep);
1051 if (r.is_more_general_than(Representation::Integer32())) {
1052 r = Representation::Integer32();
1054 UpdateRepresentation(r, h_infer, "boundscheck");
1058 Range* HBoundsCheck::InferRange(Zone* zone) {
1059 Representation r = representation();
1060 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1061 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1064 Range* result = new(zone) Range(lower, upper);
1065 if (index()->HasRange()) {
1066 result->Intersect(index()->range());
1069 // In case of Smi representation, clamp result to Smi::kMaxValue.
1070 if (r.IsSmi()) result->ClampToSmi();
1073 return HValue::InferRange(zone);
1077 std::ostream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1078 std::ostream& os) const { // NOLINT
1079 // TODO(svenpanne) This 2nd base_index() looks wrong...
1080 return os << "base: " << NameOf(base_index())
1081 << ", check: " << NameOf(base_index());
1085 std::ostream& HCallWithDescriptor::PrintDataTo(
1086 std::ostream& os) const { // NOLINT
1087 for (int i = 0; i < OperandCount(); i++) {
1088 os << NameOf(OperandAt(i)) << " ";
1090 return os << "#" << argument_count();
1094 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1095 os << ElementsKindToString(elements_kind()) << " ";
1096 return HBinaryCall::PrintDataTo(os);
1100 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1101 os << name()->ToCString().get() << " ";
1102 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1103 return os << "#" << argument_count();
1107 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1108 std::ostream& os) const { // NOLINT
1109 return os << "class_of_test(" << NameOf(value()) << ", \""
1110 << class_name()->ToCString().get() << "\")";
1114 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1115 return os << NameOf(receiver()) << " " << NameOf(function());
1119 std::ostream& HAccessArgumentsAt::PrintDataTo(
1120 std::ostream& os) const { // NOLINT
1121 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1122 << NameOf(length());
1126 std::ostream& HAllocateBlockContext::PrintDataTo(
1127 std::ostream& os) const { // NOLINT
1128 return os << NameOf(context()) << " " << NameOf(function());
1132 std::ostream& HControlInstruction::PrintDataTo(
1133 std::ostream& os) const { // NOLINT
1135 bool first_block = true;
1136 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1137 if (!first_block) os << ", ";
1138 os << *it.Current();
1139 first_block = false;
1145 std::ostream& HUnaryControlInstruction::PrintDataTo(
1146 std::ostream& os) const { // NOLINT
1147 os << NameOf(value());
1148 return HControlInstruction::PrintDataTo(os);
1152 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1153 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1158 Representation HBranch::observed_input_representation(int index) {
1159 if (expected_input_types_.Contains(ToBooleanStub::NULL_TYPE) ||
1160 expected_input_types_.Contains(ToBooleanStub::SPEC_OBJECT) ||
1161 expected_input_types_.Contains(ToBooleanStub::STRING) ||
1162 expected_input_types_.Contains(ToBooleanStub::SYMBOL)) {
1163 return Representation::Tagged();
1165 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1166 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1167 return Representation::Double();
1169 return Representation::Tagged();
1171 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1172 return Representation::Double();
1174 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1175 return Representation::Smi();
1177 return Representation::None();
1181 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1182 HValue* value = this->value();
1183 if (value->EmitAtUses()) {
1184 DCHECK(value->IsConstant());
1185 DCHECK(!value->representation().IsDouble());
1186 *block = HConstant::cast(value)->BooleanValue()
1188 : SecondSuccessor();
1196 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1197 return HUnaryControlInstruction::PrintDataTo(os) << " "
1198 << expected_input_types();
1202 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1203 os << NameOf(value()) << " (" << *map().handle() << ")";
1204 HControlInstruction::PrintDataTo(os);
1205 if (known_successor_index() == 0) {
1207 } else if (known_successor_index() == 1) {
1214 const char* HUnaryMathOperation::OpName() const {
1241 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1242 Representation r = representation();
1243 if (op() == kMathClz32) return new(zone) Range(0, 32);
1244 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1245 if (op() == kMathAbs) {
1246 int upper = value()->range()->upper();
1247 int lower = value()->range()->lower();
1248 bool spans_zero = value()->range()->CanBeZero();
1249 // Math.abs(kMinInt) overflows its representation, on which the
1250 // instruction deopts. Hence clamp it to kMaxInt.
1251 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1252 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1254 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1255 Max(abs_lower, abs_upper));
1256 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1258 if (r.IsSmi()) result->ClampToSmi();
1262 return HValue::InferRange(zone);
1266 std::ostream& HUnaryMathOperation::PrintDataTo(
1267 std::ostream& os) const { // NOLINT
1268 return os << OpName() << " " << NameOf(value());
1272 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1273 return os << NameOf(value());
1277 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1278 std::ostream& os) const { // NOLINT
1279 os << NameOf(value());
1281 case FIRST_JS_RECEIVER_TYPE:
1282 if (to_ == LAST_TYPE) os << " spec_object";
1284 case JS_REGEXP_TYPE:
1285 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1288 if (to_ == JS_ARRAY_TYPE) os << " array";
1290 case JS_FUNCTION_TYPE:
1291 if (to_ == JS_FUNCTION_TYPE) os << " function";
1300 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1301 std::ostream& os) const { // NOLINT
1302 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1303 return HControlInstruction::PrintDataTo(os);
1307 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1308 Heap* heap = isolate->heap();
1309 if (constant->HasNumberValue()) return heap->number_string();
1310 if (constant->IsUndetectable()) return heap->undefined_string();
1311 if (constant->HasStringValue()) return heap->string_string();
1312 switch (constant->GetInstanceType()) {
1313 case ODDBALL_TYPE: {
1314 Unique<Object> unique = constant->GetUnique();
1315 if (unique.IsKnownGlobal(heap->true_value()) ||
1316 unique.IsKnownGlobal(heap->false_value())) {
1317 return heap->boolean_string();
1319 if (unique.IsKnownGlobal(heap->null_value())) {
1320 return heap->object_string();
1322 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1323 return heap->undefined_string();
1326 return heap->symbol_string();
1327 case JS_FUNCTION_TYPE:
1328 case JS_FUNCTION_PROXY_TYPE:
1329 return heap->function_string();
1331 return heap->object_string();
1336 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1337 if (FLAG_fold_constants && value()->IsConstant()) {
1338 HConstant* constant = HConstant::cast(value());
1339 String* type_string = TypeOfString(constant, isolate());
1340 bool same_type = type_literal_.IsKnownGlobal(type_string);
1341 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1343 } else if (value()->representation().IsSpecialization()) {
1345 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1346 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1354 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1355 return os << NameOf(value()) << " " << NameOf(map());
1359 HValue* HCheckMapValue::Canonicalize() {
1360 if (map()->IsConstant()) {
1361 HConstant* c_map = HConstant::cast(map());
1362 return HCheckMaps::CreateAndInsertAfter(
1363 block()->graph()->zone(), value(), c_map->MapValue(),
1364 c_map->HasStableMapValue(), this);
1370 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1371 return os << NameOf(enumerable());
1375 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1376 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1381 std::ostream& HLoadFieldByIndex::PrintDataTo(
1382 std::ostream& os) const { // NOLINT
1383 return os << NameOf(object()) << " " << NameOf(index());
1387 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1388 if (!l->EqualsInteger32Constant(~0)) return false;
1394 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1395 if (!instr->IsBitwise()) return false;
1396 HBitwise* b = HBitwise::cast(instr);
1397 return (b->op() == Token::BIT_XOR) &&
1398 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1399 MatchLeftIsOnes(b->right(), b->left(), negated));
1403 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1405 return MatchNegationViaXor(instr, &negated) &&
1406 MatchNegationViaXor(negated, arg);
1410 HValue* HBitwise::Canonicalize() {
1411 if (!representation().IsSmiOrInteger32()) return this;
1412 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1413 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1414 if (left()->EqualsInteger32Constant(nop_constant) &&
1415 !right()->CheckFlag(kUint32)) {
1418 if (right()->EqualsInteger32Constant(nop_constant) &&
1419 !left()->CheckFlag(kUint32)) {
1422 // Optimize double negation, a common pattern used for ToInt32(x).
1424 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1431 Representation HAdd::RepresentationFromInputs() {
1432 Representation left_rep = left()->representation();
1433 if (left_rep.IsExternal()) {
1434 return Representation::External();
1436 return HArithmeticBinaryOperation::RepresentationFromInputs();
1440 Representation HAdd::RequiredInputRepresentation(int index) {
1442 Representation left_rep = left()->representation();
1443 if (left_rep.IsExternal()) {
1444 return Representation::Integer32();
1447 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1451 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1452 return arg1->representation().IsSpecialization() &&
1453 arg2->EqualsInteger32Constant(identity);
1457 HValue* HAdd::Canonicalize() {
1458 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1459 if (IsIdentityOperation(left(), right(), 0) &&
1460 !left()->representation().IsDouble()) { // Left could be -0.
1463 if (IsIdentityOperation(right(), left(), 0) &&
1464 !left()->representation().IsDouble()) { // Right could be -0.
1471 HValue* HSub::Canonicalize() {
1472 if (IsIdentityOperation(left(), right(), 0)) return left();
1477 HValue* HMul::Canonicalize() {
1478 if (IsIdentityOperation(left(), right(), 1)) return left();
1479 if (IsIdentityOperation(right(), left(), 1)) return right();
1484 bool HMul::MulMinusOne() {
1485 if (left()->EqualsInteger32Constant(-1) ||
1486 right()->EqualsInteger32Constant(-1)) {
1494 HValue* HMod::Canonicalize() {
1499 HValue* HDiv::Canonicalize() {
1500 if (IsIdentityOperation(left(), right(), 1)) return left();
1505 HValue* HChange::Canonicalize() {
1506 return (from().Equals(to())) ? value() : this;
1510 HValue* HWrapReceiver::Canonicalize() {
1511 if (HasNoUses()) return NULL;
1512 if (receiver()->type().IsJSObject()) {
1519 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1520 return os << NameOf(value());
1524 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1525 HValue* context, HValue* value,
1526 Representation representation) {
1527 if (FLAG_fold_constants && value->IsConstant()) {
1528 HConstant* c = HConstant::cast(value);
1529 c = c->CopyToRepresentation(representation, zone);
1530 if (c != NULL) return c;
1532 return new(zone) HForceRepresentation(value, representation);
1536 std::ostream& HForceRepresentation::PrintDataTo(
1537 std::ostream& os) const { // NOLINT
1538 return os << representation().Mnemonic() << " " << NameOf(value());
1542 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1543 HUnaryOperation::PrintDataTo(os);
1544 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1546 if (CanTruncateToSmi()) os << " truncating-smi";
1547 if (CanTruncateToInt32()) os << " truncating-int32";
1548 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1549 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1554 HValue* HUnaryMathOperation::Canonicalize() {
1555 if (op() == kMathRound || op() == kMathFloor) {
1556 HValue* val = value();
1557 if (val->IsChange()) val = HChange::cast(val)->value();
1558 if (val->representation().IsSmiOrInteger32()) {
1559 if (val->representation().Equals(representation())) return val;
1560 return Prepend(new(block()->zone()) HChange(
1561 val, representation(), false, false));
1564 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1565 HDiv* hdiv = HDiv::cast(value());
1567 HValue* left = hdiv->left();
1568 if (left->representation().IsInteger32()) {
1569 // A value with an integer representation does not need to be transformed.
1570 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1571 // A change from an integer32 can be replaced by the integer32 value.
1572 left = HChange::cast(left)->value();
1573 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1574 left = Prepend(new(block()->zone()) HChange(
1575 left, Representation::Integer32(), false, false));
1580 HValue* right = hdiv->right();
1581 if (right->IsInteger32Constant()) {
1582 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1583 Representation::Integer32(), right->block()->zone()));
1584 } else if (right->representation().IsInteger32()) {
1585 // A value with an integer representation does not need to be transformed.
1586 } else if (right->IsChange() &&
1587 HChange::cast(right)->from().IsInteger32()) {
1588 // A change from an integer32 can be replaced by the integer32 value.
1589 right = HChange::cast(right)->value();
1590 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1591 right = Prepend(new(block()->zone()) HChange(
1592 right, Representation::Integer32(), false, false));
1597 return Prepend(HMathFloorOfDiv::New(
1598 block()->graph()->isolate(), block()->zone(), context(), left, right));
1604 HValue* HCheckInstanceType::Canonicalize() {
1605 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1606 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1607 (check_ == IS_STRING && value()->type().IsString())) {
1611 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1612 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1620 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1621 InstanceType* last) {
1622 DCHECK(is_interval_check());
1624 case IS_SPEC_OBJECT:
1625 *first = FIRST_SPEC_OBJECT_TYPE;
1626 *last = LAST_SPEC_OBJECT_TYPE;
1629 *first = *last = JS_ARRAY_TYPE;
1637 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1638 DCHECK(!is_interval_check());
1641 *mask = kIsNotStringMask;
1644 case IS_INTERNALIZED_STRING:
1645 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1646 *tag = kInternalizedTag;
1654 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1655 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1656 for (int i = 1; i < maps()->size(); ++i) {
1657 os << "," << *maps()->at(i).handle();
1660 if (IsStabilityCheck()) os << "(stability-check)";
1665 HValue* HCheckMaps::Canonicalize() {
1666 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1667 HConstant* c_value = HConstant::cast(value());
1668 if (c_value->HasObjectMap()) {
1669 for (int i = 0; i < maps()->size(); ++i) {
1670 if (c_value->ObjectMap() == maps()->at(i)) {
1671 if (maps()->size() > 1) {
1672 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1673 maps()->at(i), block()->graph()->zone()));
1675 MarkAsStabilityCheck();
1685 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1686 return os << NameOf(value()) << " " << Brief(*object().handle());
1690 HValue* HCheckValue::Canonicalize() {
1691 return (value()->IsConstant() &&
1692 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1696 const char* HCheckInstanceType::GetCheckName() const {
1698 case IS_SPEC_OBJECT: return "object";
1699 case IS_JS_ARRAY: return "array";
1700 case IS_STRING: return "string";
1701 case IS_INTERNALIZED_STRING: return "internalized_string";
1708 std::ostream& HCheckInstanceType::PrintDataTo(
1709 std::ostream& os) const { // NOLINT
1710 os << GetCheckName() << " ";
1711 return HUnaryOperation::PrintDataTo(os);
1715 std::ostream& HCallStub::PrintDataTo(std::ostream& os) const { // NOLINT
1716 os << CodeStub::MajorName(major_key_, false) << " ";
1717 return HUnaryCall::PrintDataTo(os);
1721 Code::Flags HTailCallThroughMegamorphicCache::flags() const {
1722 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
1723 Code::ComputeHandlerFlags(Code::LOAD_IC));
1728 std::ostream& HTailCallThroughMegamorphicCache::PrintDataTo(
1729 std::ostream& os) const { // NOLINT
1730 for (int i = 0; i < OperandCount(); i++) {
1731 os << NameOf(OperandAt(i)) << " ";
1733 return os << "flags: " << flags();
1737 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1738 const char* type = "expression";
1739 if (environment_->is_local_index(index_)) type = "local";
1740 if (environment_->is_special_index(index_)) type = "special";
1741 if (environment_->is_parameter_index(index_)) type = "parameter";
1742 return os << type << " @ " << index_;
1746 std::ostream& HInstanceOf::PrintDataTo(std::ostream& os) const { // NOLINT
1747 return os << NameOf(left()) << " " << NameOf(right()) << " "
1748 << NameOf(context());
1752 Range* HValue::InferRange(Zone* zone) {
1754 if (representation().IsSmi() || type().IsSmi()) {
1755 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1756 result->set_can_be_minus_zero(false);
1758 result = new(zone) Range();
1759 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1760 // TODO(jkummerow): The range cannot be minus zero when the upper type
1761 // bound is Integer32.
1767 Range* HChange::InferRange(Zone* zone) {
1768 Range* input_range = value()->range();
1769 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1772 input_range != NULL &&
1773 input_range->IsInSmiRange()))) {
1774 set_type(HType::Smi());
1775 ClearChangesFlag(kNewSpacePromotion);
1777 if (to().IsSmiOrTagged() &&
1778 input_range != NULL &&
1779 input_range->IsInSmiRange() &&
1780 (!SmiValuesAre32Bits() ||
1781 !value()->CheckFlag(HValue::kUint32) ||
1782 input_range->upper() != kMaxInt)) {
1783 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1784 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1785 ClearFlag(kCanOverflow);
1787 Range* result = (input_range != NULL)
1788 ? input_range->Copy(zone)
1789 : HValue::InferRange(zone);
1790 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1791 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1792 CheckFlag(kAllUsesTruncatingToSmi)));
1793 if (to().IsSmi()) result->ClampToSmi();
1798 Range* HConstant::InferRange(Zone* zone) {
1799 if (HasInteger32Value()) {
1800 Range* result = new(zone) Range(int32_value_, int32_value_);
1801 result->set_can_be_minus_zero(false);
1804 return HValue::InferRange(zone);
1808 SourcePosition HPhi::position() const { return block()->first()->position(); }
1811 Range* HPhi::InferRange(Zone* zone) {
1812 Representation r = representation();
1813 if (r.IsSmiOrInteger32()) {
1814 if (block()->IsLoopHeader()) {
1815 Range* range = r.IsSmi()
1816 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1817 : new(zone) Range(kMinInt, kMaxInt);
1820 Range* range = OperandAt(0)->range()->Copy(zone);
1821 for (int i = 1; i < OperandCount(); ++i) {
1822 range->Union(OperandAt(i)->range());
1827 return HValue::InferRange(zone);
1832 Range* HAdd::InferRange(Zone* zone) {
1833 Representation r = representation();
1834 if (r.IsSmiOrInteger32()) {
1835 Range* a = left()->range();
1836 Range* b = right()->range();
1837 Range* res = a->Copy(zone);
1838 if (!res->AddAndCheckOverflow(r, b) ||
1839 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1840 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1841 ClearFlag(kCanOverflow);
1843 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1844 !CheckFlag(kAllUsesTruncatingToInt32) &&
1845 a->CanBeMinusZero() && b->CanBeMinusZero());
1848 return HValue::InferRange(zone);
1853 Range* HSub::InferRange(Zone* zone) {
1854 Representation r = representation();
1855 if (r.IsSmiOrInteger32()) {
1856 Range* a = left()->range();
1857 Range* b = right()->range();
1858 Range* res = a->Copy(zone);
1859 if (!res->SubAndCheckOverflow(r, b) ||
1860 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1861 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1862 ClearFlag(kCanOverflow);
1864 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1865 !CheckFlag(kAllUsesTruncatingToInt32) &&
1866 a->CanBeMinusZero() && b->CanBeZero());
1869 return HValue::InferRange(zone);
1874 Range* HMul::InferRange(Zone* zone) {
1875 Representation r = representation();
1876 if (r.IsSmiOrInteger32()) {
1877 Range* a = left()->range();
1878 Range* b = right()->range();
1879 Range* res = a->Copy(zone);
1880 if (!res->MulAndCheckOverflow(r, b) ||
1881 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1882 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1884 // Truncated int multiplication is too precise and therefore not the
1885 // same as converting to Double and back.
1886 // Handle truncated integer multiplication by -1 special.
1887 ClearFlag(kCanOverflow);
1889 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1890 !CheckFlag(kAllUsesTruncatingToInt32) &&
1891 ((a->CanBeZero() && b->CanBeNegative()) ||
1892 (a->CanBeNegative() && b->CanBeZero())));
1895 return HValue::InferRange(zone);
1900 Range* HDiv::InferRange(Zone* zone) {
1901 if (representation().IsInteger32()) {
1902 Range* a = left()->range();
1903 Range* b = right()->range();
1904 Range* result = new(zone) Range();
1905 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1906 (a->CanBeMinusZero() ||
1907 (a->CanBeZero() && b->CanBeNegative())));
1908 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1909 ClearFlag(kCanOverflow);
1912 if (!b->CanBeZero()) {
1913 ClearFlag(kCanBeDivByZero);
1917 return HValue::InferRange(zone);
1922 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1923 if (representation().IsInteger32()) {
1924 Range* a = left()->range();
1925 Range* b = right()->range();
1926 Range* result = new(zone) Range();
1927 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1928 (a->CanBeMinusZero() ||
1929 (a->CanBeZero() && b->CanBeNegative())));
1930 if (!a->Includes(kMinInt)) {
1931 ClearFlag(kLeftCanBeMinInt);
1934 if (!a->CanBeNegative()) {
1935 ClearFlag(HValue::kLeftCanBeNegative);
1938 if (!a->CanBePositive()) {
1939 ClearFlag(HValue::kLeftCanBePositive);
1942 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1943 ClearFlag(kCanOverflow);
1946 if (!b->CanBeZero()) {
1947 ClearFlag(kCanBeDivByZero);
1951 return HValue::InferRange(zone);
1956 // Returns the absolute value of its argument minus one, avoiding undefined
1957 // behavior at kMinInt.
1958 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1961 Range* HMod::InferRange(Zone* zone) {
1962 if (representation().IsInteger32()) {
1963 Range* a = left()->range();
1964 Range* b = right()->range();
1966 // The magnitude of the modulus is bounded by the right operand.
1967 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1969 // The result of the modulo operation has the sign of its left operand.
1970 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1971 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1972 a->CanBePositive() ? positive_bound : 0);
1974 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1975 left_can_be_negative);
1977 if (!a->CanBeNegative()) {
1978 ClearFlag(HValue::kLeftCanBeNegative);
1981 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1982 ClearFlag(HValue::kCanOverflow);
1985 if (!b->CanBeZero()) {
1986 ClearFlag(HValue::kCanBeDivByZero);
1990 return HValue::InferRange(zone);
1995 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1996 if (phi->block()->loop_information() == NULL) return NULL;
1997 if (phi->OperandCount() != 2) return NULL;
1998 int32_t candidate_increment;
2000 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2001 if (candidate_increment != 0) {
2002 return new(phi->block()->graph()->zone())
2003 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2006 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2007 if (candidate_increment != 0) {
2008 return new(phi->block()->graph()->zone())
2009 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2017 * This function tries to match the following patterns (and all the relevant
2018 * variants related to |, & and + being commutative):
2019 * base | constant_or_mask
2020 * base & constant_and_mask
2021 * (base + constant_offset) & constant_and_mask
2022 * (base - constant_offset) & constant_and_mask
2024 void InductionVariableData::DecomposeBitwise(
2026 BitwiseDecompositionResult* result) {
2027 HValue* base = IgnoreOsrValue(value);
2028 result->base = value;
2030 if (!base->representation().IsInteger32()) return;
2032 if (base->IsBitwise()) {
2033 bool allow_offset = false;
2036 HBitwise* bitwise = HBitwise::cast(base);
2037 if (bitwise->right()->IsInteger32Constant()) {
2038 mask = bitwise->right()->GetInteger32Constant();
2039 base = bitwise->left();
2040 } else if (bitwise->left()->IsInteger32Constant()) {
2041 mask = bitwise->left()->GetInteger32Constant();
2042 base = bitwise->right();
2046 if (bitwise->op() == Token::BIT_AND) {
2047 result->and_mask = mask;
2048 allow_offset = true;
2049 } else if (bitwise->op() == Token::BIT_OR) {
2050 result->or_mask = mask;
2055 result->context = bitwise->context();
2058 if (base->IsAdd()) {
2059 HAdd* add = HAdd::cast(base);
2060 if (add->right()->IsInteger32Constant()) {
2062 } else if (add->left()->IsInteger32Constant()) {
2063 base = add->right();
2065 } else if (base->IsSub()) {
2066 HSub* sub = HSub::cast(base);
2067 if (sub->right()->IsInteger32Constant()) {
2073 result->base = base;
2078 void InductionVariableData::AddCheck(HBoundsCheck* check,
2079 int32_t upper_limit) {
2080 DCHECK(limit_validity() != NULL);
2081 if (limit_validity() != check->block() &&
2082 !limit_validity()->Dominates(check->block())) return;
2083 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2084 check->block()->current_loop())) return;
2086 ChecksRelatedToLength* length_checks = checks();
2087 while (length_checks != NULL) {
2088 if (length_checks->length() == check->length()) break;
2089 length_checks = length_checks->next();
2091 if (length_checks == NULL) {
2092 length_checks = new(check->block()->zone())
2093 ChecksRelatedToLength(check->length(), checks());
2094 checks_ = length_checks;
2097 length_checks->AddCheck(check, upper_limit);
2101 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2102 if (checks() != NULL) {
2103 InductionVariableCheck* c = checks();
2104 HBasicBlock* current_block = c->check()->block();
2105 while (c != NULL && c->check()->block() == current_block) {
2106 c->set_upper_limit(current_upper_limit_);
2113 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2118 DCHECK(first_check_in_block() != NULL);
2119 HValue* previous_index = first_check_in_block()->index();
2120 DCHECK(context != NULL);
2122 Zone* zone = index_base->block()->graph()->zone();
2123 Isolate* isolate = index_base->block()->graph()->isolate();
2124 set_added_constant(HConstant::New(isolate, zone, context, mask));
2125 if (added_index() != NULL) {
2126 added_constant()->InsertBefore(added_index());
2128 added_constant()->InsertBefore(first_check_in_block());
2131 if (added_index() == NULL) {
2132 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2133 HInstruction* new_index = HBitwise::New(isolate, zone, context, token,
2134 index_base, added_constant());
2135 DCHECK(new_index->IsBitwise());
2136 new_index->ClearAllSideEffects();
2137 new_index->AssumeRepresentation(Representation::Integer32());
2138 set_added_index(HBitwise::cast(new_index));
2139 added_index()->InsertBefore(first_check_in_block());
2141 DCHECK(added_index()->op() == token);
2143 added_index()->SetOperandAt(1, index_base);
2144 added_index()->SetOperandAt(2, added_constant());
2145 first_check_in_block()->SetOperandAt(0, added_index());
2146 if (previous_index->HasNoUses()) {
2147 previous_index->DeleteAndReplaceWith(NULL);
2151 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2152 HBoundsCheck* check,
2153 int32_t upper_limit) {
2154 BitwiseDecompositionResult decomposition;
2155 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2157 if (first_check_in_block() == NULL ||
2158 first_check_in_block()->block() != check->block()) {
2159 CloseCurrentBlock();
2161 first_check_in_block_ = check;
2162 set_added_index(NULL);
2163 set_added_constant(NULL);
2164 current_and_mask_in_block_ = decomposition.and_mask;
2165 current_or_mask_in_block_ = decomposition.or_mask;
2166 current_upper_limit_ = upper_limit;
2168 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2169 InductionVariableCheck(check, checks_, upper_limit);
2170 checks_ = new_check;
2174 if (upper_limit > current_upper_limit()) {
2175 current_upper_limit_ = upper_limit;
2178 if (decomposition.and_mask != 0 &&
2179 current_or_mask_in_block() == 0) {
2180 if (current_and_mask_in_block() == 0 ||
2181 decomposition.and_mask > current_and_mask_in_block()) {
2182 UseNewIndexInCurrentBlock(Token::BIT_AND,
2183 decomposition.and_mask,
2185 decomposition.context);
2186 current_and_mask_in_block_ = decomposition.and_mask;
2188 check->set_skip_check();
2190 if (current_and_mask_in_block() == 0) {
2191 if (decomposition.or_mask > current_or_mask_in_block()) {
2192 UseNewIndexInCurrentBlock(Token::BIT_OR,
2193 decomposition.or_mask,
2195 decomposition.context);
2196 current_or_mask_in_block_ = decomposition.or_mask;
2198 check->set_skip_check();
2201 if (!check->skip_check()) {
2202 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2203 InductionVariableCheck(check, checks_, upper_limit);
2204 checks_ = new_check;
2210 * This method detects if phi is an induction variable, with phi_operand as
2211 * its "incremented" value (the other operand would be the "base" value).
2213 * It cheks is phi_operand has the form "phi + constant".
2214 * If yes, the constant is the increment that the induction variable gets at
2215 * every loop iteration.
2216 * Otherwise it returns 0.
2218 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2219 HValue* phi_operand) {
2220 if (!phi_operand->representation().IsSmiOrInteger32()) return 0;
2222 if (phi_operand->IsAdd()) {
2223 HAdd* operation = HAdd::cast(phi_operand);
2224 if (operation->left() == phi &&
2225 operation->right()->IsInteger32Constant()) {
2226 return operation->right()->GetInteger32Constant();
2227 } else if (operation->right() == phi &&
2228 operation->left()->IsInteger32Constant()) {
2229 return operation->left()->GetInteger32Constant();
2231 } else if (phi_operand->IsSub()) {
2232 HSub* operation = HSub::cast(phi_operand);
2233 if (operation->left() == phi &&
2234 operation->right()->IsInteger32Constant()) {
2235 return -operation->right()->GetInteger32Constant();
2244 * Swaps the information in "update" with the one contained in "this".
2245 * The swapping is important because this method is used while doing a
2246 * dominator tree traversal, and "update" will retain the old data that
2247 * will be restored while backtracking.
2249 void InductionVariableData::UpdateAdditionalLimit(
2250 InductionVariableLimitUpdate* update) {
2251 DCHECK(update->updated_variable == this);
2252 if (update->limit_is_upper) {
2253 swap(&additional_upper_limit_, &update->limit);
2254 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2256 swap(&additional_lower_limit_, &update->limit);
2257 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2262 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2264 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2265 const int32_t MAX_LIMIT = 1 << 30;
2267 int32_t result = MAX_LIMIT;
2269 if (limit() != NULL &&
2270 limit()->IsInteger32Constant()) {
2271 int32_t limit_value = limit()->GetInteger32Constant();
2272 if (!limit_included()) {
2275 if (limit_value < result) result = limit_value;
2278 if (additional_upper_limit() != NULL &&
2279 additional_upper_limit()->IsInteger32Constant()) {
2280 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2281 if (!additional_upper_limit_is_included()) {
2284 if (limit_value < result) result = limit_value;
2287 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2288 if (and_mask < result) result = and_mask;
2292 // Add the effect of the or_mask.
2295 return result >= MAX_LIMIT ? kNoLimit : result;
2299 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2300 if (!v->IsPhi()) return v;
2301 HPhi* phi = HPhi::cast(v);
2302 if (phi->OperandCount() != 2) return v;
2303 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2304 return phi->OperandAt(1);
2305 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2306 return phi->OperandAt(0);
2313 InductionVariableData* InductionVariableData::GetInductionVariableData(
2315 v = IgnoreOsrValue(v);
2317 return HPhi::cast(v)->induction_variable_data();
2324 * Check if a conditional branch to "current_branch" with token "token" is
2325 * the branch that keeps the induction loop running (and, conversely, will
2326 * terminate it if the "other_branch" is taken).
2328 * Three conditions must be met:
2329 * - "current_branch" must be in the induction loop.
2330 * - "other_branch" must be out of the induction loop.
2331 * - "token" and the induction increment must be "compatible": the token should
2332 * be a condition that keeps the execution inside the loop until the limit is
2335 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2337 HBasicBlock* current_branch,
2338 HBasicBlock* other_branch) {
2339 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2340 current_branch->current_loop())) {
2344 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2345 other_branch->current_loop())) {
2349 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2352 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2355 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2363 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2365 LimitFromPredecessorBlock* result) {
2366 if (block->predecessors()->length() != 1) return;
2367 HBasicBlock* predecessor = block->predecessors()->at(0);
2368 HInstruction* end = predecessor->last();
2370 if (!end->IsCompareNumericAndBranch()) return;
2371 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2373 Token::Value token = branch->token();
2374 if (!Token::IsArithmeticCompareOp(token)) return;
2376 HBasicBlock* other_target;
2377 if (block == branch->SuccessorAt(0)) {
2378 other_target = branch->SuccessorAt(1);
2380 other_target = branch->SuccessorAt(0);
2381 token = Token::NegateCompareOp(token);
2382 DCHECK(block == branch->SuccessorAt(1));
2385 InductionVariableData* data;
2387 data = GetInductionVariableData(branch->left());
2388 HValue* limit = branch->right();
2390 data = GetInductionVariableData(branch->right());
2391 token = Token::ReverseCompareOp(token);
2392 limit = branch->left();
2396 result->variable = data;
2397 result->token = token;
2398 result->limit = limit;
2399 result->other_target = other_target;
2405 * Compute the limit that is imposed on an induction variable when entering
2407 * If the limit is the "proper" induction limit (the one that makes the loop
2408 * terminate when the induction variable reaches it) it is stored directly in
2409 * the induction variable data.
2410 * Otherwise the limit is written in "additional_limit" and the method
2413 bool InductionVariableData::ComputeInductionVariableLimit(
2415 InductionVariableLimitUpdate* additional_limit) {
2416 LimitFromPredecessorBlock limit;
2417 ComputeLimitFromPredecessorBlock(block, &limit);
2418 if (!limit.LimitIsValid()) return false;
2420 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2422 limit.other_target)) {
2423 limit.variable->limit_ = limit.limit;
2424 limit.variable->limit_included_ = limit.LimitIsIncluded();
2425 limit.variable->limit_validity_ = block;
2426 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2427 limit.variable->induction_exit_target_ = limit.other_target;
2430 additional_limit->updated_variable = limit.variable;
2431 additional_limit->limit = limit.limit;
2432 additional_limit->limit_is_upper = limit.LimitIsUpper();
2433 additional_limit->limit_is_included = limit.LimitIsIncluded();
2439 Range* HMathMinMax::InferRange(Zone* zone) {
2440 if (representation().IsSmiOrInteger32()) {
2441 Range* a = left()->range();
2442 Range* b = right()->range();
2443 Range* res = a->Copy(zone);
2444 if (operation_ == kMathMax) {
2445 res->CombinedMax(b);
2447 DCHECK(operation_ == kMathMin);
2448 res->CombinedMin(b);
2452 return HValue::InferRange(zone);
2457 void HPushArguments::AddInput(HValue* value) {
2458 inputs_.Add(NULL, value->block()->zone());
2459 SetOperandAt(OperandCount() - 1, value);
2463 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
2465 for (int i = 0; i < OperandCount(); ++i) {
2466 os << " " << NameOf(OperandAt(i)) << " ";
2468 return os << " uses:" << UseCount() << "_"
2469 << smi_non_phi_uses() + smi_indirect_uses() << "s_"
2470 << int32_non_phi_uses() + int32_indirect_uses() << "i_"
2471 << double_non_phi_uses() + double_indirect_uses() << "d_"
2472 << tagged_non_phi_uses() + tagged_indirect_uses() << "t"
2473 << TypeOf(this) << "]";
2477 void HPhi::AddInput(HValue* value) {
2478 inputs_.Add(NULL, value->block()->zone());
2479 SetOperandAt(OperandCount() - 1, value);
2480 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2481 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2482 SetFlag(kIsArguments);
2487 bool HPhi::HasRealUses() {
2488 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2489 if (!it.value()->IsPhi()) return true;
2495 HValue* HPhi::GetRedundantReplacement() {
2496 HValue* candidate = NULL;
2497 int count = OperandCount();
2499 while (position < count && candidate == NULL) {
2500 HValue* current = OperandAt(position++);
2501 if (current != this) candidate = current;
2503 while (position < count) {
2504 HValue* current = OperandAt(position++);
2505 if (current != this && current != candidate) return NULL;
2507 DCHECK(candidate != this);
2512 void HPhi::DeleteFromGraph() {
2513 DCHECK(block() != NULL);
2514 block()->RemovePhi(this);
2515 DCHECK(block() == NULL);
2519 void HPhi::InitRealUses(int phi_id) {
2520 // Initialize real uses.
2522 // Compute a conservative approximation of truncating uses before inferring
2523 // representations. The proper, exact computation will be done later, when
2524 // inserting representation changes.
2525 SetFlag(kTruncatingToSmi);
2526 SetFlag(kTruncatingToInt32);
2527 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2528 HValue* value = it.value();
2529 if (!value->IsPhi()) {
2530 Representation rep = value->observed_input_representation(it.index());
2531 non_phi_uses_[rep.kind()] += 1;
2532 if (FLAG_trace_representation) {
2533 PrintF("#%d Phi is used by real #%d %s as %s\n",
2534 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2536 if (!value->IsSimulate()) {
2537 if (!value->CheckFlag(kTruncatingToSmi)) {
2538 ClearFlag(kTruncatingToSmi);
2540 if (!value->CheckFlag(kTruncatingToInt32)) {
2541 ClearFlag(kTruncatingToInt32);
2549 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2550 if (FLAG_trace_representation) {
2551 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2553 other->non_phi_uses_[Representation::kSmi],
2554 other->non_phi_uses_[Representation::kInteger32],
2555 other->non_phi_uses_[Representation::kDouble],
2556 other->non_phi_uses_[Representation::kTagged]);
2559 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2560 indirect_uses_[i] += other->non_phi_uses_[i];
2565 void HPhi::AddIndirectUsesTo(int* dest) {
2566 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2567 dest[i] += indirect_uses_[i];
2572 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2573 while (!list->is_empty()) {
2574 HSimulate* from = list->RemoveLast();
2575 ZoneList<HValue*>* from_values = &from->values_;
2576 for (int i = 0; i < from_values->length(); ++i) {
2577 if (from->HasAssignedIndexAt(i)) {
2578 int index = from->GetAssignedIndexAt(i);
2579 if (HasValueForIndex(index)) continue;
2580 AddAssignedValue(index, from_values->at(i));
2582 if (pop_count_ > 0) {
2585 AddPushedValue(from_values->at(i));
2589 pop_count_ += from->pop_count_;
2590 from->DeleteAndReplaceWith(NULL);
2595 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2596 os << "id=" << ast_id().ToInt();
2597 if (pop_count_ > 0) os << " pop " << pop_count_;
2598 if (values_.length() > 0) {
2599 if (pop_count_ > 0) os << " /";
2600 for (int i = values_.length() - 1; i >= 0; --i) {
2601 if (HasAssignedIndexAt(i)) {
2602 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2606 os << NameOf(values_[i]);
2607 if (i > 0) os << ",";
2614 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2615 if (is_done_with_replay()) return;
2616 DCHECK(env != NULL);
2617 env->set_ast_id(ast_id());
2618 env->Drop(pop_count());
2619 for (int i = values()->length() - 1; i >= 0; --i) {
2620 HValue* value = values()->at(i);
2621 if (HasAssignedIndexAt(i)) {
2622 env->Bind(GetAssignedIndexAt(i), value);
2627 set_done_with_replay();
2631 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2632 HCapturedObject* other) {
2633 for (int i = 0; i < values->length(); ++i) {
2634 HValue* value = values->at(i);
2635 if (value->IsCapturedObject()) {
2636 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2637 values->at(i) = other;
2639 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2646 // Replay captured objects by replacing all captured objects with the
2647 // same capture id in the current and all outer environments.
2648 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2649 DCHECK(env != NULL);
2650 while (env != NULL) {
2651 ReplayEnvironmentNested(env->values(), this);
2657 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2658 os << "#" << capture_id() << " ";
2659 return HDematerializedObject::PrintDataTo(os);
2663 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2665 DCHECK(return_target->IsInlineReturnTarget());
2666 return_targets_.Add(return_target, zone);
2670 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2671 return os << function()->debug_name()->ToCString().get();
2675 static bool IsInteger32(double value) {
2676 if (value >= std::numeric_limits<int32_t>::min() &&
2677 value <= std::numeric_limits<int32_t>::max()) {
2678 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2679 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2685 HConstant::HConstant(Special special)
2686 : HTemplateInstruction<0>(HType::TaggedNumber()),
2687 object_(Handle<Object>::null()),
2688 object_map_(Handle<Map>::null()),
2689 bit_field_(HasDoubleValueField::encode(true) |
2690 InstanceTypeField::encode(kUnknownInstanceType)),
2692 DCHECK_EQ(kHoleNaN, special);
2693 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2694 Initialize(Representation::Double());
2698 HConstant::HConstant(Handle<Object> object, Representation r)
2699 : HTemplateInstruction<0>(HType::FromValue(object)),
2700 object_(Unique<Object>::CreateUninitialized(object)),
2701 object_map_(Handle<Map>::null()),
2702 bit_field_(HasStableMapValueField::encode(false) |
2703 HasSmiValueField::encode(false) |
2704 HasInt32ValueField::encode(false) |
2705 HasDoubleValueField::encode(false) |
2706 HasExternalReferenceValueField::encode(false) |
2707 IsNotInNewSpaceField::encode(true) |
2708 BooleanValueField::encode(object->BooleanValue()) |
2709 IsUndetectableField::encode(false) |
2710 InstanceTypeField::encode(kUnknownInstanceType)) {
2711 if (object->IsHeapObject()) {
2712 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2713 Isolate* isolate = heap_object->GetIsolate();
2714 Handle<Map> map(heap_object->map(), isolate);
2715 bit_field_ = IsNotInNewSpaceField::update(
2716 bit_field_, !isolate->heap()->InNewSpace(*object));
2717 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2719 IsUndetectableField::update(bit_field_, map->is_undetectable());
2720 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2721 bit_field_ = HasStableMapValueField::update(
2723 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2725 if (object->IsNumber()) {
2726 double n = object->Number();
2727 bool has_int32_value = IsInteger32(n);
2728 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2729 int32_value_ = DoubleToInt32(n);
2730 bit_field_ = HasSmiValueField::update(
2731 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2733 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2734 // TODO(titzer): if this heap number is new space, tenure a new one.
2741 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2742 bool has_stable_map_value, Representation r, HType type,
2743 bool is_not_in_new_space, bool boolean_value,
2744 bool is_undetectable, InstanceType instance_type)
2745 : HTemplateInstruction<0>(type),
2747 object_map_(object_map),
2748 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2749 HasSmiValueField::encode(false) |
2750 HasInt32ValueField::encode(false) |
2751 HasDoubleValueField::encode(false) |
2752 HasExternalReferenceValueField::encode(false) |
2753 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2754 BooleanValueField::encode(boolean_value) |
2755 IsUndetectableField::encode(is_undetectable) |
2756 InstanceTypeField::encode(instance_type)) {
2757 DCHECK(!object.handle().is_null());
2758 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2763 HConstant::HConstant(int32_t integer_value, Representation r,
2764 bool is_not_in_new_space, Unique<Object> object)
2766 object_map_(Handle<Map>::null()),
2767 bit_field_(HasStableMapValueField::encode(false) |
2768 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2769 HasInt32ValueField::encode(true) |
2770 HasDoubleValueField::encode(true) |
2771 HasExternalReferenceValueField::encode(false) |
2772 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2773 BooleanValueField::encode(integer_value != 0) |
2774 IsUndetectableField::encode(false) |
2775 InstanceTypeField::encode(kUnknownInstanceType)),
2776 int32_value_(integer_value),
2777 double_value_(FastI2D(integer_value)) {
2778 // It's possible to create a constant with a value in Smi-range but stored
2779 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2780 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2781 bool is_smi = HasSmiValue() && !could_be_heapobject;
2782 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2787 HConstant::HConstant(double double_value, Representation r,
2788 bool is_not_in_new_space, Unique<Object> object)
2790 object_map_(Handle<Map>::null()),
2791 bit_field_(HasStableMapValueField::encode(false) |
2792 HasInt32ValueField::encode(IsInteger32(double_value)) |
2793 HasDoubleValueField::encode(true) |
2794 HasExternalReferenceValueField::encode(false) |
2795 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2796 BooleanValueField::encode(double_value != 0 &&
2797 !std::isnan(double_value)) |
2798 IsUndetectableField::encode(false) |
2799 InstanceTypeField::encode(kUnknownInstanceType)),
2800 int32_value_(DoubleToInt32(double_value)),
2801 double_value_(double_value) {
2802 bit_field_ = HasSmiValueField::update(
2803 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2804 // It's possible to create a constant with a value in Smi-range but stored
2805 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2806 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2807 bool is_smi = HasSmiValue() && !could_be_heapobject;
2808 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2813 HConstant::HConstant(ExternalReference reference)
2814 : HTemplateInstruction<0>(HType::Any()),
2815 object_(Unique<Object>(Handle<Object>::null())),
2816 object_map_(Handle<Map>::null()),
2818 HasStableMapValueField::encode(false) |
2819 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2820 HasDoubleValueField::encode(false) |
2821 HasExternalReferenceValueField::encode(true) |
2822 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2823 IsUndetectableField::encode(false) |
2824 InstanceTypeField::encode(kUnknownInstanceType)),
2825 external_reference_value_(reference) {
2826 Initialize(Representation::External());
2830 void HConstant::Initialize(Representation r) {
2832 if (HasSmiValue() && SmiValuesAre31Bits()) {
2833 r = Representation::Smi();
2834 } else if (HasInteger32Value()) {
2835 r = Representation::Integer32();
2836 } else if (HasDoubleValue()) {
2837 r = Representation::Double();
2838 } else if (HasExternalReferenceValue()) {
2839 r = Representation::External();
2841 Handle<Object> object = object_.handle();
2842 if (object->IsJSObject()) {
2843 // Try to eagerly migrate JSObjects that have deprecated maps.
2844 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2845 if (js_object->map()->is_deprecated()) {
2846 JSObject::TryMigrateInstance(js_object);
2849 r = Representation::Tagged();
2853 // If we have an existing handle, zap it, because it might be a heap
2854 // number which we must not re-use when copying this HConstant to
2855 // Tagged representation later, because having Smi representation now
2856 // could cause heap object checks not to get emitted.
2857 object_ = Unique<Object>(Handle<Object>::null());
2859 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2860 // If it's not a heap object, it can't be in new space.
2861 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2863 set_representation(r);
2868 bool HConstant::ImmortalImmovable() const {
2869 if (HasInteger32Value()) {
2872 if (HasDoubleValue()) {
2873 if (IsSpecialDouble()) {
2878 if (HasExternalReferenceValue()) {
2882 DCHECK(!object_.handle().is_null());
2883 Heap* heap = isolate()->heap();
2884 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2885 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2887 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2888 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2889 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2890 #undef IMMORTAL_IMMOVABLE_ROOT
2891 #define INTERNALIZED_STRING(name, value) \
2892 object_.IsKnownGlobal(heap->name()) ||
2893 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2894 #undef INTERNALIZED_STRING
2895 #define STRING_TYPE(NAME, size, name, Name) \
2896 object_.IsKnownGlobal(heap->name##_map()) ||
2897 STRING_TYPE_LIST(STRING_TYPE)
2903 bool HConstant::EmitAtUses() {
2905 if (block()->graph()->has_osr() &&
2906 block()->graph()->IsStandardConstant(this)) {
2907 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2910 if (HasNoUses()) return true;
2911 if (IsCell()) return false;
2912 if (representation().IsDouble()) return false;
2913 if (representation().IsExternal()) return false;
2918 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2919 if (r.IsSmi() && !HasSmiValue()) return NULL;
2920 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2921 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2922 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2923 if (HasInteger32Value()) {
2924 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2926 if (HasDoubleValue()) {
2927 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2929 if (HasExternalReferenceValue()) {
2930 return new(zone) HConstant(external_reference_value_);
2932 DCHECK(!object_.handle().is_null());
2933 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2934 type_, NotInNewSpace(), BooleanValue(),
2935 IsUndetectable(), GetInstanceType());
2939 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2940 HConstant* res = NULL;
2941 if (HasInteger32Value()) {
2942 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2943 NotInNewSpace(), object_);
2944 } else if (HasDoubleValue()) {
2946 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2947 NotInNewSpace(), object_);
2949 return Maybe<HConstant*>(res != NULL, res);
2953 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2955 HConstant* res = NULL;
2956 Handle<Object> handle = this->handle(isolate);
2957 if (handle->IsBoolean()) {
2958 res = handle->BooleanValue() ?
2959 new(zone) HConstant(1) : new(zone) HConstant(0);
2960 } else if (handle->IsUndefined()) {
2961 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2962 } else if (handle->IsNull()) {
2963 res = new(zone) HConstant(0);
2965 return Maybe<HConstant*>(res != NULL, res);
2969 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2970 if (HasInteger32Value()) {
2971 os << int32_value_ << " ";
2972 } else if (HasDoubleValue()) {
2973 os << double_value_ << " ";
2974 } else if (HasExternalReferenceValue()) {
2975 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2977 // The handle() method is silently and lazily mutating the object.
2978 Handle<Object> h = const_cast<HConstant*>(this)->handle(Isolate::Current());
2979 os << Brief(*h) << " ";
2980 if (HasStableMapValue()) os << "[stable-map] ";
2981 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2983 if (!NotInNewSpace()) os << "[new space] ";
2988 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2989 os << NameOf(left()) << " " << NameOf(right());
2990 if (CheckFlag(kCanOverflow)) os << " !";
2991 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2996 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2997 DCHECK(CheckFlag(kFlexibleRepresentation));
2998 Representation new_rep = RepresentationFromInputs();
2999 UpdateRepresentation(new_rep, h_infer, "inputs");
3001 if (representation().IsSmi() && HasNonSmiUse()) {
3002 UpdateRepresentation(
3003 Representation::Integer32(), h_infer, "use requirements");
3006 if (observed_output_representation_.IsNone()) {
3007 new_rep = RepresentationFromUses();
3008 UpdateRepresentation(new_rep, h_infer, "uses");
3010 new_rep = RepresentationFromOutput();
3011 UpdateRepresentation(new_rep, h_infer, "output");
3016 Representation HBinaryOperation::RepresentationFromInputs() {
3017 // Determine the worst case of observed input representations and
3018 // the currently assumed output representation.
3019 Representation rep = representation();
3020 for (int i = 1; i <= 2; ++i) {
3021 rep = rep.generalize(observed_input_representation(i));
3023 // If any of the actual input representation is more general than what we
3024 // have so far but not Tagged, use that representation instead.
3025 Representation left_rep = left()->representation();
3026 Representation right_rep = right()->representation();
3027 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3028 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3034 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3035 Representation current_rep) {
3036 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3037 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3038 // Mul in Integer32 mode would be too precise.
3039 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3043 Representation HBinaryOperation::RepresentationFromOutput() {
3044 Representation rep = representation();
3045 // Consider observed output representation, but ignore it if it's Double,
3046 // this instruction is not a division, and all its uses are truncating
3048 if (observed_output_representation_.is_more_general_than(rep) &&
3049 !IgnoreObservedOutputRepresentation(rep)) {
3050 return observed_output_representation_;
3052 return Representation::None();
3056 void HBinaryOperation::AssumeRepresentation(Representation r) {
3057 set_observed_input_representation(1, r);
3058 set_observed_input_representation(2, r);
3059 HValue::AssumeRepresentation(r);
3063 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3064 DCHECK(CheckFlag(kFlexibleRepresentation));
3065 Representation new_rep = RepresentationFromInputs();
3066 UpdateRepresentation(new_rep, h_infer, "inputs");
3067 // Do not care about uses.
3071 Range* HBitwise::InferRange(Zone* zone) {
3072 if (op() == Token::BIT_XOR) {
3073 if (left()->HasRange() && right()->HasRange()) {
3074 // The maximum value has the high bit, and all bits below, set:
3076 // If the range can be negative, the minimum int is a negative number with
3077 // the high bit, and all bits below, unset:
3079 // If it cannot be negative, conservatively choose 0 as minimum int.
3080 int64_t left_upper = left()->range()->upper();
3081 int64_t left_lower = left()->range()->lower();
3082 int64_t right_upper = right()->range()->upper();
3083 int64_t right_lower = right()->range()->lower();
3085 if (left_upper < 0) left_upper = ~left_upper;
3086 if (left_lower < 0) left_lower = ~left_lower;
3087 if (right_upper < 0) right_upper = ~right_upper;
3088 if (right_lower < 0) right_lower = ~right_lower;
3090 int high = MostSignificantBit(
3091 static_cast<uint32_t>(
3092 left_upper | left_lower | right_upper | right_lower));
3096 int32_t min = (left()->range()->CanBeNegative() ||
3097 right()->range()->CanBeNegative())
3098 ? static_cast<int32_t>(-limit) : 0;
3099 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3101 Range* result = HValue::InferRange(zone);
3102 result->set_can_be_minus_zero(false);
3105 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3106 int32_t left_mask = (left()->range() != NULL)
3107 ? left()->range()->Mask()
3109 int32_t right_mask = (right()->range() != NULL)
3110 ? right()->range()->Mask()
3112 int32_t result_mask = (op() == Token::BIT_AND)
3113 ? left_mask & right_mask
3114 : left_mask | right_mask;
3115 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3117 Range* result = HValue::InferRange(zone);
3118 result->set_can_be_minus_zero(false);
3123 Range* HSar::InferRange(Zone* zone) {
3124 if (right()->IsConstant()) {
3125 HConstant* c = HConstant::cast(right());
3126 if (c->HasInteger32Value()) {
3127 Range* result = (left()->range() != NULL)
3128 ? left()->range()->Copy(zone)
3129 : new(zone) Range();
3130 result->Sar(c->Integer32Value());
3134 return HValue::InferRange(zone);
3138 Range* HShr::InferRange(Zone* zone) {
3139 if (right()->IsConstant()) {
3140 HConstant* c = HConstant::cast(right());
3141 if (c->HasInteger32Value()) {
3142 int shift_count = c->Integer32Value() & 0x1f;
3143 if (left()->range()->CanBeNegative()) {
3144 // Only compute bounds if the result always fits into an int32.
3145 return (shift_count >= 1)
3146 ? new(zone) Range(0,
3147 static_cast<uint32_t>(0xffffffff) >> shift_count)
3148 : new(zone) Range();
3150 // For positive inputs we can use the >> operator.
3151 Range* result = (left()->range() != NULL)
3152 ? left()->range()->Copy(zone)
3153 : new(zone) Range();
3154 result->Sar(c->Integer32Value());
3159 return HValue::InferRange(zone);
3163 Range* HShl::InferRange(Zone* zone) {
3164 if (right()->IsConstant()) {
3165 HConstant* c = HConstant::cast(right());
3166 if (c->HasInteger32Value()) {
3167 Range* result = (left()->range() != NULL)
3168 ? left()->range()->Copy(zone)
3169 : new(zone) Range();
3170 result->Shl(c->Integer32Value());
3174 return HValue::InferRange(zone);
3178 Range* HLoadNamedField::InferRange(Zone* zone) {
3179 if (access().representation().IsInteger8()) {
3180 return new(zone) Range(kMinInt8, kMaxInt8);
3182 if (access().representation().IsUInteger8()) {
3183 return new(zone) Range(kMinUInt8, kMaxUInt8);
3185 if (access().representation().IsInteger16()) {
3186 return new(zone) Range(kMinInt16, kMaxInt16);
3188 if (access().representation().IsUInteger16()) {
3189 return new(zone) Range(kMinUInt16, kMaxUInt16);
3191 if (access().IsStringLength()) {
3192 return new(zone) Range(0, String::kMaxLength);
3194 return HValue::InferRange(zone);
3198 Range* HLoadKeyed::InferRange(Zone* zone) {
3199 switch (elements_kind()) {
3200 case EXTERNAL_INT8_ELEMENTS:
3201 return new(zone) Range(kMinInt8, kMaxInt8);
3202 case EXTERNAL_UINT8_ELEMENTS:
3203 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3204 return new(zone) Range(kMinUInt8, kMaxUInt8);
3205 case EXTERNAL_INT16_ELEMENTS:
3206 return new(zone) Range(kMinInt16, kMaxInt16);
3207 case EXTERNAL_UINT16_ELEMENTS:
3208 return new(zone) Range(kMinUInt16, kMaxUInt16);
3210 return HValue::InferRange(zone);
3215 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
3216 os << Token::Name(token()) << " ";
3217 return HBinaryOperation::PrintDataTo(os);
3221 std::ostream& HStringCompareAndBranch::PrintDataTo(
3222 std::ostream& os) const { // NOLINT
3223 os << Token::Name(token()) << " ";
3224 return HControlInstruction::PrintDataTo(os);
3228 std::ostream& HCompareNumericAndBranch::PrintDataTo(
3229 std::ostream& os) const { // NOLINT
3230 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3231 return HControlInstruction::PrintDataTo(os);
3235 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
3236 std::ostream& os) const { // NOLINT
3237 os << NameOf(left()) << " " << NameOf(right());
3238 return HControlInstruction::PrintDataTo(os);
3242 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3243 if (known_successor_index() != kNoKnownSuccessorIndex) {
3244 *block = SuccessorAt(known_successor_index());
3247 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3248 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3249 ? FirstSuccessor() : SecondSuccessor();
3257 bool ConstantIsObject(HConstant* constant, Isolate* isolate) {
3258 if (constant->HasNumberValue()) return false;
3259 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) {
3262 if (constant->IsUndetectable()) return false;
3263 InstanceType type = constant->GetInstanceType();
3264 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) &&
3265 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3269 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3270 if (FLAG_fold_constants && value()->IsConstant()) {
3271 *block = ConstantIsObject(HConstant::cast(value()), isolate())
3272 ? FirstSuccessor() : SecondSuccessor();
3280 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3281 if (known_successor_index() != kNoKnownSuccessorIndex) {
3282 *block = SuccessorAt(known_successor_index());
3285 if (FLAG_fold_constants && value()->IsConstant()) {
3286 *block = HConstant::cast(value())->HasStringValue()
3287 ? FirstSuccessor() : SecondSuccessor();
3290 if (value()->type().IsString()) {
3291 *block = FirstSuccessor();
3294 if (value()->type().IsSmi() ||
3295 value()->type().IsNull() ||
3296 value()->type().IsBoolean() ||
3297 value()->type().IsUndefined() ||
3298 value()->type().IsJSObject()) {
3299 *block = SecondSuccessor();
3307 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3308 if (FLAG_fold_constants && value()->IsConstant()) {
3309 *block = HConstant::cast(value())->IsUndetectable()
3310 ? FirstSuccessor() : SecondSuccessor();
3318 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3319 if (FLAG_fold_constants && value()->IsConstant()) {
3320 InstanceType type = HConstant::cast(value())->GetInstanceType();
3321 *block = (from_ <= type) && (type <= to_)
3322 ? FirstSuccessor() : SecondSuccessor();
3330 void HCompareHoleAndBranch::InferRepresentation(
3331 HInferRepresentationPhase* h_infer) {
3332 ChangeRepresentation(value()->representation());
3336 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3337 if (left() == right() &&
3338 left()->representation().IsSmiOrInteger32()) {
3339 *block = (token() == Token::EQ ||
3340 token() == Token::EQ_STRICT ||
3341 token() == Token::LTE ||
3342 token() == Token::GTE)
3343 ? FirstSuccessor() : SecondSuccessor();
3351 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3352 if (FLAG_fold_constants && value()->IsConstant()) {
3353 HConstant* constant = HConstant::cast(value());
3354 if (constant->HasDoubleValue()) {
3355 *block = IsMinusZero(constant->DoubleValue())
3356 ? FirstSuccessor() : SecondSuccessor();
3360 if (value()->representation().IsSmiOrInteger32()) {
3361 // A Smi or Integer32 cannot contain minus zero.
3362 *block = SecondSuccessor();
3370 void HCompareMinusZeroAndBranch::InferRepresentation(
3371 HInferRepresentationPhase* h_infer) {
3372 ChangeRepresentation(value()->representation());
3376 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
3377 return os << *SuccessorAt(0);
3381 void HCompareNumericAndBranch::InferRepresentation(
3382 HInferRepresentationPhase* h_infer) {
3383 Representation left_rep = left()->representation();
3384 Representation right_rep = right()->representation();
3385 Representation observed_left = observed_input_representation(0);
3386 Representation observed_right = observed_input_representation(1);
3388 Representation rep = Representation::None();
3389 rep = rep.generalize(observed_left);
3390 rep = rep.generalize(observed_right);
3391 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3392 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3393 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3395 rep = Representation::Double();
3398 if (rep.IsDouble()) {
3399 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3400 // and !=) have special handling of undefined, e.g. undefined == undefined
3401 // is 'true'. Relational comparisons have a different semantic, first
3402 // calling ToPrimitive() on their arguments. The standard Crankshaft
3403 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3404 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3405 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3406 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3407 // it is not consistent with the spec. For example, it would cause undefined
3408 // == undefined (should be true) to be evaluated as NaN == NaN
3409 // (false). Therefore, any comparisons other than ordered relational
3410 // comparisons must cause a deopt when one of their arguments is undefined.
3412 if (Token::IsOrderedRelationalCompareOp(token_)) {
3413 SetFlag(kAllowUndefinedAsNaN);
3416 ChangeRepresentation(rep);
3420 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
3421 return os << index();
3425 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3426 os << NameOf(object()) << access_;
3428 if (maps() != NULL) {
3429 os << " [" << *maps()->at(0).handle();
3430 for (int i = 1; i < maps()->size(); ++i) {
3431 os << "," << *maps()->at(i).handle();
3436 if (HasDependency()) os << " " << NameOf(dependency());
3441 std::ostream& HLoadNamedGeneric::PrintDataTo(
3442 std::ostream& os) const { // NOLINT
3443 Handle<String> n = Handle<String>::cast(name());
3444 return os << NameOf(object()) << "." << n->ToCString().get();
3448 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3449 if (!is_external()) {
3450 os << NameOf(elements());
3452 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3453 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3454 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3457 os << "[" << NameOf(key());
3458 if (IsDehoisted()) os << " + " << base_offset();
3461 if (HasDependency()) os << " " << NameOf(dependency());
3462 if (RequiresHoleCheck()) os << " check_hole";
3467 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3468 // The base offset is usually simply the size of the array header, except
3469 // with dehoisting adds an addition offset due to a array index key
3470 // manipulation, in which case it becomes (array header size +
3471 // constant-offset-from-key * kPointerSize)
3472 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3473 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3474 addition_result += increase_by_value;
3475 if (!addition_result.IsValid()) return false;
3476 base_offset = addition_result.ValueOrDie();
3477 if (!BaseOffsetField::is_valid(base_offset)) return false;
3478 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3483 bool HLoadKeyed::UsesMustHandleHole() const {
3484 if (IsFastPackedElementsKind(elements_kind())) {
3488 if (IsExternalArrayElementsKind(elements_kind())) {
3492 if (hole_mode() == ALLOW_RETURN_HOLE) {
3493 if (IsFastDoubleElementsKind(elements_kind())) {
3494 return AllUsesCanTreatHoleAsNaN();
3499 if (IsFastDoubleElementsKind(elements_kind())) {
3503 // Holes are only returned as tagged values.
3504 if (!representation().IsTagged()) {
3508 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3509 HValue* use = it.value();
3510 if (!use->IsChange()) return false;
3517 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3518 return IsFastDoubleElementsKind(elements_kind()) &&
3519 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3523 bool HLoadKeyed::RequiresHoleCheck() const {
3524 if (IsFastPackedElementsKind(elements_kind())) {
3528 if (IsExternalArrayElementsKind(elements_kind())) {
3532 return !UsesMustHandleHole();
3536 std::ostream& HLoadKeyedGeneric::PrintDataTo(
3537 std::ostream& os) const { // NOLINT
3538 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3542 HValue* HLoadKeyedGeneric::Canonicalize() {
3543 // Recognize generic keyed loads that use property name generated
3544 // by for-in statement as a key and rewrite them into fast property load
3546 if (key()->IsLoadKeyed()) {
3547 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3548 if (key_load->elements()->IsForInCacheArray()) {
3549 HForInCacheArray* names_cache =
3550 HForInCacheArray::cast(key_load->elements());
3552 if (names_cache->enumerable() == object()) {
3553 HForInCacheArray* index_cache =
3554 names_cache->index_cache();
3555 HCheckMapValue* map_check = HCheckMapValue::New(
3556 block()->graph()->isolate(), block()->graph()->zone(),
3557 block()->graph()->GetInvalidContext(), object(),
3558 names_cache->map());
3559 HInstruction* index = HLoadKeyed::New(
3560 block()->graph()->isolate(), block()->graph()->zone(),
3561 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3562 key_load->key(), key_load->elements_kind());
3563 map_check->InsertBefore(this);
3564 index->InsertBefore(this);
3565 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3575 std::ostream& HStoreNamedGeneric::PrintDataTo(
3576 std::ostream& os) const { // NOLINT
3577 Handle<String> n = Handle<String>::cast(name());
3578 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3583 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3584 os << NameOf(object()) << access_ << " = " << NameOf(value());
3585 if (NeedsWriteBarrier()) os << " (write-barrier)";
3586 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3591 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3592 if (!is_external()) {
3593 os << NameOf(elements());
3595 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3596 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3597 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3600 os << "[" << NameOf(key());
3601 if (IsDehoisted()) os << " + " << base_offset();
3602 return os << "] = " << NameOf(value());
3606 std::ostream& HStoreKeyedGeneric::PrintDataTo(
3607 std::ostream& os) const { // NOLINT
3608 return os << NameOf(object()) << "[" << NameOf(key())
3609 << "] = " << NameOf(value());
3613 std::ostream& HTransitionElementsKind::PrintDataTo(
3614 std::ostream& os) const { // NOLINT
3615 os << NameOf(object());
3616 ElementsKind from_kind = original_map().handle()->elements_kind();
3617 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3618 os << " " << *original_map().handle() << " ["
3619 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3620 << *transitioned_map().handle() << " ["
3621 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3622 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3627 std::ostream& HLoadGlobalCell::PrintDataTo(std::ostream& os) const { // NOLINT
3628 os << "[" << *cell().handle() << "]";
3629 if (details_.IsConfigurable()) os << " (configurable)";
3630 if (details_.IsReadOnly()) os << " (read-only)";
3635 bool HLoadGlobalCell::RequiresHoleCheck() const {
3636 if (!details_.IsConfigurable()) return false;
3637 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3638 HValue* use = it.value();
3639 if (!use->IsChange()) return true;
3645 std::ostream& HLoadGlobalGeneric::PrintDataTo(
3646 std::ostream& os) const { // NOLINT
3647 return os << name()->ToCString().get() << " ";
3651 std::ostream& HInnerAllocatedObject::PrintDataTo(
3652 std::ostream& os) const { // NOLINT
3653 os << NameOf(base_object()) << " offset ";
3654 return offset()->PrintTo(os);
3658 std::ostream& HStoreGlobalCell::PrintDataTo(std::ostream& os) const { // NOLINT
3659 os << "[" << *cell().handle() << "] = " << NameOf(value());
3660 if (details_.IsConfigurable()) os << " (configurable)";
3661 if (details_.IsReadOnly()) os << " (read-only)";
3666 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3667 return os << NameOf(value()) << "[" << slot_index() << "]";
3671 std::ostream& HStoreContextSlot::PrintDataTo(
3672 std::ostream& os) const { // NOLINT
3673 return os << NameOf(context()) << "[" << slot_index()
3674 << "] = " << NameOf(value());
3678 // Implementation of type inference and type conversions. Calculates
3679 // the inferred type of this instruction based on the input operands.
3681 HType HValue::CalculateInferredType() {
3686 HType HPhi::CalculateInferredType() {
3687 if (OperandCount() == 0) return HType::Tagged();
3688 HType result = OperandAt(0)->type();
3689 for (int i = 1; i < OperandCount(); ++i) {
3690 HType current = OperandAt(i)->type();
3691 result = result.Combine(current);
3697 HType HChange::CalculateInferredType() {
3698 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3703 Representation HUnaryMathOperation::RepresentationFromInputs() {
3704 if (SupportsFlexibleFloorAndRound() &&
3705 (op_ == kMathFloor || op_ == kMathRound)) {
3706 // Floor and Round always take a double input. The integral result can be
3707 // used as an integer or a double. Infer the representation from the uses.
3708 return Representation::None();
3710 Representation rep = representation();
3711 // If any of the actual input representation is more general than what we
3712 // have so far but not Tagged, use that representation instead.
3713 Representation input_rep = value()->representation();
3714 if (!input_rep.IsTagged()) {
3715 rep = rep.generalize(input_rep);
3721 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3722 HValue* dominator) {
3723 DCHECK(side_effect == kNewSpacePromotion);
3724 Zone* zone = block()->zone();
3725 Isolate* isolate = block()->isolate();
3726 if (!FLAG_use_allocation_folding) return false;
3728 // Try to fold allocations together with their dominating allocations.
3729 if (!dominator->IsAllocate()) {
3730 if (FLAG_trace_allocation_folding) {
3731 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3732 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3737 // Check whether we are folding within the same block for local folding.
3738 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3739 if (FLAG_trace_allocation_folding) {
3740 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3741 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3746 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3747 HValue* dominator_size = dominator_allocate->size();
3748 HValue* current_size = size();
3750 // TODO(hpayer): Add support for non-constant allocation in dominator.
3751 if (!dominator_size->IsInteger32Constant()) {
3752 if (FLAG_trace_allocation_folding) {
3753 PrintF("#%d (%s) cannot fold into #%d (%s), "
3754 "dynamic allocation size in dominator\n",
3755 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3760 dominator_allocate = GetFoldableDominator(dominator_allocate);
3761 if (dominator_allocate == NULL) {
3765 if (!has_size_upper_bound()) {
3766 if (FLAG_trace_allocation_folding) {
3767 PrintF("#%d (%s) cannot fold into #%d (%s), "
3768 "can't estimate total allocation size\n",
3769 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3774 if (!current_size->IsInteger32Constant()) {
3775 // If it's not constant then it is a size_in_bytes calculation graph
3776 // like this: (const_header_size + const_element_size * size).
3777 DCHECK(current_size->IsInstruction());
3779 HInstruction* current_instr = HInstruction::cast(current_size);
3780 if (!current_instr->Dominates(dominator_allocate)) {
3781 if (FLAG_trace_allocation_folding) {
3782 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3783 "value does not dominate target allocation\n",
3784 id(), Mnemonic(), dominator_allocate->id(),
3785 dominator_allocate->Mnemonic());
3791 DCHECK((IsNewSpaceAllocation() &&
3792 dominator_allocate->IsNewSpaceAllocation()) ||
3793 (IsOldDataSpaceAllocation() &&
3794 dominator_allocate->IsOldDataSpaceAllocation()) ||
3795 (IsOldPointerSpaceAllocation() &&
3796 dominator_allocate->IsOldPointerSpaceAllocation()));
3798 // First update the size of the dominator allocate instruction.
3799 dominator_size = dominator_allocate->size();
3800 int32_t original_object_size =
3801 HConstant::cast(dominator_size)->GetInteger32Constant();
3802 int32_t dominator_size_constant = original_object_size;
3804 if (MustAllocateDoubleAligned()) {
3805 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3806 dominator_size_constant += kDoubleSize / 2;
3810 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3811 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3813 // Since we clear the first word after folded memory, we cannot use the
3814 // whole Page::kMaxRegularHeapObjectSize memory.
3815 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3816 if (FLAG_trace_allocation_folding) {
3817 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3818 id(), Mnemonic(), dominator_allocate->id(),
3819 dominator_allocate->Mnemonic(), new_dominator_size);
3824 HInstruction* new_dominator_size_value;
3826 if (current_size->IsInteger32Constant()) {
3827 new_dominator_size_value = HConstant::CreateAndInsertBefore(
3828 isolate, zone, context(), new_dominator_size, Representation::None(),
3829 dominator_allocate);
3831 HValue* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3832 isolate, zone, context(), dominator_size_constant,
3833 Representation::Integer32(), dominator_allocate);
3835 // Add old and new size together and insert.
3836 current_size->ChangeRepresentation(Representation::Integer32());
3838 new_dominator_size_value = HAdd::New(
3839 isolate, zone, context(), new_dominator_size_constant, current_size);
3840 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3841 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3843 new_dominator_size_value->InsertBefore(dominator_allocate);
3846 dominator_allocate->UpdateSize(new_dominator_size_value);
3848 if (MustAllocateDoubleAligned()) {
3849 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3850 dominator_allocate->MakeDoubleAligned();
3854 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3856 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3859 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3860 dominator_allocate->MakePrefillWithFiller();
3862 // TODO(hpayer): This is a short-term hack to make allocation mementos
3863 // work again in new space.
3864 dominator_allocate->ClearNextMapWord(original_object_size);
3867 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3869 // After that replace the dominated allocate instruction.
3870 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3871 isolate, zone, context(), dominator_size_constant, Representation::None(),
3874 HInstruction* dominated_allocate_instr = HInnerAllocatedObject::New(
3875 isolate, zone, context(), dominator_allocate, inner_offset, type());
3876 dominated_allocate_instr->InsertBefore(this);
3877 DeleteAndReplaceWith(dominated_allocate_instr);
3878 if (FLAG_trace_allocation_folding) {
3879 PrintF("#%d (%s) folded into #%d (%s)\n",
3880 id(), Mnemonic(), dominator_allocate->id(),
3881 dominator_allocate->Mnemonic());
3887 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3888 if (!IsFoldable(dominator)) {
3889 // We cannot hoist old space allocations over new space allocations.
3890 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3891 if (FLAG_trace_allocation_folding) {
3892 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3893 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3898 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3900 // We can hoist old data space allocations over an old pointer space
3901 // allocation and vice versa. For that we have to check the dominator
3902 // of the dominator allocate instruction.
3903 if (dominator_dominator == NULL) {
3904 dominating_allocate_ = dominator;
3905 if (FLAG_trace_allocation_folding) {
3906 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
3907 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3912 // We can just fold old space allocations that are in the same basic block,
3913 // since it is not guaranteed that we fill up the whole allocated old
3915 // TODO(hpayer): Remove this limitation and add filler maps for each each
3916 // allocation as soon as we have store elimination.
3917 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3918 if (FLAG_trace_allocation_folding) {
3919 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3920 id(), Mnemonic(), dominator_dominator->id(),
3921 dominator_dominator->Mnemonic());
3926 DCHECK((IsOldDataSpaceAllocation() &&
3927 dominator_dominator->IsOldDataSpaceAllocation()) ||
3928 (IsOldPointerSpaceAllocation() &&
3929 dominator_dominator->IsOldPointerSpaceAllocation()));
3931 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3932 HStoreNamedField* dominator_free_space_size =
3933 dominator->filler_free_space_size_;
3934 if (dominator_free_space_size != NULL) {
3935 // We already hoisted one old space allocation, i.e., we already installed
3936 // a filler map. Hence, we just have to update the free space size.
3937 dominator->UpdateFreeSpaceFiller(current_size);
3939 // This is the first old space allocation that gets hoisted. We have to
3940 // install a filler map since the follwing allocation may cause a GC.
3941 dominator->CreateFreeSpaceFiller(current_size);
3944 // We can hoist the old space allocation over the actual dominator.
3945 return dominator_dominator;
3951 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3952 DCHECK(filler_free_space_size_ != NULL);
3953 Zone* zone = block()->zone();
3954 // We must explicitly force Smi representation here because on x64 we
3955 // would otherwise automatically choose int32, but the actual store
3956 // requires a Smi-tagged value.
3957 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3958 block()->isolate(), zone, context(),
3959 filler_free_space_size_->value()->GetInteger32Constant() +
3961 Representation::Smi(), filler_free_space_size_);
3962 filler_free_space_size_->UpdateValue(new_free_space_size);
3966 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3967 DCHECK(filler_free_space_size_ == NULL);
3968 Isolate* isolate = block()->isolate();
3969 Zone* zone = block()->zone();
3970 HInstruction* free_space_instr =
3971 HInnerAllocatedObject::New(isolate, zone, context(), dominating_allocate_,
3972 dominating_allocate_->size(), type());
3973 free_space_instr->InsertBefore(this);
3974 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3975 zone, Unique<Map>::CreateImmovable(isolate->factory()->free_space_map()),
3976 true, free_space_instr);
3977 HInstruction* store_map =
3978 HStoreNamedField::New(isolate, zone, context(), free_space_instr,
3979 HObjectAccess::ForMap(), filler_map);
3980 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3981 store_map->InsertAfter(filler_map);
3983 // We must explicitly force Smi representation here because on x64 we
3984 // would otherwise automatically choose int32, but the actual store
3985 // requires a Smi-tagged value.
3986 HConstant* filler_size =
3987 HConstant::CreateAndInsertAfter(isolate, zone, context(), free_space_size,
3988 Representation::Smi(), store_map);
3989 // Must force Smi representation for x64 (see comment above).
3990 HObjectAccess access = HObjectAccess::ForMapAndOffset(
3991 isolate->factory()->free_space_map(), FreeSpace::kSizeOffset,
3992 Representation::Smi());
3993 HStoreNamedField* store_size = HStoreNamedField::New(
3994 isolate, zone, context(), free_space_instr, access, filler_size);
3995 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3996 store_size->InsertAfter(filler_size);
3997 filler_free_space_size_ = store_size;
4001 void HAllocate::ClearNextMapWord(int offset) {
4002 if (MustClearNextMapWord()) {
4003 Zone* zone = block()->zone();
4004 HObjectAccess access =
4005 HObjectAccess::ForObservableJSObjectOffset(offset);
4006 HStoreNamedField* clear_next_map =
4007 HStoreNamedField::New(block()->isolate(), zone, context(), this, access,
4008 block()->graph()->GetConstant0());
4009 clear_next_map->ClearAllSideEffects();
4010 clear_next_map->InsertAfter(this);
4015 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
4016 os << NameOf(size()) << " (";
4017 if (IsNewSpaceAllocation()) os << "N";
4018 if (IsOldPointerSpaceAllocation()) os << "P";
4019 if (IsOldDataSpaceAllocation()) os << "D";
4020 if (MustAllocateDoubleAligned()) os << "A";
4021 if (MustPrefillWithFiller()) os << "F";
4026 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
4027 // The base offset is usually simply the size of the array header, except
4028 // with dehoisting adds an addition offset due to a array index key
4029 // manipulation, in which case it becomes (array header size +
4030 // constant-offset-from-key * kPointerSize)
4031 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
4032 addition_result += increase_by_value;
4033 if (!addition_result.IsValid()) return false;
4034 base_offset_ = addition_result.ValueOrDie();
4039 bool HStoreKeyed::NeedsCanonicalization() {
4040 switch (value()->opcode()) {
4042 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
4043 return IsExternalFloatOrDoubleElementsKind(load_kind) ||
4044 IsFixedFloatElementsKind(load_kind);
4047 Representation from = HChange::cast(value())->from();
4048 return from.IsTagged() || from.IsHeapObject();
4050 case kLoadNamedField:
4052 // Better safe than sorry...
4061 #define H_CONSTANT_INT(val) \
4062 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
4063 #define H_CONSTANT_DOUBLE(val) \
4064 HConstant::New(isolate, zone, context, static_cast<double>(val))
4066 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4067 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4068 HValue* left, HValue* right) { \
4069 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4070 HConstant* c_left = HConstant::cast(left); \
4071 HConstant* c_right = HConstant::cast(right); \
4072 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4073 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4074 if (IsInt32Double(double_res)) { \
4075 return H_CONSTANT_INT(double_res); \
4077 return H_CONSTANT_DOUBLE(double_res); \
4080 return new (zone) HInstr(context, left, right); \
4084 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
4085 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
4086 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
4088 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4091 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
4092 HValue* left, HValue* right,
4093 PretenureFlag pretenure_flag,
4094 StringAddFlags flags,
4095 Handle<AllocationSite> allocation_site) {
4096 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4097 HConstant* c_right = HConstant::cast(right);
4098 HConstant* c_left = HConstant::cast(left);
4099 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4100 Handle<String> left_string = c_left->StringValue();
4101 Handle<String> right_string = c_right->StringValue();
4102 // Prevent possible exception by invalid string length.
4103 if (left_string->length() + right_string->length() < String::kMaxLength) {
4104 MaybeHandle<String> concat = isolate->factory()->NewConsString(
4105 c_left->StringValue(), c_right->StringValue());
4106 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
4110 return new(zone) HStringAdd(
4111 context, left, right, pretenure_flag, flags, allocation_site);
4115 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
4116 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4118 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4120 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4121 os << "_CheckRight";
4123 HBinaryOperation::PrintDataTo(os);
4125 if (pretenure_flag() == NOT_TENURED)
4127 else if (pretenure_flag() == TENURED)
4133 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
4134 HValue* context, HValue* char_code) {
4135 if (FLAG_fold_constants && char_code->IsConstant()) {
4136 HConstant* c_code = HConstant::cast(char_code);
4137 if (c_code->HasNumberValue()) {
4138 if (std::isfinite(c_code->DoubleValue())) {
4139 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4140 return HConstant::New(
4141 isolate, zone, context,
4142 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4144 return HConstant::New(isolate, zone, context,
4145 isolate->factory()->empty_string());
4148 return new(zone) HStringCharFromCode(context, char_code);
4152 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
4153 HValue* context, HValue* value,
4154 BuiltinFunctionId op) {
4156 if (!FLAG_fold_constants) break;
4157 if (!value->IsConstant()) break;
4158 HConstant* constant = HConstant::cast(value);
4159 if (!constant->HasNumberValue()) break;
4160 double d = constant->DoubleValue();
4161 if (std::isnan(d)) { // NaN poisons everything.
4162 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4164 if (std::isinf(d)) { // +Infinity and -Infinity.
4167 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4170 return H_CONSTANT_DOUBLE(
4171 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
4174 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4178 return H_CONSTANT_DOUBLE(d);
4180 return H_CONSTANT_INT(32);
4188 return H_CONSTANT_DOUBLE(fast_exp(d));
4190 return H_CONSTANT_DOUBLE(std::log(d));
4192 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4194 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4196 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4198 // -0.5 .. -0.0 round to -0.0.
4199 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4200 // Doubles are represented as Significant * 2 ^ Exponent. If the
4201 // Exponent is not negative, the double value is already an integer.
4202 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4203 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4205 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4207 return H_CONSTANT_DOUBLE(Floor(d));
4209 uint32_t i = DoubleToUint32(d);
4210 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4217 return new(zone) HUnaryMathOperation(context, value, op);
4221 Representation HUnaryMathOperation::RepresentationFromUses() {
4222 if (op_ != kMathFloor && op_ != kMathRound) {
4223 return HValue::RepresentationFromUses();
4226 // The instruction can have an int32 or double output. Prefer a double
4227 // representation if there are double uses.
4228 bool use_double = false;
4230 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4231 HValue* use = it.value();
4232 int use_index = it.index();
4233 Representation rep_observed = use->observed_input_representation(use_index);
4234 Representation rep_required = use->RequiredInputRepresentation(use_index);
4235 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4236 if (use_double && !FLAG_trace_representation) {
4237 // Having seen one double is enough.
4240 if (FLAG_trace_representation) {
4241 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4242 PrintF("#%d %s is used by #%d %s as %s%s\n",
4243 id(), Mnemonic(), use->id(),
4244 use->Mnemonic(), rep_observed.Mnemonic(),
4245 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4247 PrintF("#%d %s is required by #%d %s as %s%s\n",
4248 id(), Mnemonic(), use->id(),
4249 use->Mnemonic(), rep_required.Mnemonic(),
4250 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4254 return use_double ? Representation::Double() : Representation::Integer32();
4258 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
4259 HValue* left, HValue* right) {
4260 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4261 HConstant* c_left = HConstant::cast(left);
4262 HConstant* c_right = HConstant::cast(right);
4263 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4264 double result = power_helper(c_left->DoubleValue(),
4265 c_right->DoubleValue());
4266 return H_CONSTANT_DOUBLE(std::isnan(result)
4267 ? std::numeric_limits<double>::quiet_NaN()
4271 return new(zone) HPower(left, right);
4275 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
4276 HValue* left, HValue* right, Operation op) {
4277 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4278 HConstant* c_left = HConstant::cast(left);
4279 HConstant* c_right = HConstant::cast(right);
4280 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4281 double d_left = c_left->DoubleValue();
4282 double d_right = c_right->DoubleValue();
4283 if (op == kMathMin) {
4284 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4285 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4286 if (d_left == d_right) {
4287 // Handle +0 and -0.
4288 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4292 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4293 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4294 if (d_left == d_right) {
4295 // Handle +0 and -0.
4296 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4300 // All comparisons failed, must be NaN.
4301 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4304 return new(zone) HMathMinMax(context, left, right, op);
4308 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
4309 HValue* left, HValue* right) {
4310 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4311 HConstant* c_left = HConstant::cast(left);
4312 HConstant* c_right = HConstant::cast(right);
4313 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4314 int32_t dividend = c_left->Integer32Value();
4315 int32_t divisor = c_right->Integer32Value();
4316 if (dividend == kMinInt && divisor == -1) {
4317 return H_CONSTANT_DOUBLE(-0.0);
4320 int32_t res = dividend % divisor;
4321 if ((res == 0) && (dividend < 0)) {
4322 return H_CONSTANT_DOUBLE(-0.0);
4324 return H_CONSTANT_INT(res);
4328 return new(zone) HMod(context, left, right);
4332 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
4333 HValue* left, HValue* right) {
4334 // If left and right are constant values, try to return a constant value.
4335 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4336 HConstant* c_left = HConstant::cast(left);
4337 HConstant* c_right = HConstant::cast(right);
4338 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4339 if (c_right->DoubleValue() != 0) {
4340 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4341 if (IsInt32Double(double_res)) {
4342 return H_CONSTANT_INT(double_res);
4344 return H_CONSTANT_DOUBLE(double_res);
4346 int sign = Double(c_left->DoubleValue()).Sign() *
4347 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4348 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4352 return new(zone) HDiv(context, left, right);
4356 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
4357 Token::Value op, HValue* left, HValue* right) {
4358 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4359 HConstant* c_left = HConstant::cast(left);
4360 HConstant* c_right = HConstant::cast(right);
4361 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4363 int32_t v_left = c_left->NumberValueAsInteger32();
4364 int32_t v_right = c_right->NumberValueAsInteger32();
4366 case Token::BIT_XOR:
4367 result = v_left ^ v_right;
4369 case Token::BIT_AND:
4370 result = v_left & v_right;
4373 result = v_left | v_right;
4376 result = 0; // Please the compiler.
4379 return H_CONSTANT_INT(result);
4382 return new(zone) HBitwise(context, op, left, right);
4386 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4387 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4388 HValue* left, HValue* right) { \
4389 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4390 HConstant* c_left = HConstant::cast(left); \
4391 HConstant* c_right = HConstant::cast(right); \
4392 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4393 return H_CONSTANT_INT(result); \
4396 return new (zone) HInstr(context, left, right); \
4400 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4401 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4402 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4403 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4405 #undef DEFINE_NEW_H_BITWISE_INSTR
4408 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
4409 HValue* left, HValue* right) {
4410 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4411 HConstant* c_left = HConstant::cast(left);
4412 HConstant* c_right = HConstant::cast(right);
4413 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4414 int32_t left_val = c_left->NumberValueAsInteger32();
4415 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4416 if ((right_val == 0) && (left_val < 0)) {
4417 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4419 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4422 return new(zone) HShr(context, left, right);
4426 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
4427 HValue* context, String::Encoding encoding,
4428 HValue* string, HValue* index) {
4429 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4430 HConstant* c_string = HConstant::cast(string);
4431 HConstant* c_index = HConstant::cast(index);
4432 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4433 Handle<String> s = c_string->StringValue();
4434 int32_t i = c_index->Integer32Value();
4436 DCHECK_LT(i, s->length());
4437 return H_CONSTANT_INT(s->Get(i));
4440 return new(zone) HSeqStringGetChar(encoding, string, index);
4444 #undef H_CONSTANT_INT
4445 #undef H_CONSTANT_DOUBLE
4448 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
4449 os << Token::Name(op_) << " ";
4450 return HBitwiseBinaryOperation::PrintDataTo(os);
4454 void HPhi::SimplifyConstantInputs() {
4455 // Convert constant inputs to integers when all uses are truncating.
4456 // This must happen before representation inference takes place.
4457 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4458 for (int i = 0; i < OperandCount(); ++i) {
4459 if (!OperandAt(i)->IsConstant()) return;
4461 HGraph* graph = block()->graph();
4462 for (int i = 0; i < OperandCount(); ++i) {
4463 HConstant* operand = HConstant::cast(OperandAt(i));
4464 if (operand->HasInteger32Value()) {
4466 } else if (operand->HasDoubleValue()) {
4467 HConstant* integer_input = HConstant::New(
4468 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
4469 DoubleToInt32(operand->DoubleValue()));
4470 integer_input->InsertAfter(operand);
4471 SetOperandAt(i, integer_input);
4472 } else if (operand->HasBooleanValue()) {
4473 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4474 : graph->GetConstant0());
4475 } else if (operand->ImmortalImmovable()) {
4476 SetOperandAt(i, graph->GetConstant0());
4479 // Overwrite observed input representations because they are likely Tagged.
4480 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4481 HValue* use = it.value();
4482 if (use->IsBinaryOperation()) {
4483 HBinaryOperation::cast(use)->set_observed_input_representation(
4484 it.index(), Representation::Smi());
4490 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4491 DCHECK(CheckFlag(kFlexibleRepresentation));
4492 Representation new_rep = RepresentationFromUses();
4493 UpdateRepresentation(new_rep, h_infer, "uses");
4494 new_rep = RepresentationFromInputs();
4495 UpdateRepresentation(new_rep, h_infer, "inputs");
4496 new_rep = RepresentationFromUseRequirements();
4497 UpdateRepresentation(new_rep, h_infer, "use requirements");
4501 Representation HPhi::RepresentationFromInputs() {
4502 bool has_type_feedback =
4503 smi_non_phi_uses() + int32_non_phi_uses() + double_non_phi_uses() > 0;
4504 Representation r = representation();
4505 for (int i = 0; i < OperandCount(); ++i) {
4506 // Ignore conservative Tagged assumption of parameters if we have
4507 // reason to believe that it's too conservative.
4508 if (has_type_feedback && OperandAt(i)->IsParameter()) continue;
4510 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4516 // Returns a representation if all uses agree on the same representation.
4517 // Integer32 is also returned when some uses are Smi but others are Integer32.
4518 Representation HValue::RepresentationFromUseRequirements() {
4519 Representation rep = Representation::None();
4520 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4521 // Ignore the use requirement from never run code
4522 if (it.value()->block()->IsUnreachable()) continue;
4524 // We check for observed_input_representation elsewhere.
4525 Representation use_rep =
4526 it.value()->RequiredInputRepresentation(it.index());
4531 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4532 if (rep.generalize(use_rep).IsInteger32()) {
4533 rep = Representation::Integer32();
4536 return Representation::None();
4542 bool HValue::HasNonSmiUse() {
4543 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4544 // We check for observed_input_representation elsewhere.
4545 Representation use_rep =
4546 it.value()->RequiredInputRepresentation(it.index());
4547 if (!use_rep.IsNone() &&
4549 !use_rep.IsTagged()) {
4557 // Node-specific verification code is only included in debug mode.
4560 void HPhi::Verify() {
4561 DCHECK(OperandCount() == block()->predecessors()->length());
4562 for (int i = 0; i < OperandCount(); ++i) {
4563 HValue* value = OperandAt(i);
4564 HBasicBlock* defining_block = value->block();
4565 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4566 DCHECK(defining_block == predecessor_block ||
4567 defining_block->Dominates(predecessor_block));
4572 void HSimulate::Verify() {
4573 HInstruction::Verify();
4574 DCHECK(HasAstId() || next()->IsEnterInlined());
4578 void HCheckHeapObject::Verify() {
4579 HInstruction::Verify();
4580 DCHECK(HasNoUses());
4584 void HCheckValue::Verify() {
4585 HInstruction::Verify();
4586 DCHECK(HasNoUses());
4592 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4593 DCHECK(offset >= 0);
4594 DCHECK(offset < FixedArray::kHeaderSize);
4595 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4596 return HObjectAccess(kInobject, offset);
4600 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4601 Representation representation) {
4602 DCHECK(offset >= 0);
4603 Portion portion = kInobject;
4605 if (offset == JSObject::kElementsOffset) {
4606 portion = kElementsPointer;
4607 } else if (offset == JSObject::kMapOffset) {
4610 bool existing_inobject_property = true;
4611 if (!map.is_null()) {
4612 existing_inobject_property = (offset <
4613 map->instance_size() - map->unused_property_fields() * kPointerSize);
4615 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4616 false, existing_inobject_property);
4620 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4622 case AllocationSite::kTransitionInfoOffset:
4623 return HObjectAccess(kInobject, offset, Representation::Tagged());
4624 case AllocationSite::kNestedSiteOffset:
4625 return HObjectAccess(kInobject, offset, Representation::Tagged());
4626 case AllocationSite::kPretenureDataOffset:
4627 return HObjectAccess(kInobject, offset, Representation::Smi());
4628 case AllocationSite::kPretenureCreateCountOffset:
4629 return HObjectAccess(kInobject, offset, Representation::Smi());
4630 case AllocationSite::kDependentCodeOffset:
4631 return HObjectAccess(kInobject, offset, Representation::Tagged());
4632 case AllocationSite::kWeakNextOffset:
4633 return HObjectAccess(kInobject, offset, Representation::Tagged());
4637 return HObjectAccess(kInobject, offset);
4641 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4643 Portion portion = kInobject;
4644 int offset = Context::kHeaderSize + index * kPointerSize;
4645 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4646 return HObjectAccess(portion, offset, Representation::Tagged());
4650 HObjectAccess HObjectAccess::ForScriptContext(int index) {
4652 Portion portion = kInobject;
4653 int offset = ScriptContextTable::GetContextOffset(index);
4654 return HObjectAccess(portion, offset, Representation::Tagged());
4658 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4659 DCHECK(offset >= 0);
4660 Portion portion = kInobject;
4662 if (offset == JSObject::kElementsOffset) {
4663 portion = kElementsPointer;
4664 } else if (offset == JSArray::kLengthOffset) {
4665 portion = kArrayLengths;
4666 } else if (offset == JSObject::kMapOffset) {
4669 return HObjectAccess(portion, offset);
4673 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4674 Representation representation) {
4675 DCHECK(offset >= 0);
4676 return HObjectAccess(kBackingStore, offset, representation,
4677 Handle<String>::null(), false, false);
4681 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4682 Representation representation,
4683 Handle<String> name) {
4685 // Negative property indices are in-object properties, indexed
4686 // from the end of the fixed part of the object.
4687 int offset = (index * kPointerSize) + map->instance_size();
4688 return HObjectAccess(kInobject, offset, representation, name, false, true);
4690 // Non-negative property indices are in the properties array.
4691 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4692 return HObjectAccess(kBackingStore, offset, representation, name,
4698 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4699 return HObjectAccess(kInobject, Cell::kValueOffset, Representation::Tagged(),
4700 isolate->factory()->cell_value_string());
4704 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4705 // set the appropriate GVN flags for a given load or store instruction
4706 if (access_type == STORE) {
4707 // track dominating allocations in order to eliminate write barriers
4708 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4709 instr->SetFlag(HValue::kTrackSideEffectDominators);
4711 // try to GVN loads, but don't hoist above map changes
4712 instr->SetFlag(HValue::kUseGVN);
4713 instr->SetDependsOnFlag(::v8::internal::kMaps);
4716 switch (portion()) {
4718 if (access_type == STORE) {
4719 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4721 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4724 case kStringLengths:
4725 if (access_type == STORE) {
4726 instr->SetChangesFlag(::v8::internal::kStringLengths);
4728 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4732 if (access_type == STORE) {
4733 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4735 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4739 if (access_type == STORE) {
4740 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4742 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4746 if (access_type == STORE) {
4747 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4749 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4752 case kElementsPointer:
4753 if (access_type == STORE) {
4754 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4756 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4760 if (access_type == STORE) {
4761 instr->SetChangesFlag(::v8::internal::kMaps);
4763 instr->SetDependsOnFlag(::v8::internal::kMaps);
4766 case kExternalMemory:
4767 if (access_type == STORE) {
4768 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4770 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4777 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4780 switch (access.portion()) {
4781 case HObjectAccess::kArrayLengths:
4782 case HObjectAccess::kStringLengths:
4785 case HObjectAccess::kElementsPointer:
4788 case HObjectAccess::kMaps:
4791 case HObjectAccess::kDouble: // fall through
4792 case HObjectAccess::kInobject:
4793 if (!access.name().is_null()) {
4794 os << Handle<String>::cast(access.name())->ToCString().get();
4796 os << "[in-object]";
4798 case HObjectAccess::kBackingStore:
4799 if (!access.name().is_null()) {
4800 os << Handle<String>::cast(access.name())->ToCString().get();
4802 os << "[backing-store]";
4804 case HObjectAccess::kExternalMemory:
4805 os << "[external-memory]";
4809 return os << "@" << access.offset();
4812 } } // namespace v8::internal