1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "hydrogen-infer-representation.h"
33 #include "property-details-inl.h"
35 #if V8_TARGET_ARCH_IA32
36 #include "ia32/lithium-ia32.h"
37 #elif V8_TARGET_ARCH_X64
38 #include "x64/lithium-x64.h"
39 #elif V8_TARGET_ARCH_ARM64
40 #include "arm64/lithium-arm64.h"
41 #elif V8_TARGET_ARCH_ARM
42 #include "arm/lithium-arm.h"
43 #elif V8_TARGET_ARCH_MIPS
44 #include "mips/lithium-mips.h"
46 #error Unsupported target architecture.
52 #define DEFINE_COMPILE(type) \
53 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
54 return builder->Do##type(this); \
56 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
60 Isolate* HValue::isolate() const {
61 ASSERT(block() != NULL);
62 return block()->isolate();
66 void HValue::AssumeRepresentation(Representation r) {
67 if (CheckFlag(kFlexibleRepresentation)) {
68 ChangeRepresentation(r);
69 // The representation of the value is dictated by type feedback and
70 // will not be changed later.
71 ClearFlag(kFlexibleRepresentation);
76 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
77 ASSERT(CheckFlag(kFlexibleRepresentation));
78 Representation new_rep = RepresentationFromInputs();
79 UpdateRepresentation(new_rep, h_infer, "inputs");
80 new_rep = RepresentationFromUses();
81 UpdateRepresentation(new_rep, h_infer, "uses");
82 if (representation().IsSmi() && HasNonSmiUse()) {
84 Representation::Integer32(), h_infer, "use requirements");
89 Representation HValue::RepresentationFromUses() {
90 if (HasNoUses()) return Representation::None();
92 // Array of use counts for each representation.
93 int use_count[Representation::kNumRepresentations] = { 0 };
95 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
96 HValue* use = it.value();
97 Representation rep = use->observed_input_representation(it.index());
98 if (rep.IsNone()) continue;
99 if (FLAG_trace_representation) {
100 PrintF("#%d %s is used by #%d %s as %s%s\n",
101 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
102 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
104 use_count[rep.kind()] += 1;
106 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
107 int tagged_count = use_count[Representation::kTagged];
108 int double_count = use_count[Representation::kDouble];
109 int int32_count = use_count[Representation::kInteger32];
110 int smi_count = use_count[Representation::kSmi];
112 if (tagged_count > 0) return Representation::Tagged();
113 if (double_count > 0) return Representation::Double();
114 if (int32_count > 0) return Representation::Integer32();
115 if (smi_count > 0) return Representation::Smi();
117 return Representation::None();
121 void HValue::UpdateRepresentation(Representation new_rep,
122 HInferRepresentationPhase* h_infer,
123 const char* reason) {
124 Representation r = representation();
125 if (new_rep.is_more_general_than(r)) {
126 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
127 if (FLAG_trace_representation) {
128 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
129 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
131 ChangeRepresentation(new_rep);
132 AddDependantsToWorklist(h_infer);
137 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
138 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
139 h_infer->AddToWorklist(it.value());
141 for (int i = 0; i < OperandCount(); ++i) {
142 h_infer->AddToWorklist(OperandAt(i));
147 static int32_t ConvertAndSetOverflow(Representation r,
151 if (result > Smi::kMaxValue) {
153 return Smi::kMaxValue;
155 if (result < Smi::kMinValue) {
157 return Smi::kMinValue;
160 if (result > kMaxInt) {
164 if (result < kMinInt) {
169 return static_cast<int32_t>(result);
173 static int32_t AddWithoutOverflow(Representation r,
177 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
178 return ConvertAndSetOverflow(r, result, overflow);
182 static int32_t SubWithoutOverflow(Representation r,
186 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
187 return ConvertAndSetOverflow(r, result, overflow);
191 static int32_t MulWithoutOverflow(const Representation& r,
195 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
196 return ConvertAndSetOverflow(r, result, overflow);
200 int32_t Range::Mask() const {
201 if (lower_ == upper_) return lower_;
204 while (res < upper_) {
205 res = (res << 1) | 1;
213 void Range::AddConstant(int32_t value) {
214 if (value == 0) return;
215 bool may_overflow = false; // Overflow is ignored here.
216 Representation r = Representation::Integer32();
217 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
218 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
225 void Range::Intersect(Range* other) {
226 upper_ = Min(upper_, other->upper_);
227 lower_ = Max(lower_, other->lower_);
228 bool b = CanBeMinusZero() && other->CanBeMinusZero();
229 set_can_be_minus_zero(b);
233 void Range::Union(Range* other) {
234 upper_ = Max(upper_, other->upper_);
235 lower_ = Min(lower_, other->lower_);
236 bool b = CanBeMinusZero() || other->CanBeMinusZero();
237 set_can_be_minus_zero(b);
241 void Range::CombinedMax(Range* other) {
242 upper_ = Max(upper_, other->upper_);
243 lower_ = Max(lower_, other->lower_);
244 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
248 void Range::CombinedMin(Range* other) {
249 upper_ = Min(upper_, other->upper_);
250 lower_ = Min(lower_, other->lower_);
251 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
255 void Range::Sar(int32_t value) {
256 int32_t bits = value & 0x1F;
257 lower_ = lower_ >> bits;
258 upper_ = upper_ >> bits;
259 set_can_be_minus_zero(false);
263 void Range::Shl(int32_t value) {
264 int32_t bits = value & 0x1F;
265 int old_lower = lower_;
266 int old_upper = upper_;
267 lower_ = lower_ << bits;
268 upper_ = upper_ << bits;
269 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
273 set_can_be_minus_zero(false);
277 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
278 bool may_overflow = false;
279 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
280 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
289 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
290 bool may_overflow = false;
291 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
292 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
301 void Range::KeepOrder() {
302 if (lower_ > upper_) {
303 int32_t tmp = lower_;
311 void Range::Verify() const {
312 ASSERT(lower_ <= upper_);
317 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
318 bool may_overflow = false;
319 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
320 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
321 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
322 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
323 lower_ = Min(Min(v1, v2), Min(v3, v4));
324 upper_ = Max(Max(v1, v2), Max(v3, v4));
332 const char* HType::ToString() {
333 // Note: The c1visualizer syntax for locals allows only a sequence of the
334 // following characters: A-Za-z0-9_-|:
336 case kNone: return "none";
337 case kTagged: return "tagged";
338 case kTaggedPrimitive: return "primitive";
339 case kTaggedNumber: return "number";
340 case kSmi: return "smi";
341 case kHeapNumber: return "heap-number";
342 case kFloat32x4: return "float32x4";
343 case kInt32x4: return "int32x4";
344 case kString: return "string";
345 case kBoolean: return "boolean";
346 case kNonPrimitive: return "non-primitive";
347 case kJSArray: return "array";
348 case kJSObject: return "object";
351 return "unreachable";
355 HType HType::TypeFromValue(Handle<Object> value) {
356 HType result = HType::Tagged();
357 if (value->IsSmi()) {
358 result = HType::Smi();
359 } else if (value->IsHeapNumber()) {
360 result = HType::HeapNumber();
361 } else if (value->IsFloat32x4()) {
362 result = HType::Float32x4();
363 } else if (value->IsInt32x4()) {
364 result = HType::Int32x4();
365 } else if (value->IsString()) {
366 result = HType::String();
367 } else if (value->IsBoolean()) {
368 result = HType::Boolean();
369 } else if (value->IsJSObject()) {
370 result = HType::JSObject();
371 } else if (value->IsJSArray()) {
372 result = HType::JSArray();
378 HType HType::TypeFromRepresentation(Representation representation) {
379 HType result = HType::Tagged();
380 if (representation.IsSmi()) {
381 result = HType::Smi();
382 } else if (representation.IsDouble()) {
383 result = HType::HeapNumber();
384 } else if (representation.IsFloat32x4()) {
385 result = HType::Float32x4();
386 } else if (representation.IsInt32x4()) {
387 result = HType::Int32x4();
393 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
394 return block()->block_id() > other->block_id();
398 HUseListNode* HUseListNode::tail() {
399 // Skip and remove dead items in the use list.
400 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
401 tail_ = tail_->tail_;
407 bool HValue::CheckUsesForFlag(Flag f) const {
408 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
409 if (it.value()->IsSimulate()) continue;
410 if (!it.value()->CheckFlag(f)) return false;
416 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
417 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
418 if (it.value()->IsSimulate()) continue;
419 if (!it.value()->CheckFlag(f)) {
428 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
429 bool return_value = false;
430 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
431 if (it.value()->IsSimulate()) continue;
432 if (!it.value()->CheckFlag(f)) return false;
439 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
444 void HUseIterator::Advance() {
446 if (current_ != NULL) {
447 next_ = current_->tail();
448 value_ = current_->value();
449 index_ = current_->index();
454 int HValue::UseCount() const {
456 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
461 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
462 HUseListNode* previous = NULL;
463 HUseListNode* current = use_list_;
464 while (current != NULL) {
465 if (current->value() == value && current->index() == index) {
466 if (previous == NULL) {
467 use_list_ = current->tail();
469 previous->set_tail(current->tail());
475 current = current->tail();
479 // Do not reuse use list nodes in debug mode, zap them.
480 if (current != NULL) {
483 HUseListNode(current->value(), current->index(), NULL);
492 bool HValue::Equals(HValue* other) {
493 if (other->opcode() != opcode()) return false;
494 if (!other->representation().Equals(representation())) return false;
495 if (!other->type_.Equals(type_)) return false;
496 if (other->flags() != flags()) return false;
497 if (OperandCount() != other->OperandCount()) return false;
498 for (int i = 0; i < OperandCount(); ++i) {
499 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
501 bool result = DataEquals(other);
502 ASSERT(!result || Hashcode() == other->Hashcode());
507 intptr_t HValue::Hashcode() {
508 intptr_t result = opcode();
509 int count = OperandCount();
510 for (int i = 0; i < count; ++i) {
511 result = result * 19 + OperandAt(i)->id() + (result >> 7);
517 const char* HValue::Mnemonic() const {
519 #define MAKE_CASE(type) case k##type: return #type;
520 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
522 case kPhi: return "Phi";
528 bool HValue::CanReplaceWithDummyUses() {
529 return FLAG_unreachable_code_elimination &&
530 !(block()->IsReachable() ||
532 IsControlInstruction() ||
539 bool HValue::IsInteger32Constant() {
540 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
544 int32_t HValue::GetInteger32Constant() {
545 return HConstant::cast(this)->Integer32Value();
549 bool HValue::EqualsInteger32Constant(int32_t value) {
550 return IsInteger32Constant() && GetInteger32Constant() == value;
554 void HValue::SetOperandAt(int index, HValue* value) {
555 RegisterUse(index, value);
556 InternalSetOperandAt(index, value);
560 void HValue::DeleteAndReplaceWith(HValue* other) {
561 // We replace all uses first, so Delete can assert that there are none.
562 if (other != NULL) ReplaceAllUsesWith(other);
568 void HValue::ReplaceAllUsesWith(HValue* other) {
569 while (use_list_ != NULL) {
570 HUseListNode* list_node = use_list_;
571 HValue* value = list_node->value();
572 ASSERT(!value->block()->IsStartBlock());
573 value->InternalSetOperandAt(list_node->index(), other);
574 use_list_ = list_node->tail();
575 list_node->set_tail(other->use_list_);
576 other->use_list_ = list_node;
581 void HValue::Kill() {
582 // Instead of going through the entire use list of each operand, we only
583 // check the first item in each use list and rely on the tail() method to
584 // skip dead items, removing them lazily next time we traverse the list.
586 for (int i = 0; i < OperandCount(); ++i) {
587 HValue* operand = OperandAt(i);
588 if (operand == NULL) continue;
589 HUseListNode* first = operand->use_list_;
590 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
591 operand->use_list_ = first->tail();
597 void HValue::SetBlock(HBasicBlock* block) {
598 ASSERT(block_ == NULL || block == NULL);
600 if (id_ == kNoNumber && block != NULL) {
601 id_ = block->graph()->GetNextValueID(this);
606 void HValue::PrintTypeTo(StringStream* stream) {
607 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return;
608 stream->Add(" type:%s", type().ToString());
612 void HValue::PrintRangeTo(StringStream* stream) {
613 if (range() == NULL || range()->IsMostGeneric()) return;
614 // Note: The c1visualizer syntax for locals allows only a sequence of the
615 // following characters: A-Za-z0-9_-|:
616 stream->Add(" range:%d_%d%s",
619 range()->CanBeMinusZero() ? "_m0" : "");
623 void HValue::PrintChangesTo(StringStream* stream) {
624 GVNFlagSet changes_flags = ChangesFlags();
625 if (changes_flags.IsEmpty()) return;
626 stream->Add(" changes[");
627 if (changes_flags == AllSideEffectsFlagSet()) {
630 bool add_comma = false;
631 #define PRINT_DO(Type) \
632 if (changes_flags.Contains(k##Type)) { \
633 if (add_comma) stream->Add(","); \
635 stream->Add(#Type); \
637 GVN_TRACKED_FLAG_LIST(PRINT_DO);
638 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
645 void HValue::PrintNameTo(StringStream* stream) {
646 stream->Add("%s%d", representation_.Mnemonic(), id());
650 bool HValue::HasMonomorphicJSObjectType() {
651 return !GetMonomorphicJSObjectMap().is_null();
655 bool HValue::UpdateInferredType() {
656 HType type = CalculateInferredType();
657 bool result = (!type.Equals(type_));
663 void HValue::RegisterUse(int index, HValue* new_value) {
664 HValue* old_value = OperandAt(index);
665 if (old_value == new_value) return;
667 HUseListNode* removed = NULL;
668 if (old_value != NULL) {
669 removed = old_value->RemoveUse(this, index);
672 if (new_value != NULL) {
673 if (removed == NULL) {
674 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
675 this, index, new_value->use_list_);
677 removed->set_tail(new_value->use_list_);
678 new_value->use_list_ = removed;
684 void HValue::AddNewRange(Range* r, Zone* zone) {
685 if (!HasRange()) ComputeInitialRange(zone);
686 if (!HasRange()) range_ = new(zone) Range();
688 r->StackUpon(range_);
693 void HValue::RemoveLastAddedRange() {
695 ASSERT(range_->next() != NULL);
696 range_ = range_->next();
700 void HValue::ComputeInitialRange(Zone* zone) {
702 range_ = InferRange(zone);
707 void HSourcePosition::PrintTo(FILE* out) {
711 if (FLAG_hydrogen_track_positions) {
712 PrintF(out, "<%d:%d>", inlining_id(), position());
714 PrintF(out, "<0:%d>", raw());
720 void HInstruction::PrintTo(StringStream* stream) {
721 PrintMnemonicTo(stream);
723 PrintRangeTo(stream);
724 PrintChangesTo(stream);
726 if (CheckFlag(HValue::kHasNoObservableSideEffects)) {
727 stream->Add(" [noOSE]");
729 if (CheckFlag(HValue::kIsDead)) {
730 stream->Add(" [dead]");
735 void HInstruction::PrintDataTo(StringStream *stream) {
736 for (int i = 0; i < OperandCount(); ++i) {
737 if (i > 0) stream->Add(" ");
738 OperandAt(i)->PrintNameTo(stream);
743 void HInstruction::PrintMnemonicTo(StringStream* stream) {
744 stream->Add("%s ", Mnemonic());
748 void HInstruction::Unlink() {
750 ASSERT(!IsControlInstruction()); // Must never move control instructions.
751 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these.
752 ASSERT(previous_ != NULL);
753 previous_->next_ = next_;
755 ASSERT(block()->last() == this);
756 block()->set_last(previous_);
758 next_->previous_ = previous_;
764 void HInstruction::InsertBefore(HInstruction* next) {
766 ASSERT(!next->IsBlockEntry());
767 ASSERT(!IsControlInstruction());
768 ASSERT(!next->block()->IsStartBlock());
769 ASSERT(next->previous_ != NULL);
770 HInstruction* prev = next->previous();
772 next->previous_ = this;
775 SetBlock(next->block());
776 if (!has_position() && next->has_position()) {
777 set_position(next->position());
782 void HInstruction::InsertAfter(HInstruction* previous) {
784 ASSERT(!previous->IsControlInstruction());
785 ASSERT(!IsControlInstruction() || previous->next_ == NULL);
786 HBasicBlock* block = previous->block();
787 // Never insert anything except constants into the start block after finishing
789 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
790 ASSERT(block->end()->SecondSuccessor() == NULL);
791 InsertAfter(block->end()->FirstSuccessor()->first());
795 // If we're inserting after an instruction with side-effects that is
796 // followed by a simulate instruction, we need to insert after the
797 // simulate instruction instead.
798 HInstruction* next = previous->next_;
799 if (previous->HasObservableSideEffects() && next != NULL) {
800 ASSERT(next->IsSimulate());
802 next = previous->next_;
805 previous_ = previous;
808 previous->next_ = this;
809 if (next != NULL) next->previous_ = this;
810 if (block->last() == previous) {
811 block->set_last(this);
813 if (!has_position() && previous->has_position()) {
814 set_position(previous->position());
820 void HInstruction::Verify() {
821 // Verify that input operands are defined before use.
822 HBasicBlock* cur_block = block();
823 for (int i = 0; i < OperandCount(); ++i) {
824 HValue* other_operand = OperandAt(i);
825 if (other_operand == NULL) continue;
826 HBasicBlock* other_block = other_operand->block();
827 if (cur_block == other_block) {
828 if (!other_operand->IsPhi()) {
829 HInstruction* cur = this->previous();
830 while (cur != NULL) {
831 if (cur == other_operand) break;
832 cur = cur->previous();
834 // Must reach other operand in the same block!
835 ASSERT(cur == other_operand);
838 // If the following assert fires, you may have forgotten an
840 ASSERT(other_block->Dominates(cur_block));
844 // Verify that instructions that may have side-effects are followed
845 // by a simulate instruction.
846 if (HasObservableSideEffects() && !IsOsrEntry()) {
847 ASSERT(next()->IsSimulate());
850 // Verify that instructions that can be eliminated by GVN have overridden
851 // HValue::DataEquals. The default implementation is UNREACHABLE. We
852 // don't actually care whether DataEquals returns true or false here.
853 if (CheckFlag(kUseGVN)) DataEquals(this);
855 // Verify that all uses are in the graph.
856 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
857 if (use.value()->IsInstruction()) {
858 ASSERT(HInstruction::cast(use.value())->IsLinked());
865 static bool HasPrimitiveRepresentation(HValue* instr) {
866 return instr->representation().IsInteger32() ||
867 instr->representation().IsDouble();
871 bool HInstruction::CanDeoptimize() {
872 // TODO(titzer): make this a virtual method?
874 case HValue::kAccessArgumentsAt:
875 case HValue::kApplyArguments:
876 case HValue::kArgumentsElements:
877 case HValue::kArgumentsLength:
878 case HValue::kArgumentsObject:
879 case HValue::kBoundsCheckBaseIndexInformation:
880 case HValue::kCapturedObject:
881 case HValue::kClampToUint8:
882 case HValue::kConstant:
883 case HValue::kContext:
884 case HValue::kDateField:
885 case HValue::kDebugBreak:
886 case HValue::kDeclareGlobals:
888 case HValue::kDummyUse:
889 case HValue::kEnterInlined:
890 case HValue::kEnvironmentMarker:
891 case HValue::kForInCacheArray:
892 case HValue::kForInPrepareMap:
893 case HValue::kFunctionLiteral:
894 case HValue::kGetCachedArrayIndex:
896 case HValue::kInnerAllocatedObject:
897 case HValue::kInstanceOf:
898 case HValue::kInstanceOfKnownGlobal:
899 case HValue::kInvokeFunction:
900 case HValue::kLeaveInlined:
901 case HValue::kLoadContextSlot:
902 case HValue::kLoadFieldByIndex:
903 case HValue::kLoadFunctionPrototype:
904 case HValue::kLoadGlobalCell:
905 case HValue::kLoadGlobalGeneric:
906 case HValue::kLoadKeyed:
907 case HValue::kLoadKeyedGeneric:
908 case HValue::kLoadNamedField:
909 case HValue::kLoadNamedGeneric:
910 case HValue::kLoadRoot:
911 case HValue::kMapEnumLength:
912 case HValue::kMathFloorOfDiv:
913 case HValue::kMathMinMax:
916 case HValue::kOsrEntry:
917 case HValue::kParameter:
919 case HValue::kPushArgument:
922 case HValue::kSeqStringGetChar:
923 case HValue::kSeqStringSetChar:
926 case HValue::kSimulate:
927 case HValue::kStackCheck:
928 case HValue::kStoreCodeEntry:
929 case HValue::kStoreContextSlot:
930 case HValue::kStoreGlobalCell:
931 case HValue::kStoreKeyed:
932 case HValue::kStoreKeyedGeneric:
933 case HValue::kStoreNamedField:
934 case HValue::kStoreNamedGeneric:
935 case HValue::kStringAdd:
936 case HValue::kStringCharCodeAt:
937 case HValue::kStringCharFromCode:
939 case HValue::kThisFunction:
940 case HValue::kToFastProperties:
941 case HValue::kTransitionElementsKind:
942 case HValue::kTrapAllocationMemento:
943 case HValue::kTypeof:
944 case HValue::kUnaryMathOperation:
945 case HValue::kUseConst:
946 case HValue::kWrapReceiver:
948 case HValue::kForceRepresentation:
950 case HValue::kBitwise:
951 case HValue::kChange:
952 case HValue::kCompareGeneric:
953 // These instructions might deoptimize if they are not primitive.
954 if (!HasPrimitiveRepresentation(this)) return true;
955 for (int i = 0; i < OperandCount(); i++) {
956 HValue* input = OperandAt(i);
957 if (!HasPrimitiveRepresentation(input)) return true;
966 void HDummyUse::PrintDataTo(StringStream* stream) {
967 value()->PrintNameTo(stream);
971 void HEnvironmentMarker::PrintDataTo(StringStream* stream) {
972 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index());
976 void HUnaryCall::PrintDataTo(StringStream* stream) {
977 value()->PrintNameTo(stream);
979 stream->Add("#%d", argument_count());
983 void HCallJSFunction::PrintDataTo(StringStream* stream) {
984 function()->PrintNameTo(stream);
986 stream->Add("#%d", argument_count());
990 HCallJSFunction* HCallJSFunction::New(
995 bool pass_argument_count) {
996 bool has_stack_check = false;
997 if (function->IsConstant()) {
998 HConstant* fun_const = HConstant::cast(function);
999 Handle<JSFunction> jsfun =
1000 Handle<JSFunction>::cast(fun_const->handle(zone->isolate()));
1001 has_stack_check = !jsfun.is_null() &&
1002 (jsfun->code()->kind() == Code::FUNCTION ||
1003 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
1006 return new(zone) HCallJSFunction(
1007 function, argument_count, pass_argument_count,
1014 void HBinaryCall::PrintDataTo(StringStream* stream) {
1015 first()->PrintNameTo(stream);
1017 second()->PrintNameTo(stream);
1019 stream->Add("#%d", argument_count());
1023 void HBoundsCheck::ApplyIndexChange() {
1024 if (skip_check()) return;
1026 DecompositionResult decomposition;
1027 bool index_is_decomposable = index()->TryDecompose(&decomposition);
1028 if (index_is_decomposable) {
1029 ASSERT(decomposition.base() == base());
1030 if (decomposition.offset() == offset() &&
1031 decomposition.scale() == scale()) return;
1036 ReplaceAllUsesWith(index());
1038 HValue* current_index = decomposition.base();
1039 int actual_offset = decomposition.offset() + offset();
1040 int actual_scale = decomposition.scale() + scale();
1042 Zone* zone = block()->graph()->zone();
1043 HValue* context = block()->graph()->GetInvalidContext();
1044 if (actual_offset != 0) {
1045 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
1046 add_offset->InsertBefore(this);
1047 HInstruction* add = HAdd::New(zone, context,
1048 current_index, add_offset);
1049 add->InsertBefore(this);
1050 add->AssumeRepresentation(index()->representation());
1051 add->ClearFlag(kCanOverflow);
1052 current_index = add;
1055 if (actual_scale != 0) {
1056 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
1057 sar_scale->InsertBefore(this);
1058 HInstruction* sar = HSar::New(zone, context,
1059 current_index, sar_scale);
1060 sar->InsertBefore(this);
1061 sar->AssumeRepresentation(index()->representation());
1062 current_index = sar;
1065 SetOperandAt(0, current_index);
1073 void HBoundsCheck::PrintDataTo(StringStream* stream) {
1074 index()->PrintNameTo(stream);
1076 length()->PrintNameTo(stream);
1077 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1078 stream->Add(" base: ((");
1079 if (base() != index()) {
1080 index()->PrintNameTo(stream);
1082 stream->Add("index");
1084 stream->Add(" + %d) >> %d)", offset(), scale());
1087 stream->Add(" [DISABLED]");
1092 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1093 ASSERT(CheckFlag(kFlexibleRepresentation));
1094 HValue* actual_index = index()->ActualValue();
1095 HValue* actual_length = length()->ActualValue();
1096 Representation index_rep = actual_index->representation();
1097 Representation length_rep = actual_length->representation();
1098 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1099 index_rep = Representation::Smi();
1101 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1102 length_rep = Representation::Smi();
1104 Representation r = index_rep.generalize(length_rep);
1105 if (r.is_more_general_than(Representation::Integer32())) {
1106 r = Representation::Integer32();
1108 UpdateRepresentation(r, h_infer, "boundscheck");
1112 Range* HBoundsCheck::InferRange(Zone* zone) {
1113 Representation r = representation();
1114 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1115 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1118 Range* result = new(zone) Range(lower, upper);
1119 if (index()->HasRange()) {
1120 result->Intersect(index()->range());
1123 // In case of Smi representation, clamp result to Smi::kMaxValue.
1124 if (r.IsSmi()) result->ClampToSmi();
1127 return HValue::InferRange(zone);
1131 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
1132 stream->Add("base: ");
1133 base_index()->PrintNameTo(stream);
1134 stream->Add(", check: ");
1135 base_index()->PrintNameTo(stream);
1139 void HCallWithDescriptor::PrintDataTo(StringStream* stream) {
1140 for (int i = 0; i < OperandCount(); i++) {
1141 OperandAt(i)->PrintNameTo(stream);
1144 stream->Add("#%d", argument_count());
1148 void HCallNewArray::PrintDataTo(StringStream* stream) {
1149 stream->Add(ElementsKindToString(elements_kind()));
1151 HBinaryCall::PrintDataTo(stream);
1155 void HCallRuntime::PrintDataTo(StringStream* stream) {
1156 stream->Add("%o ", *name());
1157 if (save_doubles() == kSaveFPRegs) {
1158 stream->Add("[save doubles] ");
1160 stream->Add("#%d", argument_count());
1164 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
1165 stream->Add("class_of_test(");
1166 value()->PrintNameTo(stream);
1167 stream->Add(", \"%o\")", *class_name());
1171 void HWrapReceiver::PrintDataTo(StringStream* stream) {
1172 receiver()->PrintNameTo(stream);
1174 function()->PrintNameTo(stream);
1178 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) {
1179 arguments()->PrintNameTo(stream);
1181 index()->PrintNameTo(stream);
1182 stream->Add("], length ");
1183 length()->PrintNameTo(stream);
1187 void HControlInstruction::PrintDataTo(StringStream* stream) {
1188 stream->Add(" goto (");
1189 bool first_block = true;
1190 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1191 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id());
1192 first_block = false;
1198 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) {
1199 value()->PrintNameTo(stream);
1200 HControlInstruction::PrintDataTo(stream);
1204 void HReturn::PrintDataTo(StringStream* stream) {
1205 value()->PrintNameTo(stream);
1206 stream->Add(" (pop ");
1207 parameter_count()->PrintNameTo(stream);
1208 stream->Add(" values)");
1212 Representation HBranch::observed_input_representation(int index) {
1213 static const ToBooleanStub::Types tagged_types(
1214 ToBooleanStub::NULL_TYPE |
1215 ToBooleanStub::SPEC_OBJECT |
1216 ToBooleanStub::STRING |
1217 ToBooleanStub::SYMBOL);
1218 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1219 return Representation::Tagged();
1221 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1222 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1223 return Representation::Double();
1225 return Representation::Tagged();
1227 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1228 return Representation::Double();
1230 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1231 return Representation::Smi();
1233 return Representation::None();
1237 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1238 HValue* value = this->value();
1239 if (value->EmitAtUses()) {
1240 ASSERT(value->IsConstant());
1241 ASSERT(!value->representation().IsDouble());
1242 *block = HConstant::cast(value)->BooleanValue()
1244 : SecondSuccessor();
1252 void HCompareMap::PrintDataTo(StringStream* stream) {
1253 value()->PrintNameTo(stream);
1254 stream->Add(" (%p)", *map().handle());
1255 HControlInstruction::PrintDataTo(stream);
1256 if (known_successor_index() == 0) {
1257 stream->Add(" [true]");
1258 } else if (known_successor_index() == 1) {
1259 stream->Add(" [false]");
1264 const char* HUnaryMathOperation::OpName() const {
1266 case kMathFloor: return "floor";
1267 case kMathRound: return "round";
1268 case kMathAbs: return "abs";
1269 case kMathLog: return "log";
1270 case kMathExp: return "exp";
1271 case kMathSqrt: return "sqrt";
1272 case kMathPowHalf: return "pow-half";
1273 case kMathClz32: return "clz32";
1281 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1282 Representation r = representation();
1283 if (op() == kMathClz32) return new(zone) Range(0, 32);
1284 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1285 if (op() == kMathAbs) {
1286 int upper = value()->range()->upper();
1287 int lower = value()->range()->lower();
1288 bool spans_zero = value()->range()->CanBeZero();
1289 // Math.abs(kMinInt) overflows its representation, on which the
1290 // instruction deopts. Hence clamp it to kMaxInt.
1291 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1292 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1294 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1295 Max(abs_lower, abs_upper));
1296 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1298 if (r.IsSmi()) result->ClampToSmi();
1302 return HValue::InferRange(zone);
1306 void HUnaryMathOperation::PrintDataTo(StringStream* stream) {
1307 const char* name = OpName();
1308 stream->Add("%s ", name);
1309 value()->PrintNameTo(stream);
1313 void HUnaryOperation::PrintDataTo(StringStream* stream) {
1314 value()->PrintNameTo(stream);
1318 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
1319 value()->PrintNameTo(stream);
1321 case FIRST_JS_RECEIVER_TYPE:
1322 if (to_ == LAST_TYPE) stream->Add(" spec_object");
1324 case JS_REGEXP_TYPE:
1325 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp");
1328 if (to_ == JS_ARRAY_TYPE) stream->Add(" array");
1330 case JS_FUNCTION_TYPE:
1331 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function");
1339 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
1340 value()->PrintNameTo(stream);
1341 stream->Add(" == %o", *type_literal_.handle());
1342 HControlInstruction::PrintDataTo(stream);
1346 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1347 Heap* heap = isolate->heap();
1348 if (constant->HasNumberValue()) return heap->number_string();
1349 if (constant->IsUndetectable()) return heap->undefined_string();
1350 if (constant->HasStringValue()) return heap->string_string();
1351 switch (constant->GetInstanceType()) {
1352 case ODDBALL_TYPE: {
1353 Unique<Object> unique = constant->GetUnique();
1354 if (unique.IsKnownGlobal(heap->true_value()) ||
1355 unique.IsKnownGlobal(heap->false_value())) {
1356 return heap->boolean_string();
1358 if (unique.IsKnownGlobal(heap->null_value())) {
1359 return FLAG_harmony_typeof ? heap->null_string()
1360 : heap->object_string();
1362 ASSERT(unique.IsKnownGlobal(heap->undefined_value()));
1363 return heap->undefined_string();
1366 return heap->symbol_string();
1367 case JS_FUNCTION_TYPE:
1368 case JS_FUNCTION_PROXY_TYPE:
1369 return heap->function_string();
1371 return heap->object_string();
1376 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1377 if (FLAG_fold_constants && value()->IsConstant()) {
1378 HConstant* constant = HConstant::cast(value());
1379 String* type_string = TypeOfString(constant, isolate());
1380 bool same_type = type_literal_.IsKnownGlobal(type_string);
1381 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1383 } else if (value()->representation().IsSpecialization()) {
1385 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1386 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1388 } else if (value()->representation().IsFloat32x4()) {
1389 bool float32x4_type =
1390 type_literal_.IsKnownGlobal(isolate()->heap()->float32x4_string());
1391 *block = float32x4_type ? FirstSuccessor() : SecondSuccessor();
1393 } else if (value()->representation().IsInt32x4()) {
1395 type_literal_.IsKnownGlobal(isolate()->heap()->int32x4_string());
1396 *block = int32x4_type ? FirstSuccessor() : SecondSuccessor();
1405 void HCheckMapValue::PrintDataTo(StringStream* stream) {
1406 value()->PrintNameTo(stream);
1408 map()->PrintNameTo(stream);
1412 void HForInPrepareMap::PrintDataTo(StringStream* stream) {
1413 enumerable()->PrintNameTo(stream);
1417 void HForInCacheArray::PrintDataTo(StringStream* stream) {
1418 enumerable()->PrintNameTo(stream);
1420 map()->PrintNameTo(stream);
1421 stream->Add("[%d]", idx_);
1425 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
1426 object()->PrintNameTo(stream);
1428 index()->PrintNameTo(stream);
1432 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1433 if (!l->EqualsInteger32Constant(~0)) return false;
1439 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1440 if (!instr->IsBitwise()) return false;
1441 HBitwise* b = HBitwise::cast(instr);
1442 return (b->op() == Token::BIT_XOR) &&
1443 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1444 MatchLeftIsOnes(b->right(), b->left(), negated));
1448 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1450 return MatchNegationViaXor(instr, &negated) &&
1451 MatchNegationViaXor(negated, arg);
1455 HValue* HBitwise::Canonicalize() {
1456 if (!representation().IsSmiOrInteger32()) return this;
1457 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1458 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1459 if (left()->EqualsInteger32Constant(nop_constant) &&
1460 !right()->CheckFlag(kUint32)) {
1463 if (right()->EqualsInteger32Constant(nop_constant) &&
1464 !left()->CheckFlag(kUint32)) {
1467 // Optimize double negation, a common pattern used for ToInt32(x).
1469 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1476 Representation HAdd::RepresentationFromInputs() {
1477 Representation left_rep = left()->representation();
1478 if (left_rep.IsExternal()) {
1479 return Representation::External();
1481 return HArithmeticBinaryOperation::RepresentationFromInputs();
1485 Representation HAdd::RequiredInputRepresentation(int index) {
1487 Representation left_rep = left()->representation();
1488 if (left_rep.IsExternal()) {
1489 return Representation::Integer32();
1492 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1496 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1497 return arg1->representation().IsSpecialization() &&
1498 arg2->EqualsInteger32Constant(identity);
1502 HValue* HAdd::Canonicalize() {
1503 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1504 if (IsIdentityOperation(left(), right(), 0) &&
1505 !left()->representation().IsDouble()) { // Left could be -0.
1508 if (IsIdentityOperation(right(), left(), 0) &&
1509 !left()->representation().IsDouble()) { // Right could be -0.
1516 HValue* HSub::Canonicalize() {
1517 if (IsIdentityOperation(left(), right(), 0)) return left();
1522 HValue* HMul::Canonicalize() {
1523 if (IsIdentityOperation(left(), right(), 1)) return left();
1524 if (IsIdentityOperation(right(), left(), 1)) return right();
1529 bool HMul::MulMinusOne() {
1530 if (left()->EqualsInteger32Constant(-1) ||
1531 right()->EqualsInteger32Constant(-1)) {
1539 HValue* HMod::Canonicalize() {
1544 HValue* HDiv::Canonicalize() {
1545 if (IsIdentityOperation(left(), right(), 1)) return left();
1550 HValue* HChange::Canonicalize() {
1551 return (from().Equals(to())) ? value() : this;
1555 HValue* HWrapReceiver::Canonicalize() {
1556 if (HasNoUses()) return NULL;
1557 if (receiver()->type().IsJSObject()) {
1564 void HTypeof::PrintDataTo(StringStream* stream) {
1565 value()->PrintNameTo(stream);
1569 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1570 HValue* value, Representation representation) {
1571 if (FLAG_fold_constants && value->IsConstant()) {
1572 HConstant* c = HConstant::cast(value);
1573 if (c->HasNumberValue()) {
1574 double double_res = c->DoubleValue();
1575 if (representation.CanContainDouble(double_res)) {
1576 return HConstant::New(zone, context,
1577 static_cast<int32_t>(double_res),
1582 return new(zone) HForceRepresentation(value, representation);
1586 void HForceRepresentation::PrintDataTo(StringStream* stream) {
1587 stream->Add("%s ", representation().Mnemonic());
1588 value()->PrintNameTo(stream);
1592 void HChange::PrintDataTo(StringStream* stream) {
1593 HUnaryOperation::PrintDataTo(stream);
1594 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic());
1596 if (CanTruncateToInt32()) stream->Add(" truncating-int32");
1597 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
1598 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan");
1602 HValue* HUnaryMathOperation::Canonicalize() {
1603 if (op() == kMathRound || op() == kMathFloor) {
1604 HValue* val = value();
1605 if (val->IsChange()) val = HChange::cast(val)->value();
1606 if (val->representation().IsSmiOrInteger32()) {
1607 if (val->representation().Equals(representation())) return val;
1608 return Prepend(new(block()->zone()) HChange(
1609 val, representation(), false, false));
1612 if (op() == kMathFloor && value()->IsDiv() && value()->UseCount() == 1) {
1613 HDiv* hdiv = HDiv::cast(value());
1615 HValue* left = hdiv->left();
1616 if (left->representation().IsInteger32()) {
1617 // A value with an integer representation does not need to be transformed.
1618 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1619 // A change from an integer32 can be replaced by the integer32 value.
1620 left = HChange::cast(left)->value();
1621 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1622 left = Prepend(new(block()->zone()) HChange(
1623 left, Representation::Integer32(), false, false));
1628 HValue* right = hdiv->right();
1629 if (right->IsInteger32Constant()) {
1630 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1631 Representation::Integer32(), right->block()->zone()));
1632 } else if (right->representation().IsInteger32()) {
1633 // A value with an integer representation does not need to be transformed.
1634 } else if (right->IsChange() &&
1635 HChange::cast(right)->from().IsInteger32()) {
1636 // A change from an integer32 can be replaced by the integer32 value.
1637 right = HChange::cast(right)->value();
1638 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1639 right = Prepend(new(block()->zone()) HChange(
1640 right, Representation::Integer32(), false, false));
1645 return Prepend(HMathFloorOfDiv::New(
1646 block()->zone(), context(), left, right));
1652 HValue* HCheckInstanceType::Canonicalize() {
1653 if (check_ == IS_STRING && value()->type().IsString()) {
1657 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1658 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1666 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1667 InstanceType* last) {
1668 ASSERT(is_interval_check());
1670 case IS_SPEC_OBJECT:
1671 *first = FIRST_SPEC_OBJECT_TYPE;
1672 *last = LAST_SPEC_OBJECT_TYPE;
1675 *first = *last = JS_ARRAY_TYPE;
1683 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1684 ASSERT(!is_interval_check());
1687 *mask = kIsNotStringMask;
1690 case IS_INTERNALIZED_STRING:
1691 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1692 *tag = kInternalizedTag;
1700 bool HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect,
1701 HValue* dominator) {
1702 ASSERT(side_effect == kMaps);
1703 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once
1704 // type information is rich enough we should generalize this to any HType
1705 // for which the map is known.
1706 if (HasNoUses() && dominator->IsStoreNamedField()) {
1707 HStoreNamedField* store = HStoreNamedField::cast(dominator);
1708 if (!store->has_transition() || store->object() != value()) return false;
1709 HConstant* transition = HConstant::cast(store->transition());
1710 if (map_set_.Contains(Unique<Map>::cast(transition->GetUnique()))) {
1711 DeleteAndReplaceWith(NULL);
1719 void HCheckMaps::PrintDataTo(StringStream* stream) {
1720 value()->PrintNameTo(stream);
1721 stream->Add(" [%p", *map_set_.at(0).handle());
1722 for (int i = 1; i < map_set_.size(); ++i) {
1723 stream->Add(",%p", *map_set_.at(i).handle());
1725 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : "");
1729 void HCheckValue::PrintDataTo(StringStream* stream) {
1730 value()->PrintNameTo(stream);
1732 object().handle()->ShortPrint(stream);
1736 HValue* HCheckValue::Canonicalize() {
1737 return (value()->IsConstant() &&
1738 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1742 const char* HCheckInstanceType::GetCheckName() {
1744 case IS_SPEC_OBJECT: return "object";
1745 case IS_JS_ARRAY: return "array";
1746 case IS_STRING: return "string";
1747 case IS_INTERNALIZED_STRING: return "internalized_string";
1754 void HCheckInstanceType::PrintDataTo(StringStream* stream) {
1755 stream->Add("%s ", GetCheckName());
1756 HUnaryOperation::PrintDataTo(stream);
1760 void HCallStub::PrintDataTo(StringStream* stream) {
1762 CodeStub::MajorName(major_key_, false));
1763 HUnaryCall::PrintDataTo(stream);
1767 void HUnknownOSRValue::PrintDataTo(StringStream *stream) {
1768 const char* type = "expression";
1769 if (environment_->is_local_index(index_)) type = "local";
1770 if (environment_->is_special_index(index_)) type = "special";
1771 if (environment_->is_parameter_index(index_)) type = "parameter";
1772 stream->Add("%s @ %d", type, index_);
1776 void HInstanceOf::PrintDataTo(StringStream* stream) {
1777 left()->PrintNameTo(stream);
1779 right()->PrintNameTo(stream);
1781 context()->PrintNameTo(stream);
1785 Range* HValue::InferRange(Zone* zone) {
1787 if (representation().IsSmi() || type().IsSmi()) {
1788 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1789 result->set_can_be_minus_zero(false);
1791 result = new(zone) Range();
1792 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1793 // TODO(jkummerow): The range cannot be minus zero when the upper type
1794 // bound is Integer32.
1800 Range* HChange::InferRange(Zone* zone) {
1801 Range* input_range = value()->range();
1802 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1805 input_range != NULL &&
1806 input_range->IsInSmiRange()))) {
1807 set_type(HType::Smi());
1808 ClearChangesFlag(kNewSpacePromotion);
1810 if (to().IsSmiOrTagged() &&
1811 input_range != NULL &&
1812 input_range->IsInSmiRange() &&
1813 (!SmiValuesAre32Bits() ||
1814 !value()->CheckFlag(HValue::kUint32) ||
1815 input_range->upper() != kMaxInt)) {
1816 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1817 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1818 ClearFlag(kCanOverflow);
1820 Range* result = (input_range != NULL)
1821 ? input_range->Copy(zone)
1822 : HValue::InferRange(zone);
1823 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1824 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1825 CheckFlag(kAllUsesTruncatingToSmi)));
1826 if (to().IsSmi()) result->ClampToSmi();
1831 Range* HConstant::InferRange(Zone* zone) {
1832 if (has_int32_value_) {
1833 Range* result = new(zone) Range(int32_value_, int32_value_);
1834 result->set_can_be_minus_zero(false);
1837 return HValue::InferRange(zone);
1841 HSourcePosition HPhi::position() const {
1842 return block()->first()->position();
1846 Range* HPhi::InferRange(Zone* zone) {
1847 Representation r = representation();
1848 if (r.IsSmiOrInteger32()) {
1849 if (block()->IsLoopHeader()) {
1850 Range* range = r.IsSmi()
1851 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1852 : new(zone) Range(kMinInt, kMaxInt);
1855 Range* range = OperandAt(0)->range()->Copy(zone);
1856 for (int i = 1; i < OperandCount(); ++i) {
1857 range->Union(OperandAt(i)->range());
1862 return HValue::InferRange(zone);
1867 Range* HAdd::InferRange(Zone* zone) {
1868 Representation r = representation();
1869 if (r.IsSmiOrInteger32()) {
1870 Range* a = left()->range();
1871 Range* b = right()->range();
1872 Range* res = a->Copy(zone);
1873 if (!res->AddAndCheckOverflow(r, b) ||
1874 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1875 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1876 ClearFlag(kCanOverflow);
1878 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1879 !CheckFlag(kAllUsesTruncatingToInt32) &&
1880 a->CanBeMinusZero() && b->CanBeMinusZero());
1883 return HValue::InferRange(zone);
1888 Range* HSub::InferRange(Zone* zone) {
1889 Representation r = representation();
1890 if (r.IsSmiOrInteger32()) {
1891 Range* a = left()->range();
1892 Range* b = right()->range();
1893 Range* res = a->Copy(zone);
1894 if (!res->SubAndCheckOverflow(r, b) ||
1895 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1896 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1897 ClearFlag(kCanOverflow);
1899 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1900 !CheckFlag(kAllUsesTruncatingToInt32) &&
1901 a->CanBeMinusZero() && b->CanBeZero());
1904 return HValue::InferRange(zone);
1909 Range* HMul::InferRange(Zone* zone) {
1910 Representation r = representation();
1911 if (r.IsSmiOrInteger32()) {
1912 Range* a = left()->range();
1913 Range* b = right()->range();
1914 Range* res = a->Copy(zone);
1915 if (!res->MulAndCheckOverflow(r, b) ||
1916 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1917 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1919 // Truncated int multiplication is too precise and therefore not the
1920 // same as converting to Double and back.
1921 // Handle truncated integer multiplication by -1 special.
1922 ClearFlag(kCanOverflow);
1924 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1925 !CheckFlag(kAllUsesTruncatingToInt32) &&
1926 ((a->CanBeZero() && b->CanBeNegative()) ||
1927 (a->CanBeNegative() && b->CanBeZero())));
1930 return HValue::InferRange(zone);
1935 Range* HDiv::InferRange(Zone* zone) {
1936 if (representation().IsInteger32()) {
1937 Range* a = left()->range();
1938 Range* b = right()->range();
1939 Range* result = new(zone) Range();
1940 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1941 (a->CanBeMinusZero() ||
1942 (a->CanBeZero() && b->CanBeNegative())));
1943 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1944 ClearFlag(kCanOverflow);
1947 if (!b->CanBeZero()) {
1948 ClearFlag(kCanBeDivByZero);
1952 return HValue::InferRange(zone);
1957 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1958 if (representation().IsInteger32()) {
1959 Range* a = left()->range();
1960 Range* b = right()->range();
1961 Range* result = new(zone) Range();
1962 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1963 (a->CanBeMinusZero() ||
1964 (a->CanBeZero() && b->CanBeNegative())));
1965 if (!a->Includes(kMinInt)) {
1966 ClearFlag(kLeftCanBeMinInt);
1969 if (!a->CanBeNegative()) {
1970 ClearFlag(HValue::kLeftCanBeNegative);
1973 if (!a->CanBePositive()) {
1974 ClearFlag(HValue::kLeftCanBePositive);
1977 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1978 ClearFlag(kCanOverflow);
1981 if (!b->CanBeZero()) {
1982 ClearFlag(kCanBeDivByZero);
1986 return HValue::InferRange(zone);
1991 Range* HMod::InferRange(Zone* zone) {
1992 if (representation().IsInteger32()) {
1993 Range* a = left()->range();
1994 Range* b = right()->range();
1996 // The magnitude of the modulus is bounded by the right operand. Note that
1997 // apart for the cases involving kMinInt, the calculation below is the same
1998 // as Max(Abs(b->lower()), Abs(b->upper())) - 1.
1999 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1);
2001 // The result of the modulo operation has the sign of its left operand.
2002 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
2003 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
2004 a->CanBePositive() ? positive_bound : 0);
2006 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
2007 left_can_be_negative);
2009 if (!a->CanBeNegative()) {
2010 ClearFlag(HValue::kLeftCanBeNegative);
2013 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
2014 ClearFlag(HValue::kCanOverflow);
2017 if (!b->CanBeZero()) {
2018 ClearFlag(HValue::kCanBeDivByZero);
2022 return HValue::InferRange(zone);
2027 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
2028 if (phi->block()->loop_information() == NULL) return NULL;
2029 if (phi->OperandCount() != 2) return NULL;
2030 int32_t candidate_increment;
2032 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2033 if (candidate_increment != 0) {
2034 return new(phi->block()->graph()->zone())
2035 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2038 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2039 if (candidate_increment != 0) {
2040 return new(phi->block()->graph()->zone())
2041 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2049 * This function tries to match the following patterns (and all the relevant
2050 * variants related to |, & and + being commutative):
2051 * base | constant_or_mask
2052 * base & constant_and_mask
2053 * (base + constant_offset) & constant_and_mask
2054 * (base - constant_offset) & constant_and_mask
2056 void InductionVariableData::DecomposeBitwise(
2058 BitwiseDecompositionResult* result) {
2059 HValue* base = IgnoreOsrValue(value);
2060 result->base = value;
2062 if (!base->representation().IsInteger32()) return;
2064 if (base->IsBitwise()) {
2065 bool allow_offset = false;
2068 HBitwise* bitwise = HBitwise::cast(base);
2069 if (bitwise->right()->IsInteger32Constant()) {
2070 mask = bitwise->right()->GetInteger32Constant();
2071 base = bitwise->left();
2072 } else if (bitwise->left()->IsInteger32Constant()) {
2073 mask = bitwise->left()->GetInteger32Constant();
2074 base = bitwise->right();
2078 if (bitwise->op() == Token::BIT_AND) {
2079 result->and_mask = mask;
2080 allow_offset = true;
2081 } else if (bitwise->op() == Token::BIT_OR) {
2082 result->or_mask = mask;
2087 result->context = bitwise->context();
2090 if (base->IsAdd()) {
2091 HAdd* add = HAdd::cast(base);
2092 if (add->right()->IsInteger32Constant()) {
2094 } else if (add->left()->IsInteger32Constant()) {
2095 base = add->right();
2097 } else if (base->IsSub()) {
2098 HSub* sub = HSub::cast(base);
2099 if (sub->right()->IsInteger32Constant()) {
2105 result->base = base;
2110 void InductionVariableData::AddCheck(HBoundsCheck* check,
2111 int32_t upper_limit) {
2112 ASSERT(limit_validity() != NULL);
2113 if (limit_validity() != check->block() &&
2114 !limit_validity()->Dominates(check->block())) return;
2115 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2116 check->block()->current_loop())) return;
2118 ChecksRelatedToLength* length_checks = checks();
2119 while (length_checks != NULL) {
2120 if (length_checks->length() == check->length()) break;
2121 length_checks = length_checks->next();
2123 if (length_checks == NULL) {
2124 length_checks = new(check->block()->zone())
2125 ChecksRelatedToLength(check->length(), checks());
2126 checks_ = length_checks;
2129 length_checks->AddCheck(check, upper_limit);
2133 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2134 if (checks() != NULL) {
2135 InductionVariableCheck* c = checks();
2136 HBasicBlock* current_block = c->check()->block();
2137 while (c != NULL && c->check()->block() == current_block) {
2138 c->set_upper_limit(current_upper_limit_);
2145 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2150 ASSERT(first_check_in_block() != NULL);
2151 HValue* previous_index = first_check_in_block()->index();
2152 ASSERT(context != NULL);
2154 Zone* zone = index_base->block()->graph()->zone();
2155 set_added_constant(HConstant::New(zone, context, mask));
2156 if (added_index() != NULL) {
2157 added_constant()->InsertBefore(added_index());
2159 added_constant()->InsertBefore(first_check_in_block());
2162 if (added_index() == NULL) {
2163 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2164 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
2166 ASSERT(new_index->IsBitwise());
2167 new_index->ClearAllSideEffects();
2168 new_index->AssumeRepresentation(Representation::Integer32());
2169 set_added_index(HBitwise::cast(new_index));
2170 added_index()->InsertBefore(first_check_in_block());
2172 ASSERT(added_index()->op() == token);
2174 added_index()->SetOperandAt(1, index_base);
2175 added_index()->SetOperandAt(2, added_constant());
2176 first_check_in_block()->SetOperandAt(0, added_index());
2177 if (previous_index->UseCount() == 0) {
2178 previous_index->DeleteAndReplaceWith(NULL);
2182 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2183 HBoundsCheck* check,
2184 int32_t upper_limit) {
2185 BitwiseDecompositionResult decomposition;
2186 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2188 if (first_check_in_block() == NULL ||
2189 first_check_in_block()->block() != check->block()) {
2190 CloseCurrentBlock();
2192 first_check_in_block_ = check;
2193 set_added_index(NULL);
2194 set_added_constant(NULL);
2195 current_and_mask_in_block_ = decomposition.and_mask;
2196 current_or_mask_in_block_ = decomposition.or_mask;
2197 current_upper_limit_ = upper_limit;
2199 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2200 InductionVariableCheck(check, checks_, upper_limit);
2201 checks_ = new_check;
2205 if (upper_limit > current_upper_limit()) {
2206 current_upper_limit_ = upper_limit;
2209 if (decomposition.and_mask != 0 &&
2210 current_or_mask_in_block() == 0) {
2211 if (current_and_mask_in_block() == 0 ||
2212 decomposition.and_mask > current_and_mask_in_block()) {
2213 UseNewIndexInCurrentBlock(Token::BIT_AND,
2214 decomposition.and_mask,
2216 decomposition.context);
2217 current_and_mask_in_block_ = decomposition.and_mask;
2219 check->set_skip_check();
2221 if (current_and_mask_in_block() == 0) {
2222 if (decomposition.or_mask > current_or_mask_in_block()) {
2223 UseNewIndexInCurrentBlock(Token::BIT_OR,
2224 decomposition.or_mask,
2226 decomposition.context);
2227 current_or_mask_in_block_ = decomposition.or_mask;
2229 check->set_skip_check();
2232 if (!check->skip_check()) {
2233 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2234 InductionVariableCheck(check, checks_, upper_limit);
2235 checks_ = new_check;
2241 * This method detects if phi is an induction variable, with phi_operand as
2242 * its "incremented" value (the other operand would be the "base" value).
2244 * It cheks is phi_operand has the form "phi + constant".
2245 * If yes, the constant is the increment that the induction variable gets at
2246 * every loop iteration.
2247 * Otherwise it returns 0.
2249 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2250 HValue* phi_operand) {
2251 if (!phi_operand->representation().IsInteger32()) return 0;
2253 if (phi_operand->IsAdd()) {
2254 HAdd* operation = HAdd::cast(phi_operand);
2255 if (operation->left() == phi &&
2256 operation->right()->IsInteger32Constant()) {
2257 return operation->right()->GetInteger32Constant();
2258 } else if (operation->right() == phi &&
2259 operation->left()->IsInteger32Constant()) {
2260 return operation->left()->GetInteger32Constant();
2262 } else if (phi_operand->IsSub()) {
2263 HSub* operation = HSub::cast(phi_operand);
2264 if (operation->left() == phi &&
2265 operation->right()->IsInteger32Constant()) {
2266 return -operation->right()->GetInteger32Constant();
2275 * Swaps the information in "update" with the one contained in "this".
2276 * The swapping is important because this method is used while doing a
2277 * dominator tree traversal, and "update" will retain the old data that
2278 * will be restored while backtracking.
2280 void InductionVariableData::UpdateAdditionalLimit(
2281 InductionVariableLimitUpdate* update) {
2282 ASSERT(update->updated_variable == this);
2283 if (update->limit_is_upper) {
2284 swap(&additional_upper_limit_, &update->limit);
2285 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2287 swap(&additional_lower_limit_, &update->limit);
2288 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2293 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2295 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2296 const int32_t MAX_LIMIT = 1 << 30;
2298 int32_t result = MAX_LIMIT;
2300 if (limit() != NULL &&
2301 limit()->IsInteger32Constant()) {
2302 int32_t limit_value = limit()->GetInteger32Constant();
2303 if (!limit_included()) {
2306 if (limit_value < result) result = limit_value;
2309 if (additional_upper_limit() != NULL &&
2310 additional_upper_limit()->IsInteger32Constant()) {
2311 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2312 if (!additional_upper_limit_is_included()) {
2315 if (limit_value < result) result = limit_value;
2318 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2319 if (and_mask < result) result = and_mask;
2323 // Add the effect of the or_mask.
2326 return result >= MAX_LIMIT ? kNoLimit : result;
2330 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2331 if (!v->IsPhi()) return v;
2332 HPhi* phi = HPhi::cast(v);
2333 if (phi->OperandCount() != 2) return v;
2334 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2335 return phi->OperandAt(1);
2336 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2337 return phi->OperandAt(0);
2344 InductionVariableData* InductionVariableData::GetInductionVariableData(
2346 v = IgnoreOsrValue(v);
2348 return HPhi::cast(v)->induction_variable_data();
2355 * Check if a conditional branch to "current_branch" with token "token" is
2356 * the branch that keeps the induction loop running (and, conversely, will
2357 * terminate it if the "other_branch" is taken).
2359 * Three conditions must be met:
2360 * - "current_branch" must be in the induction loop.
2361 * - "other_branch" must be out of the induction loop.
2362 * - "token" and the induction increment must be "compatible": the token should
2363 * be a condition that keeps the execution inside the loop until the limit is
2366 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2368 HBasicBlock* current_branch,
2369 HBasicBlock* other_branch) {
2370 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2371 current_branch->current_loop())) {
2375 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2376 other_branch->current_loop())) {
2380 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2383 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2386 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2394 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2396 LimitFromPredecessorBlock* result) {
2397 if (block->predecessors()->length() != 1) return;
2398 HBasicBlock* predecessor = block->predecessors()->at(0);
2399 HInstruction* end = predecessor->last();
2401 if (!end->IsCompareNumericAndBranch()) return;
2402 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2404 Token::Value token = branch->token();
2405 if (!Token::IsArithmeticCompareOp(token)) return;
2407 HBasicBlock* other_target;
2408 if (block == branch->SuccessorAt(0)) {
2409 other_target = branch->SuccessorAt(1);
2411 other_target = branch->SuccessorAt(0);
2412 token = Token::NegateCompareOp(token);
2413 ASSERT(block == branch->SuccessorAt(1));
2416 InductionVariableData* data;
2418 data = GetInductionVariableData(branch->left());
2419 HValue* limit = branch->right();
2421 data = GetInductionVariableData(branch->right());
2422 token = Token::ReverseCompareOp(token);
2423 limit = branch->left();
2427 result->variable = data;
2428 result->token = token;
2429 result->limit = limit;
2430 result->other_target = other_target;
2436 * Compute the limit that is imposed on an induction variable when entering
2438 * If the limit is the "proper" induction limit (the one that makes the loop
2439 * terminate when the induction variable reaches it) it is stored directly in
2440 * the induction variable data.
2441 * Otherwise the limit is written in "additional_limit" and the method
2444 bool InductionVariableData::ComputeInductionVariableLimit(
2446 InductionVariableLimitUpdate* additional_limit) {
2447 LimitFromPredecessorBlock limit;
2448 ComputeLimitFromPredecessorBlock(block, &limit);
2449 if (!limit.LimitIsValid()) return false;
2451 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2453 limit.other_target)) {
2454 limit.variable->limit_ = limit.limit;
2455 limit.variable->limit_included_ = limit.LimitIsIncluded();
2456 limit.variable->limit_validity_ = block;
2457 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2458 limit.variable->induction_exit_target_ = limit.other_target;
2461 additional_limit->updated_variable = limit.variable;
2462 additional_limit->limit = limit.limit;
2463 additional_limit->limit_is_upper = limit.LimitIsUpper();
2464 additional_limit->limit_is_included = limit.LimitIsIncluded();
2470 Range* HMathMinMax::InferRange(Zone* zone) {
2471 if (representation().IsSmiOrInteger32()) {
2472 Range* a = left()->range();
2473 Range* b = right()->range();
2474 Range* res = a->Copy(zone);
2475 if (operation_ == kMathMax) {
2476 res->CombinedMax(b);
2478 ASSERT(operation_ == kMathMin);
2479 res->CombinedMin(b);
2483 return HValue::InferRange(zone);
2488 void HPhi::PrintTo(StringStream* stream) {
2490 for (int i = 0; i < OperandCount(); ++i) {
2491 HValue* value = OperandAt(i);
2493 value->PrintNameTo(stream);
2496 stream->Add(" uses:%d_%ds_%di_%dd_%dt",
2498 smi_non_phi_uses() + smi_indirect_uses(),
2499 int32_non_phi_uses() + int32_indirect_uses(),
2500 double_non_phi_uses() + double_indirect_uses(),
2501 tagged_non_phi_uses() + tagged_indirect_uses());
2502 PrintRangeTo(stream);
2503 PrintTypeTo(stream);
2508 void HPhi::AddInput(HValue* value) {
2509 inputs_.Add(NULL, value->block()->zone());
2510 SetOperandAt(OperandCount() - 1, value);
2511 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2512 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2513 SetFlag(kIsArguments);
2518 bool HPhi::HasRealUses() {
2519 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2520 if (!it.value()->IsPhi()) return true;
2526 HValue* HPhi::GetRedundantReplacement() {
2527 HValue* candidate = NULL;
2528 int count = OperandCount();
2530 while (position < count && candidate == NULL) {
2531 HValue* current = OperandAt(position++);
2532 if (current != this) candidate = current;
2534 while (position < count) {
2535 HValue* current = OperandAt(position++);
2536 if (current != this && current != candidate) return NULL;
2538 ASSERT(candidate != this);
2543 void HPhi::DeleteFromGraph() {
2544 ASSERT(block() != NULL);
2545 block()->RemovePhi(this);
2546 ASSERT(block() == NULL);
2550 void HPhi::InitRealUses(int phi_id) {
2551 // Initialize real uses.
2553 // Compute a conservative approximation of truncating uses before inferring
2554 // representations. The proper, exact computation will be done later, when
2555 // inserting representation changes.
2556 SetFlag(kTruncatingToSmi);
2557 SetFlag(kTruncatingToInt32);
2558 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2559 HValue* value = it.value();
2560 if (!value->IsPhi()) {
2561 Representation rep = value->observed_input_representation(it.index());
2562 non_phi_uses_[rep.kind()] += 1;
2563 if (FLAG_trace_representation) {
2564 PrintF("#%d Phi is used by real #%d %s as %s\n",
2565 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2567 if (!value->IsSimulate()) {
2568 if (!value->CheckFlag(kTruncatingToSmi)) {
2569 ClearFlag(kTruncatingToSmi);
2571 if (!value->CheckFlag(kTruncatingToInt32)) {
2572 ClearFlag(kTruncatingToInt32);
2580 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2581 if (FLAG_trace_representation) {
2582 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2584 other->non_phi_uses_[Representation::kSmi],
2585 other->non_phi_uses_[Representation::kInteger32],
2586 other->non_phi_uses_[Representation::kDouble],
2587 other->non_phi_uses_[Representation::kTagged]);
2590 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2591 indirect_uses_[i] += other->non_phi_uses_[i];
2596 void HPhi::AddIndirectUsesTo(int* dest) {
2597 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2598 dest[i] += indirect_uses_[i];
2603 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2604 while (!list->is_empty()) {
2605 HSimulate* from = list->RemoveLast();
2606 ZoneList<HValue*>* from_values = &from->values_;
2607 for (int i = 0; i < from_values->length(); ++i) {
2608 if (from->HasAssignedIndexAt(i)) {
2609 int index = from->GetAssignedIndexAt(i);
2610 if (HasValueForIndex(index)) continue;
2611 AddAssignedValue(index, from_values->at(i));
2613 if (pop_count_ > 0) {
2616 AddPushedValue(from_values->at(i));
2620 pop_count_ += from->pop_count_;
2621 from->DeleteAndReplaceWith(NULL);
2626 void HSimulate::PrintDataTo(StringStream* stream) {
2627 stream->Add("id=%d", ast_id().ToInt());
2628 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
2629 if (values_.length() > 0) {
2630 if (pop_count_ > 0) stream->Add(" /");
2631 for (int i = values_.length() - 1; i >= 0; --i) {
2632 if (HasAssignedIndexAt(i)) {
2633 stream->Add(" var[%d] = ", GetAssignedIndexAt(i));
2635 stream->Add(" push ");
2637 values_[i]->PrintNameTo(stream);
2638 if (i > 0) stream->Add(",");
2644 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2645 if (done_with_replay_) return;
2646 ASSERT(env != NULL);
2647 env->set_ast_id(ast_id());
2648 env->Drop(pop_count());
2649 for (int i = values()->length() - 1; i >= 0; --i) {
2650 HValue* value = values()->at(i);
2651 if (HasAssignedIndexAt(i)) {
2652 env->Bind(GetAssignedIndexAt(i), value);
2657 done_with_replay_ = true;
2661 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2662 HCapturedObject* other) {
2663 for (int i = 0; i < values->length(); ++i) {
2664 HValue* value = values->at(i);
2665 if (value->IsCapturedObject()) {
2666 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2667 values->at(i) = other;
2669 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2676 // Replay captured objects by replacing all captured objects with the
2677 // same capture id in the current and all outer environments.
2678 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2679 ASSERT(env != NULL);
2680 while (env != NULL) {
2681 ReplayEnvironmentNested(env->values(), this);
2687 void HCapturedObject::PrintDataTo(StringStream* stream) {
2688 stream->Add("#%d ", capture_id());
2689 HDematerializedObject::PrintDataTo(stream);
2693 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2695 ASSERT(return_target->IsInlineReturnTarget());
2696 return_targets_.Add(return_target, zone);
2700 void HEnterInlined::PrintDataTo(StringStream* stream) {
2701 SmartArrayPointer<char> name = function()->debug_name()->ToCString();
2702 stream->Add("%s, id=%d", name.get(), function()->id().ToInt());
2706 static bool IsInteger32(double value) {
2707 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2708 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
2712 HConstant::HConstant(Handle<Object> handle, Representation r)
2713 : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
2714 object_(Unique<Object>::CreateUninitialized(handle)),
2715 has_smi_value_(false),
2716 has_int32_value_(false),
2717 has_double_value_(false),
2718 has_external_reference_value_(false),
2719 is_not_in_new_space_(true),
2720 boolean_value_(handle->BooleanValue()),
2721 is_undetectable_(false),
2722 instance_type_(kUnknownInstanceType) {
2723 if (handle->IsHeapObject()) {
2724 Handle<HeapObject> heap_obj = Handle<HeapObject>::cast(handle);
2725 Heap* heap = heap_obj->GetHeap();
2726 is_not_in_new_space_ = !heap->InNewSpace(*handle);
2727 instance_type_ = heap_obj->map()->instance_type();
2728 is_undetectable_ = heap_obj->map()->is_undetectable();
2730 if (handle->IsNumber()) {
2731 double n = handle->Number();
2732 has_int32_value_ = IsInteger32(n);
2733 int32_value_ = DoubleToInt32(n);
2734 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2736 has_double_value_ = true;
2737 // TODO(titzer): if this heap number is new space, tenure a new one.
2744 HConstant::HConstant(Unique<Object> unique,
2747 bool is_not_in_new_space,
2749 bool is_undetectable,
2750 InstanceType instance_type)
2751 : HTemplateInstruction<0>(type),
2753 has_smi_value_(false),
2754 has_int32_value_(false),
2755 has_double_value_(false),
2756 has_external_reference_value_(false),
2757 is_not_in_new_space_(is_not_in_new_space),
2758 boolean_value_(boolean_value),
2759 is_undetectable_(is_undetectable),
2760 instance_type_(instance_type) {
2761 ASSERT(!unique.handle().is_null());
2762 ASSERT(!type.IsTaggedNumber());
2767 HConstant::HConstant(int32_t integer_value,
2769 bool is_not_in_new_space,
2770 Unique<Object> object)
2772 has_smi_value_(Smi::IsValid(integer_value)),
2773 has_int32_value_(true),
2774 has_double_value_(true),
2775 has_external_reference_value_(false),
2776 is_not_in_new_space_(is_not_in_new_space),
2777 boolean_value_(integer_value != 0),
2778 is_undetectable_(false),
2779 int32_value_(integer_value),
2780 double_value_(FastI2D(integer_value)),
2781 instance_type_(kUnknownInstanceType) {
2782 // It's possible to create a constant with a value in Smi-range but stored
2783 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2784 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2785 bool is_smi = has_smi_value_ && !could_be_heapobject;
2786 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2791 HConstant::HConstant(double double_value,
2793 bool is_not_in_new_space,
2794 Unique<Object> object)
2796 has_int32_value_(IsInteger32(double_value)),
2797 has_double_value_(true),
2798 has_external_reference_value_(false),
2799 is_not_in_new_space_(is_not_in_new_space),
2800 boolean_value_(double_value != 0 && !std::isnan(double_value)),
2801 is_undetectable_(false),
2802 int32_value_(DoubleToInt32(double_value)),
2803 double_value_(double_value),
2804 instance_type_(kUnknownInstanceType) {
2805 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2806 // It's possible to create a constant with a value in Smi-range but stored
2807 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2808 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2809 bool is_smi = has_smi_value_ && !could_be_heapobject;
2810 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2815 HConstant::HConstant(ExternalReference reference)
2816 : HTemplateInstruction<0>(HType::None()),
2817 object_(Unique<Object>(Handle<Object>::null())),
2818 has_smi_value_(false),
2819 has_int32_value_(false),
2820 has_double_value_(false),
2821 has_external_reference_value_(true),
2822 is_not_in_new_space_(true),
2823 boolean_value_(true),
2824 is_undetectable_(false),
2825 external_reference_value_(reference),
2826 instance_type_(kUnknownInstanceType) {
2827 Initialize(Representation::External());
2831 void HConstant::Initialize(Representation r) {
2833 if (has_smi_value_ && SmiValuesAre31Bits()) {
2834 r = Representation::Smi();
2835 } else if (has_int32_value_) {
2836 r = Representation::Integer32();
2837 } else if (has_double_value_) {
2838 r = Representation::Double();
2839 } else if (has_external_reference_value_) {
2840 r = Representation::External();
2842 Handle<Object> object = object_.handle();
2843 if (object->IsJSObject()) {
2844 // Try to eagerly migrate JSObjects that have deprecated maps.
2845 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2846 if (js_object->map()->is_deprecated()) {
2847 JSObject::TryMigrateInstance(js_object);
2850 r = Representation::Tagged();
2853 set_representation(r);
2858 bool HConstant::ImmortalImmovable() const {
2859 if (has_int32_value_) {
2862 if (has_double_value_) {
2863 if (IsSpecialDouble()) {
2868 if (has_external_reference_value_) {
2872 ASSERT(!object_.handle().is_null());
2873 Heap* heap = isolate()->heap();
2874 ASSERT(!object_.IsKnownGlobal(heap->minus_zero_value()));
2875 ASSERT(!object_.IsKnownGlobal(heap->nan_value()));
2877 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2878 object_.IsKnownGlobal(heap->name()) ||
2879 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2880 #undef IMMORTAL_IMMOVABLE_ROOT
2881 #define INTERNALIZED_STRING(name, value) \
2882 object_.IsKnownGlobal(heap->name()) ||
2883 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2884 #undef INTERNALIZED_STRING
2885 #define STRING_TYPE(NAME, size, name, Name) \
2886 object_.IsKnownGlobal(heap->name##_map()) ||
2887 STRING_TYPE_LIST(STRING_TYPE)
2893 bool HConstant::EmitAtUses() {
2895 if (block()->graph()->has_osr() &&
2896 block()->graph()->IsStandardConstant(this)) {
2897 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2900 if (UseCount() == 0) return true;
2901 if (IsCell()) return false;
2902 if (representation().IsDouble()) return false;
2907 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2908 if (r.IsSmi() && !has_smi_value_) return NULL;
2909 if (r.IsInteger32() && !has_int32_value_) return NULL;
2910 if (r.IsDouble() && !has_double_value_) return NULL;
2911 if (r.IsExternal() && !has_external_reference_value_) return NULL;
2912 if (has_int32_value_) {
2913 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2915 if (has_double_value_) {
2916 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2918 if (has_external_reference_value_) {
2919 return new(zone) HConstant(external_reference_value_);
2921 ASSERT(!object_.handle().is_null());
2922 return new(zone) HConstant(object_,
2925 is_not_in_new_space_,
2932 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2933 HConstant* res = NULL;
2934 if (has_int32_value_) {
2935 res = new(zone) HConstant(int32_value_,
2936 Representation::Integer32(),
2937 is_not_in_new_space_,
2939 } else if (has_double_value_) {
2940 res = new(zone) HConstant(DoubleToInt32(double_value_),
2941 Representation::Integer32(),
2942 is_not_in_new_space_,
2945 return Maybe<HConstant*>(res != NULL, res);
2949 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2950 HConstant* res = NULL;
2951 Handle<Object> handle = this->handle(zone->isolate());
2952 if (handle->IsBoolean()) {
2953 res = handle->BooleanValue() ?
2954 new(zone) HConstant(1) : new(zone) HConstant(0);
2955 } else if (handle->IsUndefined()) {
2956 res = new(zone) HConstant(OS::nan_value());
2957 } else if (handle->IsNull()) {
2958 res = new(zone) HConstant(0);
2960 return Maybe<HConstant*>(res != NULL, res);
2964 void HConstant::PrintDataTo(StringStream* stream) {
2965 if (has_int32_value_) {
2966 stream->Add("%d ", int32_value_);
2967 } else if (has_double_value_) {
2968 stream->Add("%f ", FmtElm(double_value_));
2969 } else if (has_external_reference_value_) {
2970 stream->Add("%p ", reinterpret_cast<void*>(
2971 external_reference_value_.address()));
2973 handle(Isolate::Current())->ShortPrint(stream);
2975 if (!is_not_in_new_space_) {
2976 stream->Add("[new space] ");
2981 void HBinaryOperation::PrintDataTo(StringStream* stream) {
2982 left()->PrintNameTo(stream);
2984 right()->PrintNameTo(stream);
2985 if (CheckFlag(kCanOverflow)) stream->Add(" !");
2986 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
2990 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2991 ASSERT(CheckFlag(kFlexibleRepresentation));
2992 Representation new_rep = RepresentationFromInputs();
2993 UpdateRepresentation(new_rep, h_infer, "inputs");
2995 if (representation().IsSmi() && HasNonSmiUse()) {
2996 UpdateRepresentation(
2997 Representation::Integer32(), h_infer, "use requirements");
3000 if (observed_output_representation_.IsNone()) {
3001 new_rep = RepresentationFromUses();
3002 UpdateRepresentation(new_rep, h_infer, "uses");
3004 new_rep = RepresentationFromOutput();
3005 UpdateRepresentation(new_rep, h_infer, "output");
3010 Representation HBinaryOperation::RepresentationFromInputs() {
3011 // Determine the worst case of observed input representations and
3012 // the currently assumed output representation.
3013 Representation rep = representation();
3014 for (int i = 1; i <= 2; ++i) {
3015 rep = rep.generalize(observed_input_representation(i));
3017 // If any of the actual input representation is more general than what we
3018 // have so far but not Tagged, use that representation instead.
3019 Representation left_rep = left()->representation();
3020 Representation right_rep = right()->representation();
3021 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3022 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3028 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3029 Representation current_rep) {
3030 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3031 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3032 // Mul in Integer32 mode would be too precise.
3033 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3037 Representation HBinaryOperation::RepresentationFromOutput() {
3038 Representation rep = representation();
3039 // Consider observed output representation, but ignore it if it's Double,
3040 // this instruction is not a division, and all its uses are truncating
3042 if (observed_output_representation_.is_more_general_than(rep) &&
3043 !IgnoreObservedOutputRepresentation(rep)) {
3044 return observed_output_representation_;
3046 return Representation::None();
3050 void HBinaryOperation::AssumeRepresentation(Representation r) {
3051 set_observed_input_representation(1, r);
3052 set_observed_input_representation(2, r);
3053 HValue::AssumeRepresentation(r);
3057 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3058 ASSERT(CheckFlag(kFlexibleRepresentation));
3059 Representation new_rep = RepresentationFromInputs();
3060 UpdateRepresentation(new_rep, h_infer, "inputs");
3061 // Do not care about uses.
3065 Range* HBitwise::InferRange(Zone* zone) {
3066 if (op() == Token::BIT_XOR) {
3067 if (left()->HasRange() && right()->HasRange()) {
3068 // The maximum value has the high bit, and all bits below, set:
3070 // If the range can be negative, the minimum int is a negative number with
3071 // the high bit, and all bits below, unset:
3073 // If it cannot be negative, conservatively choose 0 as minimum int.
3074 int64_t left_upper = left()->range()->upper();
3075 int64_t left_lower = left()->range()->lower();
3076 int64_t right_upper = right()->range()->upper();
3077 int64_t right_lower = right()->range()->lower();
3079 if (left_upper < 0) left_upper = ~left_upper;
3080 if (left_lower < 0) left_lower = ~left_lower;
3081 if (right_upper < 0) right_upper = ~right_upper;
3082 if (right_lower < 0) right_lower = ~right_lower;
3084 int high = MostSignificantBit(
3085 static_cast<uint32_t>(
3086 left_upper | left_lower | right_upper | right_lower));
3090 int32_t min = (left()->range()->CanBeNegative() ||
3091 right()->range()->CanBeNegative())
3092 ? static_cast<int32_t>(-limit) : 0;
3093 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3095 Range* result = HValue::InferRange(zone);
3096 result->set_can_be_minus_zero(false);
3099 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3100 int32_t left_mask = (left()->range() != NULL)
3101 ? left()->range()->Mask()
3103 int32_t right_mask = (right()->range() != NULL)
3104 ? right()->range()->Mask()
3106 int32_t result_mask = (op() == Token::BIT_AND)
3107 ? left_mask & right_mask
3108 : left_mask | right_mask;
3109 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3111 Range* result = HValue::InferRange(zone);
3112 result->set_can_be_minus_zero(false);
3117 Range* HSar::InferRange(Zone* zone) {
3118 if (right()->IsConstant()) {
3119 HConstant* c = HConstant::cast(right());
3120 if (c->HasInteger32Value()) {
3121 Range* result = (left()->range() != NULL)
3122 ? left()->range()->Copy(zone)
3123 : new(zone) Range();
3124 result->Sar(c->Integer32Value());
3128 return HValue::InferRange(zone);
3132 Range* HShr::InferRange(Zone* zone) {
3133 if (right()->IsConstant()) {
3134 HConstant* c = HConstant::cast(right());
3135 if (c->HasInteger32Value()) {
3136 int shift_count = c->Integer32Value() & 0x1f;
3137 if (left()->range()->CanBeNegative()) {
3138 // Only compute bounds if the result always fits into an int32.
3139 return (shift_count >= 1)
3140 ? new(zone) Range(0,
3141 static_cast<uint32_t>(0xffffffff) >> shift_count)
3142 : new(zone) Range();
3144 // For positive inputs we can use the >> operator.
3145 Range* result = (left()->range() != NULL)
3146 ? left()->range()->Copy(zone)
3147 : new(zone) Range();
3148 result->Sar(c->Integer32Value());
3153 return HValue::InferRange(zone);
3157 Range* HShl::InferRange(Zone* zone) {
3158 if (right()->IsConstant()) {
3159 HConstant* c = HConstant::cast(right());
3160 if (c->HasInteger32Value()) {
3161 Range* result = (left()->range() != NULL)
3162 ? left()->range()->Copy(zone)
3163 : new(zone) Range();
3164 result->Shl(c->Integer32Value());
3168 return HValue::InferRange(zone);
3172 Range* HLoadNamedField::InferRange(Zone* zone) {
3173 if (access().representation().IsInteger8()) {
3174 return new(zone) Range(kMinInt8, kMaxInt8);
3176 if (access().representation().IsUInteger8()) {
3177 return new(zone) Range(kMinUInt8, kMaxUInt8);
3179 if (access().representation().IsInteger16()) {
3180 return new(zone) Range(kMinInt16, kMaxInt16);
3182 if (access().representation().IsUInteger16()) {
3183 return new(zone) Range(kMinUInt16, kMaxUInt16);
3185 if (access().IsStringLength()) {
3186 return new(zone) Range(0, String::kMaxLength);
3188 return HValue::InferRange(zone);
3192 Range* HLoadKeyed::InferRange(Zone* zone) {
3193 switch (elements_kind()) {
3194 case EXTERNAL_INT8_ELEMENTS:
3195 return new(zone) Range(kMinInt8, kMaxInt8);
3196 case EXTERNAL_UINT8_ELEMENTS:
3197 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3198 return new(zone) Range(kMinUInt8, kMaxUInt8);
3199 case EXTERNAL_INT16_ELEMENTS:
3200 return new(zone) Range(kMinInt16, kMaxInt16);
3201 case EXTERNAL_UINT16_ELEMENTS:
3202 return new(zone) Range(kMinUInt16, kMaxUInt16);
3204 return HValue::InferRange(zone);
3209 void HCompareGeneric::PrintDataTo(StringStream* stream) {
3210 stream->Add(Token::Name(token()));
3212 HBinaryOperation::PrintDataTo(stream);
3216 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
3217 stream->Add(Token::Name(token()));
3219 HControlInstruction::PrintDataTo(stream);
3223 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
3224 stream->Add(Token::Name(token()));
3226 left()->PrintNameTo(stream);
3228 right()->PrintNameTo(stream);
3229 HControlInstruction::PrintDataTo(stream);
3233 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) {
3234 left()->PrintNameTo(stream);
3236 right()->PrintNameTo(stream);
3237 HControlInstruction::PrintDataTo(stream);
3241 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3242 if (known_successor_index() != kNoKnownSuccessorIndex) {
3243 *block = SuccessorAt(known_successor_index());
3246 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3247 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3248 ? FirstSuccessor() : SecondSuccessor();
3256 bool ConstantIsObject(HConstant* constant, Isolate* isolate) {
3257 if (constant->HasNumberValue()) return false;
3258 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) {
3261 if (constant->IsUndetectable()) return false;
3262 InstanceType type = constant->GetInstanceType();
3263 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) &&
3264 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3268 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3269 if (FLAG_fold_constants && value()->IsConstant()) {
3270 *block = ConstantIsObject(HConstant::cast(value()), isolate())
3271 ? FirstSuccessor() : SecondSuccessor();
3279 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3280 if (FLAG_fold_constants && value()->IsConstant()) {
3281 *block = HConstant::cast(value())->HasStringValue()
3282 ? FirstSuccessor() : SecondSuccessor();
3290 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3291 if (FLAG_fold_constants && value()->IsConstant()) {
3292 *block = HConstant::cast(value())->IsUndetectable()
3293 ? FirstSuccessor() : SecondSuccessor();
3301 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3302 if (FLAG_fold_constants && value()->IsConstant()) {
3303 InstanceType type = HConstant::cast(value())->GetInstanceType();
3304 *block = (from_ <= type) && (type <= to_)
3305 ? FirstSuccessor() : SecondSuccessor();
3313 void HCompareHoleAndBranch::InferRepresentation(
3314 HInferRepresentationPhase* h_infer) {
3315 ChangeRepresentation(value()->representation());
3319 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3320 if (FLAG_fold_constants && value()->IsConstant()) {
3321 HConstant* constant = HConstant::cast(value());
3322 if (constant->HasDoubleValue()) {
3323 *block = IsMinusZero(constant->DoubleValue())
3324 ? FirstSuccessor() : SecondSuccessor();
3328 if (value()->representation().IsSmiOrInteger32()) {
3329 // A Smi or Integer32 cannot contain minus zero.
3330 *block = SecondSuccessor();
3338 void HCompareMinusZeroAndBranch::InferRepresentation(
3339 HInferRepresentationPhase* h_infer) {
3340 ChangeRepresentation(value()->representation());
3345 void HGoto::PrintDataTo(StringStream* stream) {
3346 stream->Add("B%d", SuccessorAt(0)->block_id());
3350 void HCompareNumericAndBranch::InferRepresentation(
3351 HInferRepresentationPhase* h_infer) {
3352 Representation left_rep = left()->representation();
3353 Representation right_rep = right()->representation();
3354 Representation observed_left = observed_input_representation(0);
3355 Representation observed_right = observed_input_representation(1);
3357 Representation rep = Representation::None();
3358 rep = rep.generalize(observed_left);
3359 rep = rep.generalize(observed_right);
3360 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3361 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3362 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3364 rep = Representation::Double();
3367 if (rep.IsDouble()) {
3368 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3369 // and !=) have special handling of undefined, e.g. undefined == undefined
3370 // is 'true'. Relational comparisons have a different semantic, first
3371 // calling ToPrimitive() on their arguments. The standard Crankshaft
3372 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3373 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3374 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3375 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3376 // it is not consistent with the spec. For example, it would cause undefined
3377 // == undefined (should be true) to be evaluated as NaN == NaN
3378 // (false). Therefore, any comparisons other than ordered relational
3379 // comparisons must cause a deopt when one of their arguments is undefined.
3381 if (Token::IsOrderedRelationalCompareOp(token_)) {
3382 SetFlag(kAllowUndefinedAsNaN);
3385 ChangeRepresentation(rep);
3389 void HParameter::PrintDataTo(StringStream* stream) {
3390 stream->Add("%u", index());
3394 void HLoadNamedField::PrintDataTo(StringStream* stream) {
3395 object()->PrintNameTo(stream);
3396 access_.PrintTo(stream);
3398 if (HasDependency()) {
3400 dependency()->PrintNameTo(stream);
3405 HCheckMaps* HCheckMaps::New(Zone* zone,
3409 CompilationInfo* info,
3410 HValue* typecheck) {
3411 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
3412 check_map->Add(map, zone);
3413 if (map->CanOmitMapChecks() &&
3414 value->IsConstant() &&
3415 HConstant::cast(value)->HasMap(map)) {
3416 // TODO(titzer): collect dependent map checks into a list.
3417 check_map->omit_ = true;
3418 if (map->CanTransition()) {
3419 map->AddDependentCompilationInfo(
3420 DependentCode::kPrototypeCheckGroup, info);
3427 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
3428 object()->PrintNameTo(stream);
3430 stream->Add(String::cast(*name())->ToCString().get());
3434 void HLoadKeyed::PrintDataTo(StringStream* stream) {
3435 if (!is_external()) {
3436 elements()->PrintNameTo(stream);
3438 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3439 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3440 elements()->PrintNameTo(stream);
3442 stream->Add(ElementsKindToString(elements_kind()));
3446 key()->PrintNameTo(stream);
3447 if (IsDehoisted()) {
3448 stream->Add(" + %d]", index_offset());
3453 if (HasDependency()) {
3455 dependency()->PrintNameTo(stream);
3458 if (RequiresHoleCheck()) {
3459 stream->Add(" check_hole");
3464 bool HLoadKeyed::UsesMustHandleHole() const {
3465 if (IsFastPackedElementsKind(elements_kind())) {
3469 if (IsExternalArrayElementsKind(elements_kind())) {
3473 if (hole_mode() == ALLOW_RETURN_HOLE) {
3474 if (IsFastDoubleElementsKind(elements_kind())) {
3475 return AllUsesCanTreatHoleAsNaN();
3480 if (IsFastDoubleElementsKind(elements_kind())) {
3484 // Holes are only returned as tagged values.
3485 if (!representation().IsTagged()) {
3489 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3490 HValue* use = it.value();
3491 if (!use->IsChange()) return false;
3498 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3499 return IsFastDoubleElementsKind(elements_kind()) &&
3500 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3504 bool HLoadKeyed::RequiresHoleCheck() const {
3505 if (IsFastPackedElementsKind(elements_kind())) {
3509 if (IsExternalArrayElementsKind(elements_kind())) {
3513 return !UsesMustHandleHole();
3517 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
3518 object()->PrintNameTo(stream);
3520 key()->PrintNameTo(stream);
3525 HValue* HLoadKeyedGeneric::Canonicalize() {
3526 // Recognize generic keyed loads that use property name generated
3527 // by for-in statement as a key and rewrite them into fast property load
3529 if (key()->IsLoadKeyed()) {
3530 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3531 if (key_load->elements()->IsForInCacheArray()) {
3532 HForInCacheArray* names_cache =
3533 HForInCacheArray::cast(key_load->elements());
3535 if (names_cache->enumerable() == object()) {
3536 HForInCacheArray* index_cache =
3537 names_cache->index_cache();
3538 HCheckMapValue* map_check =
3539 HCheckMapValue::New(block()->graph()->zone(),
3540 block()->graph()->GetInvalidContext(),
3542 names_cache->map());
3543 HInstruction* index = HLoadKeyed::New(
3544 block()->graph()->zone(),
3545 block()->graph()->GetInvalidContext(),
3549 key_load->elements_kind());
3550 map_check->InsertBefore(this);
3551 index->InsertBefore(this);
3552 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3562 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
3563 object()->PrintNameTo(stream);
3565 ASSERT(name()->IsString());
3566 stream->Add(String::cast(*name())->ToCString().get());
3568 value()->PrintNameTo(stream);
3572 void HStoreNamedField::PrintDataTo(StringStream* stream) {
3573 object()->PrintNameTo(stream);
3574 access_.PrintTo(stream);
3576 value()->PrintNameTo(stream);
3577 if (NeedsWriteBarrier()) {
3578 stream->Add(" (write-barrier)");
3580 if (has_transition()) {
3581 stream->Add(" (transition map %p)", *transition_map());
3586 void HStoreKeyed::PrintDataTo(StringStream* stream) {
3587 if (!is_external()) {
3588 elements()->PrintNameTo(stream);
3590 elements()->PrintNameTo(stream);
3592 stream->Add(ElementsKindToString(elements_kind()));
3593 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3594 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3598 key()->PrintNameTo(stream);
3599 if (IsDehoisted()) {
3600 stream->Add(" + %d] = ", index_offset());
3602 stream->Add("] = ");
3605 value()->PrintNameTo(stream);
3609 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
3610 object()->PrintNameTo(stream);
3612 key()->PrintNameTo(stream);
3613 stream->Add("] = ");
3614 value()->PrintNameTo(stream);
3618 void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
3619 object()->PrintNameTo(stream);
3620 ElementsKind from_kind = original_map().handle()->elements_kind();
3621 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3622 stream->Add(" %p [%s] -> %p [%s]",
3623 *original_map().handle(),
3624 ElementsAccessor::ForKind(from_kind)->name(),
3625 *transitioned_map().handle(),
3626 ElementsAccessor::ForKind(to_kind)->name());
3627 if (IsSimpleMapChangeTransition(from_kind, to_kind)) stream->Add(" (simple)");
3631 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
3632 stream->Add("[%p]", *cell().handle());
3633 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3634 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3638 bool HLoadGlobalCell::RequiresHoleCheck() const {
3639 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false;
3640 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3641 HValue* use = it.value();
3642 if (!use->IsChange()) return true;
3648 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) {
3649 stream->Add("%o ", *name());
3653 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) {
3654 base_object()->PrintNameTo(stream);
3655 stream->Add(" offset ");
3656 offset()->PrintTo(stream);
3660 void HStoreGlobalCell::PrintDataTo(StringStream* stream) {
3661 stream->Add("[%p] = ", *cell().handle());
3662 value()->PrintNameTo(stream);
3663 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3664 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3668 void HLoadContextSlot::PrintDataTo(StringStream* stream) {
3669 value()->PrintNameTo(stream);
3670 stream->Add("[%d]", slot_index());
3674 void HStoreContextSlot::PrintDataTo(StringStream* stream) {
3675 context()->PrintNameTo(stream);
3676 stream->Add("[%d] = ", slot_index());
3677 value()->PrintNameTo(stream);
3681 // Implementation of type inference and type conversions. Calculates
3682 // the inferred type of this instruction based on the input operands.
3684 HType HValue::CalculateInferredType() {
3689 HType HPhi::CalculateInferredType() {
3690 if (OperandCount() == 0) return HType::Tagged();
3691 HType result = OperandAt(0)->type();
3692 for (int i = 1; i < OperandCount(); ++i) {
3693 HType current = OperandAt(i)->type();
3694 result = result.Combine(current);
3700 HType HChange::CalculateInferredType() {
3701 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3706 Representation HUnaryMathOperation::RepresentationFromInputs() {
3707 Representation rep = representation();
3708 // If any of the actual input representation is more general than what we
3709 // have so far but not Tagged, use that representation instead.
3710 Representation input_rep = value()->representation();
3711 if (!input_rep.IsTagged()) {
3712 rep = rep.generalize(input_rep);
3718 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3719 HValue* dominator) {
3720 ASSERT(side_effect == kNewSpacePromotion);
3721 Zone* zone = block()->zone();
3722 if (!FLAG_use_allocation_folding) return false;
3724 // Try to fold allocations together with their dominating allocations.
3725 if (!dominator->IsAllocate()) {
3726 if (FLAG_trace_allocation_folding) {
3727 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3728 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3733 // Check whether we are folding within the same block for local folding.
3734 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3735 if (FLAG_trace_allocation_folding) {
3736 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3737 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3742 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3743 HValue* dominator_size = dominator_allocate->size();
3744 HValue* current_size = size();
3746 // TODO(hpayer): Add support for non-constant allocation in dominator.
3747 if (!current_size->IsInteger32Constant() ||
3748 !dominator_size->IsInteger32Constant()) {
3749 if (FLAG_trace_allocation_folding) {
3750 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n",
3751 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3756 dominator_allocate = GetFoldableDominator(dominator_allocate);
3757 if (dominator_allocate == NULL) {
3761 ASSERT((IsNewSpaceAllocation() &&
3762 dominator_allocate->IsNewSpaceAllocation()) ||
3763 (IsOldDataSpaceAllocation() &&
3764 dominator_allocate->IsOldDataSpaceAllocation()) ||
3765 (IsOldPointerSpaceAllocation() &&
3766 dominator_allocate->IsOldPointerSpaceAllocation()));
3768 // First update the size of the dominator allocate instruction.
3769 dominator_size = dominator_allocate->size();
3770 int32_t original_object_size =
3771 HConstant::cast(dominator_size)->GetInteger32Constant();
3772 int32_t dominator_size_constant = original_object_size;
3773 int32_t current_size_constant =
3774 HConstant::cast(current_size)->GetInteger32Constant();
3775 int32_t new_dominator_size = dominator_size_constant + current_size_constant;
3777 if (MustAllocateDoubleAligned()) {
3778 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3779 dominator_allocate->MakeDoubleAligned();
3781 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3782 dominator_size_constant += kDoubleSize / 2;
3783 new_dominator_size += kDoubleSize / 2;
3787 // Since we clear the first word after folded memory, we cannot use the
3788 // whole Page::kMaxRegularHeapObjectSize memory.
3789 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3790 if (FLAG_trace_allocation_folding) {
3791 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3792 id(), Mnemonic(), dominator_allocate->id(),
3793 dominator_allocate->Mnemonic(), new_dominator_size);
3798 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3802 Representation::None(),
3803 dominator_allocate);
3804 dominator_allocate->UpdateSize(new_dominator_size_constant);
3807 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
3808 dominator_allocate->MakePrefillWithFiller();
3810 // TODO(hpayer): This is a short-term hack to make allocation mementos
3811 // work again in new space.
3812 dominator_allocate->ClearNextMapWord(original_object_size);
3815 // TODO(hpayer): This is a short-term hack to make allocation mementos
3816 // work again in new space.
3817 dominator_allocate->ClearNextMapWord(original_object_size);
3820 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3822 // After that replace the dominated allocate instruction.
3823 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3826 dominator_size_constant,
3827 Representation::None(),
3830 HInstruction* dominated_allocate_instr =
3831 HInnerAllocatedObject::New(zone,
3836 dominated_allocate_instr->InsertBefore(this);
3837 DeleteAndReplaceWith(dominated_allocate_instr);
3838 if (FLAG_trace_allocation_folding) {
3839 PrintF("#%d (%s) folded into #%d (%s)\n",
3840 id(), Mnemonic(), dominator_allocate->id(),
3841 dominator_allocate->Mnemonic());
3847 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3848 if (!IsFoldable(dominator)) {
3849 // We cannot hoist old space allocations over new space allocations.
3850 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3851 if (FLAG_trace_allocation_folding) {
3852 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3853 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3858 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3860 // We can hoist old data space allocations over an old pointer space
3861 // allocation and vice versa. For that we have to check the dominator
3862 // of the dominator allocate instruction.
3863 if (dominator_dominator == NULL) {
3864 dominating_allocate_ = dominator;
3865 if (FLAG_trace_allocation_folding) {
3866 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
3867 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3872 // We can just fold old space allocations that are in the same basic block,
3873 // since it is not guaranteed that we fill up the whole allocated old
3875 // TODO(hpayer): Remove this limitation and add filler maps for each each
3876 // allocation as soon as we have store elimination.
3877 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3878 if (FLAG_trace_allocation_folding) {
3879 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3880 id(), Mnemonic(), dominator_dominator->id(),
3881 dominator_dominator->Mnemonic());
3886 ASSERT((IsOldDataSpaceAllocation() &&
3887 dominator_dominator->IsOldDataSpaceAllocation()) ||
3888 (IsOldPointerSpaceAllocation() &&
3889 dominator_dominator->IsOldPointerSpaceAllocation()));
3891 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3892 HStoreNamedField* dominator_free_space_size =
3893 dominator->filler_free_space_size_;
3894 if (dominator_free_space_size != NULL) {
3895 // We already hoisted one old space allocation, i.e., we already installed
3896 // a filler map. Hence, we just have to update the free space size.
3897 dominator->UpdateFreeSpaceFiller(current_size);
3899 // This is the first old space allocation that gets hoisted. We have to
3900 // install a filler map since the follwing allocation may cause a GC.
3901 dominator->CreateFreeSpaceFiller(current_size);
3904 // We can hoist the old space allocation over the actual dominator.
3905 return dominator_dominator;
3911 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3912 ASSERT(filler_free_space_size_ != NULL);
3913 Zone* zone = block()->zone();
3914 // We must explicitly force Smi representation here because on x64 we
3915 // would otherwise automatically choose int32, but the actual store
3916 // requires a Smi-tagged value.
3917 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3920 filler_free_space_size_->value()->GetInteger32Constant() +
3922 Representation::Smi(),
3923 filler_free_space_size_);
3924 filler_free_space_size_->UpdateValue(new_free_space_size);
3928 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3929 ASSERT(filler_free_space_size_ == NULL);
3930 Zone* zone = block()->zone();
3931 HInstruction* free_space_instr =
3932 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3933 dominating_allocate_->size(), type());
3934 free_space_instr->InsertBefore(this);
3935 HConstant* filler_map = HConstant::New(
3938 isolate()->factory()->free_space_map());
3939 filler_map->FinalizeUniqueness(); // TODO(titzer): should be init'd a'ready
3940 filler_map->InsertAfter(free_space_instr);
3941 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3942 free_space_instr, HObjectAccess::ForMap(), filler_map);
3943 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3944 store_map->InsertAfter(filler_map);
3946 // We must explicitly force Smi representation here because on x64 we
3947 // would otherwise automatically choose int32, but the actual store
3948 // requires a Smi-tagged value.
3949 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3950 zone, context(), free_space_size, Representation::Smi(), store_map);
3951 // Must force Smi representation for x64 (see comment above).
3952 HObjectAccess access =
3953 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(),
3954 FreeSpace::kSizeOffset,
3955 Representation::Smi());
3956 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3957 free_space_instr, access, filler_size);
3958 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3959 store_size->InsertAfter(filler_size);
3960 filler_free_space_size_ = store_size;
3964 void HAllocate::ClearNextMapWord(int offset) {
3965 if (MustClearNextMapWord()) {
3966 Zone* zone = block()->zone();
3967 HObjectAccess access =
3968 HObjectAccess::ForObservableJSObjectOffset(offset);
3969 HStoreNamedField* clear_next_map =
3970 HStoreNamedField::New(zone, context(), this, access,
3971 block()->graph()->GetConstant0());
3972 clear_next_map->ClearAllSideEffects();
3973 clear_next_map->InsertAfter(this);
3978 void HAllocate::PrintDataTo(StringStream* stream) {
3979 size()->PrintNameTo(stream);
3981 if (IsNewSpaceAllocation()) stream->Add("N");
3982 if (IsOldPointerSpaceAllocation()) stream->Add("P");
3983 if (IsOldDataSpaceAllocation()) stream->Add("D");
3984 if (MustAllocateDoubleAligned()) stream->Add("A");
3985 if (MustPrefillWithFiller()) stream->Add("F");
3990 bool HStoreKeyed::NeedsCanonicalization() {
3991 // If value is an integer or smi or comes from the result of a keyed load or
3992 // constant then it is either be a non-hole value or in the case of a constant
3993 // the hole is only being stored explicitly: no need for canonicalization.
3995 // The exception to that is keyed loads from external float or double arrays:
3996 // these can load arbitrary representation of NaN.
3998 if (value()->IsConstant()) {
4002 if (value()->IsLoadKeyed()) {
4003 return IsExternalFloatOrDoubleElementsKind(
4004 HLoadKeyed::cast(value())->elements_kind());
4007 if (value()->IsChange()) {
4008 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
4011 if (HChange::cast(value())->value()->type().IsSmi()) {
4019 #define H_CONSTANT_INT(val) \
4020 HConstant::New(zone, context, static_cast<int32_t>(val))
4021 #define H_CONSTANT_DOUBLE(val) \
4022 HConstant::New(zone, context, static_cast<double>(val))
4024 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4025 HInstruction* HInstr::New( \
4026 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4027 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4028 HConstant* c_left = HConstant::cast(left); \
4029 HConstant* c_right = HConstant::cast(right); \
4030 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4031 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4032 if (IsInt32Double(double_res)) { \
4033 return H_CONSTANT_INT(double_res); \
4035 return H_CONSTANT_DOUBLE(double_res); \
4038 return new(zone) HInstr(context, left, right); \
4042 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
4043 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
4044 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
4046 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4049 HInstruction* HStringAdd::New(Zone* zone,
4053 PretenureFlag pretenure_flag,
4054 StringAddFlags flags,
4055 Handle<AllocationSite> allocation_site) {
4056 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4057 HConstant* c_right = HConstant::cast(right);
4058 HConstant* c_left = HConstant::cast(left);
4059 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4060 Handle<String> left_string = c_left->StringValue();
4061 Handle<String> right_string = c_right->StringValue();
4062 // Prevent possible exception by invalid string length.
4063 if (left_string->length() + right_string->length() < String::kMaxLength) {
4064 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
4065 c_left->StringValue(), c_right->StringValue());
4066 ASSERT(!concat.is_null());
4067 return HConstant::New(zone, context, concat);
4071 return new(zone) HStringAdd(
4072 context, left, right, pretenure_flag, flags, allocation_site);
4076 void HStringAdd::PrintDataTo(StringStream* stream) {
4077 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4078 stream->Add("_CheckBoth");
4079 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4080 stream->Add("_CheckLeft");
4081 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4082 stream->Add("_CheckRight");
4084 HBinaryOperation::PrintDataTo(stream);
4086 if (pretenure_flag() == NOT_TENURED) stream->Add("N");
4087 else if (pretenure_flag() == TENURED) stream->Add("D");
4092 HInstruction* HStringCharFromCode::New(
4093 Zone* zone, HValue* context, HValue* char_code) {
4094 if (FLAG_fold_constants && char_code->IsConstant()) {
4095 HConstant* c_code = HConstant::cast(char_code);
4096 Isolate* isolate = zone->isolate();
4097 if (c_code->HasNumberValue()) {
4098 if (std::isfinite(c_code->DoubleValue())) {
4099 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4100 return HConstant::New(zone, context,
4101 LookupSingleCharacterStringFromCode(isolate, code));
4103 return HConstant::New(zone, context, isolate->factory()->empty_string());
4106 return new(zone) HStringCharFromCode(context, char_code);
4110 HInstruction* HUnaryMathOperation::New(
4111 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
4113 if (!FLAG_fold_constants) break;
4114 if (!value->IsConstant()) break;
4115 HConstant* constant = HConstant::cast(value);
4116 if (!constant->HasNumberValue()) break;
4117 double d = constant->DoubleValue();
4118 if (std::isnan(d)) { // NaN poisons everything.
4119 return H_CONSTANT_DOUBLE(OS::nan_value());
4121 if (std::isinf(d)) { // +Infinity and -Infinity.
4124 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4127 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value());
4130 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4133 return H_CONSTANT_DOUBLE(d);
4135 return H_CONSTANT_INT(32);
4143 return H_CONSTANT_DOUBLE(fast_exp(d));
4145 return H_CONSTANT_DOUBLE(std::log(d));
4147 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4149 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4151 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4153 // -0.5 .. -0.0 round to -0.0.
4154 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4155 // Doubles are represented as Significant * 2 ^ Exponent. If the
4156 // Exponent is not negative, the double value is already an integer.
4157 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4158 return H_CONSTANT_DOUBLE(std::floor(d + 0.5));
4160 return H_CONSTANT_DOUBLE(std::floor(d));
4162 uint32_t i = DoubleToUint32(d);
4163 return H_CONSTANT_INT(
4164 (i == 0) ? 32 : CompilerIntrinsics::CountLeadingZeros(i));
4171 return new(zone) HUnaryMathOperation(context, value, op);
4175 HInstruction* HPower::New(Zone* zone,
4179 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4180 HConstant* c_left = HConstant::cast(left);
4181 HConstant* c_right = HConstant::cast(right);
4182 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4183 double result = power_helper(c_left->DoubleValue(),
4184 c_right->DoubleValue());
4185 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result);
4188 return new(zone) HPower(left, right);
4192 HInstruction* HMathMinMax::New(
4193 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
4194 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4195 HConstant* c_left = HConstant::cast(left);
4196 HConstant* c_right = HConstant::cast(right);
4197 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4198 double d_left = c_left->DoubleValue();
4199 double d_right = c_right->DoubleValue();
4200 if (op == kMathMin) {
4201 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4202 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4203 if (d_left == d_right) {
4204 // Handle +0 and -0.
4205 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4209 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4210 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4211 if (d_left == d_right) {
4212 // Handle +0 and -0.
4213 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4217 // All comparisons failed, must be NaN.
4218 return H_CONSTANT_DOUBLE(OS::nan_value());
4221 return new(zone) HMathMinMax(context, left, right, op);
4225 HInstruction* HMod::New(Zone* zone,
4229 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4230 HConstant* c_left = HConstant::cast(left);
4231 HConstant* c_right = HConstant::cast(right);
4232 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4233 int32_t dividend = c_left->Integer32Value();
4234 int32_t divisor = c_right->Integer32Value();
4235 if (dividend == kMinInt && divisor == -1) {
4236 return H_CONSTANT_DOUBLE(-0.0);
4239 int32_t res = dividend % divisor;
4240 if ((res == 0) && (dividend < 0)) {
4241 return H_CONSTANT_DOUBLE(-0.0);
4243 return H_CONSTANT_INT(res);
4247 return new(zone) HMod(context, left, right);
4251 HInstruction* HDiv::New(
4252 Zone* zone, HValue* context, HValue* left, HValue* right) {
4253 // If left and right are constant values, try to return a constant value.
4254 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4255 HConstant* c_left = HConstant::cast(left);
4256 HConstant* c_right = HConstant::cast(right);
4257 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4258 if (c_right->DoubleValue() != 0) {
4259 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4260 if (IsInt32Double(double_res)) {
4261 return H_CONSTANT_INT(double_res);
4263 return H_CONSTANT_DOUBLE(double_res);
4265 int sign = Double(c_left->DoubleValue()).Sign() *
4266 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4267 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4271 return new(zone) HDiv(context, left, right);
4275 HInstruction* HBitwise::New(
4276 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
4277 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4278 HConstant* c_left = HConstant::cast(left);
4279 HConstant* c_right = HConstant::cast(right);
4280 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4282 int32_t v_left = c_left->NumberValueAsInteger32();
4283 int32_t v_right = c_right->NumberValueAsInteger32();
4285 case Token::BIT_XOR:
4286 result = v_left ^ v_right;
4288 case Token::BIT_AND:
4289 result = v_left & v_right;
4292 result = v_left | v_right;
4295 result = 0; // Please the compiler.
4298 return H_CONSTANT_INT(result);
4301 return new(zone) HBitwise(context, op, left, right);
4305 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4306 HInstruction* HInstr::New( \
4307 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4308 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4309 HConstant* c_left = HConstant::cast(left); \
4310 HConstant* c_right = HConstant::cast(right); \
4311 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4312 return H_CONSTANT_INT(result); \
4315 return new(zone) HInstr(context, left, right); \
4319 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4320 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4321 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4322 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4324 #undef DEFINE_NEW_H_BITWISE_INSTR
4327 HInstruction* HShr::New(
4328 Zone* zone, HValue* context, HValue* left, HValue* right) {
4329 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4330 HConstant* c_left = HConstant::cast(left);
4331 HConstant* c_right = HConstant::cast(right);
4332 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4333 int32_t left_val = c_left->NumberValueAsInteger32();
4334 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4335 if ((right_val == 0) && (left_val < 0)) {
4336 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4338 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4341 return new(zone) HShr(context, left, right);
4345 HInstruction* HSeqStringGetChar::New(Zone* zone,
4347 String::Encoding encoding,
4350 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4351 HConstant* c_string = HConstant::cast(string);
4352 HConstant* c_index = HConstant::cast(index);
4353 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4354 Handle<String> s = c_string->StringValue();
4355 int32_t i = c_index->Integer32Value();
4357 ASSERT_LT(i, s->length());
4358 return H_CONSTANT_INT(s->Get(i));
4361 return new(zone) HSeqStringGetChar(encoding, string, index);
4365 #undef H_CONSTANT_INT
4366 #undef H_CONSTANT_DOUBLE
4369 void HBitwise::PrintDataTo(StringStream* stream) {
4370 stream->Add(Token::Name(op_));
4372 HBitwiseBinaryOperation::PrintDataTo(stream);
4376 void HPhi::SimplifyConstantInputs() {
4377 // Convert constant inputs to integers when all uses are truncating.
4378 // This must happen before representation inference takes place.
4379 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4380 for (int i = 0; i < OperandCount(); ++i) {
4381 if (!OperandAt(i)->IsConstant()) return;
4383 HGraph* graph = block()->graph();
4384 for (int i = 0; i < OperandCount(); ++i) {
4385 HConstant* operand = HConstant::cast(OperandAt(i));
4386 if (operand->HasInteger32Value()) {
4388 } else if (operand->HasDoubleValue()) {
4389 HConstant* integer_input =
4390 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4391 DoubleToInt32(operand->DoubleValue()));
4392 integer_input->InsertAfter(operand);
4393 SetOperandAt(i, integer_input);
4394 } else if (operand->HasBooleanValue()) {
4395 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4396 : graph->GetConstant0());
4397 } else if (operand->ImmortalImmovable()) {
4398 SetOperandAt(i, graph->GetConstant0());
4401 // Overwrite observed input representations because they are likely Tagged.
4402 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4403 HValue* use = it.value();
4404 if (use->IsBinaryOperation()) {
4405 HBinaryOperation::cast(use)->set_observed_input_representation(
4406 it.index(), Representation::Smi());
4412 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4413 ASSERT(CheckFlag(kFlexibleRepresentation));
4414 Representation new_rep = RepresentationFromInputs();
4415 UpdateRepresentation(new_rep, h_infer, "inputs");
4416 new_rep = RepresentationFromUses();
4417 UpdateRepresentation(new_rep, h_infer, "uses");
4418 new_rep = RepresentationFromUseRequirements();
4419 UpdateRepresentation(new_rep, h_infer, "use requirements");
4423 Representation HPhi::RepresentationFromInputs() {
4424 Representation r = Representation::None();
4425 for (int i = 0; i < OperandCount(); ++i) {
4426 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4432 // Returns a representation if all uses agree on the same representation.
4433 // Integer32 is also returned when some uses are Smi but others are Integer32.
4434 Representation HValue::RepresentationFromUseRequirements() {
4435 Representation rep = Representation::None();
4436 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4437 // Ignore the use requirement from never run code
4438 if (it.value()->block()->IsUnreachable()) continue;
4440 // We check for observed_input_representation elsewhere.
4441 Representation use_rep =
4442 it.value()->RequiredInputRepresentation(it.index());
4447 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4448 if (rep.generalize(use_rep).IsInteger32()) {
4449 rep = Representation::Integer32();
4452 return Representation::None();
4458 bool HValue::HasNonSmiUse() {
4459 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4460 // We check for observed_input_representation elsewhere.
4461 Representation use_rep =
4462 it.value()->RequiredInputRepresentation(it.index());
4463 if (!use_rep.IsNone() &&
4465 !use_rep.IsTagged()) {
4473 // Node-specific verification code is only included in debug mode.
4476 void HPhi::Verify() {
4477 ASSERT(OperandCount() == block()->predecessors()->length());
4478 for (int i = 0; i < OperandCount(); ++i) {
4479 HValue* value = OperandAt(i);
4480 HBasicBlock* defining_block = value->block();
4481 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4482 ASSERT(defining_block == predecessor_block ||
4483 defining_block->Dominates(predecessor_block));
4488 void HSimulate::Verify() {
4489 HInstruction::Verify();
4494 void HCheckHeapObject::Verify() {
4495 HInstruction::Verify();
4496 ASSERT(HasNoUses());
4500 void HCheckValue::Verify() {
4501 HInstruction::Verify();
4502 ASSERT(HasNoUses());
4508 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4509 ASSERT(offset >= 0);
4510 ASSERT(offset < FixedArray::kHeaderSize);
4511 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4512 return HObjectAccess(kInobject, offset);
4516 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4517 Representation representation) {
4518 ASSERT(offset >= 0);
4519 Portion portion = kInobject;
4521 if (offset == JSObject::kElementsOffset) {
4522 portion = kElementsPointer;
4523 } else if (offset == JSObject::kMapOffset) {
4526 bool existing_inobject_property = true;
4527 if (!map.is_null()) {
4528 existing_inobject_property = (offset <
4529 map->instance_size() - map->unused_property_fields() * kPointerSize);
4531 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4532 false, existing_inobject_property);
4536 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4538 case AllocationSite::kTransitionInfoOffset:
4539 return HObjectAccess(kInobject, offset, Representation::Tagged());
4540 case AllocationSite::kNestedSiteOffset:
4541 return HObjectAccess(kInobject, offset, Representation::Tagged());
4542 case AllocationSite::kPretenureDataOffset:
4543 return HObjectAccess(kInobject, offset, Representation::Smi());
4544 case AllocationSite::kPretenureCreateCountOffset:
4545 return HObjectAccess(kInobject, offset, Representation::Smi());
4546 case AllocationSite::kDependentCodeOffset:
4547 return HObjectAccess(kInobject, offset, Representation::Tagged());
4548 case AllocationSite::kWeakNextOffset:
4549 return HObjectAccess(kInobject, offset, Representation::Tagged());
4553 return HObjectAccess(kInobject, offset);
4557 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4559 Portion portion = kInobject;
4560 int offset = Context::kHeaderSize + index * kPointerSize;
4561 ASSERT_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4562 return HObjectAccess(portion, offset, Representation::Tagged());
4566 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4567 ASSERT(offset >= 0);
4568 Portion portion = kInobject;
4570 if (offset == JSObject::kElementsOffset) {
4571 portion = kElementsPointer;
4572 } else if (offset == JSArray::kLengthOffset) {
4573 portion = kArrayLengths;
4574 } else if (offset == JSObject::kMapOffset) {
4577 return HObjectAccess(portion, offset);
4581 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4582 Representation representation) {
4583 ASSERT(offset >= 0);
4584 return HObjectAccess(kBackingStore, offset, representation,
4585 Handle<String>::null(), false, false);
4589 HObjectAccess HObjectAccess::ForField(Handle<Map> map,
4590 LookupResult* lookup,
4591 Handle<String> name) {
4592 ASSERT(lookup->IsField() || lookup->IsTransitionToField());
4594 Representation representation;
4595 if (lookup->IsField()) {
4596 index = lookup->GetLocalFieldIndexFromMap(*map);
4597 representation = lookup->representation();
4599 Map* transition = lookup->GetTransitionTarget();
4600 int descriptor = transition->LastAdded();
4601 index = transition->instance_descriptors()->GetFieldIndex(descriptor) -
4602 map->inobject_properties();
4603 PropertyDetails details =
4604 transition->instance_descriptors()->GetDetails(descriptor);
4605 representation = details.representation();
4608 // Negative property indices are in-object properties, indexed
4609 // from the end of the fixed part of the object.
4610 int offset = (index * kPointerSize) + map->instance_size();
4611 return HObjectAccess(kInobject, offset, representation, name, false, true);
4613 // Non-negative property indices are in the properties array.
4614 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4615 return HObjectAccess(kBackingStore, offset, representation, name,
4621 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4622 return HObjectAccess(
4623 kInobject, Cell::kValueOffset, Representation::Tagged(),
4624 Handle<String>(isolate->heap()->cell_value_string()));
4628 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4629 // set the appropriate GVN flags for a given load or store instruction
4630 if (access_type == STORE) {
4631 // track dominating allocations in order to eliminate write barriers
4632 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4633 instr->SetFlag(HValue::kTrackSideEffectDominators);
4635 // try to GVN loads, but don't hoist above map changes
4636 instr->SetFlag(HValue::kUseGVN);
4637 instr->SetDependsOnFlag(::v8::internal::kMaps);
4640 switch (portion()) {
4642 if (access_type == STORE) {
4643 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4645 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4648 case kStringLengths:
4649 if (access_type == STORE) {
4650 instr->SetChangesFlag(::v8::internal::kStringLengths);
4652 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4656 if (access_type == STORE) {
4657 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4659 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4663 if (access_type == STORE) {
4664 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4666 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4670 if (access_type == STORE) {
4671 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4673 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4676 case kElementsPointer:
4677 if (access_type == STORE) {
4678 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4680 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4684 if (access_type == STORE) {
4685 instr->SetChangesFlag(::v8::internal::kMaps);
4687 instr->SetDependsOnFlag(::v8::internal::kMaps);
4690 case kExternalMemory:
4691 if (access_type == STORE) {
4692 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4694 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4701 void HObjectAccess::PrintTo(StringStream* stream) const {
4704 switch (portion()) {
4706 case kStringLengths:
4707 stream->Add("%length");
4709 case kElementsPointer:
4710 stream->Add("%elements");
4713 stream->Add("%map");
4715 case kDouble: // fall through
4717 if (!name_.is_null()) {
4718 stream->Add(String::cast(*name_)->ToCString().get());
4720 stream->Add("[in-object]");
4723 if (!name_.is_null()) {
4724 stream->Add(String::cast(*name_)->ToCString().get());
4726 stream->Add("[backing-store]");
4728 case kExternalMemory:
4729 stream->Add("[external-memory]");
4733 stream->Add("@%d", offset());
4737 HInstruction* HNullarySIMDOperation::New(
4738 Zone* zone, HValue* context, BuiltinFunctionId op) {
4739 return new(zone) HNullarySIMDOperation(context, op);
4743 HInstruction* HUnarySIMDOperation::New(
4744 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op,
4745 Representation to) {
4746 return new(zone) HUnarySIMDOperation(context, value, op, to);
4750 HInstruction* HBinarySIMDOperation::New(
4751 Zone* zone, HValue* context, HValue* left, HValue* right,
4752 BuiltinFunctionId op) {
4753 return new(zone) HBinarySIMDOperation(context, left, right, op);
4757 HInstruction* HTernarySIMDOperation::New(
4758 Zone* zone, HValue* context, HValue* mask, HValue* left, HValue* right,
4759 BuiltinFunctionId op) {
4760 return new(zone) HTernarySIMDOperation(context, mask, left, right, op);
4764 HInstruction* HQuarternarySIMDOperation::New(
4765 Zone* zone, HValue* context, HValue* x, HValue* y, HValue* z, HValue* w,
4766 BuiltinFunctionId op) {
4767 return new(zone) HQuarternarySIMDOperation(context, x, y, z, w, op);
4771 const char* HNullarySIMDOperation::OpName() const {
4773 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
4775 return #module "." #function;
4776 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
4777 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
4785 void HNullarySIMDOperation::PrintDataTo(StringStream* stream) {
4786 const char* name = OpName();
4787 stream->Add("%s", name);
4791 const char* HUnarySIMDOperation::OpName() const {
4793 #define SIMD_UNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5) \
4795 return #module "." #function;
4796 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
4797 SIMD_UNARY_OPERATIONS_FOR_PROPERTY_ACCESS(SIMD_UNARY_OPERATION_CASE_ITEM)
4798 #undef SIMD_UNARY_OPERATION_CASE_ITEM
4806 void HUnarySIMDOperation::PrintDataTo(StringStream* stream) {
4807 const char* name = OpName();
4808 stream->Add("%s ", name);
4809 value()->PrintNameTo(stream);
4813 const char* HBinarySIMDOperation::OpName() const {
4815 #define SIMD_BINARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6) \
4817 return #module "." #function;
4818 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
4819 #undef SIMD_BINARY_OPERATION_CASE_ITEM
4827 void HBinarySIMDOperation::PrintDataTo(StringStream* stream) {
4828 const char* name = OpName();
4829 stream->Add("%s ", name);
4830 left()->PrintNameTo(stream);
4832 right()->PrintNameTo(stream);
4836 const char* HTernarySIMDOperation::OpName() const {
4838 #define SIMD_TERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6, \
4841 return #module "." #function;
4842 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
4843 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
4851 void HTernarySIMDOperation::PrintDataTo(StringStream* stream) {
4852 const char* name = OpName();
4853 stream->Add("%s ", name);
4854 first()->PrintNameTo(stream);
4856 second()->PrintNameTo(stream);
4858 third()->PrintNameTo(stream);
4862 const char* HQuarternarySIMDOperation::OpName() const {
4864 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, \
4867 return #module "." #function;
4868 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
4869 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
4877 void HQuarternarySIMDOperation::PrintDataTo(StringStream* stream) {
4878 const char* name = OpName();
4879 stream->Add("%s ", name);
4880 x()->PrintNameTo(stream);
4882 y()->PrintNameTo(stream);
4884 z()->PrintNameTo(stream);
4886 w()->PrintNameTo(stream);
4890 } } // namespace v8::internal