1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "hydrogen-infer-representation.h"
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
43 #error Unsupported target architecture.
49 #define DEFINE_COMPILE(type) \
50 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
51 return builder->Do##type(this); \
53 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
57 int HValue::LoopWeight() const {
58 const int w = FLAG_loop_weight;
59 static const int weights[] = { 1, w, w*w, w*w*w, w*w*w*w };
60 return weights[Min(block()->LoopNestingDepth(),
61 static_cast<int>(ARRAY_SIZE(weights)-1))];
65 Isolate* HValue::isolate() const {
66 ASSERT(block() != NULL);
67 return block()->isolate();
71 void HValue::AssumeRepresentation(Representation r) {
72 if (CheckFlag(kFlexibleRepresentation)) {
73 ChangeRepresentation(r);
74 // The representation of the value is dictated by type feedback and
75 // will not be changed later.
76 ClearFlag(kFlexibleRepresentation);
81 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
82 ASSERT(CheckFlag(kFlexibleRepresentation));
83 Representation new_rep = RepresentationFromInputs();
84 UpdateRepresentation(new_rep, h_infer, "inputs");
85 new_rep = RepresentationFromUses();
86 UpdateRepresentation(new_rep, h_infer, "uses");
87 if (representation().IsSmi() && HasNonSmiUse()) {
89 Representation::Integer32(), h_infer, "use requirements");
94 Representation HValue::RepresentationFromUses() {
95 if (HasNoUses()) return Representation::None();
97 // Array of use counts for each representation.
98 int use_count[Representation::kNumRepresentations] = { 0 };
100 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
101 HValue* use = it.value();
102 Representation rep = use->observed_input_representation(it.index());
103 if (rep.IsNone()) continue;
104 if (FLAG_trace_representation) {
105 PrintF("#%d %s is used by #%d %s as %s%s\n",
106 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
107 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
109 use_count[rep.kind()] += use->LoopWeight();
111 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
112 int tagged_count = use_count[Representation::kTagged];
113 int double_count = use_count[Representation::kDouble];
114 int int32_count = use_count[Representation::kInteger32];
115 int smi_count = use_count[Representation::kSmi];
117 if (tagged_count > 0) return Representation::Tagged();
118 if (double_count > 0) return Representation::Double();
119 if (int32_count > 0) return Representation::Integer32();
120 if (smi_count > 0) return Representation::Smi();
122 return Representation::None();
126 void HValue::UpdateRepresentation(Representation new_rep,
127 HInferRepresentationPhase* h_infer,
128 const char* reason) {
129 Representation r = representation();
130 if (new_rep.is_more_general_than(r)) {
131 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
132 if (FLAG_trace_representation) {
133 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
134 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
136 ChangeRepresentation(new_rep);
137 AddDependantsToWorklist(h_infer);
142 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
143 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
144 h_infer->AddToWorklist(it.value());
146 for (int i = 0; i < OperandCount(); ++i) {
147 h_infer->AddToWorklist(OperandAt(i));
152 static int32_t ConvertAndSetOverflow(Representation r,
156 if (result > Smi::kMaxValue) {
158 return Smi::kMaxValue;
160 if (result < Smi::kMinValue) {
162 return Smi::kMinValue;
165 if (result > kMaxInt) {
169 if (result < kMinInt) {
174 return static_cast<int32_t>(result);
178 static int32_t AddWithoutOverflow(Representation r,
182 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
183 return ConvertAndSetOverflow(r, result, overflow);
187 static int32_t SubWithoutOverflow(Representation r,
191 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
192 return ConvertAndSetOverflow(r, result, overflow);
196 static int32_t MulWithoutOverflow(const Representation& r,
200 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
201 return ConvertAndSetOverflow(r, result, overflow);
205 int32_t Range::Mask() const {
206 if (lower_ == upper_) return lower_;
209 while (res < upper_) {
210 res = (res << 1) | 1;
218 void Range::AddConstant(int32_t value) {
219 if (value == 0) return;
220 bool may_overflow = false; // Overflow is ignored here.
221 Representation r = Representation::Integer32();
222 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
223 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
230 void Range::Intersect(Range* other) {
231 upper_ = Min(upper_, other->upper_);
232 lower_ = Max(lower_, other->lower_);
233 bool b = CanBeMinusZero() && other->CanBeMinusZero();
234 set_can_be_minus_zero(b);
238 void Range::Union(Range* other) {
239 upper_ = Max(upper_, other->upper_);
240 lower_ = Min(lower_, other->lower_);
241 bool b = CanBeMinusZero() || other->CanBeMinusZero();
242 set_can_be_minus_zero(b);
246 void Range::CombinedMax(Range* other) {
247 upper_ = Max(upper_, other->upper_);
248 lower_ = Max(lower_, other->lower_);
249 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
253 void Range::CombinedMin(Range* other) {
254 upper_ = Min(upper_, other->upper_);
255 lower_ = Min(lower_, other->lower_);
256 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
260 void Range::Sar(int32_t value) {
261 int32_t bits = value & 0x1F;
262 lower_ = lower_ >> bits;
263 upper_ = upper_ >> bits;
264 set_can_be_minus_zero(false);
268 void Range::Shl(int32_t value) {
269 int32_t bits = value & 0x1F;
270 int old_lower = lower_;
271 int old_upper = upper_;
272 lower_ = lower_ << bits;
273 upper_ = upper_ << bits;
274 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
278 set_can_be_minus_zero(false);
282 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
283 bool may_overflow = false;
284 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
285 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
294 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
295 bool may_overflow = false;
296 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
297 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
306 void Range::KeepOrder() {
307 if (lower_ > upper_) {
308 int32_t tmp = lower_;
316 void Range::Verify() const {
317 ASSERT(lower_ <= upper_);
322 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
323 bool may_overflow = false;
324 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
325 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
326 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
327 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
328 lower_ = Min(Min(v1, v2), Min(v3, v4));
329 upper_ = Max(Max(v1, v2), Max(v3, v4));
337 const char* HType::ToString() {
338 // Note: The c1visualizer syntax for locals allows only a sequence of the
339 // following characters: A-Za-z0-9_-|:
341 case kNone: return "none";
342 case kTagged: return "tagged";
343 case kTaggedPrimitive: return "primitive";
344 case kTaggedNumber: return "number";
345 case kSmi: return "smi";
346 case kHeapNumber: return "heap-number";
347 case kString: return "string";
348 case kBoolean: return "boolean";
349 case kNonPrimitive: return "non-primitive";
350 case kJSArray: return "array";
351 case kJSObject: return "object";
354 return "unreachable";
358 HType HType::TypeFromValue(Handle<Object> value) {
359 HType result = HType::Tagged();
360 if (value->IsSmi()) {
361 result = HType::Smi();
362 } else if (value->IsHeapNumber()) {
363 result = HType::HeapNumber();
364 } else if (value->IsString()) {
365 result = HType::String();
366 } else if (value->IsBoolean()) {
367 result = HType::Boolean();
368 } else if (value->IsJSObject()) {
369 result = HType::JSObject();
370 } else if (value->IsJSArray()) {
371 result = HType::JSArray();
377 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
378 return block()->block_id() > other->block_id();
382 HUseListNode* HUseListNode::tail() {
383 // Skip and remove dead items in the use list.
384 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
385 tail_ = tail_->tail_;
391 bool HValue::CheckUsesForFlag(Flag f) const {
392 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
393 if (it.value()->IsSimulate()) continue;
394 if (!it.value()->CheckFlag(f)) return false;
400 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
401 bool return_value = false;
402 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
403 if (it.value()->IsSimulate()) continue;
404 if (!it.value()->CheckFlag(f)) return false;
411 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
416 void HUseIterator::Advance() {
418 if (current_ != NULL) {
419 next_ = current_->tail();
420 value_ = current_->value();
421 index_ = current_->index();
426 int HValue::UseCount() const {
428 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
433 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
434 HUseListNode* previous = NULL;
435 HUseListNode* current = use_list_;
436 while (current != NULL) {
437 if (current->value() == value && current->index() == index) {
438 if (previous == NULL) {
439 use_list_ = current->tail();
441 previous->set_tail(current->tail());
447 current = current->tail();
451 // Do not reuse use list nodes in debug mode, zap them.
452 if (current != NULL) {
455 HUseListNode(current->value(), current->index(), NULL);
464 bool HValue::Equals(HValue* other) {
465 if (other->opcode() != opcode()) return false;
466 if (!other->representation().Equals(representation())) return false;
467 if (!other->type_.Equals(type_)) return false;
468 if (other->flags() != flags()) return false;
469 if (OperandCount() != other->OperandCount()) return false;
470 for (int i = 0; i < OperandCount(); ++i) {
471 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
473 bool result = DataEquals(other);
474 ASSERT(!result || Hashcode() == other->Hashcode());
479 intptr_t HValue::Hashcode() {
480 intptr_t result = opcode();
481 int count = OperandCount();
482 for (int i = 0; i < count; ++i) {
483 result = result * 19 + OperandAt(i)->id() + (result >> 7);
489 const char* HValue::Mnemonic() const {
491 #define MAKE_CASE(type) case k##type: return #type;
492 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
494 case kPhi: return "Phi";
500 bool HValue::IsInteger32Constant() {
501 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
505 int32_t HValue::GetInteger32Constant() {
506 return HConstant::cast(this)->Integer32Value();
510 bool HValue::EqualsInteger32Constant(int32_t value) {
511 return IsInteger32Constant() && GetInteger32Constant() == value;
515 void HValue::SetOperandAt(int index, HValue* value) {
516 RegisterUse(index, value);
517 InternalSetOperandAt(index, value);
521 void HValue::DeleteAndReplaceWith(HValue* other) {
522 // We replace all uses first, so Delete can assert that there are none.
523 if (other != NULL) ReplaceAllUsesWith(other);
529 void HValue::ReplaceAllUsesWith(HValue* other) {
530 while (use_list_ != NULL) {
531 HUseListNode* list_node = use_list_;
532 HValue* value = list_node->value();
533 ASSERT(!value->block()->IsStartBlock());
534 value->InternalSetOperandAt(list_node->index(), other);
535 use_list_ = list_node->tail();
536 list_node->set_tail(other->use_list_);
537 other->use_list_ = list_node;
542 void HValue::Kill() {
543 // Instead of going through the entire use list of each operand, we only
544 // check the first item in each use list and rely on the tail() method to
545 // skip dead items, removing them lazily next time we traverse the list.
547 for (int i = 0; i < OperandCount(); ++i) {
548 HValue* operand = OperandAt(i);
549 if (operand == NULL) continue;
550 HUseListNode* first = operand->use_list_;
551 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
552 operand->use_list_ = first->tail();
558 void HValue::SetBlock(HBasicBlock* block) {
559 ASSERT(block_ == NULL || block == NULL);
561 if (id_ == kNoNumber && block != NULL) {
562 id_ = block->graph()->GetNextValueID(this);
567 void HValue::PrintTypeTo(StringStream* stream) {
568 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return;
569 stream->Add(" type:%s", type().ToString());
573 void HValue::PrintRangeTo(StringStream* stream) {
574 if (range() == NULL || range()->IsMostGeneric()) return;
575 // Note: The c1visualizer syntax for locals allows only a sequence of the
576 // following characters: A-Za-z0-9_-|:
577 stream->Add(" range:%d_%d%s",
580 range()->CanBeMinusZero() ? "_m0" : "");
584 void HValue::PrintChangesTo(StringStream* stream) {
585 GVNFlagSet changes_flags = ChangesFlags();
586 if (changes_flags.IsEmpty()) return;
587 stream->Add(" changes[");
588 if (changes_flags == AllSideEffectsFlagSet()) {
591 bool add_comma = false;
592 #define PRINT_DO(type) \
593 if (changes_flags.Contains(kChanges##type)) { \
594 if (add_comma) stream->Add(","); \
596 stream->Add(#type); \
598 GVN_TRACKED_FLAG_LIST(PRINT_DO);
599 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
606 void HValue::PrintNameTo(StringStream* stream) {
607 stream->Add("%s%d", representation_.Mnemonic(), id());
611 bool HValue::HasMonomorphicJSObjectType() {
612 return !GetMonomorphicJSObjectMap().is_null();
616 bool HValue::UpdateInferredType() {
617 HType type = CalculateInferredType();
618 bool result = (!type.Equals(type_));
624 void HValue::RegisterUse(int index, HValue* new_value) {
625 HValue* old_value = OperandAt(index);
626 if (old_value == new_value) return;
628 HUseListNode* removed = NULL;
629 if (old_value != NULL) {
630 removed = old_value->RemoveUse(this, index);
633 if (new_value != NULL) {
634 if (removed == NULL) {
635 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
636 this, index, new_value->use_list_);
638 removed->set_tail(new_value->use_list_);
639 new_value->use_list_ = removed;
645 void HValue::AddNewRange(Range* r, Zone* zone) {
646 if (!HasRange()) ComputeInitialRange(zone);
647 if (!HasRange()) range_ = new(zone) Range();
649 r->StackUpon(range_);
654 void HValue::RemoveLastAddedRange() {
656 ASSERT(range_->next() != NULL);
657 range_ = range_->next();
661 void HValue::ComputeInitialRange(Zone* zone) {
663 range_ = InferRange(zone);
668 void HInstruction::PrintTo(StringStream* stream) {
669 PrintMnemonicTo(stream);
671 PrintRangeTo(stream);
672 PrintChangesTo(stream);
674 if (CheckFlag(HValue::kHasNoObservableSideEffects)) {
675 stream->Add(" [noOSE]");
680 void HInstruction::PrintDataTo(StringStream *stream) {
681 for (int i = 0; i < OperandCount(); ++i) {
682 if (i > 0) stream->Add(" ");
683 OperandAt(i)->PrintNameTo(stream);
688 void HInstruction::PrintMnemonicTo(StringStream* stream) {
689 stream->Add("%s ", Mnemonic());
693 void HInstruction::Unlink() {
695 ASSERT(!IsControlInstruction()); // Must never move control instructions.
696 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these.
697 ASSERT(previous_ != NULL);
698 previous_->next_ = next_;
700 ASSERT(block()->last() == this);
701 block()->set_last(previous_);
703 next_->previous_ = previous_;
709 void HInstruction::InsertBefore(HInstruction* next) {
711 ASSERT(!next->IsBlockEntry());
712 ASSERT(!IsControlInstruction());
713 ASSERT(!next->block()->IsStartBlock());
714 ASSERT(next->previous_ != NULL);
715 HInstruction* prev = next->previous();
717 next->previous_ = this;
720 SetBlock(next->block());
724 void HInstruction::InsertAfter(HInstruction* previous) {
726 ASSERT(!previous->IsControlInstruction());
727 ASSERT(!IsControlInstruction() || previous->next_ == NULL);
728 HBasicBlock* block = previous->block();
729 // Never insert anything except constants into the start block after finishing
731 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
732 ASSERT(block->end()->SecondSuccessor() == NULL);
733 InsertAfter(block->end()->FirstSuccessor()->first());
737 // If we're inserting after an instruction with side-effects that is
738 // followed by a simulate instruction, we need to insert after the
739 // simulate instruction instead.
740 HInstruction* next = previous->next_;
741 if (previous->HasObservableSideEffects() && next != NULL) {
742 ASSERT(next->IsSimulate());
744 next = previous->next_;
747 previous_ = previous;
750 previous->next_ = this;
751 if (next != NULL) next->previous_ = this;
752 if (block->last() == previous) {
753 block->set_last(this);
759 void HInstruction::Verify() {
760 // Verify that input operands are defined before use.
761 HBasicBlock* cur_block = block();
762 for (int i = 0; i < OperandCount(); ++i) {
763 HValue* other_operand = OperandAt(i);
764 if (other_operand == NULL) continue;
765 HBasicBlock* other_block = other_operand->block();
766 if (cur_block == other_block) {
767 if (!other_operand->IsPhi()) {
768 HInstruction* cur = this->previous();
769 while (cur != NULL) {
770 if (cur == other_operand) break;
771 cur = cur->previous();
773 // Must reach other operand in the same block!
774 ASSERT(cur == other_operand);
777 // If the following assert fires, you may have forgotten an
779 ASSERT(other_block->Dominates(cur_block));
783 // Verify that instructions that may have side-effects are followed
784 // by a simulate instruction.
785 if (HasObservableSideEffects() && !IsOsrEntry()) {
786 ASSERT(next()->IsSimulate());
789 // Verify that instructions that can be eliminated by GVN have overridden
790 // HValue::DataEquals. The default implementation is UNREACHABLE. We
791 // don't actually care whether DataEquals returns true or false here.
792 if (CheckFlag(kUseGVN)) DataEquals(this);
794 // Verify that all uses are in the graph.
795 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
796 if (use.value()->IsInstruction()) {
797 ASSERT(HInstruction::cast(use.value())->IsLinked());
804 void HDummyUse::PrintDataTo(StringStream* stream) {
805 value()->PrintNameTo(stream);
809 void HEnvironmentMarker::PrintDataTo(StringStream* stream) {
810 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index());
814 void HUnaryCall::PrintDataTo(StringStream* stream) {
815 value()->PrintNameTo(stream);
817 stream->Add("#%d", argument_count());
821 void HBinaryCall::PrintDataTo(StringStream* stream) {
822 first()->PrintNameTo(stream);
824 second()->PrintNameTo(stream);
826 stream->Add("#%d", argument_count());
830 void HBoundsCheck::ApplyIndexChange() {
831 if (skip_check()) return;
833 DecompositionResult decomposition;
834 bool index_is_decomposable = index()->TryDecompose(&decomposition);
835 if (index_is_decomposable) {
836 ASSERT(decomposition.base() == base());
837 if (decomposition.offset() == offset() &&
838 decomposition.scale() == scale()) return;
843 ReplaceAllUsesWith(index());
845 HValue* current_index = decomposition.base();
846 int actual_offset = decomposition.offset() + offset();
847 int actual_scale = decomposition.scale() + scale();
849 Zone* zone = block()->graph()->zone();
850 HValue* context = block()->graph()->GetInvalidContext();
851 if (actual_offset != 0) {
852 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
853 add_offset->InsertBefore(this);
854 HInstruction* add = HAdd::New(zone, context,
855 current_index, add_offset);
856 add->InsertBefore(this);
857 add->AssumeRepresentation(index()->representation());
858 add->ClearFlag(kCanOverflow);
862 if (actual_scale != 0) {
863 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
864 sar_scale->InsertBefore(this);
865 HInstruction* sar = HSar::New(zone, context,
866 current_index, sar_scale);
867 sar->InsertBefore(this);
868 sar->AssumeRepresentation(index()->representation());
872 SetOperandAt(0, current_index);
880 void HBoundsCheck::PrintDataTo(StringStream* stream) {
881 index()->PrintNameTo(stream);
883 length()->PrintNameTo(stream);
884 if (base() != NULL && (offset() != 0 || scale() != 0)) {
885 stream->Add(" base: ((");
886 if (base() != index()) {
887 index()->PrintNameTo(stream);
889 stream->Add("index");
891 stream->Add(" + %d) >> %d)", offset(), scale());
894 stream->Add(" [DISABLED]");
899 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
900 ASSERT(CheckFlag(kFlexibleRepresentation));
901 HValue* actual_index = index()->ActualValue();
902 HValue* actual_length = length()->ActualValue();
903 Representation index_rep = actual_index->representation();
904 Representation length_rep = actual_length->representation();
905 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
906 index_rep = Representation::Smi();
908 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
909 length_rep = Representation::Smi();
911 Representation r = index_rep.generalize(length_rep);
912 if (r.is_more_general_than(Representation::Integer32())) {
913 r = Representation::Integer32();
915 UpdateRepresentation(r, h_infer, "boundscheck");
919 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
920 stream->Add("base: ");
921 base_index()->PrintNameTo(stream);
922 stream->Add(", check: ");
923 base_index()->PrintNameTo(stream);
927 void HCallConstantFunction::PrintDataTo(StringStream* stream) {
928 if (IsApplyFunction()) {
929 stream->Add("optimized apply ");
931 stream->Add("%o ", function()->shared()->DebugName());
933 stream->Add("#%d", argument_count());
937 void HCallNamed::PrintDataTo(StringStream* stream) {
938 stream->Add("%o ", *name());
939 HUnaryCall::PrintDataTo(stream);
943 void HCallGlobal::PrintDataTo(StringStream* stream) {
944 stream->Add("%o ", *name());
945 HUnaryCall::PrintDataTo(stream);
949 void HCallKnownGlobal::PrintDataTo(StringStream* stream) {
950 stream->Add("%o ", target()->shared()->DebugName());
951 stream->Add("#%d", argument_count());
955 void HCallNewArray::PrintDataTo(StringStream* stream) {
956 stream->Add(ElementsKindToString(elements_kind()));
958 HBinaryCall::PrintDataTo(stream);
962 void HCallRuntime::PrintDataTo(StringStream* stream) {
963 stream->Add("%o ", *name());
964 stream->Add("#%d", argument_count());
968 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
969 stream->Add("class_of_test(");
970 value()->PrintNameTo(stream);
971 stream->Add(", \"%o\")", *class_name());
975 void HWrapReceiver::PrintDataTo(StringStream* stream) {
976 receiver()->PrintNameTo(stream);
978 function()->PrintNameTo(stream);
982 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) {
983 arguments()->PrintNameTo(stream);
985 index()->PrintNameTo(stream);
986 stream->Add("], length ");
987 length()->PrintNameTo(stream);
991 void HControlInstruction::PrintDataTo(StringStream* stream) {
992 stream->Add(" goto (");
993 bool first_block = true;
994 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
995 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id());
1002 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) {
1003 value()->PrintNameTo(stream);
1004 HControlInstruction::PrintDataTo(stream);
1008 void HReturn::PrintDataTo(StringStream* stream) {
1009 value()->PrintNameTo(stream);
1010 stream->Add(" (pop ");
1011 parameter_count()->PrintNameTo(stream);
1012 stream->Add(" values)");
1016 Representation HBranch::observed_input_representation(int index) {
1017 static const ToBooleanStub::Types tagged_types(
1018 ToBooleanStub::NULL_TYPE |
1019 ToBooleanStub::SPEC_OBJECT |
1020 ToBooleanStub::STRING |
1021 ToBooleanStub::SYMBOL);
1022 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1023 return Representation::Tagged();
1025 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1026 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1027 return Representation::Double();
1029 return Representation::Tagged();
1031 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1032 return Representation::Double();
1034 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1035 return Representation::Smi();
1037 return Representation::None();
1041 void HCompareMap::PrintDataTo(StringStream* stream) {
1042 value()->PrintNameTo(stream);
1043 stream->Add(" (%p)", *map());
1044 HControlInstruction::PrintDataTo(stream);
1048 const char* HUnaryMathOperation::OpName() const {
1050 case kMathFloor: return "floor";
1051 case kMathRound: return "round";
1052 case kMathAbs: return "abs";
1053 case kMathLog: return "log";
1054 case kMathSin: return "sin";
1055 case kMathCos: return "cos";
1056 case kMathTan: return "tan";
1057 case kMathExp: return "exp";
1058 case kMathSqrt: return "sqrt";
1059 case kMathPowHalf: return "pow-half";
1067 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1068 Representation r = representation();
1069 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1070 if (op() == kMathAbs) {
1071 int upper = value()->range()->upper();
1072 int lower = value()->range()->lower();
1073 bool spans_zero = value()->range()->CanBeZero();
1074 // Math.abs(kMinInt) overflows its representation, on which the
1075 // instruction deopts. Hence clamp it to kMaxInt.
1076 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1077 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1079 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1080 Max(abs_lower, abs_upper));
1081 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1083 if (r.IsSmi()) result->ClampToSmi();
1087 return HValue::InferRange(zone);
1091 void HUnaryMathOperation::PrintDataTo(StringStream* stream) {
1092 const char* name = OpName();
1093 stream->Add("%s ", name);
1094 value()->PrintNameTo(stream);
1098 void HUnaryOperation::PrintDataTo(StringStream* stream) {
1099 value()->PrintNameTo(stream);
1103 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
1104 value()->PrintNameTo(stream);
1106 case FIRST_JS_RECEIVER_TYPE:
1107 if (to_ == LAST_TYPE) stream->Add(" spec_object");
1109 case JS_REGEXP_TYPE:
1110 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp");
1113 if (to_ == JS_ARRAY_TYPE) stream->Add(" array");
1115 case JS_FUNCTION_TYPE:
1116 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function");
1124 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
1125 value()->PrintNameTo(stream);
1126 stream->Add(" == %o", *type_literal_);
1127 HControlInstruction::PrintDataTo(stream);
1131 void HCheckMapValue::PrintDataTo(StringStream* stream) {
1132 value()->PrintNameTo(stream);
1134 map()->PrintNameTo(stream);
1138 void HForInPrepareMap::PrintDataTo(StringStream* stream) {
1139 enumerable()->PrintNameTo(stream);
1143 void HForInCacheArray::PrintDataTo(StringStream* stream) {
1144 enumerable()->PrintNameTo(stream);
1146 map()->PrintNameTo(stream);
1147 stream->Add("[%d]", idx_);
1151 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
1152 object()->PrintNameTo(stream);
1154 index()->PrintNameTo(stream);
1158 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1159 if (!l->EqualsInteger32Constant(~0)) return false;
1165 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1166 if (!instr->IsBitwise()) return false;
1167 HBitwise* b = HBitwise::cast(instr);
1168 return (b->op() == Token::BIT_XOR) &&
1169 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1170 MatchLeftIsOnes(b->right(), b->left(), negated));
1174 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1176 return MatchNegationViaXor(instr, &negated) &&
1177 MatchNegationViaXor(negated, arg);
1181 HValue* HBitwise::Canonicalize() {
1182 if (!representation().IsSmiOrInteger32()) return this;
1183 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1184 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1185 if (left()->EqualsInteger32Constant(nop_constant) &&
1186 !right()->CheckFlag(kUint32)) {
1189 if (right()->EqualsInteger32Constant(nop_constant) &&
1190 !left()->CheckFlag(kUint32)) {
1193 // Optimize double negation, a common pattern used for ToInt32(x).
1195 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1202 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1203 return arg1->representation().IsSpecialization() &&
1204 arg2->EqualsInteger32Constant(identity);
1208 HValue* HAdd::Canonicalize() {
1209 if (IsIdentityOperation(left(), right(), 0)) return left();
1210 if (IsIdentityOperation(right(), left(), 0)) return right();
1215 HValue* HSub::Canonicalize() {
1216 if (IsIdentityOperation(left(), right(), 0)) return left();
1221 HValue* HMul::Canonicalize() {
1222 if (IsIdentityOperation(left(), right(), 1)) return left();
1223 if (IsIdentityOperation(right(), left(), 1)) return right();
1228 HValue* HMod::Canonicalize() {
1233 HValue* HDiv::Canonicalize() {
1238 HValue* HChange::Canonicalize() {
1239 return (from().Equals(to())) ? value() : this;
1243 HValue* HWrapReceiver::Canonicalize() {
1244 if (HasNoUses()) return NULL;
1245 if (receiver()->type().IsJSObject()) {
1252 void HTypeof::PrintDataTo(StringStream* stream) {
1253 value()->PrintNameTo(stream);
1257 void HForceRepresentation::PrintDataTo(StringStream* stream) {
1258 stream->Add("%s ", representation().Mnemonic());
1259 value()->PrintNameTo(stream);
1263 void HChange::PrintDataTo(StringStream* stream) {
1264 HUnaryOperation::PrintDataTo(stream);
1265 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic());
1267 if (CanTruncateToInt32()) stream->Add(" truncating-int32");
1268 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
1269 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan");
1273 static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
1274 // A value with an integer representation does not need to be transformed.
1275 if (dividend->representation().IsInteger32()) {
1278 // A change from an integer32 can be replaced by the integer32 value.
1279 if (dividend->IsChange() &&
1280 HChange::cast(dividend)->from().IsInteger32()) {
1281 return HChange::cast(dividend)->value();
1287 HValue* HUnaryMathOperation::Canonicalize() {
1288 if (op() == kMathRound || op() == kMathFloor) {
1289 HValue* val = value();
1290 if (val->IsChange()) val = HChange::cast(val)->value();
1292 // If the input is smi or integer32 then we replace the instruction with its
1294 if (val->representation().IsSmiOrInteger32()) {
1295 if (!val->representation().Equals(representation())) {
1296 HChange* result = new(block()->zone()) HChange(
1297 val, representation(), false, false);
1298 result->InsertBefore(this);
1305 if (op() == kMathFloor) {
1306 HValue* val = value();
1307 if (val->IsChange()) val = HChange::cast(val)->value();
1308 if (val->IsDiv() && (val->UseCount() == 1)) {
1309 HDiv* hdiv = HDiv::cast(val);
1310 HValue* left = hdiv->left();
1311 HValue* right = hdiv->right();
1312 // Try to simplify left and right values of the division.
1313 HValue* new_left = SimplifiedDividendForMathFloorOfDiv(left);
1314 if (new_left == NULL &&
1315 hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1316 new_left = new(block()->zone()) HChange(
1317 left, Representation::Integer32(), false, false);
1318 HChange::cast(new_left)->InsertBefore(this);
1321 LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right);
1322 if (new_right == NULL &&
1323 #if V8_TARGET_ARCH_ARM
1324 CpuFeatures::IsSupported(SUDIV) &&
1326 hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1327 new_right = new(block()->zone()) HChange(
1328 right, Representation::Integer32(), false, false);
1329 HChange::cast(new_right)->InsertBefore(this);
1332 // Return if left or right are not optimizable.
1333 if ((new_left == NULL) || (new_right == NULL)) return this;
1335 // Insert the new values in the graph.
1336 if (new_left->IsInstruction() &&
1337 !HInstruction::cast(new_left)->IsLinked()) {
1338 HInstruction::cast(new_left)->InsertBefore(this);
1340 if (new_right->IsInstruction() &&
1341 !HInstruction::cast(new_right)->IsLinked()) {
1342 HInstruction::cast(new_right)->InsertBefore(this);
1344 HMathFloorOfDiv* instr =
1345 HMathFloorOfDiv::New(block()->zone(), context(), new_left, new_right);
1346 // Replace this HMathFloor instruction by the new HMathFloorOfDiv.
1347 instr->InsertBefore(this);
1348 ReplaceAllUsesWith(instr);
1350 // We know the division had no other uses than this HMathFloor. Delete it.
1351 // Dead code elimination will deal with |left| and |right| if
1353 hdiv->DeleteAndReplaceWith(NULL);
1355 // Return NULL to remove this instruction from the graph.
1363 HValue* HCheckInstanceType::Canonicalize() {
1364 if (check_ == IS_STRING && value()->type().IsString()) {
1368 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1369 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1377 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1378 InstanceType* last) {
1379 ASSERT(is_interval_check());
1381 case IS_SPEC_OBJECT:
1382 *first = FIRST_SPEC_OBJECT_TYPE;
1383 *last = LAST_SPEC_OBJECT_TYPE;
1386 *first = *last = JS_ARRAY_TYPE;
1394 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1395 ASSERT(!is_interval_check());
1398 *mask = kIsNotStringMask;
1401 case IS_INTERNALIZED_STRING:
1402 *mask = kIsNotInternalizedMask;
1403 *tag = kInternalizedTag;
1411 void HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect,
1412 HValue* dominator) {
1413 ASSERT(side_effect == kChangesMaps);
1414 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once
1415 // type information is rich enough we should generalize this to any HType
1416 // for which the map is known.
1417 if (HasNoUses() && dominator->IsStoreNamedField()) {
1418 HStoreNamedField* store = HStoreNamedField::cast(dominator);
1419 if (!store->has_transition() || store->object() != value()) return;
1420 HConstant* transition = HConstant::cast(store->transition());
1421 for (int i = 0; i < map_set()->length(); i++) {
1422 if (transition->UniqueValueIdsMatch(map_unique_ids_.at(i))) {
1423 DeleteAndReplaceWith(NULL);
1431 void HCheckMaps::PrintDataTo(StringStream* stream) {
1432 value()->PrintNameTo(stream);
1433 stream->Add(" [%p", *map_set()->first());
1434 for (int i = 1; i < map_set()->length(); ++i) {
1435 stream->Add(",%p", *map_set()->at(i));
1437 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : "");
1441 void HCheckFunction::PrintDataTo(StringStream* stream) {
1442 value()->PrintNameTo(stream);
1443 stream->Add(" %p", *target());
1447 HValue* HCheckFunction::Canonicalize() {
1448 return (value()->IsConstant() &&
1449 HConstant::cast(value())->UniqueValueIdsMatch(target_unique_id_))
1455 const char* HCheckInstanceType::GetCheckName() {
1457 case IS_SPEC_OBJECT: return "object";
1458 case IS_JS_ARRAY: return "array";
1459 case IS_STRING: return "string";
1460 case IS_INTERNALIZED_STRING: return "internalized_string";
1467 void HCheckInstanceType::PrintDataTo(StringStream* stream) {
1468 stream->Add("%s ", GetCheckName());
1469 HUnaryOperation::PrintDataTo(stream);
1473 void HCallStub::PrintDataTo(StringStream* stream) {
1475 CodeStub::MajorName(major_key_, false));
1476 HUnaryCall::PrintDataTo(stream);
1480 void HInstanceOf::PrintDataTo(StringStream* stream) {
1481 left()->PrintNameTo(stream);
1483 right()->PrintNameTo(stream);
1485 context()->PrintNameTo(stream);
1489 Range* HValue::InferRange(Zone* zone) {
1491 if (representation().IsSmi() || type().IsSmi()) {
1492 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1493 result->set_can_be_minus_zero(false);
1495 result = new(zone) Range();
1496 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1497 // TODO(jkummerow): The range cannot be minus zero when the upper type
1498 // bound is Integer32.
1504 Range* HChange::InferRange(Zone* zone) {
1505 Range* input_range = value()->range();
1506 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1509 input_range != NULL &&
1510 input_range->IsInSmiRange()))) {
1511 set_type(HType::Smi());
1512 ClearGVNFlag(kChangesNewSpacePromotion);
1514 Range* result = (input_range != NULL)
1515 ? input_range->Copy(zone)
1516 : HValue::InferRange(zone);
1517 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1518 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1519 CheckFlag(kAllUsesTruncatingToSmi)));
1520 if (to().IsSmi()) result->ClampToSmi();
1525 Range* HConstant::InferRange(Zone* zone) {
1526 if (has_int32_value_) {
1527 Range* result = new(zone) Range(int32_value_, int32_value_);
1528 result->set_can_be_minus_zero(false);
1531 return HValue::InferRange(zone);
1535 Range* HPhi::InferRange(Zone* zone) {
1536 Representation r = representation();
1537 if (r.IsSmiOrInteger32()) {
1538 if (block()->IsLoopHeader()) {
1539 Range* range = r.IsSmi()
1540 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1541 : new(zone) Range(kMinInt, kMaxInt);
1544 Range* range = OperandAt(0)->range()->Copy(zone);
1545 for (int i = 1; i < OperandCount(); ++i) {
1546 range->Union(OperandAt(i)->range());
1551 return HValue::InferRange(zone);
1556 Range* HAdd::InferRange(Zone* zone) {
1557 Representation r = representation();
1558 if (r.IsSmiOrInteger32()) {
1559 Range* a = left()->range();
1560 Range* b = right()->range();
1561 Range* res = a->Copy(zone);
1562 if (!res->AddAndCheckOverflow(r, b) ||
1563 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1564 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1565 ClearFlag(kCanOverflow);
1567 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1568 !CheckFlag(kAllUsesTruncatingToInt32) &&
1569 a->CanBeMinusZero() && b->CanBeMinusZero());
1572 return HValue::InferRange(zone);
1577 Range* HSub::InferRange(Zone* zone) {
1578 Representation r = representation();
1579 if (r.IsSmiOrInteger32()) {
1580 Range* a = left()->range();
1581 Range* b = right()->range();
1582 Range* res = a->Copy(zone);
1583 if (!res->SubAndCheckOverflow(r, b) ||
1584 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1585 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1586 ClearFlag(kCanOverflow);
1588 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1589 !CheckFlag(kAllUsesTruncatingToInt32) &&
1590 a->CanBeMinusZero() && b->CanBeZero());
1593 return HValue::InferRange(zone);
1598 Range* HMul::InferRange(Zone* zone) {
1599 Representation r = representation();
1600 if (r.IsSmiOrInteger32()) {
1601 Range* a = left()->range();
1602 Range* b = right()->range();
1603 Range* res = a->Copy(zone);
1604 if (!res->MulAndCheckOverflow(r, b)) {
1605 // Clearing the kCanOverflow flag when kAllUsesAreTruncatingToInt32
1606 // would be wrong, because truncated integer multiplication is too
1607 // precise and therefore not the same as converting to Double and back.
1608 ClearFlag(kCanOverflow);
1610 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1611 !CheckFlag(kAllUsesTruncatingToInt32) &&
1612 ((a->CanBeZero() && b->CanBeNegative()) ||
1613 (a->CanBeNegative() && b->CanBeZero())));
1616 return HValue::InferRange(zone);
1621 Range* HDiv::InferRange(Zone* zone) {
1622 if (representation().IsInteger32()) {
1623 Range* a = left()->range();
1624 Range* b = right()->range();
1625 Range* result = new(zone) Range();
1626 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1627 (a->CanBeMinusZero() ||
1628 (a->CanBeZero() && b->CanBeNegative())));
1629 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1630 ClearFlag(HValue::kCanOverflow);
1633 if (!b->CanBeZero()) {
1634 ClearFlag(HValue::kCanBeDivByZero);
1638 return HValue::InferRange(zone);
1643 Range* HMod::InferRange(Zone* zone) {
1644 if (representation().IsInteger32()) {
1645 Range* a = left()->range();
1646 Range* b = right()->range();
1648 // The magnitude of the modulus is bounded by the right operand. Note that
1649 // apart for the cases involving kMinInt, the calculation below is the same
1650 // as Max(Abs(b->lower()), Abs(b->upper())) - 1.
1651 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1);
1653 // The result of the modulo operation has the sign of its left operand.
1654 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1655 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1656 a->CanBePositive() ? positive_bound : 0);
1658 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1659 left_can_be_negative);
1661 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1662 ClearFlag(HValue::kCanOverflow);
1665 if (!b->CanBeZero()) {
1666 ClearFlag(HValue::kCanBeDivByZero);
1670 return HValue::InferRange(zone);
1675 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1676 if (phi->block()->loop_information() == NULL) return NULL;
1677 if (phi->OperandCount() != 2) return NULL;
1678 int32_t candidate_increment;
1680 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1681 if (candidate_increment != 0) {
1682 return new(phi->block()->graph()->zone())
1683 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
1686 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
1687 if (candidate_increment != 0) {
1688 return new(phi->block()->graph()->zone())
1689 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
1697 * This function tries to match the following patterns (and all the relevant
1698 * variants related to |, & and + being commutative):
1699 * base | constant_or_mask
1700 * base & constant_and_mask
1701 * (base + constant_offset) & constant_and_mask
1702 * (base - constant_offset) & constant_and_mask
1704 void InductionVariableData::DecomposeBitwise(
1706 BitwiseDecompositionResult* result) {
1707 HValue* base = IgnoreOsrValue(value);
1708 result->base = value;
1710 if (!base->representation().IsInteger32()) return;
1712 if (base->IsBitwise()) {
1713 bool allow_offset = false;
1716 HBitwise* bitwise = HBitwise::cast(base);
1717 if (bitwise->right()->IsInteger32Constant()) {
1718 mask = bitwise->right()->GetInteger32Constant();
1719 base = bitwise->left();
1720 } else if (bitwise->left()->IsInteger32Constant()) {
1721 mask = bitwise->left()->GetInteger32Constant();
1722 base = bitwise->right();
1726 if (bitwise->op() == Token::BIT_AND) {
1727 result->and_mask = mask;
1728 allow_offset = true;
1729 } else if (bitwise->op() == Token::BIT_OR) {
1730 result->or_mask = mask;
1735 result->context = bitwise->context();
1738 if (base->IsAdd()) {
1739 HAdd* add = HAdd::cast(base);
1740 if (add->right()->IsInteger32Constant()) {
1742 } else if (add->left()->IsInteger32Constant()) {
1743 base = add->right();
1745 } else if (base->IsSub()) {
1746 HSub* sub = HSub::cast(base);
1747 if (sub->right()->IsInteger32Constant()) {
1753 result->base = base;
1758 void InductionVariableData::AddCheck(HBoundsCheck* check,
1759 int32_t upper_limit) {
1760 ASSERT(limit_validity() != NULL);
1761 if (limit_validity() != check->block() &&
1762 !limit_validity()->Dominates(check->block())) return;
1763 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
1764 check->block()->current_loop())) return;
1766 ChecksRelatedToLength* length_checks = checks();
1767 while (length_checks != NULL) {
1768 if (length_checks->length() == check->length()) break;
1769 length_checks = length_checks->next();
1771 if (length_checks == NULL) {
1772 length_checks = new(check->block()->zone())
1773 ChecksRelatedToLength(check->length(), checks());
1774 checks_ = length_checks;
1777 length_checks->AddCheck(check, upper_limit);
1781 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
1782 if (checks() != NULL) {
1783 InductionVariableCheck* c = checks();
1784 HBasicBlock* current_block = c->check()->block();
1785 while (c != NULL && c->check()->block() == current_block) {
1786 c->set_upper_limit(current_upper_limit_);
1793 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
1798 ASSERT(first_check_in_block() != NULL);
1799 HValue* previous_index = first_check_in_block()->index();
1800 ASSERT(context != NULL);
1802 Zone* zone = index_base->block()->graph()->zone();
1803 set_added_constant(HConstant::New(zone, context, mask));
1804 if (added_index() != NULL) {
1805 added_constant()->InsertBefore(added_index());
1807 added_constant()->InsertBefore(first_check_in_block());
1810 if (added_index() == NULL) {
1811 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
1812 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
1814 ASSERT(new_index->IsBitwise());
1815 new_index->ClearAllSideEffects();
1816 new_index->AssumeRepresentation(Representation::Integer32());
1817 set_added_index(HBitwise::cast(new_index));
1818 added_index()->InsertBefore(first_check_in_block());
1820 ASSERT(added_index()->op() == token);
1822 added_index()->SetOperandAt(1, index_base);
1823 added_index()->SetOperandAt(2, added_constant());
1824 first_check_in_block()->SetOperandAt(0, added_index());
1825 if (previous_index->UseCount() == 0) {
1826 previous_index->DeleteAndReplaceWith(NULL);
1830 void InductionVariableData::ChecksRelatedToLength::AddCheck(
1831 HBoundsCheck* check,
1832 int32_t upper_limit) {
1833 BitwiseDecompositionResult decomposition;
1834 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
1836 if (first_check_in_block() == NULL ||
1837 first_check_in_block()->block() != check->block()) {
1838 CloseCurrentBlock();
1840 first_check_in_block_ = check;
1841 set_added_index(NULL);
1842 set_added_constant(NULL);
1843 current_and_mask_in_block_ = decomposition.and_mask;
1844 current_or_mask_in_block_ = decomposition.or_mask;
1845 current_upper_limit_ = upper_limit;
1847 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
1848 InductionVariableCheck(check, checks_, upper_limit);
1849 checks_ = new_check;
1853 if (upper_limit > current_upper_limit()) {
1854 current_upper_limit_ = upper_limit;
1857 if (decomposition.and_mask != 0 &&
1858 current_or_mask_in_block() == 0) {
1859 if (current_and_mask_in_block() == 0 ||
1860 decomposition.and_mask > current_and_mask_in_block()) {
1861 UseNewIndexInCurrentBlock(Token::BIT_AND,
1862 decomposition.and_mask,
1864 decomposition.context);
1865 current_and_mask_in_block_ = decomposition.and_mask;
1867 check->set_skip_check();
1869 if (current_and_mask_in_block() == 0) {
1870 if (decomposition.or_mask > current_or_mask_in_block()) {
1871 UseNewIndexInCurrentBlock(Token::BIT_OR,
1872 decomposition.or_mask,
1874 decomposition.context);
1875 current_or_mask_in_block_ = decomposition.or_mask;
1877 check->set_skip_check();
1880 if (!check->skip_check()) {
1881 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
1882 InductionVariableCheck(check, checks_, upper_limit);
1883 checks_ = new_check;
1889 * This method detects if phi is an induction variable, with phi_operand as
1890 * its "incremented" value (the other operand would be the "base" value).
1892 * It cheks is phi_operand has the form "phi + constant".
1893 * If yes, the constant is the increment that the induction variable gets at
1894 * every loop iteration.
1895 * Otherwise it returns 0.
1897 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
1898 HValue* phi_operand) {
1899 if (!phi_operand->representation().IsInteger32()) return 0;
1901 if (phi_operand->IsAdd()) {
1902 HAdd* operation = HAdd::cast(phi_operand);
1903 if (operation->left() == phi &&
1904 operation->right()->IsInteger32Constant()) {
1905 return operation->right()->GetInteger32Constant();
1906 } else if (operation->right() == phi &&
1907 operation->left()->IsInteger32Constant()) {
1908 return operation->left()->GetInteger32Constant();
1910 } else if (phi_operand->IsSub()) {
1911 HSub* operation = HSub::cast(phi_operand);
1912 if (operation->left() == phi &&
1913 operation->right()->IsInteger32Constant()) {
1914 return -operation->right()->GetInteger32Constant();
1923 * Swaps the information in "update" with the one contained in "this".
1924 * The swapping is important because this method is used while doing a
1925 * dominator tree traversal, and "update" will retain the old data that
1926 * will be restored while backtracking.
1928 void InductionVariableData::UpdateAdditionalLimit(
1929 InductionVariableLimitUpdate* update) {
1930 ASSERT(update->updated_variable == this);
1931 if (update->limit_is_upper) {
1932 swap(&additional_upper_limit_, &update->limit);
1933 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
1935 swap(&additional_lower_limit_, &update->limit);
1936 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
1941 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
1943 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
1944 const int32_t MAX_LIMIT = 1 << 30;
1946 int32_t result = MAX_LIMIT;
1948 if (limit() != NULL &&
1949 limit()->IsInteger32Constant()) {
1950 int32_t limit_value = limit()->GetInteger32Constant();
1951 if (!limit_included()) {
1954 if (limit_value < result) result = limit_value;
1957 if (additional_upper_limit() != NULL &&
1958 additional_upper_limit()->IsInteger32Constant()) {
1959 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
1960 if (!additional_upper_limit_is_included()) {
1963 if (limit_value < result) result = limit_value;
1966 if (and_mask > 0 && and_mask < MAX_LIMIT) {
1967 if (and_mask < result) result = and_mask;
1971 // Add the effect of the or_mask.
1974 return result >= MAX_LIMIT ? kNoLimit : result;
1978 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
1979 if (!v->IsPhi()) return v;
1980 HPhi* phi = HPhi::cast(v);
1981 if (phi->OperandCount() != 2) return v;
1982 if (phi->OperandAt(0)->block()->is_osr_entry()) {
1983 return phi->OperandAt(1);
1984 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
1985 return phi->OperandAt(0);
1992 InductionVariableData* InductionVariableData::GetInductionVariableData(
1994 v = IgnoreOsrValue(v);
1996 return HPhi::cast(v)->induction_variable_data();
2003 * Check if a conditional branch to "current_branch" with token "token" is
2004 * the branch that keeps the induction loop running (and, conversely, will
2005 * terminate it if the "other_branch" is taken).
2007 * Three conditions must be met:
2008 * - "current_branch" must be in the induction loop.
2009 * - "other_branch" must be out of the induction loop.
2010 * - "token" and the induction increment must be "compatible": the token should
2011 * be a condition that keeps the execution inside the loop until the limit is
2014 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2016 HBasicBlock* current_branch,
2017 HBasicBlock* other_branch) {
2018 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2019 current_branch->current_loop())) {
2023 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2024 other_branch->current_loop())) {
2028 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2031 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2034 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2042 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2044 LimitFromPredecessorBlock* result) {
2045 if (block->predecessors()->length() != 1) return;
2046 HBasicBlock* predecessor = block->predecessors()->at(0);
2047 HInstruction* end = predecessor->last();
2049 if (!end->IsCompareNumericAndBranch()) return;
2050 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2052 Token::Value token = branch->token();
2053 if (!Token::IsArithmeticCompareOp(token)) return;
2055 HBasicBlock* other_target;
2056 if (block == branch->SuccessorAt(0)) {
2057 other_target = branch->SuccessorAt(1);
2059 other_target = branch->SuccessorAt(0);
2060 token = Token::NegateCompareOp(token);
2061 ASSERT(block == branch->SuccessorAt(1));
2064 InductionVariableData* data;
2066 data = GetInductionVariableData(branch->left());
2067 HValue* limit = branch->right();
2069 data = GetInductionVariableData(branch->right());
2070 token = Token::ReverseCompareOp(token);
2071 limit = branch->left();
2075 result->variable = data;
2076 result->token = token;
2077 result->limit = limit;
2078 result->other_target = other_target;
2084 * Compute the limit that is imposed on an induction variable when entering
2086 * If the limit is the "proper" induction limit (the one that makes the loop
2087 * terminate when the induction variable reaches it) it is stored directly in
2088 * the induction variable data.
2089 * Otherwise the limit is written in "additional_limit" and the method
2092 bool InductionVariableData::ComputeInductionVariableLimit(
2094 InductionVariableLimitUpdate* additional_limit) {
2095 LimitFromPredecessorBlock limit;
2096 ComputeLimitFromPredecessorBlock(block, &limit);
2097 if (!limit.LimitIsValid()) return false;
2099 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2101 limit.other_target)) {
2102 limit.variable->limit_ = limit.limit;
2103 limit.variable->limit_included_ = limit.LimitIsIncluded();
2104 limit.variable->limit_validity_ = block;
2105 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2106 limit.variable->induction_exit_target_ = limit.other_target;
2109 additional_limit->updated_variable = limit.variable;
2110 additional_limit->limit = limit.limit;
2111 additional_limit->limit_is_upper = limit.LimitIsUpper();
2112 additional_limit->limit_is_included = limit.LimitIsIncluded();
2118 Range* HMathMinMax::InferRange(Zone* zone) {
2119 if (representation().IsSmiOrInteger32()) {
2120 Range* a = left()->range();
2121 Range* b = right()->range();
2122 Range* res = a->Copy(zone);
2123 if (operation_ == kMathMax) {
2124 res->CombinedMax(b);
2126 ASSERT(operation_ == kMathMin);
2127 res->CombinedMin(b);
2131 return HValue::InferRange(zone);
2136 void HPhi::PrintTo(StringStream* stream) {
2138 for (int i = 0; i < OperandCount(); ++i) {
2139 HValue* value = OperandAt(i);
2141 value->PrintNameTo(stream);
2144 stream->Add(" uses:%d_%ds_%di_%dd_%dt",
2146 smi_non_phi_uses() + smi_indirect_uses(),
2147 int32_non_phi_uses() + int32_indirect_uses(),
2148 double_non_phi_uses() + double_indirect_uses(),
2149 tagged_non_phi_uses() + tagged_indirect_uses());
2150 PrintRangeTo(stream);
2151 PrintTypeTo(stream);
2156 void HPhi::AddInput(HValue* value) {
2157 inputs_.Add(NULL, value->block()->zone());
2158 SetOperandAt(OperandCount() - 1, value);
2159 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2160 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2161 SetFlag(kIsArguments);
2166 bool HPhi::HasRealUses() {
2167 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2168 if (!it.value()->IsPhi()) return true;
2174 HValue* HPhi::GetRedundantReplacement() {
2175 HValue* candidate = NULL;
2176 int count = OperandCount();
2178 while (position < count && candidate == NULL) {
2179 HValue* current = OperandAt(position++);
2180 if (current != this) candidate = current;
2182 while (position < count) {
2183 HValue* current = OperandAt(position++);
2184 if (current != this && current != candidate) return NULL;
2186 ASSERT(candidate != this);
2191 void HPhi::DeleteFromGraph() {
2192 ASSERT(block() != NULL);
2193 block()->RemovePhi(this);
2194 ASSERT(block() == NULL);
2198 void HPhi::InitRealUses(int phi_id) {
2199 // Initialize real uses.
2201 // Compute a conservative approximation of truncating uses before inferring
2202 // representations. The proper, exact computation will be done later, when
2203 // inserting representation changes.
2204 SetFlag(kTruncatingToSmi);
2205 SetFlag(kTruncatingToInt32);
2206 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2207 HValue* value = it.value();
2208 if (!value->IsPhi()) {
2209 Representation rep = value->observed_input_representation(it.index());
2210 non_phi_uses_[rep.kind()] += value->LoopWeight();
2211 if (FLAG_trace_representation) {
2212 PrintF("#%d Phi is used by real #%d %s as %s\n",
2213 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2215 if (!value->IsSimulate()) {
2216 if (!value->CheckFlag(kTruncatingToSmi)) {
2217 ClearFlag(kTruncatingToSmi);
2219 if (!value->CheckFlag(kTruncatingToInt32)) {
2220 ClearFlag(kTruncatingToInt32);
2228 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2229 if (FLAG_trace_representation) {
2230 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2232 other->non_phi_uses_[Representation::kSmi],
2233 other->non_phi_uses_[Representation::kInteger32],
2234 other->non_phi_uses_[Representation::kDouble],
2235 other->non_phi_uses_[Representation::kTagged]);
2238 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2239 indirect_uses_[i] += other->non_phi_uses_[i];
2244 void HPhi::AddIndirectUsesTo(int* dest) {
2245 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2246 dest[i] += indirect_uses_[i];
2251 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2252 while (!list->is_empty()) {
2253 HSimulate* from = list->RemoveLast();
2254 ZoneList<HValue*>* from_values = &from->values_;
2255 for (int i = 0; i < from_values->length(); ++i) {
2256 if (from->HasAssignedIndexAt(i)) {
2257 int index = from->GetAssignedIndexAt(i);
2258 if (HasValueForIndex(index)) continue;
2259 AddAssignedValue(index, from_values->at(i));
2261 if (pop_count_ > 0) {
2264 AddPushedValue(from_values->at(i));
2268 pop_count_ += from->pop_count_;
2269 from->DeleteAndReplaceWith(NULL);
2274 void HSimulate::PrintDataTo(StringStream* stream) {
2275 stream->Add("id=%d", ast_id().ToInt());
2276 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
2277 if (values_.length() > 0) {
2278 if (pop_count_ > 0) stream->Add(" /");
2279 for (int i = values_.length() - 1; i >= 0; --i) {
2280 if (HasAssignedIndexAt(i)) {
2281 stream->Add(" var[%d] = ", GetAssignedIndexAt(i));
2283 stream->Add(" push ");
2285 values_[i]->PrintNameTo(stream);
2286 if (i > 0) stream->Add(",");
2292 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2294 ASSERT(return_target->IsInlineReturnTarget());
2295 return_targets_.Add(return_target, zone);
2299 void HEnterInlined::PrintDataTo(StringStream* stream) {
2300 SmartArrayPointer<char> name = function()->debug_name()->ToCString();
2301 stream->Add("%s, id=%d", *name, function()->id().ToInt());
2305 static bool IsInteger32(double value) {
2306 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2307 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
2311 HConstant::HConstant(Handle<Object> handle, Representation r)
2312 : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
2315 has_smi_value_(false),
2316 has_int32_value_(false),
2317 has_double_value_(false),
2318 has_external_reference_value_(false),
2319 is_internalized_string_(false),
2320 is_not_in_new_space_(true),
2322 boolean_value_(handle->BooleanValue()) {
2323 if (handle_->IsHeapObject()) {
2324 Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap();
2325 is_not_in_new_space_ = !heap->InNewSpace(*handle);
2327 if (handle_->IsNumber()) {
2328 double n = handle_->Number();
2329 has_int32_value_ = IsInteger32(n);
2330 int32_value_ = DoubleToInt32(n);
2331 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2333 has_double_value_ = true;
2335 is_internalized_string_ = handle_->IsInternalizedString();
2338 is_cell_ = !handle_.is_null() &&
2339 (handle_->IsCell() || handle_->IsPropertyCell());
2344 HConstant::HConstant(Handle<Object> handle,
2345 UniqueValueId unique_id,
2348 bool is_internalize_string,
2349 bool is_not_in_new_space,
2352 : HTemplateInstruction<0>(type),
2354 unique_id_(unique_id),
2355 has_smi_value_(false),
2356 has_int32_value_(false),
2357 has_double_value_(false),
2358 has_external_reference_value_(false),
2359 is_internalized_string_(is_internalize_string),
2360 is_not_in_new_space_(is_not_in_new_space),
2362 boolean_value_(boolean_value) {
2363 ASSERT(!handle.is_null());
2364 ASSERT(!type.IsTaggedNumber());
2369 HConstant::HConstant(int32_t integer_value,
2371 bool is_not_in_new_space,
2372 Handle<Object> optional_handle)
2373 : handle_(optional_handle),
2375 has_smi_value_(Smi::IsValid(integer_value)),
2376 has_int32_value_(true),
2377 has_double_value_(true),
2378 has_external_reference_value_(false),
2379 is_internalized_string_(false),
2380 is_not_in_new_space_(is_not_in_new_space),
2382 boolean_value_(integer_value != 0),
2383 int32_value_(integer_value),
2384 double_value_(FastI2D(integer_value)) {
2385 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2390 HConstant::HConstant(double double_value,
2392 bool is_not_in_new_space,
2393 Handle<Object> optional_handle)
2394 : handle_(optional_handle),
2396 has_int32_value_(IsInteger32(double_value)),
2397 has_double_value_(true),
2398 has_external_reference_value_(false),
2399 is_internalized_string_(false),
2400 is_not_in_new_space_(is_not_in_new_space),
2402 boolean_value_(double_value != 0 && !std::isnan(double_value)),
2403 int32_value_(DoubleToInt32(double_value)),
2404 double_value_(double_value) {
2405 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2406 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2411 HConstant::HConstant(ExternalReference reference)
2412 : HTemplateInstruction<0>(HType::None()),
2413 has_smi_value_(false),
2414 has_int32_value_(false),
2415 has_double_value_(false),
2416 has_external_reference_value_(true),
2417 is_internalized_string_(false),
2418 is_not_in_new_space_(true),
2420 boolean_value_(true),
2421 external_reference_value_(reference) {
2422 Initialize(Representation::External());
2426 static void PrepareConstant(Handle<Object> object) {
2427 if (!object->IsJSObject()) return;
2428 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2429 if (!js_object->map()->is_deprecated()) return;
2430 JSObject::TryMigrateInstance(js_object);
2434 void HConstant::Initialize(Representation r) {
2436 if (has_smi_value_ && kSmiValueSize == 31) {
2437 r = Representation::Smi();
2438 } else if (has_int32_value_) {
2439 r = Representation::Integer32();
2440 } else if (has_double_value_) {
2441 r = Representation::Double();
2442 } else if (has_external_reference_value_) {
2443 r = Representation::External();
2445 PrepareConstant(handle_);
2446 r = Representation::Tagged();
2449 set_representation(r);
2454 bool HConstant::EmitAtUses() {
2456 if (block()->graph()->has_osr()) {
2457 return block()->graph()->IsStandardConstant(this);
2459 if (IsCell()) return false;
2460 if (representation().IsDouble()) return false;
2465 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2466 if (r.IsSmi() && !has_smi_value_) return NULL;
2467 if (r.IsInteger32() && !has_int32_value_) return NULL;
2468 if (r.IsDouble() && !has_double_value_) return NULL;
2469 if (r.IsExternal() && !has_external_reference_value_) return NULL;
2470 if (has_int32_value_) {
2471 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, handle_);
2473 if (has_double_value_) {
2474 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, handle_);
2476 if (has_external_reference_value_) {
2477 return new(zone) HConstant(external_reference_value_);
2479 ASSERT(!handle_.is_null());
2480 return new(zone) HConstant(handle_,
2484 is_internalized_string_,
2485 is_not_in_new_space_,
2491 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2492 HConstant* res = NULL;
2493 if (has_int32_value_) {
2494 res = new(zone) HConstant(int32_value_,
2495 Representation::Integer32(),
2496 is_not_in_new_space_,
2498 } else if (has_double_value_) {
2499 res = new(zone) HConstant(DoubleToInt32(double_value_),
2500 Representation::Integer32(),
2501 is_not_in_new_space_,
2504 ASSERT(!HasNumberValue());
2505 Maybe<HConstant*> number = CopyToTruncatedNumber(zone);
2506 if (number.has_value) return number.value->CopyToTruncatedInt32(zone);
2508 return Maybe<HConstant*>(res != NULL, res);
2512 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2513 HConstant* res = NULL;
2514 if (handle()->IsBoolean()) {
2515 res = handle()->BooleanValue() ?
2516 new(zone) HConstant(1) : new(zone) HConstant(0);
2517 } else if (handle()->IsUndefined()) {
2518 res = new(zone) HConstant(OS::nan_value());
2519 } else if (handle()->IsNull()) {
2520 res = new(zone) HConstant(0);
2522 return Maybe<HConstant*>(res != NULL, res);
2526 void HConstant::PrintDataTo(StringStream* stream) {
2527 if (has_int32_value_) {
2528 stream->Add("%d ", int32_value_);
2529 } else if (has_double_value_) {
2530 stream->Add("%f ", FmtElm(double_value_));
2531 } else if (has_external_reference_value_) {
2532 stream->Add("%p ", reinterpret_cast<void*>(
2533 external_reference_value_.address()));
2535 handle()->ShortPrint(stream);
2540 void HBinaryOperation::PrintDataTo(StringStream* stream) {
2541 left()->PrintNameTo(stream);
2543 right()->PrintNameTo(stream);
2544 if (CheckFlag(kCanOverflow)) stream->Add(" !");
2545 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
2549 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2550 ASSERT(CheckFlag(kFlexibleRepresentation));
2551 Representation new_rep = RepresentationFromInputs();
2552 UpdateRepresentation(new_rep, h_infer, "inputs");
2553 if (observed_output_representation_.IsNone()) {
2554 new_rep = RepresentationFromUses();
2555 UpdateRepresentation(new_rep, h_infer, "uses");
2557 new_rep = RepresentationFromOutput();
2558 UpdateRepresentation(new_rep, h_infer, "output");
2561 if (representation().IsSmi() && HasNonSmiUse()) {
2562 UpdateRepresentation(
2563 Representation::Integer32(), h_infer, "use requirements");
2568 Representation HBinaryOperation::RepresentationFromInputs() {
2569 // Determine the worst case of observed input representations and
2570 // the currently assumed output representation.
2571 Representation rep = representation();
2572 for (int i = 1; i <= 2; ++i) {
2573 rep = rep.generalize(observed_input_representation(i));
2575 // If any of the actual input representation is more general than what we
2576 // have so far but not Tagged, use that representation instead.
2577 Representation left_rep = left()->representation();
2578 Representation right_rep = right()->representation();
2579 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2580 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2586 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2587 Representation current_rep) {
2588 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2589 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2590 // Mul in Integer32 mode would be too precise.
2595 Representation HBinaryOperation::RepresentationFromOutput() {
2596 Representation rep = representation();
2597 // Consider observed output representation, but ignore it if it's Double,
2598 // this instruction is not a division, and all its uses are truncating
2600 if (observed_output_representation_.is_more_general_than(rep) &&
2601 !IgnoreObservedOutputRepresentation(rep)) {
2602 return observed_output_representation_;
2604 return Representation::None();
2608 void HBinaryOperation::AssumeRepresentation(Representation r) {
2609 set_observed_input_representation(1, r);
2610 set_observed_input_representation(2, r);
2611 HValue::AssumeRepresentation(r);
2615 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2616 ASSERT(CheckFlag(kFlexibleRepresentation));
2617 Representation new_rep = RepresentationFromInputs();
2618 UpdateRepresentation(new_rep, h_infer, "inputs");
2619 // Do not care about uses.
2623 Range* HBitwise::InferRange(Zone* zone) {
2624 if (op() == Token::BIT_XOR) {
2625 if (left()->HasRange() && right()->HasRange()) {
2626 // The maximum value has the high bit, and all bits below, set:
2628 // If the range can be negative, the minimum int is a negative number with
2629 // the high bit, and all bits below, unset:
2631 // If it cannot be negative, conservatively choose 0 as minimum int.
2632 int64_t left_upper = left()->range()->upper();
2633 int64_t left_lower = left()->range()->lower();
2634 int64_t right_upper = right()->range()->upper();
2635 int64_t right_lower = right()->range()->lower();
2637 if (left_upper < 0) left_upper = ~left_upper;
2638 if (left_lower < 0) left_lower = ~left_lower;
2639 if (right_upper < 0) right_upper = ~right_upper;
2640 if (right_lower < 0) right_lower = ~right_lower;
2642 int high = MostSignificantBit(
2643 static_cast<uint32_t>(
2644 left_upper | left_lower | right_upper | right_lower));
2648 int32_t min = (left()->range()->CanBeNegative() ||
2649 right()->range()->CanBeNegative())
2650 ? static_cast<int32_t>(-limit) : 0;
2651 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2653 Range* result = HValue::InferRange(zone);
2654 result->set_can_be_minus_zero(false);
2657 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2658 int32_t left_mask = (left()->range() != NULL)
2659 ? left()->range()->Mask()
2661 int32_t right_mask = (right()->range() != NULL)
2662 ? right()->range()->Mask()
2664 int32_t result_mask = (op() == Token::BIT_AND)
2665 ? left_mask & right_mask
2666 : left_mask | right_mask;
2667 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2669 Range* result = HValue::InferRange(zone);
2670 result->set_can_be_minus_zero(false);
2675 Range* HSar::InferRange(Zone* zone) {
2676 if (right()->IsConstant()) {
2677 HConstant* c = HConstant::cast(right());
2678 if (c->HasInteger32Value()) {
2679 Range* result = (left()->range() != NULL)
2680 ? left()->range()->Copy(zone)
2681 : new(zone) Range();
2682 result->Sar(c->Integer32Value());
2686 return HValue::InferRange(zone);
2690 Range* HShr::InferRange(Zone* zone) {
2691 if (right()->IsConstant()) {
2692 HConstant* c = HConstant::cast(right());
2693 if (c->HasInteger32Value()) {
2694 int shift_count = c->Integer32Value() & 0x1f;
2695 if (left()->range()->CanBeNegative()) {
2696 // Only compute bounds if the result always fits into an int32.
2697 return (shift_count >= 1)
2698 ? new(zone) Range(0,
2699 static_cast<uint32_t>(0xffffffff) >> shift_count)
2700 : new(zone) Range();
2702 // For positive inputs we can use the >> operator.
2703 Range* result = (left()->range() != NULL)
2704 ? left()->range()->Copy(zone)
2705 : new(zone) Range();
2706 result->Sar(c->Integer32Value());
2711 return HValue::InferRange(zone);
2715 Range* HShl::InferRange(Zone* zone) {
2716 if (right()->IsConstant()) {
2717 HConstant* c = HConstant::cast(right());
2718 if (c->HasInteger32Value()) {
2719 Range* result = (left()->range() != NULL)
2720 ? left()->range()->Copy(zone)
2721 : new(zone) Range();
2722 result->Shl(c->Integer32Value());
2726 return HValue::InferRange(zone);
2730 Range* HLoadNamedField::InferRange(Zone* zone) {
2731 if (access().IsStringLength()) {
2732 return new(zone) Range(0, String::kMaxLength);
2734 return HValue::InferRange(zone);
2738 Range* HLoadKeyed::InferRange(Zone* zone) {
2739 switch (elements_kind()) {
2740 case EXTERNAL_PIXEL_ELEMENTS:
2741 return new(zone) Range(0, 255);
2742 case EXTERNAL_BYTE_ELEMENTS:
2743 return new(zone) Range(-128, 127);
2744 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2745 return new(zone) Range(0, 255);
2746 case EXTERNAL_SHORT_ELEMENTS:
2747 return new(zone) Range(-32768, 32767);
2748 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2749 return new(zone) Range(0, 65535);
2751 return HValue::InferRange(zone);
2756 void HCompareGeneric::PrintDataTo(StringStream* stream) {
2757 stream->Add(Token::Name(token()));
2759 HBinaryOperation::PrintDataTo(stream);
2763 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
2764 stream->Add(Token::Name(token()));
2766 HControlInstruction::PrintDataTo(stream);
2770 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
2771 stream->Add(Token::Name(token()));
2773 left()->PrintNameTo(stream);
2775 right()->PrintNameTo(stream);
2776 HControlInstruction::PrintDataTo(stream);
2780 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) {
2781 left()->PrintNameTo(stream);
2783 right()->PrintNameTo(stream);
2784 HControlInstruction::PrintDataTo(stream);
2788 void HCompareHoleAndBranch::PrintDataTo(StringStream* stream) {
2789 object()->PrintNameTo(stream);
2790 HControlInstruction::PrintDataTo(stream);
2794 void HCompareHoleAndBranch::InferRepresentation(
2795 HInferRepresentationPhase* h_infer) {
2796 ChangeRepresentation(object()->representation());
2800 void HGoto::PrintDataTo(StringStream* stream) {
2801 stream->Add("B%d", SuccessorAt(0)->block_id());
2805 void HCompareNumericAndBranch::InferRepresentation(
2806 HInferRepresentationPhase* h_infer) {
2807 Representation left_rep = left()->representation();
2808 Representation right_rep = right()->representation();
2809 Representation observed_left = observed_input_representation(0);
2810 Representation observed_right = observed_input_representation(1);
2812 Representation rep = Representation::None();
2813 rep = rep.generalize(observed_left);
2814 rep = rep.generalize(observed_right);
2815 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
2816 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2817 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2819 rep = Representation::Double();
2822 if (rep.IsDouble()) {
2823 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
2824 // and !=) have special handling of undefined, e.g. undefined == undefined
2825 // is 'true'. Relational comparisons have a different semantic, first
2826 // calling ToPrimitive() on their arguments. The standard Crankshaft
2827 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
2828 // inputs are doubles caused 'undefined' to be converted to NaN. That's
2829 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
2830 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
2831 // it is not consistent with the spec. For example, it would cause undefined
2832 // == undefined (should be true) to be evaluated as NaN == NaN
2833 // (false). Therefore, any comparisons other than ordered relational
2834 // comparisons must cause a deopt when one of their arguments is undefined.
2836 if (Token::IsOrderedRelationalCompareOp(token_)) {
2837 SetFlag(kAllowUndefinedAsNaN);
2840 ChangeRepresentation(rep);
2844 void HParameter::PrintDataTo(StringStream* stream) {
2845 stream->Add("%u", index());
2849 void HLoadNamedField::PrintDataTo(StringStream* stream) {
2850 object()->PrintNameTo(stream);
2851 access_.PrintTo(stream);
2852 if (HasTypeCheck()) {
2854 typecheck()->PrintNameTo(stream);
2859 HCheckMaps* HCheckMaps::New(Zone* zone,
2863 CompilationInfo* info,
2864 HValue* typecheck) {
2865 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
2866 check_map->Add(map, zone);
2867 if (map->CanOmitMapChecks() &&
2868 value->IsConstant() &&
2869 HConstant::cast(value)->InstanceOf(map)) {
2870 check_map->omit(info);
2876 void HCheckMaps::FinalizeUniqueValueId() {
2877 if (!map_unique_ids_.is_empty()) return;
2878 Zone* zone = block()->zone();
2879 map_unique_ids_.Initialize(map_set_.length(), zone);
2880 for (int i = 0; i < map_set_.length(); i++) {
2881 map_unique_ids_.Add(UniqueValueId(map_set_.at(i)), zone);
2886 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
2887 object()->PrintNameTo(stream);
2889 stream->Add(*String::cast(*name())->ToCString());
2893 void HLoadKeyed::PrintDataTo(StringStream* stream) {
2894 if (!is_external()) {
2895 elements()->PrintNameTo(stream);
2897 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
2898 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
2899 elements()->PrintNameTo(stream);
2901 stream->Add(ElementsKindToString(elements_kind()));
2905 key()->PrintNameTo(stream);
2906 if (IsDehoisted()) {
2907 stream->Add(" + %d]", index_offset());
2912 if (HasDependency()) {
2914 dependency()->PrintNameTo(stream);
2917 if (RequiresHoleCheck()) {
2918 stream->Add(" check_hole");
2923 bool HLoadKeyed::UsesMustHandleHole() const {
2924 if (IsFastPackedElementsKind(elements_kind())) {
2928 if (IsExternalArrayElementsKind(elements_kind())) {
2932 if (hole_mode() == ALLOW_RETURN_HOLE) {
2933 if (IsFastDoubleElementsKind(elements_kind())) {
2934 return AllUsesCanTreatHoleAsNaN();
2939 if (IsFastDoubleElementsKind(elements_kind())) {
2943 // Holes are only returned as tagged values.
2944 if (!representation().IsTagged()) {
2948 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2949 HValue* use = it.value();
2950 if (!use->IsChange()) return false;
2957 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
2958 return IsFastDoubleElementsKind(elements_kind()) &&
2959 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
2963 bool HLoadKeyed::RequiresHoleCheck() const {
2964 if (IsFastPackedElementsKind(elements_kind())) {
2968 if (IsExternalArrayElementsKind(elements_kind())) {
2972 return !UsesMustHandleHole();
2976 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
2977 object()->PrintNameTo(stream);
2979 key()->PrintNameTo(stream);
2984 HValue* HLoadKeyedGeneric::Canonicalize() {
2985 // Recognize generic keyed loads that use property name generated
2986 // by for-in statement as a key and rewrite them into fast property load
2988 if (key()->IsLoadKeyed()) {
2989 HLoadKeyed* key_load = HLoadKeyed::cast(key());
2990 if (key_load->elements()->IsForInCacheArray()) {
2991 HForInCacheArray* names_cache =
2992 HForInCacheArray::cast(key_load->elements());
2994 if (names_cache->enumerable() == object()) {
2995 HForInCacheArray* index_cache =
2996 names_cache->index_cache();
2997 HCheckMapValue* map_check =
2998 HCheckMapValue::New(block()->graph()->zone(),
2999 block()->graph()->GetInvalidContext(),
3001 names_cache->map());
3002 HInstruction* index = HLoadKeyed::New(
3003 block()->graph()->zone(),
3004 block()->graph()->GetInvalidContext(),
3008 key_load->elements_kind());
3009 map_check->InsertBefore(this);
3010 index->InsertBefore(this);
3011 HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
3013 load->InsertBefore(this);
3023 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
3024 object()->PrintNameTo(stream);
3026 ASSERT(name()->IsString());
3027 stream->Add(*String::cast(*name())->ToCString());
3029 value()->PrintNameTo(stream);
3033 void HStoreNamedField::PrintDataTo(StringStream* stream) {
3034 object()->PrintNameTo(stream);
3035 access_.PrintTo(stream);
3037 value()->PrintNameTo(stream);
3038 if (NeedsWriteBarrier()) {
3039 stream->Add(" (write-barrier)");
3041 if (has_transition()) {
3042 stream->Add(" (transition map %p)", *transition_map());
3047 void HStoreKeyed::PrintDataTo(StringStream* stream) {
3048 if (!is_external()) {
3049 elements()->PrintNameTo(stream);
3051 elements()->PrintNameTo(stream);
3053 stream->Add(ElementsKindToString(elements_kind()));
3054 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3055 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3059 key()->PrintNameTo(stream);
3060 if (IsDehoisted()) {
3061 stream->Add(" + %d] = ", index_offset());
3063 stream->Add("] = ");
3066 value()->PrintNameTo(stream);
3070 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
3071 object()->PrintNameTo(stream);
3073 key()->PrintNameTo(stream);
3074 stream->Add("] = ");
3075 value()->PrintNameTo(stream);
3079 void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
3080 object()->PrintNameTo(stream);
3081 ElementsKind from_kind = original_map()->elements_kind();
3082 ElementsKind to_kind = transitioned_map()->elements_kind();
3083 stream->Add(" %p [%s] -> %p [%s]",
3085 ElementsAccessor::ForKind(from_kind)->name(),
3086 *transitioned_map(),
3087 ElementsAccessor::ForKind(to_kind)->name());
3091 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
3092 stream->Add("[%p]", *cell());
3093 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3094 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3098 bool HLoadGlobalCell::RequiresHoleCheck() const {
3099 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false;
3100 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3101 HValue* use = it.value();
3102 if (!use->IsChange()) return true;
3108 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) {
3109 stream->Add("%o ", *name());
3113 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) {
3114 base_object()->PrintNameTo(stream);
3115 stream->Add(" offset %d", offset());
3119 void HStoreGlobalCell::PrintDataTo(StringStream* stream) {
3120 stream->Add("[%p] = ", *cell());
3121 value()->PrintNameTo(stream);
3122 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3123 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3127 void HStoreGlobalGeneric::PrintDataTo(StringStream* stream) {
3128 stream->Add("%o = ", *name());
3129 value()->PrintNameTo(stream);
3133 void HLoadContextSlot::PrintDataTo(StringStream* stream) {
3134 value()->PrintNameTo(stream);
3135 stream->Add("[%d]", slot_index());
3139 void HStoreContextSlot::PrintDataTo(StringStream* stream) {
3140 context()->PrintNameTo(stream);
3141 stream->Add("[%d] = ", slot_index());
3142 value()->PrintNameTo(stream);
3146 // Implementation of type inference and type conversions. Calculates
3147 // the inferred type of this instruction based on the input operands.
3149 HType HValue::CalculateInferredType() {
3154 HType HPhi::CalculateInferredType() {
3155 if (OperandCount() == 0) return HType::Tagged();
3156 HType result = OperandAt(0)->type();
3157 for (int i = 1; i < OperandCount(); ++i) {
3158 HType current = OperandAt(i)->type();
3159 result = result.Combine(current);
3165 HType HChange::CalculateInferredType() {
3166 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3171 Representation HUnaryMathOperation::RepresentationFromInputs() {
3172 Representation rep = representation();
3173 // If any of the actual input representation is more general than what we
3174 // have so far but not Tagged, use that representation instead.
3175 Representation input_rep = value()->representation();
3176 if (!input_rep.IsTagged()) {
3177 rep = rep.generalize(input_rep);
3183 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3184 HValue* dominator) {
3185 ASSERT(side_effect == kChangesNewSpacePromotion);
3186 if (!FLAG_use_allocation_folding) return;
3188 // Try to fold allocations together with their dominating allocations.
3189 if (!dominator->IsAllocate()) {
3190 if (FLAG_trace_allocation_folding) {
3191 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3192 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3197 HAllocate* dominator_allocate_instr = HAllocate::cast(dominator);
3198 HValue* dominator_size = dominator_allocate_instr->size();
3199 HValue* current_size = size();
3200 // We can just fold allocations that are guaranteed in new space.
3201 // TODO(hpayer): Add support for non-constant allocation in dominator.
3202 if (!IsNewSpaceAllocation() || !current_size->IsInteger32Constant() ||
3203 !dominator_allocate_instr->IsNewSpaceAllocation() ||
3204 !dominator_size->IsInteger32Constant()) {
3205 if (FLAG_trace_allocation_folding) {
3206 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3207 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3212 // First update the size of the dominator allocate instruction.
3213 int32_t dominator_size_constant =
3214 HConstant::cast(dominator_size)->GetInteger32Constant();
3215 int32_t current_size_constant =
3216 HConstant::cast(current_size)->GetInteger32Constant();
3217 int32_t new_dominator_size = dominator_size_constant + current_size_constant;
3219 if (MustAllocateDoubleAligned()) {
3220 if (!dominator_allocate_instr->MustAllocateDoubleAligned()) {
3221 dominator_allocate_instr->MakeDoubleAligned();
3223 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3224 dominator_size_constant += kDoubleSize / 2;
3225 new_dominator_size += kDoubleSize / 2;
3229 if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) {
3230 if (FLAG_trace_allocation_folding) {
3231 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3232 id(), Mnemonic(), dominator->id(), dominator->Mnemonic(),
3233 new_dominator_size);
3237 HBasicBlock* block = dominator->block();
3238 Zone* zone = block->zone();
3239 HInstruction* new_dominator_size_constant =
3240 HConstant::New(zone, context(), new_dominator_size);
3241 new_dominator_size_constant->InsertBefore(dominator_allocate_instr);
3242 dominator_allocate_instr->UpdateSize(new_dominator_size_constant);
3245 if (FLAG_verify_heap) {
3246 dominator_allocate_instr->MakePrefillWithFiller();
3250 // After that replace the dominated allocate instruction.
3251 HInstruction* dominated_allocate_instr =
3252 HInnerAllocatedObject::New(zone,
3254 dominator_allocate_instr,
3255 dominator_size_constant,
3257 dominated_allocate_instr->InsertBefore(this);
3258 DeleteAndReplaceWith(dominated_allocate_instr);
3259 if (FLAG_trace_allocation_folding) {
3260 PrintF("#%d (%s) folded into #%d (%s)\n",
3261 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3266 void HAllocate::PrintDataTo(StringStream* stream) {
3267 size()->PrintNameTo(stream);
3269 if (IsNewSpaceAllocation()) stream->Add("N");
3270 if (IsOldPointerSpaceAllocation()) stream->Add("P");
3271 if (IsOldDataSpaceAllocation()) stream->Add("D");
3272 if (MustAllocateDoubleAligned()) stream->Add("A");
3273 if (MustPrefillWithFiller()) stream->Add("F");
3278 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero(
3279 BitVector* visited) {
3281 if (representation().IsSmiOrInteger32() &&
3282 !value()->representation().Equals(representation())) {
3283 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3284 SetFlag(kBailoutOnMinusZero);
3287 if (RequiredInputRepresentation(0).IsSmiOrInteger32() &&
3288 representation().Equals(RequiredInputRepresentation(0))) {
3295 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3297 if (from().IsSmiOrInteger32()) return NULL;
3298 if (CanTruncateToInt32()) return NULL;
3299 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3300 SetFlag(kBailoutOnMinusZero);
3302 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32());
3307 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero(
3308 BitVector* visited) {
3314 HValue* HMod::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3316 if (range() == NULL || range()->CanBeMinusZero()) {
3317 SetFlag(kBailoutOnMinusZero);
3324 HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3326 if (range() == NULL || range()->CanBeMinusZero()) {
3327 SetFlag(kBailoutOnMinusZero);
3333 HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3335 SetFlag(kBailoutOnMinusZero);
3340 HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3342 if (range() == NULL || range()->CanBeMinusZero()) {
3343 SetFlag(kBailoutOnMinusZero);
3349 HValue* HSub::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3351 // Propagate to the left argument. If the left argument cannot be -0, then
3352 // the result of the add operation cannot be either.
3353 if (range() == NULL || range()->CanBeMinusZero()) {
3360 HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3362 // Propagate to the left argument. If the left argument cannot be -0, then
3363 // the result of the sub operation cannot be either.
3364 if (range() == NULL || range()->CanBeMinusZero()) {
3371 bool HStoreKeyed::NeedsCanonicalization() {
3372 // If value is an integer or smi or comes from the result of a keyed load or
3373 // constant then it is either be a non-hole value or in the case of a constant
3374 // the hole is only being stored explicitly: no need for canonicalization.
3376 // The exception to that is keyed loads from external float or double arrays:
3377 // these can load arbitrary representation of NaN.
3379 if (value()->IsConstant()) {
3383 if (value()->IsLoadKeyed()) {
3384 return IsExternalFloatOrDoubleElementsKind(
3385 HLoadKeyed::cast(value())->elements_kind());
3388 if (value()->IsChange()) {
3389 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
3392 if (HChange::cast(value())->value()->type().IsSmi()) {
3400 #define H_CONSTANT_INT(val) \
3401 HConstant::New(zone, context, static_cast<int32_t>(val))
3402 #define H_CONSTANT_DOUBLE(val) \
3403 HConstant::New(zone, context, static_cast<double>(val))
3405 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3406 HInstruction* HInstr::New( \
3407 Zone* zone, HValue* context, HValue* left, HValue* right) { \
3408 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3409 HConstant* c_left = HConstant::cast(left); \
3410 HConstant* c_right = HConstant::cast(right); \
3411 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3412 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3413 if (TypeInfo::IsInt32Double(double_res)) { \
3414 return H_CONSTANT_INT(double_res); \
3416 return H_CONSTANT_DOUBLE(double_res); \
3419 return new(zone) HInstr(context, left, right); \
3423 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3424 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3425 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3427 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3430 HInstruction* HStringAdd::New(Zone* zone,
3434 StringAddFlags flags) {
3435 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3436 HConstant* c_right = HConstant::cast(right);
3437 HConstant* c_left = HConstant::cast(left);
3438 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3439 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
3440 c_left->StringValue(), c_right->StringValue());
3441 return HConstant::New(zone, context, concat);
3444 return new(zone) HStringAdd(context, left, right, flags);
3448 HInstruction* HStringCharFromCode::New(
3449 Zone* zone, HValue* context, HValue* char_code) {
3450 if (FLAG_fold_constants && char_code->IsConstant()) {
3451 HConstant* c_code = HConstant::cast(char_code);
3452 Isolate* isolate = Isolate::Current();
3453 if (c_code->HasNumberValue()) {
3454 if (std::isfinite(c_code->DoubleValue())) {
3455 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3456 return HConstant::New(zone, context,
3457 LookupSingleCharacterStringFromCode(isolate, code));
3459 return HConstant::New(zone, context, isolate->factory()->empty_string());
3462 return new(zone) HStringCharFromCode(context, char_code);
3466 HInstruction* HUnaryMathOperation::New(
3467 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
3469 if (!FLAG_fold_constants) break;
3470 if (!value->IsConstant()) break;
3471 HConstant* constant = HConstant::cast(value);
3472 if (!constant->HasNumberValue()) break;
3473 double d = constant->DoubleValue();
3474 if (std::isnan(d)) { // NaN poisons everything.
3475 return H_CONSTANT_DOUBLE(OS::nan_value());
3477 if (std::isinf(d)) { // +Infinity and -Infinity.
3482 return H_CONSTANT_DOUBLE(OS::nan_value());
3484 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3487 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value());
3490 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3493 return H_CONSTANT_DOUBLE(d);
3501 return H_CONSTANT_DOUBLE(fast_sin(d));
3503 return H_CONSTANT_DOUBLE(fast_cos(d));
3505 return H_CONSTANT_DOUBLE(fast_tan(d));
3507 return H_CONSTANT_DOUBLE(fast_exp(d));
3509 return H_CONSTANT_DOUBLE(fast_log(d));
3511 return H_CONSTANT_DOUBLE(fast_sqrt(d));
3513 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3515 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3517 // -0.5 .. -0.0 round to -0.0.
3518 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3519 // Doubles are represented as Significant * 2 ^ Exponent. If the
3520 // Exponent is not negative, the double value is already an integer.
3521 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3522 return H_CONSTANT_DOUBLE(floor(d + 0.5));
3524 return H_CONSTANT_DOUBLE(floor(d));
3530 return new(zone) HUnaryMathOperation(context, value, op);
3534 HInstruction* HPower::New(Zone* zone,
3538 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3539 HConstant* c_left = HConstant::cast(left);
3540 HConstant* c_right = HConstant::cast(right);
3541 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3542 double result = power_helper(c_left->DoubleValue(),
3543 c_right->DoubleValue());
3544 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result);
3547 return new(zone) HPower(left, right);
3551 HInstruction* HMathMinMax::New(
3552 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
3553 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3554 HConstant* c_left = HConstant::cast(left);
3555 HConstant* c_right = HConstant::cast(right);
3556 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3557 double d_left = c_left->DoubleValue();
3558 double d_right = c_right->DoubleValue();
3559 if (op == kMathMin) {
3560 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
3561 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
3562 if (d_left == d_right) {
3563 // Handle +0 and -0.
3564 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
3568 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
3569 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
3570 if (d_left == d_right) {
3571 // Handle +0 and -0.
3572 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
3576 // All comparisons failed, must be NaN.
3577 return H_CONSTANT_DOUBLE(OS::nan_value());
3580 return new(zone) HMathMinMax(context, left, right, op);
3584 HInstruction* HMod::New(Zone* zone,
3588 Maybe<int> fixed_right_arg) {
3589 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3590 HConstant* c_left = HConstant::cast(left);
3591 HConstant* c_right = HConstant::cast(right);
3592 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
3593 int32_t dividend = c_left->Integer32Value();
3594 int32_t divisor = c_right->Integer32Value();
3595 if (dividend == kMinInt && divisor == -1) {
3596 return H_CONSTANT_DOUBLE(-0.0);
3599 int32_t res = dividend % divisor;
3600 if ((res == 0) && (dividend < 0)) {
3601 return H_CONSTANT_DOUBLE(-0.0);
3603 return H_CONSTANT_INT(res);
3607 return new(zone) HMod(context, left, right, fixed_right_arg);
3611 HInstruction* HDiv::New(
3612 Zone* zone, HValue* context, HValue* left, HValue* right) {
3613 // If left and right are constant values, try to return a constant value.
3614 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3615 HConstant* c_left = HConstant::cast(left);
3616 HConstant* c_right = HConstant::cast(right);
3617 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3618 if (c_right->DoubleValue() != 0) {
3619 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
3620 if (TypeInfo::IsInt32Double(double_res)) {
3621 return H_CONSTANT_INT(double_res);
3623 return H_CONSTANT_DOUBLE(double_res);
3625 int sign = Double(c_left->DoubleValue()).Sign() *
3626 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
3627 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
3631 return new(zone) HDiv(context, left, right);
3635 HInstruction* HBitwise::New(
3636 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
3637 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3638 HConstant* c_left = HConstant::cast(left);
3639 HConstant* c_right = HConstant::cast(right);
3640 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3642 int32_t v_left = c_left->NumberValueAsInteger32();
3643 int32_t v_right = c_right->NumberValueAsInteger32();
3645 case Token::BIT_XOR:
3646 result = v_left ^ v_right;
3648 case Token::BIT_AND:
3649 result = v_left & v_right;
3652 result = v_left | v_right;
3655 result = 0; // Please the compiler.
3658 return H_CONSTANT_INT(result);
3661 return new(zone) HBitwise(context, op, left, right);
3665 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
3666 HInstruction* HInstr::New( \
3667 Zone* zone, HValue* context, HValue* left, HValue* right) { \
3668 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3669 HConstant* c_left = HConstant::cast(left); \
3670 HConstant* c_right = HConstant::cast(right); \
3671 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3672 return H_CONSTANT_INT(result); \
3675 return new(zone) HInstr(context, left, right); \
3679 DEFINE_NEW_H_BITWISE_INSTR(HSar,
3680 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
3681 DEFINE_NEW_H_BITWISE_INSTR(HShl,
3682 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
3684 #undef DEFINE_NEW_H_BITWISE_INSTR
3687 HInstruction* HShr::New(
3688 Zone* zone, HValue* context, HValue* left, HValue* right) {
3689 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3690 HConstant* c_left = HConstant::cast(left);
3691 HConstant* c_right = HConstant::cast(right);
3692 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3693 int32_t left_val = c_left->NumberValueAsInteger32();
3694 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
3695 if ((right_val == 0) && (left_val < 0)) {
3696 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
3698 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
3701 return new(zone) HShr(context, left, right);
3705 #undef H_CONSTANT_INT
3706 #undef H_CONSTANT_DOUBLE
3709 void HBitwise::PrintDataTo(StringStream* stream) {
3710 stream->Add(Token::Name(op_));
3712 HBitwiseBinaryOperation::PrintDataTo(stream);
3716 void HPhi::SimplifyConstantInputs() {
3717 // Convert constant inputs to integers when all uses are truncating.
3718 // This must happen before representation inference takes place.
3719 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
3720 for (int i = 0; i < OperandCount(); ++i) {
3721 if (!OperandAt(i)->IsConstant()) return;
3723 HGraph* graph = block()->graph();
3724 for (int i = 0; i < OperandCount(); ++i) {
3725 HConstant* operand = HConstant::cast(OperandAt(i));
3726 if (operand->HasInteger32Value()) {
3728 } else if (operand->HasDoubleValue()) {
3729 HConstant* integer_input =
3730 HConstant::New(graph->zone(), graph->GetInvalidContext(),
3731 DoubleToInt32(operand->DoubleValue()));
3732 integer_input->InsertAfter(operand);
3733 SetOperandAt(i, integer_input);
3734 } else if (operand->HasBooleanValue()) {
3735 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
3736 : graph->GetConstant0());
3737 } else if (operand->ImmortalImmovable()) {
3738 SetOperandAt(i, graph->GetConstant0());
3741 // Overwrite observed input representations because they are likely Tagged.
3742 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3743 HValue* use = it.value();
3744 if (use->IsBinaryOperation()) {
3745 HBinaryOperation::cast(use)->set_observed_input_representation(
3746 it.index(), Representation::Smi());
3752 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
3753 ASSERT(CheckFlag(kFlexibleRepresentation));
3754 Representation new_rep = RepresentationFromInputs();
3755 UpdateRepresentation(new_rep, h_infer, "inputs");
3756 new_rep = RepresentationFromUses();
3757 UpdateRepresentation(new_rep, h_infer, "uses");
3758 new_rep = RepresentationFromUseRequirements();
3759 UpdateRepresentation(new_rep, h_infer, "use requirements");
3763 Representation HPhi::RepresentationFromInputs() {
3764 Representation r = Representation::None();
3765 for (int i = 0; i < OperandCount(); ++i) {
3766 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
3772 // Returns a representation if all uses agree on the same representation.
3773 // Integer32 is also returned when some uses are Smi but others are Integer32.
3774 Representation HValue::RepresentationFromUseRequirements() {
3775 Representation rep = Representation::None();
3776 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3777 // We check for observed_input_representation elsewhere.
3778 Representation use_rep =
3779 it.value()->RequiredInputRepresentation(it.index());
3784 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
3785 if (rep.generalize(use_rep).IsInteger32()) {
3786 rep = Representation::Integer32();
3789 return Representation::None();
3795 bool HValue::HasNonSmiUse() {
3796 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3797 // We check for observed_input_representation elsewhere.
3798 Representation use_rep =
3799 it.value()->RequiredInputRepresentation(it.index());
3800 if (!use_rep.IsNone() &&
3802 !use_rep.IsTagged()) {
3810 // Node-specific verification code is only included in debug mode.
3813 void HPhi::Verify() {
3814 ASSERT(OperandCount() == block()->predecessors()->length());
3815 for (int i = 0; i < OperandCount(); ++i) {
3816 HValue* value = OperandAt(i);
3817 HBasicBlock* defining_block = value->block();
3818 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
3819 ASSERT(defining_block == predecessor_block ||
3820 defining_block->Dominates(predecessor_block));
3825 void HSimulate::Verify() {
3826 HInstruction::Verify();
3831 void HCheckHeapObject::Verify() {
3832 HInstruction::Verify();
3833 ASSERT(HasNoUses());
3837 void HCheckFunction::Verify() {
3838 HInstruction::Verify();
3839 ASSERT(HasNoUses());
3845 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
3846 ASSERT(offset >= 0);
3847 ASSERT(offset < FixedArray::kHeaderSize);
3848 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
3849 return HObjectAccess(kInobject, offset);
3853 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset,
3854 Representation representation) {
3855 ASSERT(offset >= 0);
3856 Portion portion = kInobject;
3858 if (offset == JSObject::kElementsOffset) {
3859 portion = kElementsPointer;
3860 } else if (offset == JSObject::kMapOffset) {
3863 return HObjectAccess(portion, offset, representation);
3867 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
3868 ASSERT(offset >= 0);
3869 Portion portion = kInobject;
3871 if (offset == JSObject::kElementsOffset) {
3872 portion = kElementsPointer;
3873 } else if (offset == JSArray::kLengthOffset) {
3874 portion = kArrayLengths;
3875 } else if (offset == JSObject::kMapOffset) {
3878 return HObjectAccess(portion, offset);
3882 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
3883 Representation representation) {
3884 ASSERT(offset >= 0);
3885 return HObjectAccess(kBackingStore, offset, representation);
3889 HObjectAccess HObjectAccess::ForField(Handle<Map> map,
3890 LookupResult *lookup, Handle<String> name) {
3891 ASSERT(lookup->IsField() || lookup->IsTransitionToField(*map));
3893 Representation representation;
3894 if (lookup->IsField()) {
3895 index = lookup->GetLocalFieldIndexFromMap(*map);
3896 representation = lookup->representation();
3898 Map* transition = lookup->GetTransitionMapFromMap(*map);
3899 int descriptor = transition->LastAdded();
3900 index = transition->instance_descriptors()->GetFieldIndex(descriptor) -
3901 map->inobject_properties();
3902 PropertyDetails details =
3903 transition->instance_descriptors()->GetDetails(descriptor);
3904 representation = details.representation();
3907 // Negative property indices are in-object properties, indexed
3908 // from the end of the fixed part of the object.
3909 int offset = (index * kPointerSize) + map->instance_size();
3910 return HObjectAccess(kInobject, offset, representation);
3912 // Non-negative property indices are in the properties array.
3913 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
3914 return HObjectAccess(kBackingStore, offset, representation, name);
3919 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
3920 return HObjectAccess(
3921 kInobject, Cell::kValueOffset, Representation::Tagged(),
3922 Handle<String>(isolate->heap()->cell_value_string()));
3926 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) {
3927 // set the appropriate GVN flags for a given load or store instruction
3929 // track dominating allocations in order to eliminate write barriers
3930 instr->SetGVNFlag(kDependsOnNewSpacePromotion);
3931 instr->SetFlag(HValue::kTrackSideEffectDominators);
3933 // try to GVN loads, but don't hoist above map changes
3934 instr->SetFlag(HValue::kUseGVN);
3935 instr->SetGVNFlag(kDependsOnMaps);
3938 switch (portion()) {
3940 instr->SetGVNFlag(is_store
3941 ? kChangesArrayLengths : kDependsOnArrayLengths);
3943 case kStringLengths:
3944 instr->SetGVNFlag(is_store
3945 ? kChangesStringLengths : kDependsOnStringLengths);
3948 instr->SetGVNFlag(is_store
3949 ? kChangesInobjectFields : kDependsOnInobjectFields);
3952 instr->SetGVNFlag(is_store
3953 ? kChangesDoubleFields : kDependsOnDoubleFields);
3956 instr->SetGVNFlag(is_store
3957 ? kChangesBackingStoreFields : kDependsOnBackingStoreFields);
3959 case kElementsPointer:
3960 instr->SetGVNFlag(is_store
3961 ? kChangesElementsPointer : kDependsOnElementsPointer);
3964 instr->SetGVNFlag(is_store
3965 ? kChangesMaps : kDependsOnMaps);
3967 case kExternalMemory:
3968 instr->SetGVNFlag(is_store
3969 ? kChangesExternalMemory : kDependsOnExternalMemory);
3975 void HObjectAccess::PrintTo(StringStream* stream) {
3978 switch (portion()) {
3980 case kStringLengths:
3981 stream->Add("%length");
3983 case kElementsPointer:
3984 stream->Add("%elements");
3987 stream->Add("%map");
3989 case kDouble: // fall through
3991 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString());
3992 stream->Add("[in-object]");
3995 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString());
3996 stream->Add("[backing-store]");
3998 case kExternalMemory:
3999 stream->Add("[external-memory]");
4003 stream->Add("@%d", offset());
4006 } } // namespace v8::internal