1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "hydrogen-infer-representation.h"
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
43 #error Unsupported target architecture.
49 #define DEFINE_COMPILE(type) \
50 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
51 return builder->Do##type(this); \
53 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
57 Isolate* HValue::isolate() const {
58 ASSERT(block() != NULL);
59 return block()->isolate();
63 void HValue::AssumeRepresentation(Representation r) {
64 if (CheckFlag(kFlexibleRepresentation)) {
65 ChangeRepresentation(r);
66 // The representation of the value is dictated by type feedback and
67 // will not be changed later.
68 ClearFlag(kFlexibleRepresentation);
73 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
74 ASSERT(CheckFlag(kFlexibleRepresentation));
75 Representation new_rep = RepresentationFromInputs();
76 UpdateRepresentation(new_rep, h_infer, "inputs");
77 new_rep = RepresentationFromUses();
78 UpdateRepresentation(new_rep, h_infer, "uses");
79 if (representation().IsSmi() && HasNonSmiUse()) {
81 Representation::Integer32(), h_infer, "use requirements");
86 Representation HValue::RepresentationFromUses() {
87 if (HasNoUses()) return Representation::None();
89 // Array of use counts for each representation.
90 int use_count[Representation::kNumRepresentations] = { 0 };
92 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
93 HValue* use = it.value();
94 Representation rep = use->observed_input_representation(it.index());
95 if (rep.IsNone()) continue;
96 if (FLAG_trace_representation) {
97 PrintF("#%d %s is used by #%d %s as %s%s\n",
98 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
99 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
101 use_count[rep.kind()] += 1;
103 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
104 int tagged_count = use_count[Representation::kTagged];
105 int double_count = use_count[Representation::kDouble];
106 int int32_count = use_count[Representation::kInteger32];
107 int smi_count = use_count[Representation::kSmi];
109 if (tagged_count > 0) return Representation::Tagged();
110 if (double_count > 0) return Representation::Double();
111 if (int32_count > 0) return Representation::Integer32();
112 if (smi_count > 0) return Representation::Smi();
114 return Representation::None();
118 void HValue::UpdateRepresentation(Representation new_rep,
119 HInferRepresentationPhase* h_infer,
120 const char* reason) {
121 Representation r = representation();
122 if (new_rep.is_more_general_than(r)) {
123 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
124 if (FLAG_trace_representation) {
125 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
126 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
128 ChangeRepresentation(new_rep);
129 AddDependantsToWorklist(h_infer);
134 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
135 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
136 h_infer->AddToWorklist(it.value());
138 for (int i = 0; i < OperandCount(); ++i) {
139 h_infer->AddToWorklist(OperandAt(i));
144 static int32_t ConvertAndSetOverflow(Representation r,
148 if (result > Smi::kMaxValue) {
150 return Smi::kMaxValue;
152 if (result < Smi::kMinValue) {
154 return Smi::kMinValue;
157 if (result > kMaxInt) {
161 if (result < kMinInt) {
166 return static_cast<int32_t>(result);
170 static int32_t AddWithoutOverflow(Representation r,
174 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
175 return ConvertAndSetOverflow(r, result, overflow);
179 static int32_t SubWithoutOverflow(Representation r,
183 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
184 return ConvertAndSetOverflow(r, result, overflow);
188 static int32_t MulWithoutOverflow(const Representation& r,
192 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
193 return ConvertAndSetOverflow(r, result, overflow);
197 int32_t Range::Mask() const {
198 if (lower_ == upper_) return lower_;
201 while (res < upper_) {
202 res = (res << 1) | 1;
210 void Range::AddConstant(int32_t value) {
211 if (value == 0) return;
212 bool may_overflow = false; // Overflow is ignored here.
213 Representation r = Representation::Integer32();
214 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
215 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
222 void Range::Intersect(Range* other) {
223 upper_ = Min(upper_, other->upper_);
224 lower_ = Max(lower_, other->lower_);
225 bool b = CanBeMinusZero() && other->CanBeMinusZero();
226 set_can_be_minus_zero(b);
230 void Range::Union(Range* other) {
231 upper_ = Max(upper_, other->upper_);
232 lower_ = Min(lower_, other->lower_);
233 bool b = CanBeMinusZero() || other->CanBeMinusZero();
234 set_can_be_minus_zero(b);
238 void Range::CombinedMax(Range* other) {
239 upper_ = Max(upper_, other->upper_);
240 lower_ = Max(lower_, other->lower_);
241 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
245 void Range::CombinedMin(Range* other) {
246 upper_ = Min(upper_, other->upper_);
247 lower_ = Min(lower_, other->lower_);
248 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
252 void Range::Sar(int32_t value) {
253 int32_t bits = value & 0x1F;
254 lower_ = lower_ >> bits;
255 upper_ = upper_ >> bits;
256 set_can_be_minus_zero(false);
260 void Range::Shl(int32_t value) {
261 int32_t bits = value & 0x1F;
262 int old_lower = lower_;
263 int old_upper = upper_;
264 lower_ = lower_ << bits;
265 upper_ = upper_ << bits;
266 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
270 set_can_be_minus_zero(false);
274 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
275 bool may_overflow = false;
276 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
277 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
286 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
287 bool may_overflow = false;
288 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
289 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
298 void Range::KeepOrder() {
299 if (lower_ > upper_) {
300 int32_t tmp = lower_;
308 void Range::Verify() const {
309 ASSERT(lower_ <= upper_);
314 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
315 bool may_overflow = false;
316 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
317 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
318 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
319 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
320 lower_ = Min(Min(v1, v2), Min(v3, v4));
321 upper_ = Max(Max(v1, v2), Max(v3, v4));
329 const char* HType::ToString() {
330 // Note: The c1visualizer syntax for locals allows only a sequence of the
331 // following characters: A-Za-z0-9_-|:
333 case kNone: return "none";
334 case kTagged: return "tagged";
335 case kTaggedPrimitive: return "primitive";
336 case kTaggedNumber: return "number";
337 case kSmi: return "smi";
338 case kHeapNumber: return "heap-number";
339 case kString: return "string";
340 case kBoolean: return "boolean";
341 case kNonPrimitive: return "non-primitive";
342 case kJSArray: return "array";
343 case kJSObject: return "object";
346 return "unreachable";
350 HType HType::TypeFromValue(Handle<Object> value) {
351 HType result = HType::Tagged();
352 if (value->IsSmi()) {
353 result = HType::Smi();
354 } else if (value->IsHeapNumber()) {
355 result = HType::HeapNumber();
356 } else if (value->IsString()) {
357 result = HType::String();
358 } else if (value->IsBoolean()) {
359 result = HType::Boolean();
360 } else if (value->IsJSObject()) {
361 result = HType::JSObject();
362 } else if (value->IsJSArray()) {
363 result = HType::JSArray();
369 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
370 return block()->block_id() > other->block_id();
374 HUseListNode* HUseListNode::tail() {
375 // Skip and remove dead items in the use list.
376 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
377 tail_ = tail_->tail_;
383 bool HValue::CheckUsesForFlag(Flag f) const {
384 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
385 if (it.value()->IsSimulate()) continue;
386 if (!it.value()->CheckFlag(f)) return false;
392 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
393 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
394 if (it.value()->IsSimulate()) continue;
395 if (!it.value()->CheckFlag(f)) {
404 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
405 bool return_value = false;
406 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
407 if (it.value()->IsSimulate()) continue;
408 if (!it.value()->CheckFlag(f)) return false;
415 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
420 void HUseIterator::Advance() {
422 if (current_ != NULL) {
423 next_ = current_->tail();
424 value_ = current_->value();
425 index_ = current_->index();
430 int HValue::UseCount() const {
432 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
437 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
438 HUseListNode* previous = NULL;
439 HUseListNode* current = use_list_;
440 while (current != NULL) {
441 if (current->value() == value && current->index() == index) {
442 if (previous == NULL) {
443 use_list_ = current->tail();
445 previous->set_tail(current->tail());
451 current = current->tail();
455 // Do not reuse use list nodes in debug mode, zap them.
456 if (current != NULL) {
459 HUseListNode(current->value(), current->index(), NULL);
468 bool HValue::Equals(HValue* other) {
469 if (other->opcode() != opcode()) return false;
470 if (!other->representation().Equals(representation())) return false;
471 if (!other->type_.Equals(type_)) return false;
472 if (other->flags() != flags()) return false;
473 if (OperandCount() != other->OperandCount()) return false;
474 for (int i = 0; i < OperandCount(); ++i) {
475 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
477 bool result = DataEquals(other);
478 ASSERT(!result || Hashcode() == other->Hashcode());
483 intptr_t HValue::Hashcode() {
484 intptr_t result = opcode();
485 int count = OperandCount();
486 for (int i = 0; i < count; ++i) {
487 result = result * 19 + OperandAt(i)->id() + (result >> 7);
493 const char* HValue::Mnemonic() const {
495 #define MAKE_CASE(type) case k##type: return #type;
496 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
498 case kPhi: return "Phi";
504 bool HValue::CanReplaceWithDummyUses() {
505 return FLAG_unreachable_code_elimination &&
506 !(block()->IsReachable() ||
508 IsControlInstruction() ||
515 bool HValue::IsInteger32Constant() {
516 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
520 int32_t HValue::GetInteger32Constant() {
521 return HConstant::cast(this)->Integer32Value();
525 bool HValue::EqualsInteger32Constant(int32_t value) {
526 return IsInteger32Constant() && GetInteger32Constant() == value;
530 void HValue::SetOperandAt(int index, HValue* value) {
531 RegisterUse(index, value);
532 InternalSetOperandAt(index, value);
536 void HValue::DeleteAndReplaceWith(HValue* other) {
537 // We replace all uses first, so Delete can assert that there are none.
538 if (other != NULL) ReplaceAllUsesWith(other);
544 void HValue::ReplaceAllUsesWith(HValue* other) {
545 while (use_list_ != NULL) {
546 HUseListNode* list_node = use_list_;
547 HValue* value = list_node->value();
548 ASSERT(!value->block()->IsStartBlock());
549 value->InternalSetOperandAt(list_node->index(), other);
550 use_list_ = list_node->tail();
551 list_node->set_tail(other->use_list_);
552 other->use_list_ = list_node;
557 void HValue::Kill() {
558 // Instead of going through the entire use list of each operand, we only
559 // check the first item in each use list and rely on the tail() method to
560 // skip dead items, removing them lazily next time we traverse the list.
562 for (int i = 0; i < OperandCount(); ++i) {
563 HValue* operand = OperandAt(i);
564 if (operand == NULL) continue;
565 HUseListNode* first = operand->use_list_;
566 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
567 operand->use_list_ = first->tail();
573 void HValue::SetBlock(HBasicBlock* block) {
574 ASSERT(block_ == NULL || block == NULL);
576 if (id_ == kNoNumber && block != NULL) {
577 id_ = block->graph()->GetNextValueID(this);
582 void HValue::PrintTypeTo(StringStream* stream) {
583 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return;
584 stream->Add(" type:%s", type().ToString());
588 void HValue::PrintRangeTo(StringStream* stream) {
589 if (range() == NULL || range()->IsMostGeneric()) return;
590 // Note: The c1visualizer syntax for locals allows only a sequence of the
591 // following characters: A-Za-z0-9_-|:
592 stream->Add(" range:%d_%d%s",
595 range()->CanBeMinusZero() ? "_m0" : "");
599 void HValue::PrintChangesTo(StringStream* stream) {
600 GVNFlagSet changes_flags = ChangesFlags();
601 if (changes_flags.IsEmpty()) return;
602 stream->Add(" changes[");
603 if (changes_flags == AllSideEffectsFlagSet()) {
606 bool add_comma = false;
607 #define PRINT_DO(type) \
608 if (changes_flags.Contains(kChanges##type)) { \
609 if (add_comma) stream->Add(","); \
611 stream->Add(#type); \
613 GVN_TRACKED_FLAG_LIST(PRINT_DO);
614 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
621 void HValue::PrintNameTo(StringStream* stream) {
622 stream->Add("%s%d", representation_.Mnemonic(), id());
626 bool HValue::HasMonomorphicJSObjectType() {
627 return !GetMonomorphicJSObjectMap().is_null();
631 bool HValue::UpdateInferredType() {
632 HType type = CalculateInferredType();
633 bool result = (!type.Equals(type_));
639 void HValue::RegisterUse(int index, HValue* new_value) {
640 HValue* old_value = OperandAt(index);
641 if (old_value == new_value) return;
643 HUseListNode* removed = NULL;
644 if (old_value != NULL) {
645 removed = old_value->RemoveUse(this, index);
648 if (new_value != NULL) {
649 if (removed == NULL) {
650 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
651 this, index, new_value->use_list_);
653 removed->set_tail(new_value->use_list_);
654 new_value->use_list_ = removed;
660 void HValue::AddNewRange(Range* r, Zone* zone) {
661 if (!HasRange()) ComputeInitialRange(zone);
662 if (!HasRange()) range_ = new(zone) Range();
664 r->StackUpon(range_);
669 void HValue::RemoveLastAddedRange() {
671 ASSERT(range_->next() != NULL);
672 range_ = range_->next();
676 void HValue::ComputeInitialRange(Zone* zone) {
678 range_ = InferRange(zone);
683 void HInstruction::PrintTo(StringStream* stream) {
684 PrintMnemonicTo(stream);
686 PrintRangeTo(stream);
687 PrintChangesTo(stream);
689 if (CheckFlag(HValue::kHasNoObservableSideEffects)) {
690 stream->Add(" [noOSE]");
695 void HInstruction::PrintDataTo(StringStream *stream) {
696 for (int i = 0; i < OperandCount(); ++i) {
697 if (i > 0) stream->Add(" ");
698 OperandAt(i)->PrintNameTo(stream);
703 void HInstruction::PrintMnemonicTo(StringStream* stream) {
704 stream->Add("%s ", Mnemonic());
708 void HInstruction::Unlink() {
710 ASSERT(!IsControlInstruction()); // Must never move control instructions.
711 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these.
712 ASSERT(previous_ != NULL);
713 previous_->next_ = next_;
715 ASSERT(block()->last() == this);
716 block()->set_last(previous_);
718 next_->previous_ = previous_;
724 void HInstruction::InsertBefore(HInstruction* next) {
726 ASSERT(!next->IsBlockEntry());
727 ASSERT(!IsControlInstruction());
728 ASSERT(!next->block()->IsStartBlock());
729 ASSERT(next->previous_ != NULL);
730 HInstruction* prev = next->previous();
732 next->previous_ = this;
735 SetBlock(next->block());
736 if (position() == RelocInfo::kNoPosition &&
737 next->position() != RelocInfo::kNoPosition) {
738 set_position(next->position());
743 void HInstruction::InsertAfter(HInstruction* previous) {
745 ASSERT(!previous->IsControlInstruction());
746 ASSERT(!IsControlInstruction() || previous->next_ == NULL);
747 HBasicBlock* block = previous->block();
748 // Never insert anything except constants into the start block after finishing
750 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
751 ASSERT(block->end()->SecondSuccessor() == NULL);
752 InsertAfter(block->end()->FirstSuccessor()->first());
756 // If we're inserting after an instruction with side-effects that is
757 // followed by a simulate instruction, we need to insert after the
758 // simulate instruction instead.
759 HInstruction* next = previous->next_;
760 if (previous->HasObservableSideEffects() && next != NULL) {
761 ASSERT(next->IsSimulate());
763 next = previous->next_;
766 previous_ = previous;
769 previous->next_ = this;
770 if (next != NULL) next->previous_ = this;
771 if (block->last() == previous) {
772 block->set_last(this);
774 if (position() == RelocInfo::kNoPosition &&
775 previous->position() != RelocInfo::kNoPosition) {
776 set_position(previous->position());
782 void HInstruction::Verify() {
783 // Verify that input operands are defined before use.
784 HBasicBlock* cur_block = block();
785 for (int i = 0; i < OperandCount(); ++i) {
786 HValue* other_operand = OperandAt(i);
787 if (other_operand == NULL) continue;
788 HBasicBlock* other_block = other_operand->block();
789 if (cur_block == other_block) {
790 if (!other_operand->IsPhi()) {
791 HInstruction* cur = this->previous();
792 while (cur != NULL) {
793 if (cur == other_operand) break;
794 cur = cur->previous();
796 // Must reach other operand in the same block!
797 ASSERT(cur == other_operand);
800 // If the following assert fires, you may have forgotten an
802 ASSERT(other_block->Dominates(cur_block));
806 // Verify that instructions that may have side-effects are followed
807 // by a simulate instruction.
808 if (HasObservableSideEffects() && !IsOsrEntry()) {
809 ASSERT(next()->IsSimulate());
812 // Verify that instructions that can be eliminated by GVN have overridden
813 // HValue::DataEquals. The default implementation is UNREACHABLE. We
814 // don't actually care whether DataEquals returns true or false here.
815 if (CheckFlag(kUseGVN)) DataEquals(this);
817 // Verify that all uses are in the graph.
818 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
819 if (use.value()->IsInstruction()) {
820 ASSERT(HInstruction::cast(use.value())->IsLinked());
827 void HDummyUse::PrintDataTo(StringStream* stream) {
828 value()->PrintNameTo(stream);
832 void HEnvironmentMarker::PrintDataTo(StringStream* stream) {
833 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index());
837 void HUnaryCall::PrintDataTo(StringStream* stream) {
838 value()->PrintNameTo(stream);
840 stream->Add("#%d", argument_count());
844 void HCallJSFunction::PrintDataTo(StringStream* stream) {
845 function()->PrintNameTo(stream);
847 stream->Add("#%d", argument_count());
851 HCallJSFunction* HCallJSFunction::New(
856 bool pass_argument_count) {
857 bool has_stack_check = false;
858 if (function->IsConstant()) {
859 HConstant* fun_const = HConstant::cast(function);
860 Handle<JSFunction> jsfun =
861 Handle<JSFunction>::cast(fun_const->handle(zone->isolate()));
862 has_stack_check = !jsfun.is_null() &&
863 (jsfun->code()->kind() == Code::FUNCTION ||
864 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
867 return new(zone) HCallJSFunction(
868 function, argument_count, pass_argument_count,
875 void HBinaryCall::PrintDataTo(StringStream* stream) {
876 first()->PrintNameTo(stream);
878 second()->PrintNameTo(stream);
880 stream->Add("#%d", argument_count());
884 void HBoundsCheck::ApplyIndexChange() {
885 if (skip_check()) return;
887 DecompositionResult decomposition;
888 bool index_is_decomposable = index()->TryDecompose(&decomposition);
889 if (index_is_decomposable) {
890 ASSERT(decomposition.base() == base());
891 if (decomposition.offset() == offset() &&
892 decomposition.scale() == scale()) return;
897 ReplaceAllUsesWith(index());
899 HValue* current_index = decomposition.base();
900 int actual_offset = decomposition.offset() + offset();
901 int actual_scale = decomposition.scale() + scale();
903 Zone* zone = block()->graph()->zone();
904 HValue* context = block()->graph()->GetInvalidContext();
905 if (actual_offset != 0) {
906 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
907 add_offset->InsertBefore(this);
908 HInstruction* add = HAdd::New(zone, context,
909 current_index, add_offset);
910 add->InsertBefore(this);
911 add->AssumeRepresentation(index()->representation());
912 add->ClearFlag(kCanOverflow);
916 if (actual_scale != 0) {
917 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
918 sar_scale->InsertBefore(this);
919 HInstruction* sar = HSar::New(zone, context,
920 current_index, sar_scale);
921 sar->InsertBefore(this);
922 sar->AssumeRepresentation(index()->representation());
926 SetOperandAt(0, current_index);
934 void HBoundsCheck::PrintDataTo(StringStream* stream) {
935 index()->PrintNameTo(stream);
937 length()->PrintNameTo(stream);
938 if (base() != NULL && (offset() != 0 || scale() != 0)) {
939 stream->Add(" base: ((");
940 if (base() != index()) {
941 index()->PrintNameTo(stream);
943 stream->Add("index");
945 stream->Add(" + %d) >> %d)", offset(), scale());
948 stream->Add(" [DISABLED]");
953 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
954 ASSERT(CheckFlag(kFlexibleRepresentation));
955 HValue* actual_index = index()->ActualValue();
956 HValue* actual_length = length()->ActualValue();
957 Representation index_rep = actual_index->representation();
958 Representation length_rep = actual_length->representation();
959 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
960 index_rep = Representation::Smi();
962 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
963 length_rep = Representation::Smi();
965 Representation r = index_rep.generalize(length_rep);
966 if (r.is_more_general_than(Representation::Integer32())) {
967 r = Representation::Integer32();
969 UpdateRepresentation(r, h_infer, "boundscheck");
973 Range* HBoundsCheck::InferRange(Zone* zone) {
974 Representation r = representation();
975 if (r.IsSmiOrInteger32() && length()->HasRange()) {
976 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
979 Range* result = new(zone) Range(lower, upper);
980 if (index()->HasRange()) {
981 result->Intersect(index()->range());
984 // In case of Smi representation, clamp result to Smi::kMaxValue.
985 if (r.IsSmi()) result->ClampToSmi();
988 return HValue::InferRange(zone);
992 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
993 stream->Add("base: ");
994 base_index()->PrintNameTo(stream);
995 stream->Add(", check: ");
996 base_index()->PrintNameTo(stream);
1000 void HCallWithDescriptor::PrintDataTo(StringStream* stream) {
1001 for (int i = 0; i < OperandCount(); i++) {
1002 OperandAt(i)->PrintNameTo(stream);
1005 stream->Add("#%d", argument_count());
1009 void HCallNewArray::PrintDataTo(StringStream* stream) {
1010 stream->Add(ElementsKindToString(elements_kind()));
1012 HBinaryCall::PrintDataTo(stream);
1016 void HCallRuntime::PrintDataTo(StringStream* stream) {
1017 stream->Add("%o ", *name());
1018 if (save_doubles() == kSaveFPRegs) {
1019 stream->Add("[save doubles] ");
1021 stream->Add("#%d", argument_count());
1025 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
1026 stream->Add("class_of_test(");
1027 value()->PrintNameTo(stream);
1028 stream->Add(", \"%o\")", *class_name());
1032 void HWrapReceiver::PrintDataTo(StringStream* stream) {
1033 receiver()->PrintNameTo(stream);
1035 function()->PrintNameTo(stream);
1039 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) {
1040 arguments()->PrintNameTo(stream);
1042 index()->PrintNameTo(stream);
1043 stream->Add("], length ");
1044 length()->PrintNameTo(stream);
1048 void HControlInstruction::PrintDataTo(StringStream* stream) {
1049 stream->Add(" goto (");
1050 bool first_block = true;
1051 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1052 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id());
1053 first_block = false;
1059 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) {
1060 value()->PrintNameTo(stream);
1061 HControlInstruction::PrintDataTo(stream);
1065 void HReturn::PrintDataTo(StringStream* stream) {
1066 value()->PrintNameTo(stream);
1067 stream->Add(" (pop ");
1068 parameter_count()->PrintNameTo(stream);
1069 stream->Add(" values)");
1073 Representation HBranch::observed_input_representation(int index) {
1074 static const ToBooleanStub::Types tagged_types(
1075 ToBooleanStub::NULL_TYPE |
1076 ToBooleanStub::SPEC_OBJECT |
1077 ToBooleanStub::STRING |
1078 ToBooleanStub::SYMBOL);
1079 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1080 return Representation::Tagged();
1082 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1083 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1084 return Representation::Double();
1086 return Representation::Tagged();
1088 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1089 return Representation::Double();
1091 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1092 return Representation::Smi();
1094 return Representation::None();
1098 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1099 HValue* value = this->value();
1100 if (value->EmitAtUses()) {
1101 ASSERT(value->IsConstant());
1102 ASSERT(!value->representation().IsDouble());
1103 *block = HConstant::cast(value)->BooleanValue()
1105 : SecondSuccessor();
1113 void HCompareMap::PrintDataTo(StringStream* stream) {
1114 value()->PrintNameTo(stream);
1115 stream->Add(" (%p)", *map().handle());
1116 HControlInstruction::PrintDataTo(stream);
1120 const char* HUnaryMathOperation::OpName() const {
1122 case kMathFloor: return "floor";
1123 case kMathRound: return "round";
1124 case kMathAbs: return "abs";
1125 case kMathLog: return "log";
1126 case kMathExp: return "exp";
1127 case kMathSqrt: return "sqrt";
1128 case kMathPowHalf: return "pow-half";
1136 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1137 Representation r = representation();
1138 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1139 if (op() == kMathAbs) {
1140 int upper = value()->range()->upper();
1141 int lower = value()->range()->lower();
1142 bool spans_zero = value()->range()->CanBeZero();
1143 // Math.abs(kMinInt) overflows its representation, on which the
1144 // instruction deopts. Hence clamp it to kMaxInt.
1145 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1146 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1148 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1149 Max(abs_lower, abs_upper));
1150 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1152 if (r.IsSmi()) result->ClampToSmi();
1156 return HValue::InferRange(zone);
1160 void HUnaryMathOperation::PrintDataTo(StringStream* stream) {
1161 const char* name = OpName();
1162 stream->Add("%s ", name);
1163 value()->PrintNameTo(stream);
1167 void HUnaryOperation::PrintDataTo(StringStream* stream) {
1168 value()->PrintNameTo(stream);
1172 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
1173 value()->PrintNameTo(stream);
1175 case FIRST_JS_RECEIVER_TYPE:
1176 if (to_ == LAST_TYPE) stream->Add(" spec_object");
1178 case JS_REGEXP_TYPE:
1179 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp");
1182 if (to_ == JS_ARRAY_TYPE) stream->Add(" array");
1184 case JS_FUNCTION_TYPE:
1185 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function");
1193 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
1194 value()->PrintNameTo(stream);
1195 stream->Add(" == %o", *type_literal_);
1196 HControlInstruction::PrintDataTo(stream);
1200 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1201 if (value()->representation().IsSpecialization()) {
1202 if (compares_number_type()) {
1203 *block = FirstSuccessor();
1205 *block = SecondSuccessor();
1214 void HCheckMapValue::PrintDataTo(StringStream* stream) {
1215 value()->PrintNameTo(stream);
1217 map()->PrintNameTo(stream);
1221 void HForInPrepareMap::PrintDataTo(StringStream* stream) {
1222 enumerable()->PrintNameTo(stream);
1226 void HForInCacheArray::PrintDataTo(StringStream* stream) {
1227 enumerable()->PrintNameTo(stream);
1229 map()->PrintNameTo(stream);
1230 stream->Add("[%d]", idx_);
1234 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
1235 object()->PrintNameTo(stream);
1237 index()->PrintNameTo(stream);
1241 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1242 if (!l->EqualsInteger32Constant(~0)) return false;
1248 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1249 if (!instr->IsBitwise()) return false;
1250 HBitwise* b = HBitwise::cast(instr);
1251 return (b->op() == Token::BIT_XOR) &&
1252 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1253 MatchLeftIsOnes(b->right(), b->left(), negated));
1257 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1259 return MatchNegationViaXor(instr, &negated) &&
1260 MatchNegationViaXor(negated, arg);
1264 HValue* HBitwise::Canonicalize() {
1265 if (!representation().IsSmiOrInteger32()) return this;
1266 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1267 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1268 if (left()->EqualsInteger32Constant(nop_constant) &&
1269 !right()->CheckFlag(kUint32)) {
1272 if (right()->EqualsInteger32Constant(nop_constant) &&
1273 !left()->CheckFlag(kUint32)) {
1276 // Optimize double negation, a common pattern used for ToInt32(x).
1278 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1285 Representation HAdd::RepresentationFromInputs() {
1286 Representation left_rep = left()->representation();
1287 if (left_rep.IsExternal()) {
1288 return Representation::External();
1290 return HArithmeticBinaryOperation::RepresentationFromInputs();
1294 Representation HAdd::RequiredInputRepresentation(int index) {
1296 Representation left_rep = left()->representation();
1297 if (left_rep.IsExternal()) {
1298 return Representation::Integer32();
1301 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1305 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1306 return arg1->representation().IsSpecialization() &&
1307 arg2->EqualsInteger32Constant(identity);
1311 HValue* HAdd::Canonicalize() {
1312 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1313 if (IsIdentityOperation(left(), right(), 0) &&
1314 !left()->representation().IsDouble()) { // Left could be -0.
1317 if (IsIdentityOperation(right(), left(), 0) &&
1318 !left()->representation().IsDouble()) { // Right could be -0.
1325 HValue* HSub::Canonicalize() {
1326 if (IsIdentityOperation(left(), right(), 0)) return left();
1331 HValue* HMul::Canonicalize() {
1332 if (IsIdentityOperation(left(), right(), 1)) return left();
1333 if (IsIdentityOperation(right(), left(), 1)) return right();
1338 bool HMul::MulMinusOne() {
1339 if (left()->EqualsInteger32Constant(-1) ||
1340 right()->EqualsInteger32Constant(-1)) {
1348 HValue* HMod::Canonicalize() {
1353 HValue* HDiv::Canonicalize() {
1354 if (IsIdentityOperation(left(), right(), 1)) return left();
1359 HValue* HChange::Canonicalize() {
1360 return (from().Equals(to())) ? value() : this;
1364 HValue* HWrapReceiver::Canonicalize() {
1365 if (HasNoUses()) return NULL;
1366 if (receiver()->type().IsJSObject()) {
1373 void HTypeof::PrintDataTo(StringStream* stream) {
1374 value()->PrintNameTo(stream);
1378 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1379 HValue* value, Representation required_representation) {
1380 if (FLAG_fold_constants && value->IsConstant()) {
1381 HConstant* c = HConstant::cast(value);
1382 if (c->HasNumberValue()) {
1383 double double_res = c->DoubleValue();
1384 if (IsInt32Double(double_res)) {
1385 return HConstant::New(zone, context,
1386 static_cast<int32_t>(double_res),
1387 required_representation);
1391 return new(zone) HForceRepresentation(value, required_representation);
1395 void HForceRepresentation::PrintDataTo(StringStream* stream) {
1396 stream->Add("%s ", representation().Mnemonic());
1397 value()->PrintNameTo(stream);
1401 void HChange::PrintDataTo(StringStream* stream) {
1402 HUnaryOperation::PrintDataTo(stream);
1403 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic());
1405 if (CanTruncateToInt32()) stream->Add(" truncating-int32");
1406 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
1407 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan");
1411 HValue* HUnaryMathOperation::Canonicalize() {
1412 if (op() == kMathRound || op() == kMathFloor) {
1413 HValue* val = value();
1414 if (val->IsChange()) val = HChange::cast(val)->value();
1415 if (val->representation().IsSmiOrInteger32()) {
1416 if (val->representation().Equals(representation())) return val;
1417 return Prepend(new(block()->zone()) HChange(
1418 val, representation(), false, false));
1421 if (op() == kMathFloor && value()->IsDiv() && value()->UseCount() == 1) {
1422 HDiv* hdiv = HDiv::cast(value());
1424 HValue* left = hdiv->left();
1425 if (left->representation().IsInteger32()) {
1426 // A value with an integer representation does not need to be transformed.
1427 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1428 // A change from an integer32 can be replaced by the integer32 value.
1429 left = HChange::cast(left)->value();
1430 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1431 left = Prepend(new(block()->zone()) HChange(
1432 left, Representation::Integer32(), false, false));
1437 HValue* right = hdiv->right();
1438 if (right->IsInteger32Constant()) {
1439 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1440 Representation::Integer32(), right->block()->zone()));
1441 } else if (right->representation().IsInteger32()) {
1442 // A value with an integer representation does not need to be transformed.
1443 } else if (right->IsChange() &&
1444 HChange::cast(right)->from().IsInteger32()) {
1445 // A change from an integer32 can be replaced by the integer32 value.
1446 right = HChange::cast(right)->value();
1447 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1448 right = Prepend(new(block()->zone()) HChange(
1449 right, Representation::Integer32(), false, false));
1454 return Prepend(HMathFloorOfDiv::New(
1455 block()->zone(), context(), left, right));
1461 HValue* HCheckInstanceType::Canonicalize() {
1462 if (check_ == IS_STRING && value()->type().IsString()) {
1466 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1467 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1475 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1476 InstanceType* last) {
1477 ASSERT(is_interval_check());
1479 case IS_SPEC_OBJECT:
1480 *first = FIRST_SPEC_OBJECT_TYPE;
1481 *last = LAST_SPEC_OBJECT_TYPE;
1484 *first = *last = JS_ARRAY_TYPE;
1492 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1493 ASSERT(!is_interval_check());
1496 *mask = kIsNotStringMask;
1499 case IS_INTERNALIZED_STRING:
1500 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1501 *tag = kInternalizedTag;
1509 void HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect,
1510 HValue* dominator) {
1511 ASSERT(side_effect == kChangesMaps);
1512 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once
1513 // type information is rich enough we should generalize this to any HType
1514 // for which the map is known.
1515 if (HasNoUses() && dominator->IsStoreNamedField()) {
1516 HStoreNamedField* store = HStoreNamedField::cast(dominator);
1517 if (!store->has_transition() || store->object() != value()) return;
1518 HConstant* transition = HConstant::cast(store->transition());
1519 if (map_set_.Contains(transition->GetUnique())) {
1520 DeleteAndReplaceWith(NULL);
1527 void HCheckMaps::PrintDataTo(StringStream* stream) {
1528 value()->PrintNameTo(stream);
1529 stream->Add(" [%p", *map_set_.at(0).handle());
1530 for (int i = 1; i < map_set_.size(); ++i) {
1531 stream->Add(",%p", *map_set_.at(i).handle());
1533 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : "");
1537 void HCheckValue::PrintDataTo(StringStream* stream) {
1538 value()->PrintNameTo(stream);
1540 object().handle()->ShortPrint(stream);
1544 HValue* HCheckValue::Canonicalize() {
1545 return (value()->IsConstant() &&
1546 HConstant::cast(value())->GetUnique() == object_)
1552 const char* HCheckInstanceType::GetCheckName() {
1554 case IS_SPEC_OBJECT: return "object";
1555 case IS_JS_ARRAY: return "array";
1556 case IS_STRING: return "string";
1557 case IS_INTERNALIZED_STRING: return "internalized_string";
1564 void HCheckInstanceType::PrintDataTo(StringStream* stream) {
1565 stream->Add("%s ", GetCheckName());
1566 HUnaryOperation::PrintDataTo(stream);
1570 void HCallStub::PrintDataTo(StringStream* stream) {
1572 CodeStub::MajorName(major_key_, false));
1573 HUnaryCall::PrintDataTo(stream);
1577 void HUnknownOSRValue::PrintDataTo(StringStream *stream) {
1578 const char* type = "expression";
1579 if (environment_->is_local_index(index_)) type = "local";
1580 if (environment_->is_special_index(index_)) type = "special";
1581 if (environment_->is_parameter_index(index_)) type = "parameter";
1582 stream->Add("%s @ %d", type, index_);
1586 void HInstanceOf::PrintDataTo(StringStream* stream) {
1587 left()->PrintNameTo(stream);
1589 right()->PrintNameTo(stream);
1591 context()->PrintNameTo(stream);
1595 Range* HValue::InferRange(Zone* zone) {
1597 if (representation().IsSmi() || type().IsSmi()) {
1598 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1599 result->set_can_be_minus_zero(false);
1601 result = new(zone) Range();
1602 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1603 // TODO(jkummerow): The range cannot be minus zero when the upper type
1604 // bound is Integer32.
1610 Range* HChange::InferRange(Zone* zone) {
1611 Range* input_range = value()->range();
1612 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1615 input_range != NULL &&
1616 input_range->IsInSmiRange()))) {
1617 set_type(HType::Smi());
1618 ClearGVNFlag(kChangesNewSpacePromotion);
1620 Range* result = (input_range != NULL)
1621 ? input_range->Copy(zone)
1622 : HValue::InferRange(zone);
1623 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1624 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1625 CheckFlag(kAllUsesTruncatingToSmi)));
1626 if (to().IsSmi()) result->ClampToSmi();
1631 Range* HConstant::InferRange(Zone* zone) {
1632 if (has_int32_value_) {
1633 Range* result = new(zone) Range(int32_value_, int32_value_);
1634 result->set_can_be_minus_zero(false);
1637 return HValue::InferRange(zone);
1641 int HPhi::position() const {
1642 return block()->first()->position();
1646 Range* HPhi::InferRange(Zone* zone) {
1647 Representation r = representation();
1648 if (r.IsSmiOrInteger32()) {
1649 if (block()->IsLoopHeader()) {
1650 Range* range = r.IsSmi()
1651 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1652 : new(zone) Range(kMinInt, kMaxInt);
1655 Range* range = OperandAt(0)->range()->Copy(zone);
1656 for (int i = 1; i < OperandCount(); ++i) {
1657 range->Union(OperandAt(i)->range());
1662 return HValue::InferRange(zone);
1667 Range* HAdd::InferRange(Zone* zone) {
1668 Representation r = representation();
1669 if (r.IsSmiOrInteger32()) {
1670 Range* a = left()->range();
1671 Range* b = right()->range();
1672 Range* res = a->Copy(zone);
1673 if (!res->AddAndCheckOverflow(r, b) ||
1674 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1675 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1676 ClearFlag(kCanOverflow);
1678 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1679 !CheckFlag(kAllUsesTruncatingToInt32) &&
1680 a->CanBeMinusZero() && b->CanBeMinusZero());
1683 return HValue::InferRange(zone);
1688 Range* HSub::InferRange(Zone* zone) {
1689 Representation r = representation();
1690 if (r.IsSmiOrInteger32()) {
1691 Range* a = left()->range();
1692 Range* b = right()->range();
1693 Range* res = a->Copy(zone);
1694 if (!res->SubAndCheckOverflow(r, b) ||
1695 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1696 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1697 ClearFlag(kCanOverflow);
1699 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1700 !CheckFlag(kAllUsesTruncatingToInt32) &&
1701 a->CanBeMinusZero() && b->CanBeZero());
1704 return HValue::InferRange(zone);
1709 Range* HMul::InferRange(Zone* zone) {
1710 Representation r = representation();
1711 if (r.IsSmiOrInteger32()) {
1712 Range* a = left()->range();
1713 Range* b = right()->range();
1714 Range* res = a->Copy(zone);
1715 if (!res->MulAndCheckOverflow(r, b) ||
1716 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1717 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1719 // Truncated int multiplication is too precise and therefore not the
1720 // same as converting to Double and back.
1721 // Handle truncated integer multiplication by -1 special.
1722 ClearFlag(kCanOverflow);
1724 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1725 !CheckFlag(kAllUsesTruncatingToInt32) &&
1726 ((a->CanBeZero() && b->CanBeNegative()) ||
1727 (a->CanBeNegative() && b->CanBeZero())));
1730 return HValue::InferRange(zone);
1735 Range* HDiv::InferRange(Zone* zone) {
1736 if (representation().IsInteger32()) {
1737 Range* a = left()->range();
1738 Range* b = right()->range();
1739 Range* result = new(zone) Range();
1740 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1741 (a->CanBeMinusZero() ||
1742 (a->CanBeZero() && b->CanBeNegative())));
1743 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1744 ClearFlag(HValue::kCanOverflow);
1747 if (!b->CanBeZero()) {
1748 ClearFlag(HValue::kCanBeDivByZero);
1752 return HValue::InferRange(zone);
1757 Range* HMod::InferRange(Zone* zone) {
1758 if (representation().IsInteger32()) {
1759 Range* a = left()->range();
1760 Range* b = right()->range();
1762 // The magnitude of the modulus is bounded by the right operand. Note that
1763 // apart for the cases involving kMinInt, the calculation below is the same
1764 // as Max(Abs(b->lower()), Abs(b->upper())) - 1.
1765 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1);
1767 // The result of the modulo operation has the sign of its left operand.
1768 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1769 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1770 a->CanBePositive() ? positive_bound : 0);
1772 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1773 left_can_be_negative);
1775 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1776 ClearFlag(HValue::kCanOverflow);
1779 if (!b->CanBeZero()) {
1780 ClearFlag(HValue::kCanBeDivByZero);
1784 return HValue::InferRange(zone);
1789 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1790 if (phi->block()->loop_information() == NULL) return NULL;
1791 if (phi->OperandCount() != 2) return NULL;
1792 int32_t candidate_increment;
1794 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1795 if (candidate_increment != 0) {
1796 return new(phi->block()->graph()->zone())
1797 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
1800 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
1801 if (candidate_increment != 0) {
1802 return new(phi->block()->graph()->zone())
1803 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
1811 * This function tries to match the following patterns (and all the relevant
1812 * variants related to |, & and + being commutative):
1813 * base | constant_or_mask
1814 * base & constant_and_mask
1815 * (base + constant_offset) & constant_and_mask
1816 * (base - constant_offset) & constant_and_mask
1818 void InductionVariableData::DecomposeBitwise(
1820 BitwiseDecompositionResult* result) {
1821 HValue* base = IgnoreOsrValue(value);
1822 result->base = value;
1824 if (!base->representation().IsInteger32()) return;
1826 if (base->IsBitwise()) {
1827 bool allow_offset = false;
1830 HBitwise* bitwise = HBitwise::cast(base);
1831 if (bitwise->right()->IsInteger32Constant()) {
1832 mask = bitwise->right()->GetInteger32Constant();
1833 base = bitwise->left();
1834 } else if (bitwise->left()->IsInteger32Constant()) {
1835 mask = bitwise->left()->GetInteger32Constant();
1836 base = bitwise->right();
1840 if (bitwise->op() == Token::BIT_AND) {
1841 result->and_mask = mask;
1842 allow_offset = true;
1843 } else if (bitwise->op() == Token::BIT_OR) {
1844 result->or_mask = mask;
1849 result->context = bitwise->context();
1852 if (base->IsAdd()) {
1853 HAdd* add = HAdd::cast(base);
1854 if (add->right()->IsInteger32Constant()) {
1856 } else if (add->left()->IsInteger32Constant()) {
1857 base = add->right();
1859 } else if (base->IsSub()) {
1860 HSub* sub = HSub::cast(base);
1861 if (sub->right()->IsInteger32Constant()) {
1867 result->base = base;
1872 void InductionVariableData::AddCheck(HBoundsCheck* check,
1873 int32_t upper_limit) {
1874 ASSERT(limit_validity() != NULL);
1875 if (limit_validity() != check->block() &&
1876 !limit_validity()->Dominates(check->block())) return;
1877 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
1878 check->block()->current_loop())) return;
1880 ChecksRelatedToLength* length_checks = checks();
1881 while (length_checks != NULL) {
1882 if (length_checks->length() == check->length()) break;
1883 length_checks = length_checks->next();
1885 if (length_checks == NULL) {
1886 length_checks = new(check->block()->zone())
1887 ChecksRelatedToLength(check->length(), checks());
1888 checks_ = length_checks;
1891 length_checks->AddCheck(check, upper_limit);
1895 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
1896 if (checks() != NULL) {
1897 InductionVariableCheck* c = checks();
1898 HBasicBlock* current_block = c->check()->block();
1899 while (c != NULL && c->check()->block() == current_block) {
1900 c->set_upper_limit(current_upper_limit_);
1907 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
1912 ASSERT(first_check_in_block() != NULL);
1913 HValue* previous_index = first_check_in_block()->index();
1914 ASSERT(context != NULL);
1916 Zone* zone = index_base->block()->graph()->zone();
1917 set_added_constant(HConstant::New(zone, context, mask));
1918 if (added_index() != NULL) {
1919 added_constant()->InsertBefore(added_index());
1921 added_constant()->InsertBefore(first_check_in_block());
1924 if (added_index() == NULL) {
1925 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
1926 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
1928 ASSERT(new_index->IsBitwise());
1929 new_index->ClearAllSideEffects();
1930 new_index->AssumeRepresentation(Representation::Integer32());
1931 set_added_index(HBitwise::cast(new_index));
1932 added_index()->InsertBefore(first_check_in_block());
1934 ASSERT(added_index()->op() == token);
1936 added_index()->SetOperandAt(1, index_base);
1937 added_index()->SetOperandAt(2, added_constant());
1938 first_check_in_block()->SetOperandAt(0, added_index());
1939 if (previous_index->UseCount() == 0) {
1940 previous_index->DeleteAndReplaceWith(NULL);
1944 void InductionVariableData::ChecksRelatedToLength::AddCheck(
1945 HBoundsCheck* check,
1946 int32_t upper_limit) {
1947 BitwiseDecompositionResult decomposition;
1948 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
1950 if (first_check_in_block() == NULL ||
1951 first_check_in_block()->block() != check->block()) {
1952 CloseCurrentBlock();
1954 first_check_in_block_ = check;
1955 set_added_index(NULL);
1956 set_added_constant(NULL);
1957 current_and_mask_in_block_ = decomposition.and_mask;
1958 current_or_mask_in_block_ = decomposition.or_mask;
1959 current_upper_limit_ = upper_limit;
1961 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
1962 InductionVariableCheck(check, checks_, upper_limit);
1963 checks_ = new_check;
1967 if (upper_limit > current_upper_limit()) {
1968 current_upper_limit_ = upper_limit;
1971 if (decomposition.and_mask != 0 &&
1972 current_or_mask_in_block() == 0) {
1973 if (current_and_mask_in_block() == 0 ||
1974 decomposition.and_mask > current_and_mask_in_block()) {
1975 UseNewIndexInCurrentBlock(Token::BIT_AND,
1976 decomposition.and_mask,
1978 decomposition.context);
1979 current_and_mask_in_block_ = decomposition.and_mask;
1981 check->set_skip_check();
1983 if (current_and_mask_in_block() == 0) {
1984 if (decomposition.or_mask > current_or_mask_in_block()) {
1985 UseNewIndexInCurrentBlock(Token::BIT_OR,
1986 decomposition.or_mask,
1988 decomposition.context);
1989 current_or_mask_in_block_ = decomposition.or_mask;
1991 check->set_skip_check();
1994 if (!check->skip_check()) {
1995 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
1996 InductionVariableCheck(check, checks_, upper_limit);
1997 checks_ = new_check;
2003 * This method detects if phi is an induction variable, with phi_operand as
2004 * its "incremented" value (the other operand would be the "base" value).
2006 * It cheks is phi_operand has the form "phi + constant".
2007 * If yes, the constant is the increment that the induction variable gets at
2008 * every loop iteration.
2009 * Otherwise it returns 0.
2011 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2012 HValue* phi_operand) {
2013 if (!phi_operand->representation().IsInteger32()) return 0;
2015 if (phi_operand->IsAdd()) {
2016 HAdd* operation = HAdd::cast(phi_operand);
2017 if (operation->left() == phi &&
2018 operation->right()->IsInteger32Constant()) {
2019 return operation->right()->GetInteger32Constant();
2020 } else if (operation->right() == phi &&
2021 operation->left()->IsInteger32Constant()) {
2022 return operation->left()->GetInteger32Constant();
2024 } else if (phi_operand->IsSub()) {
2025 HSub* operation = HSub::cast(phi_operand);
2026 if (operation->left() == phi &&
2027 operation->right()->IsInteger32Constant()) {
2028 return -operation->right()->GetInteger32Constant();
2037 * Swaps the information in "update" with the one contained in "this".
2038 * The swapping is important because this method is used while doing a
2039 * dominator tree traversal, and "update" will retain the old data that
2040 * will be restored while backtracking.
2042 void InductionVariableData::UpdateAdditionalLimit(
2043 InductionVariableLimitUpdate* update) {
2044 ASSERT(update->updated_variable == this);
2045 if (update->limit_is_upper) {
2046 swap(&additional_upper_limit_, &update->limit);
2047 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2049 swap(&additional_lower_limit_, &update->limit);
2050 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2055 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2057 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2058 const int32_t MAX_LIMIT = 1 << 30;
2060 int32_t result = MAX_LIMIT;
2062 if (limit() != NULL &&
2063 limit()->IsInteger32Constant()) {
2064 int32_t limit_value = limit()->GetInteger32Constant();
2065 if (!limit_included()) {
2068 if (limit_value < result) result = limit_value;
2071 if (additional_upper_limit() != NULL &&
2072 additional_upper_limit()->IsInteger32Constant()) {
2073 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2074 if (!additional_upper_limit_is_included()) {
2077 if (limit_value < result) result = limit_value;
2080 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2081 if (and_mask < result) result = and_mask;
2085 // Add the effect of the or_mask.
2088 return result >= MAX_LIMIT ? kNoLimit : result;
2092 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2093 if (!v->IsPhi()) return v;
2094 HPhi* phi = HPhi::cast(v);
2095 if (phi->OperandCount() != 2) return v;
2096 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2097 return phi->OperandAt(1);
2098 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2099 return phi->OperandAt(0);
2106 InductionVariableData* InductionVariableData::GetInductionVariableData(
2108 v = IgnoreOsrValue(v);
2110 return HPhi::cast(v)->induction_variable_data();
2117 * Check if a conditional branch to "current_branch" with token "token" is
2118 * the branch that keeps the induction loop running (and, conversely, will
2119 * terminate it if the "other_branch" is taken).
2121 * Three conditions must be met:
2122 * - "current_branch" must be in the induction loop.
2123 * - "other_branch" must be out of the induction loop.
2124 * - "token" and the induction increment must be "compatible": the token should
2125 * be a condition that keeps the execution inside the loop until the limit is
2128 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2130 HBasicBlock* current_branch,
2131 HBasicBlock* other_branch) {
2132 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2133 current_branch->current_loop())) {
2137 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2138 other_branch->current_loop())) {
2142 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2145 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2148 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2156 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2158 LimitFromPredecessorBlock* result) {
2159 if (block->predecessors()->length() != 1) return;
2160 HBasicBlock* predecessor = block->predecessors()->at(0);
2161 HInstruction* end = predecessor->last();
2163 if (!end->IsCompareNumericAndBranch()) return;
2164 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2166 Token::Value token = branch->token();
2167 if (!Token::IsArithmeticCompareOp(token)) return;
2169 HBasicBlock* other_target;
2170 if (block == branch->SuccessorAt(0)) {
2171 other_target = branch->SuccessorAt(1);
2173 other_target = branch->SuccessorAt(0);
2174 token = Token::NegateCompareOp(token);
2175 ASSERT(block == branch->SuccessorAt(1));
2178 InductionVariableData* data;
2180 data = GetInductionVariableData(branch->left());
2181 HValue* limit = branch->right();
2183 data = GetInductionVariableData(branch->right());
2184 token = Token::ReverseCompareOp(token);
2185 limit = branch->left();
2189 result->variable = data;
2190 result->token = token;
2191 result->limit = limit;
2192 result->other_target = other_target;
2198 * Compute the limit that is imposed on an induction variable when entering
2200 * If the limit is the "proper" induction limit (the one that makes the loop
2201 * terminate when the induction variable reaches it) it is stored directly in
2202 * the induction variable data.
2203 * Otherwise the limit is written in "additional_limit" and the method
2206 bool InductionVariableData::ComputeInductionVariableLimit(
2208 InductionVariableLimitUpdate* additional_limit) {
2209 LimitFromPredecessorBlock limit;
2210 ComputeLimitFromPredecessorBlock(block, &limit);
2211 if (!limit.LimitIsValid()) return false;
2213 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2215 limit.other_target)) {
2216 limit.variable->limit_ = limit.limit;
2217 limit.variable->limit_included_ = limit.LimitIsIncluded();
2218 limit.variable->limit_validity_ = block;
2219 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2220 limit.variable->induction_exit_target_ = limit.other_target;
2223 additional_limit->updated_variable = limit.variable;
2224 additional_limit->limit = limit.limit;
2225 additional_limit->limit_is_upper = limit.LimitIsUpper();
2226 additional_limit->limit_is_included = limit.LimitIsIncluded();
2232 Range* HMathMinMax::InferRange(Zone* zone) {
2233 if (representation().IsSmiOrInteger32()) {
2234 Range* a = left()->range();
2235 Range* b = right()->range();
2236 Range* res = a->Copy(zone);
2237 if (operation_ == kMathMax) {
2238 res->CombinedMax(b);
2240 ASSERT(operation_ == kMathMin);
2241 res->CombinedMin(b);
2245 return HValue::InferRange(zone);
2250 void HPhi::PrintTo(StringStream* stream) {
2252 for (int i = 0; i < OperandCount(); ++i) {
2253 HValue* value = OperandAt(i);
2255 value->PrintNameTo(stream);
2258 stream->Add(" uses:%d_%ds_%di_%dd_%dt",
2260 smi_non_phi_uses() + smi_indirect_uses(),
2261 int32_non_phi_uses() + int32_indirect_uses(),
2262 double_non_phi_uses() + double_indirect_uses(),
2263 tagged_non_phi_uses() + tagged_indirect_uses());
2264 PrintRangeTo(stream);
2265 PrintTypeTo(stream);
2270 void HPhi::AddInput(HValue* value) {
2271 inputs_.Add(NULL, value->block()->zone());
2272 SetOperandAt(OperandCount() - 1, value);
2273 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2274 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2275 SetFlag(kIsArguments);
2280 bool HPhi::HasRealUses() {
2281 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2282 if (!it.value()->IsPhi()) return true;
2288 HValue* HPhi::GetRedundantReplacement() {
2289 HValue* candidate = NULL;
2290 int count = OperandCount();
2292 while (position < count && candidate == NULL) {
2293 HValue* current = OperandAt(position++);
2294 if (current != this) candidate = current;
2296 while (position < count) {
2297 HValue* current = OperandAt(position++);
2298 if (current != this && current != candidate) return NULL;
2300 ASSERT(candidate != this);
2305 void HPhi::DeleteFromGraph() {
2306 ASSERT(block() != NULL);
2307 block()->RemovePhi(this);
2308 ASSERT(block() == NULL);
2312 void HPhi::InitRealUses(int phi_id) {
2313 // Initialize real uses.
2315 // Compute a conservative approximation of truncating uses before inferring
2316 // representations. The proper, exact computation will be done later, when
2317 // inserting representation changes.
2318 SetFlag(kTruncatingToSmi);
2319 SetFlag(kTruncatingToInt32);
2320 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2321 HValue* value = it.value();
2322 if (!value->IsPhi()) {
2323 Representation rep = value->observed_input_representation(it.index());
2324 non_phi_uses_[rep.kind()] += 1;
2325 if (FLAG_trace_representation) {
2326 PrintF("#%d Phi is used by real #%d %s as %s\n",
2327 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2329 if (!value->IsSimulate()) {
2330 if (!value->CheckFlag(kTruncatingToSmi)) {
2331 ClearFlag(kTruncatingToSmi);
2333 if (!value->CheckFlag(kTruncatingToInt32)) {
2334 ClearFlag(kTruncatingToInt32);
2342 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2343 if (FLAG_trace_representation) {
2344 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2346 other->non_phi_uses_[Representation::kSmi],
2347 other->non_phi_uses_[Representation::kInteger32],
2348 other->non_phi_uses_[Representation::kDouble],
2349 other->non_phi_uses_[Representation::kTagged]);
2352 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2353 indirect_uses_[i] += other->non_phi_uses_[i];
2358 void HPhi::AddIndirectUsesTo(int* dest) {
2359 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2360 dest[i] += indirect_uses_[i];
2365 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2366 while (!list->is_empty()) {
2367 HSimulate* from = list->RemoveLast();
2368 ZoneList<HValue*>* from_values = &from->values_;
2369 for (int i = 0; i < from_values->length(); ++i) {
2370 if (from->HasAssignedIndexAt(i)) {
2371 int index = from->GetAssignedIndexAt(i);
2372 if (HasValueForIndex(index)) continue;
2373 AddAssignedValue(index, from_values->at(i));
2375 if (pop_count_ > 0) {
2378 AddPushedValue(from_values->at(i));
2382 pop_count_ += from->pop_count_;
2383 from->DeleteAndReplaceWith(NULL);
2388 void HSimulate::PrintDataTo(StringStream* stream) {
2389 stream->Add("id=%d", ast_id().ToInt());
2390 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
2391 if (values_.length() > 0) {
2392 if (pop_count_ > 0) stream->Add(" /");
2393 for (int i = values_.length() - 1; i >= 0; --i) {
2394 if (HasAssignedIndexAt(i)) {
2395 stream->Add(" var[%d] = ", GetAssignedIndexAt(i));
2397 stream->Add(" push ");
2399 values_[i]->PrintNameTo(stream);
2400 if (i > 0) stream->Add(",");
2406 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2407 ASSERT(env != NULL);
2408 env->set_ast_id(ast_id());
2409 env->Drop(pop_count());
2410 for (int i = values()->length() - 1; i >= 0; --i) {
2411 HValue* value = values()->at(i);
2412 if (HasAssignedIndexAt(i)) {
2413 env->Bind(GetAssignedIndexAt(i), value);
2421 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2422 HCapturedObject* other) {
2423 for (int i = 0; i < values->length(); ++i) {
2424 HValue* value = values->at(i);
2425 if (value->IsCapturedObject()) {
2426 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2427 values->at(i) = other;
2429 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2436 // Replay captured objects by replacing all captured objects with the
2437 // same capture id in the current and all outer environments.
2438 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2439 ASSERT(env != NULL);
2440 while (env != NULL) {
2441 ReplayEnvironmentNested(env->values(), this);
2447 void HCapturedObject::PrintDataTo(StringStream* stream) {
2448 stream->Add("#%d ", capture_id());
2449 HDematerializedObject::PrintDataTo(stream);
2453 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2455 ASSERT(return_target->IsInlineReturnTarget());
2456 return_targets_.Add(return_target, zone);
2460 void HEnterInlined::PrintDataTo(StringStream* stream) {
2461 SmartArrayPointer<char> name = function()->debug_name()->ToCString();
2462 stream->Add("%s, id=%d", name.get(), function()->id().ToInt());
2466 static bool IsInteger32(double value) {
2467 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2468 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
2472 HConstant::HConstant(Handle<Object> handle, Representation r)
2473 : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
2474 object_(Unique<Object>::CreateUninitialized(handle)),
2475 has_smi_value_(false),
2476 has_int32_value_(false),
2477 has_double_value_(false),
2478 has_external_reference_value_(false),
2479 is_internalized_string_(false),
2480 is_not_in_new_space_(true),
2482 boolean_value_(handle->BooleanValue()) {
2483 if (handle->IsHeapObject()) {
2484 Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap();
2485 is_not_in_new_space_ = !heap->InNewSpace(*handle);
2487 if (handle->IsNumber()) {
2488 double n = handle->Number();
2489 has_int32_value_ = IsInteger32(n);
2490 int32_value_ = DoubleToInt32(n);
2491 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2493 has_double_value_ = true;
2494 // TODO(titzer): if this heap number is new space, tenure a new one.
2496 is_internalized_string_ = handle->IsInternalizedString();
2499 is_cell_ = !handle.is_null() &&
2500 (handle->IsCell() || handle->IsPropertyCell());
2505 HConstant::HConstant(Unique<Object> unique,
2508 bool is_internalize_string,
2509 bool is_not_in_new_space,
2512 : HTemplateInstruction<0>(type),
2514 has_smi_value_(false),
2515 has_int32_value_(false),
2516 has_double_value_(false),
2517 has_external_reference_value_(false),
2518 is_internalized_string_(is_internalize_string),
2519 is_not_in_new_space_(is_not_in_new_space),
2521 boolean_value_(boolean_value) {
2522 ASSERT(!unique.handle().is_null());
2523 ASSERT(!type.IsTaggedNumber());
2528 HConstant::HConstant(int32_t integer_value,
2530 bool is_not_in_new_space,
2531 Unique<Object> object)
2533 has_smi_value_(Smi::IsValid(integer_value)),
2534 has_int32_value_(true),
2535 has_double_value_(true),
2536 has_external_reference_value_(false),
2537 is_internalized_string_(false),
2538 is_not_in_new_space_(is_not_in_new_space),
2540 boolean_value_(integer_value != 0),
2541 int32_value_(integer_value),
2542 double_value_(FastI2D(integer_value)) {
2543 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2548 HConstant::HConstant(double double_value,
2550 bool is_not_in_new_space,
2551 Unique<Object> object)
2553 has_int32_value_(IsInteger32(double_value)),
2554 has_double_value_(true),
2555 has_external_reference_value_(false),
2556 is_internalized_string_(false),
2557 is_not_in_new_space_(is_not_in_new_space),
2559 boolean_value_(double_value != 0 && !std::isnan(double_value)),
2560 int32_value_(DoubleToInt32(double_value)),
2561 double_value_(double_value) {
2562 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2563 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2568 HConstant::HConstant(ExternalReference reference)
2569 : HTemplateInstruction<0>(HType::None()),
2570 object_(Unique<Object>(Handle<Object>::null())),
2571 has_smi_value_(false),
2572 has_int32_value_(false),
2573 has_double_value_(false),
2574 has_external_reference_value_(true),
2575 is_internalized_string_(false),
2576 is_not_in_new_space_(true),
2578 boolean_value_(true),
2579 external_reference_value_(reference) {
2580 Initialize(Representation::External());
2584 void HConstant::Initialize(Representation r) {
2586 if (has_smi_value_ && SmiValuesAre31Bits()) {
2587 r = Representation::Smi();
2588 } else if (has_int32_value_) {
2589 r = Representation::Integer32();
2590 } else if (has_double_value_) {
2591 r = Representation::Double();
2592 } else if (has_external_reference_value_) {
2593 r = Representation::External();
2595 Handle<Object> object = object_.handle();
2596 if (object->IsJSObject()) {
2597 // Try to eagerly migrate JSObjects that have deprecated maps.
2598 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2599 if (js_object->map()->is_deprecated()) {
2600 JSObject::TryMigrateInstance(js_object);
2603 r = Representation::Tagged();
2606 set_representation(r);
2611 bool HConstant::EmitAtUses() {
2613 if (block()->graph()->has_osr() &&
2614 block()->graph()->IsStandardConstant(this)) {
2615 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2618 if (UseCount() == 0) return true;
2619 if (IsCell()) return false;
2620 if (representation().IsDouble()) return false;
2625 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2626 if (r.IsSmi() && !has_smi_value_) return NULL;
2627 if (r.IsInteger32() && !has_int32_value_) return NULL;
2628 if (r.IsDouble() && !has_double_value_) return NULL;
2629 if (r.IsExternal() && !has_external_reference_value_) return NULL;
2630 if (has_int32_value_) {
2631 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2633 if (has_double_value_) {
2634 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2636 if (has_external_reference_value_) {
2637 return new(zone) HConstant(external_reference_value_);
2639 ASSERT(!object_.handle().is_null());
2640 return new(zone) HConstant(object_,
2643 is_internalized_string_,
2644 is_not_in_new_space_,
2650 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2651 HConstant* res = NULL;
2652 if (has_int32_value_) {
2653 res = new(zone) HConstant(int32_value_,
2654 Representation::Integer32(),
2655 is_not_in_new_space_,
2657 } else if (has_double_value_) {
2658 res = new(zone) HConstant(DoubleToInt32(double_value_),
2659 Representation::Integer32(),
2660 is_not_in_new_space_,
2663 return Maybe<HConstant*>(res != NULL, res);
2667 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2668 HConstant* res = NULL;
2669 Handle<Object> handle = this->handle(zone->isolate());
2670 if (handle->IsBoolean()) {
2671 res = handle->BooleanValue() ?
2672 new(zone) HConstant(1) : new(zone) HConstant(0);
2673 } else if (handle->IsUndefined()) {
2674 res = new(zone) HConstant(OS::nan_value());
2675 } else if (handle->IsNull()) {
2676 res = new(zone) HConstant(0);
2678 return Maybe<HConstant*>(res != NULL, res);
2682 void HConstant::PrintDataTo(StringStream* stream) {
2683 if (has_int32_value_) {
2684 stream->Add("%d ", int32_value_);
2685 } else if (has_double_value_) {
2686 stream->Add("%f ", FmtElm(double_value_));
2687 } else if (has_external_reference_value_) {
2688 stream->Add("%p ", reinterpret_cast<void*>(
2689 external_reference_value_.address()));
2691 handle(Isolate::Current())->ShortPrint(stream);
2693 if (!is_not_in_new_space_) {
2694 stream->Add("[new space] ");
2699 void HBinaryOperation::PrintDataTo(StringStream* stream) {
2700 left()->PrintNameTo(stream);
2702 right()->PrintNameTo(stream);
2703 if (CheckFlag(kCanOverflow)) stream->Add(" !");
2704 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
2708 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2709 ASSERT(CheckFlag(kFlexibleRepresentation));
2710 Representation new_rep = RepresentationFromInputs();
2711 UpdateRepresentation(new_rep, h_infer, "inputs");
2713 if (representation().IsSmi() && HasNonSmiUse()) {
2714 UpdateRepresentation(
2715 Representation::Integer32(), h_infer, "use requirements");
2718 if (observed_output_representation_.IsNone()) {
2719 new_rep = RepresentationFromUses();
2720 UpdateRepresentation(new_rep, h_infer, "uses");
2722 new_rep = RepresentationFromOutput();
2723 UpdateRepresentation(new_rep, h_infer, "output");
2728 Representation HBinaryOperation::RepresentationFromInputs() {
2729 // Determine the worst case of observed input representations and
2730 // the currently assumed output representation.
2731 Representation rep = representation();
2732 for (int i = 1; i <= 2; ++i) {
2733 rep = rep.generalize(observed_input_representation(i));
2735 // If any of the actual input representation is more general than what we
2736 // have so far but not Tagged, use that representation instead.
2737 Representation left_rep = left()->representation();
2738 Representation right_rep = right()->representation();
2739 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2740 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2746 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2747 Representation current_rep) {
2748 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2749 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2750 // Mul in Integer32 mode would be too precise.
2751 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
2755 Representation HBinaryOperation::RepresentationFromOutput() {
2756 Representation rep = representation();
2757 // Consider observed output representation, but ignore it if it's Double,
2758 // this instruction is not a division, and all its uses are truncating
2760 if (observed_output_representation_.is_more_general_than(rep) &&
2761 !IgnoreObservedOutputRepresentation(rep)) {
2762 return observed_output_representation_;
2764 return Representation::None();
2768 void HBinaryOperation::AssumeRepresentation(Representation r) {
2769 set_observed_input_representation(1, r);
2770 set_observed_input_representation(2, r);
2771 HValue::AssumeRepresentation(r);
2775 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2776 ASSERT(CheckFlag(kFlexibleRepresentation));
2777 Representation new_rep = RepresentationFromInputs();
2778 UpdateRepresentation(new_rep, h_infer, "inputs");
2779 // Do not care about uses.
2783 Range* HBitwise::InferRange(Zone* zone) {
2784 if (op() == Token::BIT_XOR) {
2785 if (left()->HasRange() && right()->HasRange()) {
2786 // The maximum value has the high bit, and all bits below, set:
2788 // If the range can be negative, the minimum int is a negative number with
2789 // the high bit, and all bits below, unset:
2791 // If it cannot be negative, conservatively choose 0 as minimum int.
2792 int64_t left_upper = left()->range()->upper();
2793 int64_t left_lower = left()->range()->lower();
2794 int64_t right_upper = right()->range()->upper();
2795 int64_t right_lower = right()->range()->lower();
2797 if (left_upper < 0) left_upper = ~left_upper;
2798 if (left_lower < 0) left_lower = ~left_lower;
2799 if (right_upper < 0) right_upper = ~right_upper;
2800 if (right_lower < 0) right_lower = ~right_lower;
2802 int high = MostSignificantBit(
2803 static_cast<uint32_t>(
2804 left_upper | left_lower | right_upper | right_lower));
2808 int32_t min = (left()->range()->CanBeNegative() ||
2809 right()->range()->CanBeNegative())
2810 ? static_cast<int32_t>(-limit) : 0;
2811 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2813 Range* result = HValue::InferRange(zone);
2814 result->set_can_be_minus_zero(false);
2817 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2818 int32_t left_mask = (left()->range() != NULL)
2819 ? left()->range()->Mask()
2821 int32_t right_mask = (right()->range() != NULL)
2822 ? right()->range()->Mask()
2824 int32_t result_mask = (op() == Token::BIT_AND)
2825 ? left_mask & right_mask
2826 : left_mask | right_mask;
2827 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2829 Range* result = HValue::InferRange(zone);
2830 result->set_can_be_minus_zero(false);
2835 Range* HSar::InferRange(Zone* zone) {
2836 if (right()->IsConstant()) {
2837 HConstant* c = HConstant::cast(right());
2838 if (c->HasInteger32Value()) {
2839 Range* result = (left()->range() != NULL)
2840 ? left()->range()->Copy(zone)
2841 : new(zone) Range();
2842 result->Sar(c->Integer32Value());
2846 return HValue::InferRange(zone);
2850 Range* HShr::InferRange(Zone* zone) {
2851 if (right()->IsConstant()) {
2852 HConstant* c = HConstant::cast(right());
2853 if (c->HasInteger32Value()) {
2854 int shift_count = c->Integer32Value() & 0x1f;
2855 if (left()->range()->CanBeNegative()) {
2856 // Only compute bounds if the result always fits into an int32.
2857 return (shift_count >= 1)
2858 ? new(zone) Range(0,
2859 static_cast<uint32_t>(0xffffffff) >> shift_count)
2860 : new(zone) Range();
2862 // For positive inputs we can use the >> operator.
2863 Range* result = (left()->range() != NULL)
2864 ? left()->range()->Copy(zone)
2865 : new(zone) Range();
2866 result->Sar(c->Integer32Value());
2871 return HValue::InferRange(zone);
2875 Range* HShl::InferRange(Zone* zone) {
2876 if (right()->IsConstant()) {
2877 HConstant* c = HConstant::cast(right());
2878 if (c->HasInteger32Value()) {
2879 Range* result = (left()->range() != NULL)
2880 ? left()->range()->Copy(zone)
2881 : new(zone) Range();
2882 result->Shl(c->Integer32Value());
2886 return HValue::InferRange(zone);
2890 Range* HLoadNamedField::InferRange(Zone* zone) {
2891 if (access().representation().IsInteger8()) {
2892 return new(zone) Range(kMinInt8, kMaxInt8);
2894 if (access().representation().IsUInteger8()) {
2895 return new(zone) Range(kMinUInt8, kMaxUInt8);
2897 if (access().representation().IsInteger16()) {
2898 return new(zone) Range(kMinInt16, kMaxInt16);
2900 if (access().representation().IsUInteger16()) {
2901 return new(zone) Range(kMinUInt16, kMaxUInt16);
2903 if (access().IsStringLength()) {
2904 return new(zone) Range(0, String::kMaxLength);
2906 return HValue::InferRange(zone);
2910 Range* HLoadKeyed::InferRange(Zone* zone) {
2911 switch (elements_kind()) {
2912 case EXTERNAL_INT8_ELEMENTS:
2913 return new(zone) Range(kMinInt8, kMaxInt8);
2914 case EXTERNAL_UINT8_ELEMENTS:
2915 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
2916 return new(zone) Range(kMinUInt8, kMaxUInt8);
2917 case EXTERNAL_INT16_ELEMENTS:
2918 return new(zone) Range(kMinInt16, kMaxInt16);
2919 case EXTERNAL_UINT16_ELEMENTS:
2920 return new(zone) Range(kMinUInt16, kMaxUInt16);
2922 return HValue::InferRange(zone);
2927 void HCompareGeneric::PrintDataTo(StringStream* stream) {
2928 stream->Add(Token::Name(token()));
2930 HBinaryOperation::PrintDataTo(stream);
2934 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
2935 stream->Add(Token::Name(token()));
2937 HControlInstruction::PrintDataTo(stream);
2941 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
2942 stream->Add(Token::Name(token()));
2944 left()->PrintNameTo(stream);
2946 right()->PrintNameTo(stream);
2947 HControlInstruction::PrintDataTo(stream);
2951 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) {
2952 left()->PrintNameTo(stream);
2954 right()->PrintNameTo(stream);
2955 HControlInstruction::PrintDataTo(stream);
2959 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2960 if (left()->IsConstant() && right()->IsConstant()) {
2961 bool comparison_result =
2962 HConstant::cast(left())->Equals(HConstant::cast(right()));
2963 *block = comparison_result
2965 : SecondSuccessor();
2973 void HCompareHoleAndBranch::InferRepresentation(
2974 HInferRepresentationPhase* h_infer) {
2975 ChangeRepresentation(value()->representation());
2979 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2980 if (value()->representation().IsSmiOrInteger32()) {
2981 // A Smi or Integer32 cannot contain minus zero.
2982 *block = SecondSuccessor();
2990 void HCompareMinusZeroAndBranch::InferRepresentation(
2991 HInferRepresentationPhase* h_infer) {
2992 ChangeRepresentation(value()->representation());
2997 void HGoto::PrintDataTo(StringStream* stream) {
2998 stream->Add("B%d", SuccessorAt(0)->block_id());
3002 void HCompareNumericAndBranch::InferRepresentation(
3003 HInferRepresentationPhase* h_infer) {
3004 Representation left_rep = left()->representation();
3005 Representation right_rep = right()->representation();
3006 Representation observed_left = observed_input_representation(0);
3007 Representation observed_right = observed_input_representation(1);
3009 Representation rep = Representation::None();
3010 rep = rep.generalize(observed_left);
3011 rep = rep.generalize(observed_right);
3012 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3013 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3014 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3016 rep = Representation::Double();
3019 if (rep.IsDouble()) {
3020 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3021 // and !=) have special handling of undefined, e.g. undefined == undefined
3022 // is 'true'. Relational comparisons have a different semantic, first
3023 // calling ToPrimitive() on their arguments. The standard Crankshaft
3024 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3025 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3026 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3027 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3028 // it is not consistent with the spec. For example, it would cause undefined
3029 // == undefined (should be true) to be evaluated as NaN == NaN
3030 // (false). Therefore, any comparisons other than ordered relational
3031 // comparisons must cause a deopt when one of their arguments is undefined.
3033 if (Token::IsOrderedRelationalCompareOp(token_)) {
3034 SetFlag(kAllowUndefinedAsNaN);
3037 ChangeRepresentation(rep);
3041 void HParameter::PrintDataTo(StringStream* stream) {
3042 stream->Add("%u", index());
3046 void HLoadNamedField::PrintDataTo(StringStream* stream) {
3047 object()->PrintNameTo(stream);
3048 access_.PrintTo(stream);
3052 HCheckMaps* HCheckMaps::New(Zone* zone,
3056 CompilationInfo* info,
3057 HValue* typecheck) {
3058 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
3059 check_map->Add(map, zone);
3060 if (map->CanOmitMapChecks() &&
3061 value->IsConstant() &&
3062 HConstant::cast(value)->HasMap(map)) {
3063 // TODO(titzer): collect dependent map checks into a list.
3064 check_map->omit_ = true;
3065 if (map->CanTransition()) {
3066 map->AddDependentCompilationInfo(
3067 DependentCode::kPrototypeCheckGroup, info);
3074 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
3075 object()->PrintNameTo(stream);
3077 stream->Add(String::cast(*name())->ToCString().get());
3081 void HLoadKeyed::PrintDataTo(StringStream* stream) {
3082 if (!is_external()) {
3083 elements()->PrintNameTo(stream);
3085 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3086 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3087 elements()->PrintNameTo(stream);
3089 stream->Add(ElementsKindToString(elements_kind()));
3093 key()->PrintNameTo(stream);
3094 if (IsDehoisted()) {
3095 stream->Add(" + %d]", index_offset());
3100 if (HasDependency()) {
3102 dependency()->PrintNameTo(stream);
3105 if (RequiresHoleCheck()) {
3106 stream->Add(" check_hole");
3111 bool HLoadKeyed::UsesMustHandleHole() const {
3112 if (IsFastPackedElementsKind(elements_kind())) {
3116 if (IsExternalArrayElementsKind(elements_kind())) {
3120 if (hole_mode() == ALLOW_RETURN_HOLE) {
3121 if (IsFastDoubleElementsKind(elements_kind())) {
3122 return AllUsesCanTreatHoleAsNaN();
3127 if (IsFastDoubleElementsKind(elements_kind())) {
3131 // Holes are only returned as tagged values.
3132 if (!representation().IsTagged()) {
3136 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3137 HValue* use = it.value();
3138 if (!use->IsChange()) return false;
3145 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3146 return IsFastDoubleElementsKind(elements_kind()) &&
3147 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3151 bool HLoadKeyed::RequiresHoleCheck() const {
3152 if (IsFastPackedElementsKind(elements_kind())) {
3156 if (IsExternalArrayElementsKind(elements_kind())) {
3160 return !UsesMustHandleHole();
3164 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
3165 object()->PrintNameTo(stream);
3167 key()->PrintNameTo(stream);
3172 HValue* HLoadKeyedGeneric::Canonicalize() {
3173 // Recognize generic keyed loads that use property name generated
3174 // by for-in statement as a key and rewrite them into fast property load
3176 if (key()->IsLoadKeyed()) {
3177 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3178 if (key_load->elements()->IsForInCacheArray()) {
3179 HForInCacheArray* names_cache =
3180 HForInCacheArray::cast(key_load->elements());
3182 if (names_cache->enumerable() == object()) {
3183 HForInCacheArray* index_cache =
3184 names_cache->index_cache();
3185 HCheckMapValue* map_check =
3186 HCheckMapValue::New(block()->graph()->zone(),
3187 block()->graph()->GetInvalidContext(),
3189 names_cache->map());
3190 HInstruction* index = HLoadKeyed::New(
3191 block()->graph()->zone(),
3192 block()->graph()->GetInvalidContext(),
3196 key_load->elements_kind());
3197 map_check->InsertBefore(this);
3198 index->InsertBefore(this);
3199 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3209 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
3210 object()->PrintNameTo(stream);
3212 ASSERT(name()->IsString());
3213 stream->Add(String::cast(*name())->ToCString().get());
3215 value()->PrintNameTo(stream);
3219 void HStoreNamedField::PrintDataTo(StringStream* stream) {
3220 object()->PrintNameTo(stream);
3221 access_.PrintTo(stream);
3223 value()->PrintNameTo(stream);
3224 if (NeedsWriteBarrier()) {
3225 stream->Add(" (write-barrier)");
3227 if (has_transition()) {
3228 stream->Add(" (transition map %p)", *transition_map());
3233 void HStoreKeyed::PrintDataTo(StringStream* stream) {
3234 if (!is_external()) {
3235 elements()->PrintNameTo(stream);
3237 elements()->PrintNameTo(stream);
3239 stream->Add(ElementsKindToString(elements_kind()));
3240 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3241 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3245 key()->PrintNameTo(stream);
3246 if (IsDehoisted()) {
3247 stream->Add(" + %d] = ", index_offset());
3249 stream->Add("] = ");
3252 value()->PrintNameTo(stream);
3256 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
3257 object()->PrintNameTo(stream);
3259 key()->PrintNameTo(stream);
3260 stream->Add("] = ");
3261 value()->PrintNameTo(stream);
3265 void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
3266 object()->PrintNameTo(stream);
3267 ElementsKind from_kind = original_map().handle()->elements_kind();
3268 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3269 stream->Add(" %p [%s] -> %p [%s]",
3270 *original_map().handle(),
3271 ElementsAccessor::ForKind(from_kind)->name(),
3272 *transitioned_map().handle(),
3273 ElementsAccessor::ForKind(to_kind)->name());
3274 if (IsSimpleMapChangeTransition(from_kind, to_kind)) stream->Add(" (simple)");
3278 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
3279 stream->Add("[%p]", *cell().handle());
3280 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3281 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3285 bool HLoadGlobalCell::RequiresHoleCheck() const {
3286 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false;
3287 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3288 HValue* use = it.value();
3289 if (!use->IsChange()) return true;
3295 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) {
3296 stream->Add("%o ", *name());
3300 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) {
3301 base_object()->PrintNameTo(stream);
3302 stream->Add(" offset %d", offset());
3306 void HStoreGlobalCell::PrintDataTo(StringStream* stream) {
3307 stream->Add("[%p] = ", *cell().handle());
3308 value()->PrintNameTo(stream);
3309 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3310 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3314 void HLoadContextSlot::PrintDataTo(StringStream* stream) {
3315 value()->PrintNameTo(stream);
3316 stream->Add("[%d]", slot_index());
3320 void HStoreContextSlot::PrintDataTo(StringStream* stream) {
3321 context()->PrintNameTo(stream);
3322 stream->Add("[%d] = ", slot_index());
3323 value()->PrintNameTo(stream);
3327 // Implementation of type inference and type conversions. Calculates
3328 // the inferred type of this instruction based on the input operands.
3330 HType HValue::CalculateInferredType() {
3335 HType HPhi::CalculateInferredType() {
3336 if (OperandCount() == 0) return HType::Tagged();
3337 HType result = OperandAt(0)->type();
3338 for (int i = 1; i < OperandCount(); ++i) {
3339 HType current = OperandAt(i)->type();
3340 result = result.Combine(current);
3346 HType HChange::CalculateInferredType() {
3347 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3352 Representation HUnaryMathOperation::RepresentationFromInputs() {
3353 Representation rep = representation();
3354 // If any of the actual input representation is more general than what we
3355 // have so far but not Tagged, use that representation instead.
3356 Representation input_rep = value()->representation();
3357 if (!input_rep.IsTagged()) {
3358 rep = rep.generalize(input_rep);
3364 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3365 HValue* dominator) {
3366 ASSERT(side_effect == kChangesNewSpacePromotion);
3367 Zone* zone = block()->zone();
3368 if (!FLAG_use_allocation_folding) return;
3370 // Try to fold allocations together with their dominating allocations.
3371 if (!dominator->IsAllocate()) {
3372 if (FLAG_trace_allocation_folding) {
3373 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3374 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3379 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3380 HValue* dominator_size = dominator_allocate->size();
3381 HValue* current_size = size();
3383 // TODO(hpayer): Add support for non-constant allocation in dominator.
3384 if (!current_size->IsInteger32Constant() ||
3385 !dominator_size->IsInteger32Constant()) {
3386 if (FLAG_trace_allocation_folding) {
3387 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n",
3388 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3393 dominator_allocate = GetFoldableDominator(dominator_allocate);
3394 if (dominator_allocate == NULL) {
3398 ASSERT((IsNewSpaceAllocation() &&
3399 dominator_allocate->IsNewSpaceAllocation()) ||
3400 (IsOldDataSpaceAllocation() &&
3401 dominator_allocate->IsOldDataSpaceAllocation()) ||
3402 (IsOldPointerSpaceAllocation() &&
3403 dominator_allocate->IsOldPointerSpaceAllocation()));
3405 // First update the size of the dominator allocate instruction.
3406 dominator_size = dominator_allocate->size();
3407 int32_t original_object_size =
3408 HConstant::cast(dominator_size)->GetInteger32Constant();
3409 int32_t dominator_size_constant = original_object_size;
3410 int32_t current_size_constant =
3411 HConstant::cast(current_size)->GetInteger32Constant();
3412 int32_t new_dominator_size = dominator_size_constant + current_size_constant;
3414 if (MustAllocateDoubleAligned()) {
3415 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3416 dominator_allocate->MakeDoubleAligned();
3418 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3419 dominator_size_constant += kDoubleSize / 2;
3420 new_dominator_size += kDoubleSize / 2;
3424 // Since we clear the first word after folded memory, we cannot use the
3425 // whole Page::kMaxRegularHeapObjectSize memory.
3426 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3427 if (FLAG_trace_allocation_folding) {
3428 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3429 id(), Mnemonic(), dominator_allocate->id(),
3430 dominator_allocate->Mnemonic(), new_dominator_size);
3435 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3439 Representation::None(),
3440 dominator_allocate);
3441 dominator_allocate->UpdateSize(new_dominator_size_constant);
3444 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
3445 dominator_allocate->MakePrefillWithFiller();
3447 // TODO(hpayer): This is a short-term hack to make allocation mementos
3448 // work again in new space.
3449 dominator_allocate->ClearNextMapWord(original_object_size);
3452 // TODO(hpayer): This is a short-term hack to make allocation mementos
3453 // work again in new space.
3454 dominator_allocate->ClearNextMapWord(original_object_size);
3457 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3459 // After that replace the dominated allocate instruction.
3460 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3463 dominator_size_constant,
3464 Representation::None(),
3467 HInstruction* dominated_allocate_instr =
3468 HInnerAllocatedObject::New(zone,
3473 dominated_allocate_instr->InsertBefore(this);
3474 DeleteAndReplaceWith(dominated_allocate_instr);
3475 if (FLAG_trace_allocation_folding) {
3476 PrintF("#%d (%s) folded into #%d (%s)\n",
3477 id(), Mnemonic(), dominator_allocate->id(),
3478 dominator_allocate->Mnemonic());
3483 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3484 if (!IsFoldable(dominator)) {
3485 // We cannot hoist old space allocations over new space allocations.
3486 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3487 if (FLAG_trace_allocation_folding) {
3488 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3489 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3494 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3496 // We can hoist old data space allocations over an old pointer space
3497 // allocation and vice versa. For that we have to check the dominator
3498 // of the dominator allocate instruction.
3499 if (dominator_dominator == NULL) {
3500 dominating_allocate_ = dominator;
3501 if (FLAG_trace_allocation_folding) {
3502 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
3503 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3508 // We can just fold old space allocations that are in the same basic block,
3509 // since it is not guaranteed that we fill up the whole allocated old
3511 // TODO(hpayer): Remove this limitation and add filler maps for each each
3512 // allocation as soon as we have store elimination.
3513 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3514 if (FLAG_trace_allocation_folding) {
3515 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3516 id(), Mnemonic(), dominator_dominator->id(),
3517 dominator_dominator->Mnemonic());
3522 ASSERT((IsOldDataSpaceAllocation() &&
3523 dominator_dominator->IsOldDataSpaceAllocation()) ||
3524 (IsOldPointerSpaceAllocation() &&
3525 dominator_dominator->IsOldPointerSpaceAllocation()));
3527 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3528 HStoreNamedField* dominator_free_space_size =
3529 dominator->filler_free_space_size_;
3530 if (dominator_free_space_size != NULL) {
3531 // We already hoisted one old space allocation, i.e., we already installed
3532 // a filler map. Hence, we just have to update the free space size.
3533 dominator->UpdateFreeSpaceFiller(current_size);
3535 // This is the first old space allocation that gets hoisted. We have to
3536 // install a filler map since the follwing allocation may cause a GC.
3537 dominator->CreateFreeSpaceFiller(current_size);
3540 // We can hoist the old space allocation over the actual dominator.
3541 return dominator_dominator;
3547 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3548 ASSERT(filler_free_space_size_ != NULL);
3549 Zone* zone = block()->zone();
3550 // We must explicitly force Smi representation here because on x64 we
3551 // would otherwise automatically choose int32, but the actual store
3552 // requires a Smi-tagged value.
3553 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3556 filler_free_space_size_->value()->GetInteger32Constant() +
3558 Representation::Smi(),
3559 filler_free_space_size_);
3560 filler_free_space_size_->UpdateValue(new_free_space_size);
3564 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3565 ASSERT(filler_free_space_size_ == NULL);
3566 Zone* zone = block()->zone();
3567 HInstruction* free_space_instr =
3568 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3569 dominating_allocate_->size(), type());
3570 free_space_instr->InsertBefore(this);
3571 HConstant* filler_map = HConstant::New(
3574 isolate()->factory()->free_space_map());
3575 filler_map->FinalizeUniqueness(); // TODO(titzer): should be init'd a'ready
3576 filler_map->InsertAfter(free_space_instr);
3577 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3578 free_space_instr, HObjectAccess::ForMap(), filler_map);
3579 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3580 store_map->InsertAfter(filler_map);
3582 // We must explicitly force Smi representation here because on x64 we
3583 // would otherwise automatically choose int32, but the actual store
3584 // requires a Smi-tagged value.
3585 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3586 zone, context(), free_space_size, Representation::Smi(), store_map);
3587 // Must force Smi representation for x64 (see comment above).
3588 HObjectAccess access =
3589 HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset,
3590 Representation::Smi());
3591 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3592 free_space_instr, access, filler_size);
3593 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3594 store_size->InsertAfter(filler_size);
3595 filler_free_space_size_ = store_size;
3599 void HAllocate::ClearNextMapWord(int offset) {
3600 if (MustClearNextMapWord()) {
3601 Zone* zone = block()->zone();
3602 HObjectAccess access = HObjectAccess::ForJSObjectOffset(offset);
3603 HStoreNamedField* clear_next_map =
3604 HStoreNamedField::New(zone, context(), this, access,
3605 block()->graph()->GetConstant0());
3606 clear_next_map->ClearAllSideEffects();
3607 clear_next_map->InsertAfter(this);
3612 void HAllocate::PrintDataTo(StringStream* stream) {
3613 size()->PrintNameTo(stream);
3615 if (IsNewSpaceAllocation()) stream->Add("N");
3616 if (IsOldPointerSpaceAllocation()) stream->Add("P");
3617 if (IsOldDataSpaceAllocation()) stream->Add("D");
3618 if (MustAllocateDoubleAligned()) stream->Add("A");
3619 if (MustPrefillWithFiller()) stream->Add("F");
3624 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero(
3625 BitVector* visited) {
3627 if (representation().IsSmiOrInteger32() &&
3628 !value()->representation().Equals(representation())) {
3629 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3630 SetFlag(kBailoutOnMinusZero);
3633 if (RequiredInputRepresentation(0).IsSmiOrInteger32() &&
3634 representation().Equals(RequiredInputRepresentation(0))) {
3641 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3643 if (from().IsSmiOrInteger32()) return NULL;
3644 if (CanTruncateToInt32()) return NULL;
3645 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3646 SetFlag(kBailoutOnMinusZero);
3648 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32());
3653 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero(
3654 BitVector* visited) {
3660 HValue* HMod::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3662 if (range() == NULL || range()->CanBeMinusZero()) {
3663 SetFlag(kBailoutOnMinusZero);
3670 HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3672 if (range() == NULL || range()->CanBeMinusZero()) {
3673 SetFlag(kBailoutOnMinusZero);
3679 HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3681 SetFlag(kBailoutOnMinusZero);
3686 HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3688 if (range() == NULL || range()->CanBeMinusZero()) {
3689 SetFlag(kBailoutOnMinusZero);
3695 HValue* HSub::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3697 // Propagate to the left argument. If the left argument cannot be -0, then
3698 // the result of the add operation cannot be either.
3699 if (range() == NULL || range()->CanBeMinusZero()) {
3706 HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3708 // Propagate to the left argument. If the left argument cannot be -0, then
3709 // the result of the sub operation cannot be either.
3710 if (range() == NULL || range()->CanBeMinusZero()) {
3717 bool HStoreKeyed::NeedsCanonicalization() {
3718 // If value is an integer or smi or comes from the result of a keyed load or
3719 // constant then it is either be a non-hole value or in the case of a constant
3720 // the hole is only being stored explicitly: no need for canonicalization.
3722 // The exception to that is keyed loads from external float or double arrays:
3723 // these can load arbitrary representation of NaN.
3725 if (value()->IsConstant()) {
3729 if (value()->IsLoadKeyed()) {
3730 return IsExternalFloatOrDoubleElementsKind(
3731 HLoadKeyed::cast(value())->elements_kind());
3734 if (value()->IsChange()) {
3735 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
3738 if (HChange::cast(value())->value()->type().IsSmi()) {
3746 #define H_CONSTANT_INT(val) \
3747 HConstant::New(zone, context, static_cast<int32_t>(val))
3748 #define H_CONSTANT_DOUBLE(val) \
3749 HConstant::New(zone, context, static_cast<double>(val))
3751 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3752 HInstruction* HInstr::New( \
3753 Zone* zone, HValue* context, HValue* left, HValue* right) { \
3754 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3755 HConstant* c_left = HConstant::cast(left); \
3756 HConstant* c_right = HConstant::cast(right); \
3757 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3758 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3759 if (IsInt32Double(double_res)) { \
3760 return H_CONSTANT_INT(double_res); \
3762 return H_CONSTANT_DOUBLE(double_res); \
3765 return new(zone) HInstr(context, left, right); \
3769 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3770 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3771 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3773 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3776 HInstruction* HStringAdd::New(Zone* zone,
3780 PretenureFlag pretenure_flag,
3781 StringAddFlags flags,
3782 Handle<AllocationSite> allocation_site) {
3783 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3784 HConstant* c_right = HConstant::cast(right);
3785 HConstant* c_left = HConstant::cast(left);
3786 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3787 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
3788 c_left->StringValue(), c_right->StringValue());
3789 return HConstant::New(zone, context, concat);
3792 return new(zone) HStringAdd(
3793 context, left, right, pretenure_flag, flags, allocation_site);
3797 void HStringAdd::PrintDataTo(StringStream* stream) {
3798 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3799 stream->Add("_CheckBoth");
3800 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
3801 stream->Add("_CheckLeft");
3802 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
3803 stream->Add("_CheckRight");
3806 if (pretenure_flag() == NOT_TENURED) stream->Add("N");
3807 else if (pretenure_flag() == TENURED) stream->Add("D");
3812 HInstruction* HStringCharFromCode::New(
3813 Zone* zone, HValue* context, HValue* char_code) {
3814 if (FLAG_fold_constants && char_code->IsConstant()) {
3815 HConstant* c_code = HConstant::cast(char_code);
3816 Isolate* isolate = zone->isolate();
3817 if (c_code->HasNumberValue()) {
3818 if (std::isfinite(c_code->DoubleValue())) {
3819 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3820 return HConstant::New(zone, context,
3821 LookupSingleCharacterStringFromCode(isolate, code));
3823 return HConstant::New(zone, context, isolate->factory()->empty_string());
3826 return new(zone) HStringCharFromCode(context, char_code);
3830 HInstruction* HUnaryMathOperation::New(
3831 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
3833 if (!FLAG_fold_constants) break;
3834 if (!value->IsConstant()) break;
3835 HConstant* constant = HConstant::cast(value);
3836 if (!constant->HasNumberValue()) break;
3837 double d = constant->DoubleValue();
3838 if (std::isnan(d)) { // NaN poisons everything.
3839 return H_CONSTANT_DOUBLE(OS::nan_value());
3841 if (std::isinf(d)) { // +Infinity and -Infinity.
3844 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3847 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value());
3850 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3853 return H_CONSTANT_DOUBLE(d);
3861 return H_CONSTANT_DOUBLE(fast_exp(d));
3863 return H_CONSTANT_DOUBLE(std::log(d));
3865 return H_CONSTANT_DOUBLE(fast_sqrt(d));
3867 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3869 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3871 // -0.5 .. -0.0 round to -0.0.
3872 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3873 // Doubles are represented as Significant * 2 ^ Exponent. If the
3874 // Exponent is not negative, the double value is already an integer.
3875 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3876 return H_CONSTANT_DOUBLE(std::floor(d + 0.5));
3878 return H_CONSTANT_DOUBLE(std::floor(d));
3884 return new(zone) HUnaryMathOperation(context, value, op);
3888 HInstruction* HPower::New(Zone* zone,
3892 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3893 HConstant* c_left = HConstant::cast(left);
3894 HConstant* c_right = HConstant::cast(right);
3895 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3896 double result = power_helper(c_left->DoubleValue(),
3897 c_right->DoubleValue());
3898 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result);
3901 return new(zone) HPower(left, right);
3905 HInstruction* HMathMinMax::New(
3906 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
3907 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3908 HConstant* c_left = HConstant::cast(left);
3909 HConstant* c_right = HConstant::cast(right);
3910 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3911 double d_left = c_left->DoubleValue();
3912 double d_right = c_right->DoubleValue();
3913 if (op == kMathMin) {
3914 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
3915 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
3916 if (d_left == d_right) {
3917 // Handle +0 and -0.
3918 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
3922 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
3923 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
3924 if (d_left == d_right) {
3925 // Handle +0 and -0.
3926 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
3930 // All comparisons failed, must be NaN.
3931 return H_CONSTANT_DOUBLE(OS::nan_value());
3934 return new(zone) HMathMinMax(context, left, right, op);
3938 HInstruction* HMod::New(Zone* zone,
3942 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3943 HConstant* c_left = HConstant::cast(left);
3944 HConstant* c_right = HConstant::cast(right);
3945 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
3946 int32_t dividend = c_left->Integer32Value();
3947 int32_t divisor = c_right->Integer32Value();
3948 if (dividend == kMinInt && divisor == -1) {
3949 return H_CONSTANT_DOUBLE(-0.0);
3952 int32_t res = dividend % divisor;
3953 if ((res == 0) && (dividend < 0)) {
3954 return H_CONSTANT_DOUBLE(-0.0);
3956 return H_CONSTANT_INT(res);
3960 return new(zone) HMod(context, left, right);
3964 HInstruction* HDiv::New(
3965 Zone* zone, HValue* context, HValue* left, HValue* right) {
3966 // If left and right are constant values, try to return a constant value.
3967 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3968 HConstant* c_left = HConstant::cast(left);
3969 HConstant* c_right = HConstant::cast(right);
3970 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3971 if (c_right->DoubleValue() != 0) {
3972 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
3973 if (IsInt32Double(double_res)) {
3974 return H_CONSTANT_INT(double_res);
3976 return H_CONSTANT_DOUBLE(double_res);
3978 int sign = Double(c_left->DoubleValue()).Sign() *
3979 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
3980 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
3984 return new(zone) HDiv(context, left, right);
3988 HInstruction* HBitwise::New(
3989 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
3990 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3991 HConstant* c_left = HConstant::cast(left);
3992 HConstant* c_right = HConstant::cast(right);
3993 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3995 int32_t v_left = c_left->NumberValueAsInteger32();
3996 int32_t v_right = c_right->NumberValueAsInteger32();
3998 case Token::BIT_XOR:
3999 result = v_left ^ v_right;
4001 case Token::BIT_AND:
4002 result = v_left & v_right;
4005 result = v_left | v_right;
4008 result = 0; // Please the compiler.
4011 return H_CONSTANT_INT(result);
4014 return new(zone) HBitwise(context, op, left, right);
4018 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4019 HInstruction* HInstr::New( \
4020 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4021 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4022 HConstant* c_left = HConstant::cast(left); \
4023 HConstant* c_right = HConstant::cast(right); \
4024 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4025 return H_CONSTANT_INT(result); \
4028 return new(zone) HInstr(context, left, right); \
4032 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4033 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4034 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4035 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4037 #undef DEFINE_NEW_H_BITWISE_INSTR
4040 HInstruction* HShr::New(
4041 Zone* zone, HValue* context, HValue* left, HValue* right) {
4042 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4043 HConstant* c_left = HConstant::cast(left);
4044 HConstant* c_right = HConstant::cast(right);
4045 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4046 int32_t left_val = c_left->NumberValueAsInteger32();
4047 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4048 if ((right_val == 0) && (left_val < 0)) {
4049 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4051 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4054 return new(zone) HShr(context, left, right);
4058 HInstruction* HSeqStringGetChar::New(Zone* zone,
4060 String::Encoding encoding,
4063 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4064 HConstant* c_string = HConstant::cast(string);
4065 HConstant* c_index = HConstant::cast(index);
4066 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4067 Handle<String> s = c_string->StringValue();
4068 int32_t i = c_index->Integer32Value();
4070 ASSERT_LT(i, s->length());
4071 return H_CONSTANT_INT(s->Get(i));
4074 return new(zone) HSeqStringGetChar(encoding, string, index);
4078 #undef H_CONSTANT_INT
4079 #undef H_CONSTANT_DOUBLE
4082 void HBitwise::PrintDataTo(StringStream* stream) {
4083 stream->Add(Token::Name(op_));
4085 HBitwiseBinaryOperation::PrintDataTo(stream);
4089 void HPhi::SimplifyConstantInputs() {
4090 // Convert constant inputs to integers when all uses are truncating.
4091 // This must happen before representation inference takes place.
4092 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4093 for (int i = 0; i < OperandCount(); ++i) {
4094 if (!OperandAt(i)->IsConstant()) return;
4096 HGraph* graph = block()->graph();
4097 for (int i = 0; i < OperandCount(); ++i) {
4098 HConstant* operand = HConstant::cast(OperandAt(i));
4099 if (operand->HasInteger32Value()) {
4101 } else if (operand->HasDoubleValue()) {
4102 HConstant* integer_input =
4103 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4104 DoubleToInt32(operand->DoubleValue()));
4105 integer_input->InsertAfter(operand);
4106 SetOperandAt(i, integer_input);
4107 } else if (operand->HasBooleanValue()) {
4108 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4109 : graph->GetConstant0());
4110 } else if (operand->ImmortalImmovable()) {
4111 SetOperandAt(i, graph->GetConstant0());
4114 // Overwrite observed input representations because they are likely Tagged.
4115 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4116 HValue* use = it.value();
4117 if (use->IsBinaryOperation()) {
4118 HBinaryOperation::cast(use)->set_observed_input_representation(
4119 it.index(), Representation::Smi());
4125 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4126 ASSERT(CheckFlag(kFlexibleRepresentation));
4127 Representation new_rep = RepresentationFromInputs();
4128 UpdateRepresentation(new_rep, h_infer, "inputs");
4129 new_rep = RepresentationFromUses();
4130 UpdateRepresentation(new_rep, h_infer, "uses");
4131 new_rep = RepresentationFromUseRequirements();
4132 UpdateRepresentation(new_rep, h_infer, "use requirements");
4136 Representation HPhi::RepresentationFromInputs() {
4137 Representation r = Representation::None();
4138 for (int i = 0; i < OperandCount(); ++i) {
4139 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4145 // Returns a representation if all uses agree on the same representation.
4146 // Integer32 is also returned when some uses are Smi but others are Integer32.
4147 Representation HValue::RepresentationFromUseRequirements() {
4148 Representation rep = Representation::None();
4149 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4150 // Ignore the use requirement from never run code
4151 if (it.value()->block()->IsUnreachable()) continue;
4153 // We check for observed_input_representation elsewhere.
4154 Representation use_rep =
4155 it.value()->RequiredInputRepresentation(it.index());
4160 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4161 if (rep.generalize(use_rep).IsInteger32()) {
4162 rep = Representation::Integer32();
4165 return Representation::None();
4171 bool HValue::HasNonSmiUse() {
4172 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4173 // We check for observed_input_representation elsewhere.
4174 Representation use_rep =
4175 it.value()->RequiredInputRepresentation(it.index());
4176 if (!use_rep.IsNone() &&
4178 !use_rep.IsTagged()) {
4186 // Node-specific verification code is only included in debug mode.
4189 void HPhi::Verify() {
4190 ASSERT(OperandCount() == block()->predecessors()->length());
4191 for (int i = 0; i < OperandCount(); ++i) {
4192 HValue* value = OperandAt(i);
4193 HBasicBlock* defining_block = value->block();
4194 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4195 ASSERT(defining_block == predecessor_block ||
4196 defining_block->Dominates(predecessor_block));
4201 void HSimulate::Verify() {
4202 HInstruction::Verify();
4207 void HCheckHeapObject::Verify() {
4208 HInstruction::Verify();
4209 ASSERT(HasNoUses());
4213 void HCheckValue::Verify() {
4214 HInstruction::Verify();
4215 ASSERT(HasNoUses());
4221 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4222 ASSERT(offset >= 0);
4223 ASSERT(offset < FixedArray::kHeaderSize);
4224 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4225 return HObjectAccess(kInobject, offset);
4229 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset,
4230 Representation representation) {
4231 ASSERT(offset >= 0);
4232 Portion portion = kInobject;
4234 if (offset == JSObject::kElementsOffset) {
4235 portion = kElementsPointer;
4236 } else if (offset == JSObject::kMapOffset) {
4239 return HObjectAccess(portion, offset, representation);
4243 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4245 case AllocationSite::kTransitionInfoOffset:
4246 return HObjectAccess(kInobject, offset, Representation::Tagged());
4247 case AllocationSite::kNestedSiteOffset:
4248 return HObjectAccess(kInobject, offset, Representation::Tagged());
4249 case AllocationSite::kPretenureDataOffset:
4250 return HObjectAccess(kInobject, offset, Representation::Smi());
4251 case AllocationSite::kPretenureCreateCountOffset:
4252 return HObjectAccess(kInobject, offset, Representation::Smi());
4253 case AllocationSite::kDependentCodeOffset:
4254 return HObjectAccess(kInobject, offset, Representation::Tagged());
4255 case AllocationSite::kWeakNextOffset:
4256 return HObjectAccess(kInobject, offset, Representation::Tagged());
4260 return HObjectAccess(kInobject, offset);
4264 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4266 Portion portion = kInobject;
4267 int offset = Context::kHeaderSize + index * kPointerSize;
4268 ASSERT_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4269 return HObjectAccess(portion, offset, Representation::Tagged());
4273 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4274 ASSERT(offset >= 0);
4275 Portion portion = kInobject;
4277 if (offset == JSObject::kElementsOffset) {
4278 portion = kElementsPointer;
4279 } else if (offset == JSArray::kLengthOffset) {
4280 portion = kArrayLengths;
4281 } else if (offset == JSObject::kMapOffset) {
4284 return HObjectAccess(portion, offset);
4288 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4289 Representation representation) {
4290 ASSERT(offset >= 0);
4291 return HObjectAccess(kBackingStore, offset, representation);
4295 HObjectAccess HObjectAccess::ForField(Handle<Map> map,
4296 LookupResult *lookup, Handle<String> name) {
4297 ASSERT(lookup->IsField() || lookup->IsTransitionToField(*map));
4299 Representation representation;
4300 if (lookup->IsField()) {
4301 index = lookup->GetLocalFieldIndexFromMap(*map);
4302 representation = lookup->representation();
4304 Map* transition = lookup->GetTransitionMapFromMap(*map);
4305 int descriptor = transition->LastAdded();
4306 index = transition->instance_descriptors()->GetFieldIndex(descriptor) -
4307 map->inobject_properties();
4308 PropertyDetails details =
4309 transition->instance_descriptors()->GetDetails(descriptor);
4310 representation = details.representation();
4313 // Negative property indices are in-object properties, indexed
4314 // from the end of the fixed part of the object.
4315 int offset = (index * kPointerSize) + map->instance_size();
4316 return HObjectAccess(kInobject, offset, representation, name);
4318 // Non-negative property indices are in the properties array.
4319 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4320 return HObjectAccess(kBackingStore, offset, representation, name);
4325 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4326 return HObjectAccess(
4327 kInobject, Cell::kValueOffset, Representation::Tagged(),
4328 Handle<String>(isolate->heap()->cell_value_string()));
4332 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) {
4333 // set the appropriate GVN flags for a given load or store instruction
4335 // track dominating allocations in order to eliminate write barriers
4336 instr->SetGVNFlag(kDependsOnNewSpacePromotion);
4337 instr->SetFlag(HValue::kTrackSideEffectDominators);
4339 // try to GVN loads, but don't hoist above map changes
4340 instr->SetFlag(HValue::kUseGVN);
4341 instr->SetGVNFlag(kDependsOnMaps);
4344 switch (portion()) {
4346 instr->SetGVNFlag(is_store
4347 ? kChangesArrayLengths : kDependsOnArrayLengths);
4349 case kStringLengths:
4350 instr->SetGVNFlag(is_store
4351 ? kChangesStringLengths : kDependsOnStringLengths);
4354 instr->SetGVNFlag(is_store
4355 ? kChangesInobjectFields : kDependsOnInobjectFields);
4358 instr->SetGVNFlag(is_store
4359 ? kChangesDoubleFields : kDependsOnDoubleFields);
4362 instr->SetGVNFlag(is_store
4363 ? kChangesBackingStoreFields : kDependsOnBackingStoreFields);
4365 case kElementsPointer:
4366 instr->SetGVNFlag(is_store
4367 ? kChangesElementsPointer : kDependsOnElementsPointer);
4370 instr->SetGVNFlag(is_store
4371 ? kChangesMaps : kDependsOnMaps);
4373 case kExternalMemory:
4374 instr->SetGVNFlag(is_store
4375 ? kChangesExternalMemory : kDependsOnExternalMemory);
4381 void HObjectAccess::PrintTo(StringStream* stream) {
4384 switch (portion()) {
4386 case kStringLengths:
4387 stream->Add("%length");
4389 case kElementsPointer:
4390 stream->Add("%elements");
4393 stream->Add("%map");
4395 case kDouble: // fall through
4397 if (!name_.is_null()) {
4398 stream->Add(String::cast(*name_)->ToCString().get());
4400 stream->Add("[in-object]");
4403 if (!name_.is_null()) {
4404 stream->Add(String::cast(*name_)->ToCString().get());
4406 stream->Add("[backing-store]");
4408 case kExternalMemory:
4409 stream->Add("[external-memory]");
4413 stream->Add("@%d", offset());
4416 } } // namespace v8::internal